1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_IA32
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ia32/frames-ia32.h"
14 #include "src/ic/ic.h"
15 #include "src/parser.h"
16 #include "src/scopes.h"
21 #define __ ACCESS_MASM(masm_)
24 class JumpPatchSite BASE_EMBEDDED {
26 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28 info_emitted_ = false;
33 DCHECK(patch_site_.is_bound() == info_emitted_);
36 void EmitJumpIfNotSmi(Register reg,
38 Label::Distance distance = Label::kFar) {
39 __ test(reg, Immediate(kSmiTagMask));
40 EmitJump(not_carry, target, distance); // Always taken before patched.
43 void EmitJumpIfSmi(Register reg,
45 Label::Distance distance = Label::kFar) {
46 __ test(reg, Immediate(kSmiTagMask));
47 EmitJump(carry, target, distance); // Never taken before patched.
50 void EmitPatchInfo() {
51 if (patch_site_.is_bound()) {
52 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
53 DCHECK(is_uint8(delta_to_patch_site));
54 __ test(eax, Immediate(delta_to_patch_site));
59 __ nop(); // Signals no inlined code.
64 // jc will be patched with jz, jnc will become jnz.
65 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
66 DCHECK(!patch_site_.is_bound() && !info_emitted_);
67 DCHECK(cc == carry || cc == not_carry);
68 __ bind(&patch_site_);
69 __ j(cc, target, distance);
72 MacroAssembler* masm_;
80 // Generate code for a JS function. On entry to the function the receiver
81 // and arguments have been pushed on the stack left to right, with the
82 // return address on top of them. The actual argument count matches the
83 // formal parameter count expected by the function.
85 // The live registers are:
86 // o edi: the JS function object being called (i.e. ourselves)
88 // o ebp: our caller's frame pointer
89 // o esp: stack pointer (pointing to return address)
91 // The function builds a JS frame. Please see JavaScriptFrameConstants in
92 // frames-ia32.h for its layout.
93 void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
103 if (strlen(FLAG_stop_at) > 0 &&
104 literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
109 // Sloppy mode functions and builtins need to replace the receiver with the
110 // global proxy when called as functions (without an explicit receiver
112 if (is_sloppy(info->language_mode()) && !info->is_native() &&
113 info->MayUseThis()) {
115 // +1 for return address.
116 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
117 __ mov(ecx, Operand(esp, receiver_offset));
119 __ cmp(ecx, isolate()->factory()->undefined_value());
120 __ j(not_equal, &ok, Label::kNear);
122 __ mov(ecx, GlobalObjectOperand());
123 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
125 __ mov(Operand(esp, receiver_offset), ecx);
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
135 info->set_prologue_offset(masm_->pc_offset());
136 __ Prologue(info->IsCodePreAgingActive());
137 info->AddNoFrameRange(0, masm_->pc_offset());
139 { Comment cmnt(masm_, "[ Allocate locals");
140 int locals_count = info->scope()->num_stack_slots();
141 // Generators allocate locals, if any, in context slots.
142 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
143 if (locals_count == 1) {
144 __ push(Immediate(isolate()->factory()->undefined_value()));
145 } else if (locals_count > 1) {
146 if (locals_count >= 128) {
149 __ sub(ecx, Immediate(locals_count * kPointerSize));
150 ExternalReference stack_limit =
151 ExternalReference::address_of_real_stack_limit(isolate());
152 __ cmp(ecx, Operand::StaticVariable(stack_limit));
153 __ j(above_equal, &ok, Label::kNear);
154 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
157 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
158 const int kMaxPushes = 32;
159 if (locals_count >= kMaxPushes) {
160 int loop_iterations = locals_count / kMaxPushes;
161 __ mov(ecx, loop_iterations);
163 __ bind(&loop_header);
165 for (int i = 0; i < kMaxPushes; i++) {
169 __ j(not_zero, &loop_header, Label::kNear);
171 int remaining = locals_count % kMaxPushes;
172 // Emit the remaining pushes.
173 for (int i = 0; i < remaining; i++) {
179 bool function_in_register = true;
181 // Possibly allocate a local context.
182 if (info->scope()->num_heap_slots() > 0) {
183 Comment cmnt(masm_, "[ Allocate context");
184 bool need_write_barrier = true;
185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 // Argument to NewContext is the function, which is still in edi.
187 if (info->scope()->is_script_scope()) {
189 __ Push(info->scope()->GetScopeInfo(info->isolate()));
190 __ CallRuntime(Runtime::kNewScriptContext, 2);
191 } else if (slots <= FastNewContextStub::kMaximumSlots) {
192 FastNewContextStub stub(isolate(), slots);
194 // Result of FastNewContextStub is always in new space.
195 need_write_barrier = false;
198 __ CallRuntime(Runtime::kNewFunctionContext, 1);
200 function_in_register = false;
201 // Context is returned in eax. It replaces the context passed to us.
202 // It's saved in the stack and kept live in esi.
204 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
206 // Copy parameters into context if necessary.
207 int num_parameters = info->scope()->num_parameters();
208 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
209 for (int i = first_parameter; i < num_parameters; i++) {
210 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
211 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize;
214 // Load parameter from stack.
215 __ mov(eax, Operand(ebp, parameter_offset));
216 // Store it in the context.
217 int context_offset = Context::SlotOffset(var->index());
218 __ mov(Operand(esi, context_offset), eax);
219 // Update the write barrier. This clobbers eax and ebx.
220 if (need_write_barrier) {
221 __ RecordWriteContextSlot(esi,
226 } else if (FLAG_debug_code) {
228 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
229 __ Abort(kExpectedNewSpaceObject);
236 // Possibly set up a local binding to the this function which is used in
237 // derived constructors with super calls.
238 Variable* this_function_var = scope()->this_function_var();
239 if (this_function_var != nullptr) {
240 Comment cmnt(masm_, "[ This function");
241 if (!function_in_register) {
242 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
243 // The write barrier clobbers register again, keep is marked as such.
245 SetVar(this_function_var, edi, ebx, edx);
248 Variable* new_target_var = scope()->new_target_var();
249 if (new_target_var != nullptr) {
250 Comment cmnt(masm_, "[ new.target");
251 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
252 Label non_adaptor_frame;
253 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
254 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
255 __ j(not_equal, &non_adaptor_frame);
256 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
258 __ bind(&non_adaptor_frame);
259 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
260 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
262 Label non_construct_frame, done;
263 __ j(not_equal, &non_construct_frame);
267 Operand(eax, ConstructFrameConstants::kOriginalConstructorOffset));
270 // Non-construct frame
271 __ bind(&non_construct_frame);
272 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
275 SetVar(new_target_var, eax, ebx, edx);
279 // Possibly allocate RestParameters
281 Variable* rest_param = scope()->rest_parameter(&rest_index);
283 Comment cmnt(masm_, "[ Allocate rest parameter array");
285 int num_parameters = info->scope()->num_parameters();
286 int offset = num_parameters * kPointerSize;
289 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
291 __ push(Immediate(Smi::FromInt(num_parameters)));
292 __ push(Immediate(Smi::FromInt(rest_index)));
293 __ push(Immediate(Smi::FromInt(language_mode())));
295 RestParamAccessStub stub(isolate());
298 SetVar(rest_param, eax, ebx, edx);
301 Variable* arguments = scope()->arguments();
302 if (arguments != NULL) {
303 // Function uses arguments object.
304 Comment cmnt(masm_, "[ Allocate arguments object");
305 if (function_in_register) {
308 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
310 // Receiver is just before the parameters on the caller's stack.
311 int num_parameters = info->scope()->num_parameters();
312 int offset = num_parameters * kPointerSize;
314 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
316 __ push(Immediate(Smi::FromInt(num_parameters)));
317 // Arguments to ArgumentsAccessStub:
318 // function, receiver address, parameter count.
319 // The stub will rewrite receiver and parameter count if the previous
320 // stack frame was an arguments adapter frame.
321 ArgumentsAccessStub::Type type;
322 if (is_strict(language_mode()) || !has_simple_parameters()) {
323 type = ArgumentsAccessStub::NEW_STRICT;
324 } else if (literal()->has_duplicate_parameters()) {
325 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
327 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
330 ArgumentsAccessStub stub(isolate(), type);
333 SetVar(arguments, eax, ebx, edx);
337 __ CallRuntime(Runtime::kTraceEnter, 0);
340 // Visit the declarations and body unless there is an illegal
342 if (scope()->HasIllegalRedeclaration()) {
343 Comment cmnt(masm_, "[ Declarations");
344 scope()->VisitIllegalRedeclaration(this);
347 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
348 { Comment cmnt(masm_, "[ Declarations");
349 VisitDeclarations(scope()->declarations());
352 // Assert that the declarations do not use ICs. Otherwise the debugger
353 // won't be able to redirect a PC at an IC to the correct IC in newly
355 DCHECK_EQ(0, ic_total_count_);
357 { Comment cmnt(masm_, "[ Stack check");
358 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
360 ExternalReference stack_limit
361 = ExternalReference::address_of_stack_limit(isolate());
362 __ cmp(esp, Operand::StaticVariable(stack_limit));
363 __ j(above_equal, &ok, Label::kNear);
364 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
368 { Comment cmnt(masm_, "[ Body");
369 DCHECK(loop_depth() == 0);
370 VisitStatements(literal()->body());
371 DCHECK(loop_depth() == 0);
375 // Always emit a 'return undefined' in case control fell off the end of
377 { Comment cmnt(masm_, "[ return <undefined>;");
378 __ mov(eax, isolate()->factory()->undefined_value());
379 EmitReturnSequence();
384 void FullCodeGenerator::ClearAccumulator() {
385 __ Move(eax, Immediate(Smi::FromInt(0)));
389 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
390 __ mov(ebx, Immediate(profiling_counter_));
391 __ sub(FieldOperand(ebx, Cell::kValueOffset),
392 Immediate(Smi::FromInt(delta)));
396 void FullCodeGenerator::EmitProfilingCounterReset() {
397 int reset_value = FLAG_interrupt_budget;
398 __ mov(ebx, Immediate(profiling_counter_));
399 __ mov(FieldOperand(ebx, Cell::kValueOffset),
400 Immediate(Smi::FromInt(reset_value)));
404 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
405 Label* back_edge_target) {
406 Comment cmnt(masm_, "[ Back edge bookkeeping");
409 DCHECK(back_edge_target->is_bound());
410 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
411 int weight = Min(kMaxBackEdgeWeight,
412 Max(1, distance / kCodeSizeMultiplier));
413 EmitProfilingCounterDecrement(weight);
414 __ j(positive, &ok, Label::kNear);
415 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
417 // Record a mapping of this PC offset to the OSR id. This is used to find
418 // the AST id from the unoptimized code in order to use it as a key into
419 // the deoptimization input data found in the optimized code.
420 RecordBackEdge(stmt->OsrEntryId());
422 EmitProfilingCounterReset();
425 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
426 // Record a mapping of the OSR id to this PC. This is used if the OSR
427 // entry becomes the target of a bailout. We don't expect it to be, but
428 // we want it to work if it is.
429 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
433 void FullCodeGenerator::EmitReturnSequence() {
434 Comment cmnt(masm_, "[ Return sequence");
435 if (return_label_.is_bound()) {
436 __ jmp(&return_label_);
438 // Common return label
439 __ bind(&return_label_);
442 __ CallRuntime(Runtime::kTraceExit, 1);
444 // Pretend that the exit is a backwards jump to the entry.
446 if (info_->ShouldSelfOptimize()) {
447 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
449 int distance = masm_->pc_offset();
450 weight = Min(kMaxBackEdgeWeight,
451 Max(1, distance / kCodeSizeMultiplier));
453 EmitProfilingCounterDecrement(weight);
455 __ j(positive, &ok, Label::kNear);
457 __ call(isolate()->builtins()->InterruptCheck(),
458 RelocInfo::CODE_TARGET);
460 EmitProfilingCounterReset();
463 SetReturnPosition(literal());
464 int no_frame_start = masm_->pc_offset();
467 int arg_count = info_->scope()->num_parameters() + 1;
468 int arguments_bytes = arg_count * kPointerSize;
469 __ Ret(arguments_bytes, ecx);
470 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
475 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
476 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
477 MemOperand operand = codegen()->VarOperand(var, result_register());
478 // Memory operands can be pushed directly.
483 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
484 UNREACHABLE(); // Not used on IA32.
488 void FullCodeGenerator::AccumulatorValueContext::Plug(
489 Heap::RootListIndex index) const {
490 UNREACHABLE(); // Not used on IA32.
494 void FullCodeGenerator::StackValueContext::Plug(
495 Heap::RootListIndex index) const {
496 UNREACHABLE(); // Not used on IA32.
500 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
501 UNREACHABLE(); // Not used on IA32.
505 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
509 void FullCodeGenerator::AccumulatorValueContext::Plug(
510 Handle<Object> lit) const {
512 __ SafeMove(result_register(), Immediate(lit));
514 __ Move(result_register(), Immediate(lit));
519 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
521 __ SafePush(Immediate(lit));
523 __ push(Immediate(lit));
528 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
529 codegen()->PrepareForBailoutBeforeSplit(condition(),
533 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
534 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
535 if (false_label_ != fall_through_) __ jmp(false_label_);
536 } else if (lit->IsTrue() || lit->IsJSObject()) {
537 if (true_label_ != fall_through_) __ jmp(true_label_);
538 } else if (lit->IsString()) {
539 if (String::cast(*lit)->length() == 0) {
540 if (false_label_ != fall_through_) __ jmp(false_label_);
542 if (true_label_ != fall_through_) __ jmp(true_label_);
544 } else if (lit->IsSmi()) {
545 if (Smi::cast(*lit)->value() == 0) {
546 if (false_label_ != fall_through_) __ jmp(false_label_);
548 if (true_label_ != fall_through_) __ jmp(true_label_);
551 // For simplicity we always test the accumulator register.
552 __ mov(result_register(), lit);
553 codegen()->DoTest(this);
558 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
559 Register reg) const {
565 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
567 Register reg) const {
570 __ Move(result_register(), reg);
574 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
575 Register reg) const {
577 if (count > 1) __ Drop(count - 1);
578 __ mov(Operand(esp, 0), reg);
582 void FullCodeGenerator::TestContext::DropAndPlug(int count,
583 Register reg) const {
585 // For simplicity we always test the accumulator register.
587 __ Move(result_register(), reg);
588 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
589 codegen()->DoTest(this);
593 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
594 Label* materialize_false) const {
595 DCHECK(materialize_true == materialize_false);
596 __ bind(materialize_true);
600 void FullCodeGenerator::AccumulatorValueContext::Plug(
601 Label* materialize_true,
602 Label* materialize_false) const {
604 __ bind(materialize_true);
605 __ mov(result_register(), isolate()->factory()->true_value());
606 __ jmp(&done, Label::kNear);
607 __ bind(materialize_false);
608 __ mov(result_register(), isolate()->factory()->false_value());
613 void FullCodeGenerator::StackValueContext::Plug(
614 Label* materialize_true,
615 Label* materialize_false) const {
617 __ bind(materialize_true);
618 __ push(Immediate(isolate()->factory()->true_value()));
619 __ jmp(&done, Label::kNear);
620 __ bind(materialize_false);
621 __ push(Immediate(isolate()->factory()->false_value()));
626 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
627 Label* materialize_false) const {
628 DCHECK(materialize_true == true_label_);
629 DCHECK(materialize_false == false_label_);
633 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
634 Handle<Object> value = flag
635 ? isolate()->factory()->true_value()
636 : isolate()->factory()->false_value();
637 __ mov(result_register(), value);
641 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
642 Handle<Object> value = flag
643 ? isolate()->factory()->true_value()
644 : isolate()->factory()->false_value();
645 __ push(Immediate(value));
649 void FullCodeGenerator::TestContext::Plug(bool flag) const {
650 codegen()->PrepareForBailoutBeforeSplit(condition(),
655 if (true_label_ != fall_through_) __ jmp(true_label_);
657 if (false_label_ != fall_through_) __ jmp(false_label_);
662 void FullCodeGenerator::DoTest(Expression* condition,
665 Label* fall_through) {
666 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
667 CallIC(ic, condition->test_id());
668 __ test(result_register(), result_register());
669 // The stub returns nonzero for true.
670 Split(not_zero, if_true, if_false, fall_through);
674 void FullCodeGenerator::Split(Condition cc,
677 Label* fall_through) {
678 if (if_false == fall_through) {
680 } else if (if_true == fall_through) {
681 __ j(NegateCondition(cc), if_false);
689 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
690 DCHECK(var->IsStackAllocated());
691 // Offset is negative because higher indexes are at lower addresses.
692 int offset = -var->index() * kPointerSize;
693 // Adjust by a (parameter or local) base offset.
694 if (var->IsParameter()) {
695 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
697 offset += JavaScriptFrameConstants::kLocal0Offset;
699 return Operand(ebp, offset);
703 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
704 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
705 if (var->IsContextSlot()) {
706 int context_chain_length = scope()->ContextChainLength(var->scope());
707 __ LoadContext(scratch, context_chain_length);
708 return ContextOperand(scratch, var->index());
710 return StackOperand(var);
715 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
716 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
717 MemOperand location = VarOperand(var, dest);
718 __ mov(dest, location);
722 void FullCodeGenerator::SetVar(Variable* var,
726 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
727 DCHECK(!scratch0.is(src));
728 DCHECK(!scratch0.is(scratch1));
729 DCHECK(!scratch1.is(src));
730 MemOperand location = VarOperand(var, scratch0);
731 __ mov(location, src);
733 // Emit the write barrier code if the location is in the heap.
734 if (var->IsContextSlot()) {
735 int offset = Context::SlotOffset(var->index());
736 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
737 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
742 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
743 bool should_normalize,
746 // Only prepare for bailouts before splits if we're in a test
747 // context. Otherwise, we let the Visit function deal with the
748 // preparation to avoid preparing with the same AST id twice.
749 if (!context()->IsTest()) return;
752 if (should_normalize) __ jmp(&skip, Label::kNear);
753 PrepareForBailout(expr, TOS_REG);
754 if (should_normalize) {
755 __ cmp(eax, isolate()->factory()->true_value());
756 Split(equal, if_true, if_false, NULL);
762 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
763 // The variable in the declaration always resides in the current context.
764 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
765 if (generate_debug_code_) {
766 // Check that we're not inside a with or catch context.
767 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
768 __ cmp(ebx, isolate()->factory()->with_context_map());
769 __ Check(not_equal, kDeclarationInWithContext);
770 __ cmp(ebx, isolate()->factory()->catch_context_map());
771 __ Check(not_equal, kDeclarationInCatchContext);
776 void FullCodeGenerator::VisitVariableDeclaration(
777 VariableDeclaration* declaration) {
778 // If it was not possible to allocate the variable at compile time, we
779 // need to "declare" it at runtime to make sure it actually exists in the
781 VariableProxy* proxy = declaration->proxy();
782 VariableMode mode = declaration->mode();
783 Variable* variable = proxy->var();
784 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
785 switch (variable->location()) {
786 case VariableLocation::GLOBAL:
787 case VariableLocation::UNALLOCATED:
788 globals_->Add(variable->name(), zone());
789 globals_->Add(variable->binding_needs_init()
790 ? isolate()->factory()->the_hole_value()
791 : isolate()->factory()->undefined_value(), zone());
794 case VariableLocation::PARAMETER:
795 case VariableLocation::LOCAL:
797 Comment cmnt(masm_, "[ VariableDeclaration");
798 __ mov(StackOperand(variable),
799 Immediate(isolate()->factory()->the_hole_value()));
803 case VariableLocation::CONTEXT:
805 Comment cmnt(masm_, "[ VariableDeclaration");
806 EmitDebugCheckDeclarationContext(variable);
807 __ mov(ContextOperand(esi, variable->index()),
808 Immediate(isolate()->factory()->the_hole_value()));
809 // No write barrier since the hole value is in old space.
810 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
814 case VariableLocation::LOOKUP: {
815 Comment cmnt(masm_, "[ VariableDeclaration");
816 __ push(Immediate(variable->name()));
817 // VariableDeclaration nodes are always introduced in one of four modes.
818 DCHECK(IsDeclaredVariableMode(mode));
819 // Push initial value, if any.
820 // Note: For variables we must not push an initial value (such as
821 // 'undefined') because we may have a (legal) redeclaration and we
822 // must not destroy the current value.
824 __ push(Immediate(isolate()->factory()->the_hole_value()));
826 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
828 __ CallRuntime(IsImmutableVariableMode(mode)
829 ? Runtime::kDeclareReadOnlyLookupSlot
830 : Runtime::kDeclareLookupSlot,
838 void FullCodeGenerator::VisitFunctionDeclaration(
839 FunctionDeclaration* declaration) {
840 VariableProxy* proxy = declaration->proxy();
841 Variable* variable = proxy->var();
842 switch (variable->location()) {
843 case VariableLocation::GLOBAL:
844 case VariableLocation::UNALLOCATED: {
845 globals_->Add(variable->name(), zone());
846 Handle<SharedFunctionInfo> function =
847 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
848 // Check for stack-overflow exception.
849 if (function.is_null()) return SetStackOverflow();
850 globals_->Add(function, zone());
854 case VariableLocation::PARAMETER:
855 case VariableLocation::LOCAL: {
856 Comment cmnt(masm_, "[ FunctionDeclaration");
857 VisitForAccumulatorValue(declaration->fun());
858 __ mov(StackOperand(variable), result_register());
862 case VariableLocation::CONTEXT: {
863 Comment cmnt(masm_, "[ FunctionDeclaration");
864 EmitDebugCheckDeclarationContext(variable);
865 VisitForAccumulatorValue(declaration->fun());
866 __ mov(ContextOperand(esi, variable->index()), result_register());
867 // We know that we have written a function, which is not a smi.
868 __ RecordWriteContextSlot(esi,
869 Context::SlotOffset(variable->index()),
875 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
879 case VariableLocation::LOOKUP: {
880 Comment cmnt(masm_, "[ FunctionDeclaration");
881 __ push(Immediate(variable->name()));
882 VisitForStackValue(declaration->fun());
883 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
890 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
891 // Call the runtime to declare the globals.
893 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
894 __ CallRuntime(Runtime::kDeclareGlobals, 2);
895 // Return value is ignored.
899 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
900 // Call the runtime to declare the modules.
901 __ Push(descriptions);
902 __ CallRuntime(Runtime::kDeclareModules, 1);
903 // Return value is ignored.
907 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
908 Comment cmnt(masm_, "[ SwitchStatement");
909 Breakable nested_statement(this, stmt);
910 SetStatementPosition(stmt);
912 // Keep the switch value on the stack until a case matches.
913 VisitForStackValue(stmt->tag());
914 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
916 ZoneList<CaseClause*>* clauses = stmt->cases();
917 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
919 Label next_test; // Recycled for each test.
920 // Compile all the tests with branches to their bodies.
921 for (int i = 0; i < clauses->length(); i++) {
922 CaseClause* clause = clauses->at(i);
923 clause->body_target()->Unuse();
925 // The default is not a test, but remember it as final fall through.
926 if (clause->is_default()) {
927 default_clause = clause;
931 Comment cmnt(masm_, "[ Case comparison");
935 // Compile the label expression.
936 VisitForAccumulatorValue(clause->label());
938 // Perform the comparison as if via '==='.
939 __ mov(edx, Operand(esp, 0)); // Switch value.
940 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
941 JumpPatchSite patch_site(masm_);
942 if (inline_smi_code) {
946 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
949 __ j(not_equal, &next_test);
950 __ Drop(1); // Switch value is no longer needed.
951 __ jmp(clause->body_target());
955 SetExpressionPosition(clause);
956 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
957 strength(language_mode())).code();
958 CallIC(ic, clause->CompareId());
959 patch_site.EmitPatchInfo();
962 __ jmp(&skip, Label::kNear);
963 PrepareForBailout(clause, TOS_REG);
964 __ cmp(eax, isolate()->factory()->true_value());
965 __ j(not_equal, &next_test);
967 __ jmp(clause->body_target());
971 __ j(not_equal, &next_test);
972 __ Drop(1); // Switch value is no longer needed.
973 __ jmp(clause->body_target());
976 // Discard the test value and jump to the default if present, otherwise to
977 // the end of the statement.
979 __ Drop(1); // Switch value is no longer needed.
980 if (default_clause == NULL) {
981 __ jmp(nested_statement.break_label());
983 __ jmp(default_clause->body_target());
986 // Compile all the case bodies.
987 for (int i = 0; i < clauses->length(); i++) {
988 Comment cmnt(masm_, "[ Case body");
989 CaseClause* clause = clauses->at(i);
990 __ bind(clause->body_target());
991 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
992 VisitStatements(clause->statements());
995 __ bind(nested_statement.break_label());
996 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1000 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1001 Comment cmnt(masm_, "[ ForInStatement");
1002 SetStatementPosition(stmt, SKIP_BREAK);
1004 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1007 ForIn loop_statement(this, stmt);
1008 increment_loop_depth();
1010 // Get the object to enumerate over. If the object is null or undefined, skip
1011 // over the loop. See ECMA-262 version 5, section 12.6.4.
1012 SetExpressionAsStatementPosition(stmt->enumerable());
1013 VisitForAccumulatorValue(stmt->enumerable());
1014 __ cmp(eax, isolate()->factory()->undefined_value());
1016 __ cmp(eax, isolate()->factory()->null_value());
1019 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1021 // Convert the object to a JS object.
1022 Label convert, done_convert;
1023 __ JumpIfSmi(eax, &convert, Label::kNear);
1024 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1025 __ j(above_equal, &done_convert, Label::kNear);
1027 ToObjectStub stub(isolate());
1029 __ bind(&done_convert);
1030 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1033 // Check for proxies.
1034 Label call_runtime, use_cache, fixed_array;
1035 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1036 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1037 __ j(below_equal, &call_runtime);
1039 // Check cache validity in generated code. This is a fast case for
1040 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1041 // guarantee cache validity, call the runtime system to check cache
1042 // validity or get the property names in a fixed array.
1043 __ CheckEnumCache(&call_runtime);
1045 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1046 __ jmp(&use_cache, Label::kNear);
1048 // Get the set of properties to enumerate.
1049 __ bind(&call_runtime);
1051 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1052 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1053 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1054 isolate()->factory()->meta_map());
1055 __ j(not_equal, &fixed_array);
1058 // We got a map in register eax. Get the enumeration cache from it.
1059 Label no_descriptors;
1060 __ bind(&use_cache);
1062 __ EnumLength(edx, eax);
1063 __ cmp(edx, Immediate(Smi::FromInt(0)));
1064 __ j(equal, &no_descriptors);
1066 __ LoadInstanceDescriptors(eax, ecx);
1067 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1068 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1070 // Set up the four remaining stack slots.
1071 __ push(eax); // Map.
1072 __ push(ecx); // Enumeration cache.
1073 __ push(edx); // Number of valid entries for the map in the enum cache.
1074 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1077 __ bind(&no_descriptors);
1078 __ add(esp, Immediate(kPointerSize));
1081 // We got a fixed array in register eax. Iterate through that.
1083 __ bind(&fixed_array);
1085 // No need for a write barrier, we are storing a Smi in the feedback vector.
1086 __ LoadHeapObject(ebx, FeedbackVector());
1087 int vector_index = FeedbackVector()->GetIndex(slot);
1088 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1089 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1091 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1092 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1093 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1094 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1095 __ j(above, &non_proxy);
1096 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1097 __ bind(&non_proxy);
1098 __ push(ebx); // Smi
1099 __ push(eax); // Array
1100 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1101 __ push(eax); // Fixed array length (as smi).
1102 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1104 // Generate code for doing the condition check.
1105 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1107 SetExpressionAsStatementPosition(stmt->each());
1109 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1110 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1111 __ j(above_equal, loop_statement.break_label());
1113 // Get the current entry of the array into register ebx.
1114 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1115 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1117 // Get the expected map from the stack or a smi in the
1118 // permanent slow case into register edx.
1119 __ mov(edx, Operand(esp, 3 * kPointerSize));
1121 // Check if the expected map still matches that of the enumerable.
1122 // If not, we may have to filter the key.
1124 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1125 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1126 __ j(equal, &update_each, Label::kNear);
1128 // For proxies, no filtering is done.
1129 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1130 DCHECK(Smi::FromInt(0) == 0);
1132 __ j(zero, &update_each);
1134 // Convert the entry to a string or null if it isn't a property
1135 // anymore. If the property has been removed while iterating, we
1137 __ push(ecx); // Enumerable.
1138 __ push(ebx); // Current entry.
1139 __ CallRuntime(Runtime::kForInFilter, 2);
1140 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1141 __ cmp(eax, isolate()->factory()->undefined_value());
1142 __ j(equal, loop_statement.continue_label());
1145 // Update the 'each' property or variable from the possibly filtered
1146 // entry in register ebx.
1147 __ bind(&update_each);
1148 __ mov(result_register(), ebx);
1149 // Perform the assignment as if via '='.
1150 { EffectContext context(this);
1151 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1152 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1155 // Generate code for the body of the loop.
1156 Visit(stmt->body());
1158 // Generate code for going to the next element by incrementing the
1159 // index (smi) stored on top of the stack.
1160 __ bind(loop_statement.continue_label());
1161 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1163 EmitBackEdgeBookkeeping(stmt, &loop);
1166 // Remove the pointers stored on the stack.
1167 __ bind(loop_statement.break_label());
1168 __ add(esp, Immediate(5 * kPointerSize));
1170 // Exit and decrement the loop depth.
1171 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1173 decrement_loop_depth();
1177 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1179 // Use the fast case closure allocation code that allocates in new
1180 // space for nested functions that don't need literals cloning. If
1181 // we're running with the --always-opt or the --prepare-always-opt
1182 // flag, we need to use the runtime function so that the new function
1183 // we are creating here gets a chance to have its code optimized and
1184 // doesn't just get a copy of the existing unoptimized code.
1185 if (!FLAG_always_opt &&
1186 !FLAG_prepare_always_opt &&
1188 scope()->is_function_scope() &&
1189 info->num_literals() == 0) {
1190 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1191 __ mov(ebx, Immediate(info));
1195 __ push(Immediate(info));
1196 __ push(Immediate(pretenure
1197 ? isolate()->factory()->true_value()
1198 : isolate()->factory()->false_value()));
1199 __ CallRuntime(Runtime::kNewClosure, 3);
1201 context()->Plug(eax);
1205 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1207 FeedbackVectorICSlot slot) {
1208 if (NeedsHomeObject(initializer)) {
1209 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1210 __ mov(StoreDescriptor::NameRegister(),
1211 Immediate(isolate()->factory()->home_object_symbol()));
1212 __ mov(StoreDescriptor::ValueRegister(),
1213 Operand(esp, offset * kPointerSize));
1214 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1220 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1221 TypeofMode typeof_mode,
1223 Register context = esi;
1224 Register temp = edx;
1228 if (s->num_heap_slots() > 0) {
1229 if (s->calls_sloppy_eval()) {
1230 // Check that extension is NULL.
1231 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1233 __ j(not_equal, slow);
1235 // Load next context in chain.
1236 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1237 // Walk the rest of the chain without clobbering esi.
1240 // If no outer scope calls eval, we do not need to check more
1241 // context extensions. If we have reached an eval scope, we check
1242 // all extensions from this point.
1243 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1244 s = s->outer_scope();
1247 if (s != NULL && s->is_eval_scope()) {
1248 // Loop up the context chain. There is no frame effect so it is
1249 // safe to use raw labels here.
1251 if (!context.is(temp)) {
1252 __ mov(temp, context);
1255 // Terminate at native context.
1256 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1257 Immediate(isolate()->factory()->native_context_map()));
1258 __ j(equal, &fast, Label::kNear);
1259 // Check that extension is NULL.
1260 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1261 __ j(not_equal, slow);
1262 // Load next context in chain.
1263 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1268 // All extension objects were empty and it is safe to use a normal global
1270 EmitGlobalVariableLoad(proxy, typeof_mode);
1274 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1276 DCHECK(var->IsContextSlot());
1277 Register context = esi;
1278 Register temp = ebx;
1280 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1281 if (s->num_heap_slots() > 0) {
1282 if (s->calls_sloppy_eval()) {
1283 // Check that extension is NULL.
1284 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1286 __ j(not_equal, slow);
1288 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1289 // Walk the rest of the chain without clobbering esi.
1293 // Check that last extension is NULL.
1294 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1295 __ j(not_equal, slow);
1297 // This function is used only for loads, not stores, so it's safe to
1298 // return an esi-based operand (the write barrier cannot be allowed to
1299 // destroy the esi register).
1300 return ContextOperand(context, var->index());
1304 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1305 TypeofMode typeof_mode,
1306 Label* slow, Label* done) {
1307 // Generate fast-case code for variables that might be shadowed by
1308 // eval-introduced variables. Eval is used a lot without
1309 // introducing variables. In those cases, we do not want to
1310 // perform a runtime call for all variables in the scope
1311 // containing the eval.
1312 Variable* var = proxy->var();
1313 if (var->mode() == DYNAMIC_GLOBAL) {
1314 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1316 } else if (var->mode() == DYNAMIC_LOCAL) {
1317 Variable* local = var->local_if_not_shadowed();
1318 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1319 if (local->mode() == LET || local->mode() == CONST ||
1320 local->mode() == CONST_LEGACY) {
1321 __ cmp(eax, isolate()->factory()->the_hole_value());
1322 __ j(not_equal, done);
1323 if (local->mode() == CONST_LEGACY) {
1324 __ mov(eax, isolate()->factory()->undefined_value());
1325 } else { // LET || CONST
1326 __ push(Immediate(var->name()));
1327 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1335 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1336 TypeofMode typeof_mode) {
1337 Variable* var = proxy->var();
1338 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1339 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1340 if (var->IsGlobalSlot()) {
1341 DCHECK(var->index() > 0);
1342 DCHECK(var->IsStaticGlobalObjectProperty());
1343 int const slot = var->index();
1344 int const depth = scope()->ContextChainLength(var->scope());
1345 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1346 __ Move(LoadGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
1347 LoadGlobalViaContextStub stub(isolate(), depth);
1350 __ Push(Smi::FromInt(slot));
1351 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1355 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1356 __ mov(LoadDescriptor::NameRegister(), var->name());
1357 __ mov(LoadDescriptor::SlotRegister(),
1358 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1359 CallLoadIC(typeof_mode);
1364 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1365 TypeofMode typeof_mode) {
1366 SetExpressionPosition(proxy);
1367 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1368 Variable* var = proxy->var();
1370 // Three cases: global variables, lookup variables, and all other types of
1372 switch (var->location()) {
1373 case VariableLocation::GLOBAL:
1374 case VariableLocation::UNALLOCATED: {
1375 Comment cmnt(masm_, "[ Global variable");
1376 EmitGlobalVariableLoad(proxy, typeof_mode);
1377 context()->Plug(eax);
1381 case VariableLocation::PARAMETER:
1382 case VariableLocation::LOCAL:
1383 case VariableLocation::CONTEXT: {
1384 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1385 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1386 : "[ Stack variable");
1387 if (var->binding_needs_init()) {
1388 // var->scope() may be NULL when the proxy is located in eval code and
1389 // refers to a potential outside binding. Currently those bindings are
1390 // always looked up dynamically, i.e. in that case
1391 // var->location() == LOOKUP.
1393 DCHECK(var->scope() != NULL);
1395 // Check if the binding really needs an initialization check. The check
1396 // can be skipped in the following situation: we have a LET or CONST
1397 // binding in harmony mode, both the Variable and the VariableProxy have
1398 // the same declaration scope (i.e. they are both in global code, in the
1399 // same function or in the same eval code) and the VariableProxy is in
1400 // the source physically located after the initializer of the variable.
1402 // We cannot skip any initialization checks for CONST in non-harmony
1403 // mode because const variables may be declared but never initialized:
1404 // if (false) { const x; }; var y = x;
1406 // The condition on the declaration scopes is a conservative check for
1407 // nested functions that access a binding and are called before the
1408 // binding is initialized:
1409 // function() { f(); let x = 1; function f() { x = 2; } }
1411 bool skip_init_check;
1412 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1413 skip_init_check = false;
1414 } else if (var->is_this()) {
1415 CHECK(literal() != nullptr &&
1416 (literal()->kind() & kSubclassConstructor) != 0);
1417 // TODO(dslomov): implement 'this' hole check elimination.
1418 skip_init_check = false;
1420 // Check that we always have valid source position.
1421 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1422 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1423 skip_init_check = var->mode() != CONST_LEGACY &&
1424 var->initializer_position() < proxy->position();
1427 if (!skip_init_check) {
1428 // Let and const need a read barrier.
1431 __ cmp(eax, isolate()->factory()->the_hole_value());
1432 __ j(not_equal, &done, Label::kNear);
1433 if (var->mode() == LET || var->mode() == CONST) {
1434 // Throw a reference error when using an uninitialized let/const
1435 // binding in harmony mode.
1436 __ push(Immediate(var->name()));
1437 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1439 // Uninitalized const bindings outside of harmony mode are unholed.
1440 DCHECK(var->mode() == CONST_LEGACY);
1441 __ mov(eax, isolate()->factory()->undefined_value());
1444 context()->Plug(eax);
1448 context()->Plug(var);
1452 case VariableLocation::LOOKUP: {
1453 Comment cmnt(masm_, "[ Lookup variable");
1455 // Generate code for loading from variables potentially shadowed
1456 // by eval-introduced variables.
1457 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1459 __ push(esi); // Context.
1460 __ push(Immediate(var->name()));
1461 Runtime::FunctionId function_id =
1462 typeof_mode == NOT_INSIDE_TYPEOF
1463 ? Runtime::kLoadLookupSlot
1464 : Runtime::kLoadLookupSlotNoReferenceError;
1465 __ CallRuntime(function_id, 2);
1467 context()->Plug(eax);
1474 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1475 Comment cmnt(masm_, "[ RegExpLiteral");
1477 // Registers will be used as follows:
1478 // edi = JS function.
1479 // ecx = literals array.
1480 // ebx = regexp literal.
1481 // eax = regexp literal clone.
1482 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1483 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1484 int literal_offset =
1485 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1486 __ mov(ebx, FieldOperand(ecx, literal_offset));
1487 __ cmp(ebx, isolate()->factory()->undefined_value());
1488 __ j(not_equal, &materialized, Label::kNear);
1490 // Create regexp literal using runtime function
1491 // Result will be in eax.
1493 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1494 __ push(Immediate(expr->pattern()));
1495 __ push(Immediate(expr->flags()));
1496 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1499 __ bind(&materialized);
1500 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1501 Label allocated, runtime_allocate;
1502 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1505 __ bind(&runtime_allocate);
1507 __ push(Immediate(Smi::FromInt(size)));
1508 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1511 __ bind(&allocated);
1512 // Copy the content into the newly allocated memory.
1513 // (Unroll copy loop once for better throughput).
1514 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1515 __ mov(edx, FieldOperand(ebx, i));
1516 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1517 __ mov(FieldOperand(eax, i), edx);
1518 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1520 if ((size % (2 * kPointerSize)) != 0) {
1521 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1522 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1524 context()->Plug(eax);
1528 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1529 if (expression == NULL) {
1530 __ push(Immediate(isolate()->factory()->null_value()));
1532 VisitForStackValue(expression);
1537 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1538 Comment cmnt(masm_, "[ ObjectLiteral");
1540 Handle<FixedArray> constant_properties = expr->constant_properties();
1541 int flags = expr->ComputeFlags();
1542 // If any of the keys would store to the elements array, then we shouldn't
1544 if (MustCreateObjectLiteralWithRuntime(expr)) {
1545 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1546 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1547 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1548 __ push(Immediate(constant_properties));
1549 __ push(Immediate(Smi::FromInt(flags)));
1550 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1552 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1553 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1554 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1555 __ mov(ecx, Immediate(constant_properties));
1556 __ mov(edx, Immediate(Smi::FromInt(flags)));
1557 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1560 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1562 // If result_saved is true the result is on top of the stack. If
1563 // result_saved is false the result is in eax.
1564 bool result_saved = false;
1566 AccessorTable accessor_table(zone());
1567 int property_index = 0;
1568 // store_slot_index points to the vector IC slot for the next store IC used.
1569 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1570 // and must be updated if the number of store ICs emitted here changes.
1571 int store_slot_index = 0;
1572 for (; property_index < expr->properties()->length(); property_index++) {
1573 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1574 if (property->is_computed_name()) break;
1575 if (property->IsCompileTimeValue()) continue;
1577 Literal* key = property->key()->AsLiteral();
1578 Expression* value = property->value();
1579 if (!result_saved) {
1580 __ push(eax); // Save result on the stack
1581 result_saved = true;
1583 switch (property->kind()) {
1584 case ObjectLiteral::Property::CONSTANT:
1586 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1587 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1589 case ObjectLiteral::Property::COMPUTED:
1590 // It is safe to use [[Put]] here because the boilerplate already
1591 // contains computed properties with an uninitialized value.
1592 if (key->value()->IsInternalizedString()) {
1593 if (property->emit_store()) {
1594 VisitForAccumulatorValue(value);
1595 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1596 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1597 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1598 if (FLAG_vector_stores) {
1599 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1602 CallStoreIC(key->LiteralFeedbackId());
1604 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1606 if (NeedsHomeObject(value)) {
1607 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1608 __ mov(StoreDescriptor::NameRegister(),
1609 Immediate(isolate()->factory()->home_object_symbol()));
1610 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, 0));
1611 if (FLAG_vector_stores) {
1612 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1617 VisitForEffect(value);
1621 __ push(Operand(esp, 0)); // Duplicate receiver.
1622 VisitForStackValue(key);
1623 VisitForStackValue(value);
1624 if (property->emit_store()) {
1625 EmitSetHomeObjectIfNeeded(
1626 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1627 __ push(Immediate(Smi::FromInt(SLOPPY))); // Language mode
1628 __ CallRuntime(Runtime::kSetProperty, 4);
1633 case ObjectLiteral::Property::PROTOTYPE:
1634 __ push(Operand(esp, 0)); // Duplicate receiver.
1635 VisitForStackValue(value);
1636 DCHECK(property->emit_store());
1637 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1639 case ObjectLiteral::Property::GETTER:
1640 if (property->emit_store()) {
1641 accessor_table.lookup(key)->second->getter = value;
1644 case ObjectLiteral::Property::SETTER:
1645 if (property->emit_store()) {
1646 accessor_table.lookup(key)->second->setter = value;
1652 // Emit code to define accessors, using only a single call to the runtime for
1653 // each pair of corresponding getters and setters.
1654 for (AccessorTable::Iterator it = accessor_table.begin();
1655 it != accessor_table.end();
1657 __ push(Operand(esp, 0)); // Duplicate receiver.
1658 VisitForStackValue(it->first);
1659 EmitAccessor(it->second->getter);
1660 EmitSetHomeObjectIfNeeded(
1661 it->second->getter, 2,
1662 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1664 EmitAccessor(it->second->setter);
1665 EmitSetHomeObjectIfNeeded(
1666 it->second->setter, 3,
1667 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1669 __ push(Immediate(Smi::FromInt(NONE)));
1670 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1673 // Object literals have two parts. The "static" part on the left contains no
1674 // computed property names, and so we can compute its map ahead of time; see
1675 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1676 // starts with the first computed property name, and continues with all
1677 // properties to its right. All the code from above initializes the static
1678 // component of the object literal, and arranges for the map of the result to
1679 // reflect the static order in which the keys appear. For the dynamic
1680 // properties, we compile them into a series of "SetOwnProperty" runtime
1681 // calls. This will preserve insertion order.
1682 for (; property_index < expr->properties()->length(); property_index++) {
1683 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1685 Expression* value = property->value();
1686 if (!result_saved) {
1687 __ push(eax); // Save result on the stack
1688 result_saved = true;
1691 __ push(Operand(esp, 0)); // Duplicate receiver.
1693 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1694 DCHECK(!property->is_computed_name());
1695 VisitForStackValue(value);
1696 DCHECK(property->emit_store());
1697 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1699 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1700 VisitForStackValue(value);
1701 EmitSetHomeObjectIfNeeded(
1702 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1704 switch (property->kind()) {
1705 case ObjectLiteral::Property::CONSTANT:
1706 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1707 case ObjectLiteral::Property::COMPUTED:
1708 if (property->emit_store()) {
1709 __ push(Immediate(Smi::FromInt(NONE)));
1710 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1716 case ObjectLiteral::Property::PROTOTYPE:
1720 case ObjectLiteral::Property::GETTER:
1721 __ push(Immediate(Smi::FromInt(NONE)));
1722 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1725 case ObjectLiteral::Property::SETTER:
1726 __ push(Immediate(Smi::FromInt(NONE)));
1727 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1733 if (expr->has_function()) {
1734 DCHECK(result_saved);
1735 __ push(Operand(esp, 0));
1736 __ CallRuntime(Runtime::kToFastProperties, 1);
1740 context()->PlugTOS();
1742 context()->Plug(eax);
1745 // Verify that compilation exactly consumed the number of store ic slots that
1746 // the ObjectLiteral node had to offer.
1747 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1751 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1752 Comment cmnt(masm_, "[ ArrayLiteral");
1754 expr->BuildConstantElements(isolate());
1755 Handle<FixedArray> constant_elements = expr->constant_elements();
1756 bool has_constant_fast_elements =
1757 IsFastObjectElementsKind(expr->constant_elements_kind());
1759 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1760 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1761 // If the only customer of allocation sites is transitioning, then
1762 // we can turn it off if we don't have anywhere else to transition to.
1763 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1766 if (MustCreateArrayLiteralWithRuntime(expr)) {
1767 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1768 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1769 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1770 __ push(Immediate(constant_elements));
1771 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1772 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1774 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1775 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1776 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1777 __ mov(ecx, Immediate(constant_elements));
1778 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1781 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1783 bool result_saved = false; // Is the result saved to the stack?
1784 ZoneList<Expression*>* subexprs = expr->values();
1785 int length = subexprs->length();
1787 // Emit code to evaluate all the non-constant subexpressions and to store
1788 // them into the newly cloned array.
1789 int array_index = 0;
1790 for (; array_index < length; array_index++) {
1791 Expression* subexpr = subexprs->at(array_index);
1792 if (subexpr->IsSpread()) break;
1794 // If the subexpression is a literal or a simple materialized literal it
1795 // is already set in the cloned array.
1796 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1798 if (!result_saved) {
1799 __ push(eax); // array literal.
1800 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1801 result_saved = true;
1803 VisitForAccumulatorValue(subexpr);
1805 if (has_constant_fast_elements) {
1806 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1807 // cannot transition and don't need to call the runtime stub.
1808 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1809 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1810 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1811 // Store the subexpression value in the array's elements.
1812 __ mov(FieldOperand(ebx, offset), result_register());
1813 // Update the write barrier for the array store.
1814 __ RecordWriteField(ebx, offset, result_register(), ecx,
1816 EMIT_REMEMBERED_SET,
1819 // Store the subexpression value in the array's elements.
1820 __ mov(ecx, Immediate(Smi::FromInt(array_index)));
1821 StoreArrayLiteralElementStub stub(isolate());
1825 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1828 // In case the array literal contains spread expressions it has two parts. The
1829 // first part is the "static" array which has a literal index is handled
1830 // above. The second part is the part after the first spread expression
1831 // (inclusive) and these elements gets appended to the array. Note that the
1832 // number elements an iterable produces is unknown ahead of time.
1833 if (array_index < length && result_saved) {
1834 __ Drop(1); // literal index
1836 result_saved = false;
1838 for (; array_index < length; array_index++) {
1839 Expression* subexpr = subexprs->at(array_index);
1842 if (subexpr->IsSpread()) {
1843 VisitForStackValue(subexpr->AsSpread()->expression());
1844 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1846 VisitForStackValue(subexpr);
1847 __ CallRuntime(Runtime::kAppendElement, 2);
1850 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1854 __ Drop(1); // literal index
1855 context()->PlugTOS();
1857 context()->Plug(eax);
1862 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1863 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1865 Comment cmnt(masm_, "[ Assignment");
1866 SetExpressionPosition(expr, INSERT_BREAK);
1868 Property* property = expr->target()->AsProperty();
1869 LhsKind assign_type = Property::GetAssignType(property);
1871 // Evaluate LHS expression.
1872 switch (assign_type) {
1874 // Nothing to do here.
1876 case NAMED_SUPER_PROPERTY:
1878 property->obj()->AsSuperPropertyReference()->this_var());
1879 VisitForAccumulatorValue(
1880 property->obj()->AsSuperPropertyReference()->home_object());
1881 __ push(result_register());
1882 if (expr->is_compound()) {
1883 __ push(MemOperand(esp, kPointerSize));
1884 __ push(result_register());
1887 case NAMED_PROPERTY:
1888 if (expr->is_compound()) {
1889 // We need the receiver both on the stack and in the register.
1890 VisitForStackValue(property->obj());
1891 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1893 VisitForStackValue(property->obj());
1896 case KEYED_SUPER_PROPERTY:
1898 property->obj()->AsSuperPropertyReference()->this_var());
1900 property->obj()->AsSuperPropertyReference()->home_object());
1901 VisitForAccumulatorValue(property->key());
1902 __ Push(result_register());
1903 if (expr->is_compound()) {
1904 __ push(MemOperand(esp, 2 * kPointerSize));
1905 __ push(MemOperand(esp, 2 * kPointerSize));
1906 __ push(result_register());
1909 case KEYED_PROPERTY: {
1910 if (expr->is_compound()) {
1911 VisitForStackValue(property->obj());
1912 VisitForStackValue(property->key());
1913 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1914 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1916 VisitForStackValue(property->obj());
1917 VisitForStackValue(property->key());
1923 // For compound assignments we need another deoptimization point after the
1924 // variable/property load.
1925 if (expr->is_compound()) {
1926 AccumulatorValueContext result_context(this);
1927 { AccumulatorValueContext left_operand_context(this);
1928 switch (assign_type) {
1930 EmitVariableLoad(expr->target()->AsVariableProxy());
1931 PrepareForBailout(expr->target(), TOS_REG);
1933 case NAMED_SUPER_PROPERTY:
1934 EmitNamedSuperPropertyLoad(property);
1935 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1937 case NAMED_PROPERTY:
1938 EmitNamedPropertyLoad(property);
1939 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1941 case KEYED_SUPER_PROPERTY:
1942 EmitKeyedSuperPropertyLoad(property);
1943 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1945 case KEYED_PROPERTY:
1946 EmitKeyedPropertyLoad(property);
1947 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1952 Token::Value op = expr->binary_op();
1953 __ push(eax); // Left operand goes on the stack.
1954 VisitForAccumulatorValue(expr->value());
1956 if (ShouldInlineSmiCase(op)) {
1957 EmitInlineSmiBinaryOp(expr->binary_operation(),
1962 EmitBinaryOp(expr->binary_operation(), op);
1965 // Deoptimization point in case the binary operation may have side effects.
1966 PrepareForBailout(expr->binary_operation(), TOS_REG);
1968 VisitForAccumulatorValue(expr->value());
1971 SetExpressionPosition(expr);
1974 switch (assign_type) {
1976 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1977 expr->op(), expr->AssignmentSlot());
1978 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1979 context()->Plug(eax);
1981 case NAMED_PROPERTY:
1982 EmitNamedPropertyAssignment(expr);
1984 case NAMED_SUPER_PROPERTY:
1985 EmitNamedSuperPropertyStore(property);
1986 context()->Plug(result_register());
1988 case KEYED_SUPER_PROPERTY:
1989 EmitKeyedSuperPropertyStore(property);
1990 context()->Plug(result_register());
1992 case KEYED_PROPERTY:
1993 EmitKeyedPropertyAssignment(expr);
1999 void FullCodeGenerator::VisitYield(Yield* expr) {
2000 Comment cmnt(masm_, "[ Yield");
2001 SetExpressionPosition(expr);
2003 // Evaluate yielded value first; the initial iterator definition depends on
2004 // this. It stays on the stack while we update the iterator.
2005 VisitForStackValue(expr->expression());
2007 switch (expr->yield_kind()) {
2008 case Yield::kSuspend:
2009 // Pop value from top-of-stack slot; box result into result register.
2010 EmitCreateIteratorResult(false);
2011 __ push(result_register());
2013 case Yield::kInitial: {
2014 Label suspend, continuation, post_runtime, resume;
2017 __ bind(&continuation);
2018 __ RecordGeneratorContinuation();
2022 VisitForAccumulatorValue(expr->generator_object());
2023 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2024 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2025 Immediate(Smi::FromInt(continuation.pos())));
2026 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2028 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2030 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
2032 __ j(equal, &post_runtime);
2033 __ push(eax); // generator object
2034 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2035 __ mov(context_register(),
2036 Operand(ebp, StandardFrameConstants::kContextOffset));
2037 __ bind(&post_runtime);
2038 __ pop(result_register());
2039 EmitReturnSequence();
2042 context()->Plug(result_register());
2046 case Yield::kFinal: {
2047 VisitForAccumulatorValue(expr->generator_object());
2048 __ mov(FieldOperand(result_register(),
2049 JSGeneratorObject::kContinuationOffset),
2050 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2051 // Pop value from top-of-stack slot, box result into result register.
2052 EmitCreateIteratorResult(true);
2053 EmitUnwindBeforeReturn();
2054 EmitReturnSequence();
2058 case Yield::kDelegating: {
2059 VisitForStackValue(expr->generator_object());
2061 // Initial stack layout is as follows:
2062 // [sp + 1 * kPointerSize] iter
2063 // [sp + 0 * kPointerSize] g
2065 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2066 Label l_next, l_call, l_loop;
2067 Register load_receiver = LoadDescriptor::ReceiverRegister();
2068 Register load_name = LoadDescriptor::NameRegister();
2070 // Initial send value is undefined.
2071 __ mov(eax, isolate()->factory()->undefined_value());
2074 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2076 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2077 __ push(load_name); // "throw"
2078 __ push(Operand(esp, 2 * kPointerSize)); // iter
2079 __ push(eax); // exception
2082 // try { received = %yield result }
2083 // Shuffle the received result above a try handler and yield it without
2086 __ pop(eax); // result
2087 int handler_index = NewHandlerTableEntry();
2088 EnterTryBlock(handler_index, &l_catch);
2089 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2090 __ push(eax); // result
2093 __ bind(&l_continuation);
2094 __ RecordGeneratorContinuation();
2097 __ bind(&l_suspend);
2098 const int generator_object_depth = kPointerSize + try_block_size;
2099 __ mov(eax, Operand(esp, generator_object_depth));
2101 __ push(Immediate(Smi::FromInt(handler_index))); // handler-index
2102 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2103 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2104 Immediate(Smi::FromInt(l_continuation.pos())));
2105 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2107 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2109 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2110 __ mov(context_register(),
2111 Operand(ebp, StandardFrameConstants::kContextOffset));
2112 __ pop(eax); // result
2113 EmitReturnSequence();
2114 __ bind(&l_resume); // received in eax
2115 ExitTryBlock(handler_index);
2117 // receiver = iter; f = iter.next; arg = received;
2120 __ mov(load_name, isolate()->factory()->next_string());
2121 __ push(load_name); // "next"
2122 __ push(Operand(esp, 2 * kPointerSize)); // iter
2123 __ push(eax); // received
2125 // result = receiver[f](arg);
2127 __ mov(load_receiver, Operand(esp, kPointerSize));
2128 __ mov(LoadDescriptor::SlotRegister(),
2129 Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2130 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2131 CallIC(ic, TypeFeedbackId::None());
2133 __ mov(Operand(esp, 2 * kPointerSize), edi);
2134 SetCallPosition(expr, 1);
2135 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2138 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2139 __ Drop(1); // The function is still on the stack; drop it.
2141 // if (!result.done) goto l_try;
2143 __ push(eax); // save result
2144 __ Move(load_receiver, eax); // result
2146 isolate()->factory()->done_string()); // "done"
2147 __ mov(LoadDescriptor::SlotRegister(),
2148 Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
2149 CallLoadIC(NOT_INSIDE_TYPEOF); // result.done in eax
2150 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2156 __ pop(load_receiver); // result
2158 isolate()->factory()->value_string()); // "value"
2159 __ mov(LoadDescriptor::SlotRegister(),
2160 Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
2161 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in eax
2162 context()->DropAndPlug(2, eax); // drop iter and g
2169 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2171 JSGeneratorObject::ResumeMode resume_mode) {
2172 // The value stays in eax, and is ultimately read by the resumed generator, as
2173 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2174 // is read to throw the value when the resumed generator is already closed.
2175 // ebx will hold the generator object until the activation has been resumed.
2176 VisitForStackValue(generator);
2177 VisitForAccumulatorValue(value);
2180 // Load suspended function and context.
2181 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2182 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2185 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2187 // Push holes for arguments to generator function.
2188 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2190 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2191 __ mov(ecx, isolate()->factory()->the_hole_value());
2192 Label push_argument_holes, push_frame;
2193 __ bind(&push_argument_holes);
2194 __ sub(edx, Immediate(Smi::FromInt(1)));
2195 __ j(carry, &push_frame);
2197 __ jmp(&push_argument_holes);
2199 // Enter a new JavaScript frame, and initialize its slots as they were when
2200 // the generator was suspended.
2201 Label resume_frame, done;
2202 __ bind(&push_frame);
2203 __ call(&resume_frame);
2205 __ bind(&resume_frame);
2206 __ push(ebp); // Caller's frame pointer.
2208 __ push(esi); // Callee's context.
2209 __ push(edi); // Callee's JS Function.
2211 // Load the operand stack size.
2212 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2213 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2216 // If we are sending a value and there is no operand stack, we can jump back
2218 if (resume_mode == JSGeneratorObject::NEXT) {
2220 __ cmp(edx, Immediate(0));
2221 __ j(not_zero, &slow_resume);
2222 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2223 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2226 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2227 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2229 __ bind(&slow_resume);
2232 // Otherwise, we push holes for the operand stack and call the runtime to fix
2233 // up the stack and the handlers.
2234 Label push_operand_holes, call_resume;
2235 __ bind(&push_operand_holes);
2236 __ sub(edx, Immediate(1));
2237 __ j(carry, &call_resume);
2239 __ jmp(&push_operand_holes);
2240 __ bind(&call_resume);
2242 __ push(result_register());
2243 __ Push(Smi::FromInt(resume_mode));
2244 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2245 // Not reached: the runtime call returns elsewhere.
2246 __ Abort(kGeneratorFailedToResume);
2249 context()->Plug(result_register());
2253 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2257 const int instance_size = 5 * kPointerSize;
2258 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2261 __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2264 __ bind(&gc_required);
2265 __ Push(Smi::FromInt(instance_size));
2266 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2267 __ mov(context_register(),
2268 Operand(ebp, StandardFrameConstants::kContextOffset));
2270 __ bind(&allocated);
2271 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2272 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2273 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2275 __ mov(edx, isolate()->factory()->ToBoolean(done));
2276 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2277 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2278 isolate()->factory()->empty_fixed_array());
2279 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2280 isolate()->factory()->empty_fixed_array());
2281 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2282 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2284 // Only the value field needs a write barrier, as the other values are in the
2286 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2287 ecx, edx, kDontSaveFPRegs);
2291 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2292 SetExpressionPosition(prop);
2293 Literal* key = prop->key()->AsLiteral();
2294 DCHECK(!key->value()->IsSmi());
2295 DCHECK(!prop->IsSuperAccess());
2297 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2298 __ mov(LoadDescriptor::SlotRegister(),
2299 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2300 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2304 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2305 // Stack: receiver, home_object.
2306 SetExpressionPosition(prop);
2307 Literal* key = prop->key()->AsLiteral();
2308 DCHECK(!key->value()->IsSmi());
2309 DCHECK(prop->IsSuperAccess());
2311 __ push(Immediate(key->value()));
2312 __ push(Immediate(Smi::FromInt(language_mode())));
2313 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2317 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2318 SetExpressionPosition(prop);
2319 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2320 __ mov(LoadDescriptor::SlotRegister(),
2321 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2326 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2327 // Stack: receiver, home_object, key.
2328 SetExpressionPosition(prop);
2329 __ push(Immediate(Smi::FromInt(language_mode())));
2330 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2334 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2337 Expression* right) {
2338 // Do combined smi check of the operands. Left operand is on the
2339 // stack. Right operand is in eax.
2340 Label smi_case, done, stub_call;
2344 JumpPatchSite patch_site(masm_);
2345 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2347 __ bind(&stub_call);
2350 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2351 CallIC(code, expr->BinaryOperationFeedbackId());
2352 patch_site.EmitPatchInfo();
2353 __ jmp(&done, Label::kNear);
2357 __ mov(eax, edx); // Copy left operand in case of a stub call.
2362 __ sar_cl(eax); // No checks of result necessary
2363 __ and_(eax, Immediate(~kSmiTagMask));
2370 // Check that the *signed* result fits in a smi.
2371 __ cmp(eax, 0xc0000000);
2372 __ j(positive, &result_ok);
2375 __ bind(&result_ok);
2384 __ test(eax, Immediate(0xc0000000));
2385 __ j(zero, &result_ok);
2388 __ bind(&result_ok);
2394 __ j(overflow, &stub_call);
2398 __ j(overflow, &stub_call);
2403 __ j(overflow, &stub_call);
2405 __ j(not_zero, &done, Label::kNear);
2408 __ j(negative, &stub_call);
2414 case Token::BIT_AND:
2417 case Token::BIT_XOR:
2425 context()->Plug(eax);
2429 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2430 int* used_store_slots) {
2431 // Constructor is in eax.
2432 DCHECK(lit != NULL);
2435 // No access check is needed here since the constructor is created by the
2437 Register scratch = ebx;
2438 __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2441 for (int i = 0; i < lit->properties()->length(); i++) {
2442 ObjectLiteral::Property* property = lit->properties()->at(i);
2443 Expression* value = property->value();
2445 if (property->is_static()) {
2446 __ push(Operand(esp, kPointerSize)); // constructor
2448 __ push(Operand(esp, 0)); // prototype
2450 EmitPropertyKey(property, lit->GetIdForProperty(i));
2452 // The static prototype property is read only. We handle the non computed
2453 // property name case in the parser. Since this is the only case where we
2454 // need to check for an own read only property we special case this so we do
2455 // not need to do this for every property.
2456 if (property->is_static() && property->is_computed_name()) {
2457 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2461 VisitForStackValue(value);
2462 EmitSetHomeObjectIfNeeded(value, 2,
2463 lit->SlotForHomeObject(value, used_store_slots));
2465 switch (property->kind()) {
2466 case ObjectLiteral::Property::CONSTANT:
2467 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2468 case ObjectLiteral::Property::PROTOTYPE:
2470 case ObjectLiteral::Property::COMPUTED:
2471 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2474 case ObjectLiteral::Property::GETTER:
2475 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2476 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2479 case ObjectLiteral::Property::SETTER:
2480 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2481 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2486 // Set both the prototype and constructor to have fast properties, and also
2487 // freeze them in strong mode.
2488 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2492 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2495 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2496 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2497 CallIC(code, expr->BinaryOperationFeedbackId());
2498 patch_site.EmitPatchInfo();
2499 context()->Plug(eax);
2503 void FullCodeGenerator::EmitAssignment(Expression* expr,
2504 FeedbackVectorICSlot slot) {
2505 DCHECK(expr->IsValidReferenceExpressionOrThis());
2507 Property* prop = expr->AsProperty();
2508 LhsKind assign_type = Property::GetAssignType(prop);
2510 switch (assign_type) {
2512 Variable* var = expr->AsVariableProxy()->var();
2513 EffectContext context(this);
2514 EmitVariableAssignment(var, Token::ASSIGN, slot);
2517 case NAMED_PROPERTY: {
2518 __ push(eax); // Preserve value.
2519 VisitForAccumulatorValue(prop->obj());
2520 __ Move(StoreDescriptor::ReceiverRegister(), eax);
2521 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2522 __ mov(StoreDescriptor::NameRegister(),
2523 prop->key()->AsLiteral()->value());
2524 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2528 case NAMED_SUPER_PROPERTY: {
2530 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2531 VisitForAccumulatorValue(
2532 prop->obj()->AsSuperPropertyReference()->home_object());
2533 // stack: value, this; eax: home_object
2534 Register scratch = ecx;
2535 Register scratch2 = edx;
2536 __ mov(scratch, result_register()); // home_object
2537 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2538 __ mov(scratch2, MemOperand(esp, 0)); // this
2539 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2540 __ mov(MemOperand(esp, 0), scratch); // home_object
2541 // stack: this, home_object. eax: value
2542 EmitNamedSuperPropertyStore(prop);
2545 case KEYED_SUPER_PROPERTY: {
2547 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2549 prop->obj()->AsSuperPropertyReference()->home_object());
2550 VisitForAccumulatorValue(prop->key());
2551 Register scratch = ecx;
2552 Register scratch2 = edx;
2553 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2554 // stack: value, this, home_object; eax: key, edx: value
2555 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2556 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2557 __ mov(scratch, MemOperand(esp, 0)); // home_object
2558 __ mov(MemOperand(esp, kPointerSize), scratch);
2559 __ mov(MemOperand(esp, 0), eax);
2560 __ mov(eax, scratch2);
2561 // stack: this, home_object, key; eax: value.
2562 EmitKeyedSuperPropertyStore(prop);
2565 case KEYED_PROPERTY: {
2566 __ push(eax); // Preserve value.
2567 VisitForStackValue(prop->obj());
2568 VisitForAccumulatorValue(prop->key());
2569 __ Move(StoreDescriptor::NameRegister(), eax);
2570 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2571 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2572 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2574 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2579 context()->Plug(eax);
2583 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2584 Variable* var, MemOperand location) {
2585 __ mov(location, eax);
2586 if (var->IsContextSlot()) {
2588 int offset = Context::SlotOffset(var->index());
2589 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2594 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2595 FeedbackVectorICSlot slot) {
2596 if (var->IsUnallocated()) {
2597 // Global var, const, or let.
2598 __ mov(StoreDescriptor::NameRegister(), var->name());
2599 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2600 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2603 } else if (var->IsGlobalSlot()) {
2604 // Global var, const, or let.
2605 DCHECK(var->index() > 0);
2606 DCHECK(var->IsStaticGlobalObjectProperty());
2607 int const slot = var->index();
2608 int const depth = scope()->ContextChainLength(var->scope());
2609 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2610 __ Move(StoreGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
2611 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
2612 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2615 __ Push(Smi::FromInt(slot));
2617 __ CallRuntime(is_strict(language_mode())
2618 ? Runtime::kStoreGlobalViaContext_Strict
2619 : Runtime::kStoreGlobalViaContext_Sloppy,
2623 } else if (var->mode() == LET && op != Token::INIT_LET) {
2624 // Non-initializing assignment to let variable needs a write barrier.
2625 DCHECK(!var->IsLookupSlot());
2626 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2628 MemOperand location = VarOperand(var, ecx);
2629 __ mov(edx, location);
2630 __ cmp(edx, isolate()->factory()->the_hole_value());
2631 __ j(not_equal, &assign, Label::kNear);
2632 __ push(Immediate(var->name()));
2633 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2635 EmitStoreToStackLocalOrContextSlot(var, location);
2637 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2638 // Assignment to const variable needs a write barrier.
2639 DCHECK(!var->IsLookupSlot());
2640 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2642 MemOperand location = VarOperand(var, ecx);
2643 __ mov(edx, location);
2644 __ cmp(edx, isolate()->factory()->the_hole_value());
2645 __ j(not_equal, &const_error, Label::kNear);
2646 __ push(Immediate(var->name()));
2647 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2648 __ bind(&const_error);
2649 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2651 } else if (var->is_this() && op == Token::INIT_CONST) {
2652 // Initializing assignment to const {this} needs a write barrier.
2653 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2654 Label uninitialized_this;
2655 MemOperand location = VarOperand(var, ecx);
2656 __ mov(edx, location);
2657 __ cmp(edx, isolate()->factory()->the_hole_value());
2658 __ j(equal, &uninitialized_this);
2659 __ push(Immediate(var->name()));
2660 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2661 __ bind(&uninitialized_this);
2662 EmitStoreToStackLocalOrContextSlot(var, location);
2664 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2665 if (var->IsLookupSlot()) {
2666 // Assignment to var.
2667 __ push(eax); // Value.
2668 __ push(esi); // Context.
2669 __ push(Immediate(var->name()));
2670 __ push(Immediate(Smi::FromInt(language_mode())));
2671 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2673 // Assignment to var or initializing assignment to let/const in harmony
2675 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2676 MemOperand location = VarOperand(var, ecx);
2677 if (generate_debug_code_ && op == Token::INIT_LET) {
2678 // Check for an uninitialized let binding.
2679 __ mov(edx, location);
2680 __ cmp(edx, isolate()->factory()->the_hole_value());
2681 __ Check(equal, kLetBindingReInitialization);
2683 EmitStoreToStackLocalOrContextSlot(var, location);
2686 } else if (op == Token::INIT_CONST_LEGACY) {
2687 // Const initializers need a write barrier.
2688 DCHECK(var->mode() == CONST_LEGACY);
2689 DCHECK(!var->IsParameter()); // No const parameters.
2690 if (var->IsLookupSlot()) {
2693 __ push(Immediate(var->name()));
2694 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2696 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2698 MemOperand location = VarOperand(var, ecx);
2699 __ mov(edx, location);
2700 __ cmp(edx, isolate()->factory()->the_hole_value());
2701 __ j(not_equal, &skip, Label::kNear);
2702 EmitStoreToStackLocalOrContextSlot(var, location);
2707 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2708 if (is_strict(language_mode())) {
2709 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2711 // Silently ignore store in sloppy mode.
2716 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2717 // Assignment to a property, using a named store IC.
2719 // esp[0] : receiver
2720 Property* prop = expr->target()->AsProperty();
2721 DCHECK(prop != NULL);
2722 DCHECK(prop->key()->IsLiteral());
2724 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2725 __ pop(StoreDescriptor::ReceiverRegister());
2726 if (FLAG_vector_stores) {
2727 EmitLoadStoreICSlot(expr->AssignmentSlot());
2730 CallStoreIC(expr->AssignmentFeedbackId());
2732 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2733 context()->Plug(eax);
2737 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2738 // Assignment to named property of super.
2740 // stack : receiver ('this'), home_object
2741 DCHECK(prop != NULL);
2742 Literal* key = prop->key()->AsLiteral();
2743 DCHECK(key != NULL);
2745 __ push(Immediate(key->value()));
2747 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2748 : Runtime::kStoreToSuper_Sloppy),
2753 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2754 // Assignment to named property of super.
2756 // stack : receiver ('this'), home_object, key
2760 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2761 : Runtime::kStoreKeyedToSuper_Sloppy),
2766 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2767 // Assignment to a property, using a keyed store IC.
2770 // esp[kPointerSize] : receiver
2772 __ pop(StoreDescriptor::NameRegister()); // Key.
2773 __ pop(StoreDescriptor::ReceiverRegister());
2774 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2776 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2777 if (FLAG_vector_stores) {
2778 EmitLoadStoreICSlot(expr->AssignmentSlot());
2781 CallIC(ic, expr->AssignmentFeedbackId());
2784 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2785 context()->Plug(eax);
2789 void FullCodeGenerator::VisitProperty(Property* expr) {
2790 Comment cmnt(masm_, "[ Property");
2791 SetExpressionPosition(expr);
2793 Expression* key = expr->key();
2795 if (key->IsPropertyName()) {
2796 if (!expr->IsSuperAccess()) {
2797 VisitForAccumulatorValue(expr->obj());
2798 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2799 EmitNamedPropertyLoad(expr);
2801 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2803 expr->obj()->AsSuperPropertyReference()->home_object());
2804 EmitNamedSuperPropertyLoad(expr);
2807 if (!expr->IsSuperAccess()) {
2808 VisitForStackValue(expr->obj());
2809 VisitForAccumulatorValue(expr->key());
2810 __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2811 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2812 EmitKeyedPropertyLoad(expr);
2814 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2816 expr->obj()->AsSuperPropertyReference()->home_object());
2817 VisitForStackValue(expr->key());
2818 EmitKeyedSuperPropertyLoad(expr);
2821 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2822 context()->Plug(eax);
2826 void FullCodeGenerator::CallIC(Handle<Code> code,
2827 TypeFeedbackId ast_id) {
2829 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2833 // Code common for calls using the IC.
2834 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2835 Expression* callee = expr->expression();
2837 CallICState::CallType call_type =
2838 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2839 // Get the target function.
2840 if (call_type == CallICState::FUNCTION) {
2841 { StackValueContext context(this);
2842 EmitVariableLoad(callee->AsVariableProxy());
2843 PrepareForBailout(callee, NO_REGISTERS);
2845 // Push undefined as receiver. This is patched in the method prologue if it
2846 // is a sloppy mode method.
2847 __ push(Immediate(isolate()->factory()->undefined_value()));
2849 // Load the function from the receiver.
2850 DCHECK(callee->IsProperty());
2851 DCHECK(!callee->AsProperty()->IsSuperAccess());
2852 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2853 EmitNamedPropertyLoad(callee->AsProperty());
2854 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2855 // Push the target function under the receiver.
2856 __ push(Operand(esp, 0));
2857 __ mov(Operand(esp, kPointerSize), eax);
2860 EmitCall(expr, call_type);
2864 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2865 SetExpressionPosition(expr);
2866 Expression* callee = expr->expression();
2867 DCHECK(callee->IsProperty());
2868 Property* prop = callee->AsProperty();
2869 DCHECK(prop->IsSuperAccess());
2871 Literal* key = prop->key()->AsLiteral();
2872 DCHECK(!key->value()->IsSmi());
2873 // Load the function from the receiver.
2874 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2875 VisitForStackValue(super_ref->home_object());
2876 VisitForAccumulatorValue(super_ref->this_var());
2879 __ push(Operand(esp, kPointerSize * 2));
2880 __ push(Immediate(key->value()));
2881 __ push(Immediate(Smi::FromInt(language_mode())));
2884 // - this (receiver)
2885 // - this (receiver) <-- LoadFromSuper will pop here and below.
2889 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2891 // Replace home_object with target function.
2892 __ mov(Operand(esp, kPointerSize), eax);
2895 // - target function
2896 // - this (receiver)
2897 EmitCall(expr, CallICState::METHOD);
2901 // Code common for calls using the IC.
2902 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2905 VisitForAccumulatorValue(key);
2907 Expression* callee = expr->expression();
2909 // Load the function from the receiver.
2910 DCHECK(callee->IsProperty());
2911 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2912 __ mov(LoadDescriptor::NameRegister(), eax);
2913 EmitKeyedPropertyLoad(callee->AsProperty());
2914 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2916 // Push the target function under the receiver.
2917 __ push(Operand(esp, 0));
2918 __ mov(Operand(esp, kPointerSize), eax);
2920 EmitCall(expr, CallICState::METHOD);
2924 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2925 Expression* callee = expr->expression();
2926 DCHECK(callee->IsProperty());
2927 Property* prop = callee->AsProperty();
2928 DCHECK(prop->IsSuperAccess());
2930 SetExpressionPosition(prop);
2931 // Load the function from the receiver.
2932 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2933 VisitForStackValue(super_ref->home_object());
2934 VisitForAccumulatorValue(super_ref->this_var());
2937 __ push(Operand(esp, kPointerSize * 2));
2938 VisitForStackValue(prop->key());
2939 __ push(Immediate(Smi::FromInt(language_mode())));
2942 // - this (receiver)
2943 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2947 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2949 // Replace home_object with target function.
2950 __ mov(Operand(esp, kPointerSize), eax);
2953 // - target function
2954 // - this (receiver)
2955 EmitCall(expr, CallICState::METHOD);
2959 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2960 // Load the arguments.
2961 ZoneList<Expression*>* args = expr->arguments();
2962 int arg_count = args->length();
2963 for (int i = 0; i < arg_count; i++) {
2964 VisitForStackValue(args->at(i));
2967 SetCallPosition(expr, arg_count);
2968 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2969 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2970 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2971 // Don't assign a type feedback id to the IC, since type feedback is provided
2972 // by the vector above.
2975 RecordJSReturnSite(expr);
2977 // Restore context register.
2978 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2980 context()->DropAndPlug(1, eax);
2984 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2985 // Push copy of the first argument or undefined if it doesn't exist.
2986 if (arg_count > 0) {
2987 __ push(Operand(esp, arg_count * kPointerSize));
2989 __ push(Immediate(isolate()->factory()->undefined_value()));
2992 // Push the enclosing function.
2993 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2995 // Push the language mode.
2996 __ push(Immediate(Smi::FromInt(language_mode())));
2998 // Push the start position of the scope the calls resides in.
2999 __ push(Immediate(Smi::FromInt(scope()->start_position())));
3001 // Do the runtime call.
3002 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3006 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3007 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3008 VariableProxy* callee = expr->expression()->AsVariableProxy();
3009 if (callee->var()->IsLookupSlot()) {
3011 SetExpressionPosition(callee);
3012 // Generate code for loading from variables potentially shadowed by
3013 // eval-introduced variables.
3014 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3017 // Call the runtime to find the function to call (returned in eax) and
3018 // the object holding it (returned in edx).
3019 __ push(context_register());
3020 __ push(Immediate(callee->name()));
3021 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3022 __ push(eax); // Function.
3023 __ push(edx); // Receiver.
3024 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3026 // If fast case code has been generated, emit code to push the function
3027 // and receiver and have the slow path jump around this code.
3028 if (done.is_linked()) {
3030 __ jmp(&call, Label::kNear);
3034 // The receiver is implicitly the global receiver. Indicate this by
3035 // passing the hole to the call function stub.
3036 __ push(Immediate(isolate()->factory()->undefined_value()));
3040 VisitForStackValue(callee);
3041 // refEnv.WithBaseObject()
3042 __ push(Immediate(isolate()->factory()->undefined_value()));
3047 void FullCodeGenerator::VisitCall(Call* expr) {
3049 // We want to verify that RecordJSReturnSite gets called on all paths
3050 // through this function. Avoid early returns.
3051 expr->return_is_recorded_ = false;
3054 Comment cmnt(masm_, "[ Call");
3055 Expression* callee = expr->expression();
3056 Call::CallType call_type = expr->GetCallType(isolate());
3058 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3059 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3060 // to resolve the function we need to call. Then we call the resolved
3061 // function using the given arguments.
3062 ZoneList<Expression*>* args = expr->arguments();
3063 int arg_count = args->length();
3065 PushCalleeAndWithBaseObject(expr);
3067 // Push the arguments.
3068 for (int i = 0; i < arg_count; i++) {
3069 VisitForStackValue(args->at(i));
3072 // Push a copy of the function (found below the arguments) and
3074 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
3075 EmitResolvePossiblyDirectEval(arg_count);
3077 // Touch up the stack with the resolved function.
3078 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
3080 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3082 SetCallPosition(expr, arg_count);
3083 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3084 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3086 RecordJSReturnSite(expr);
3087 // Restore context register.
3088 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3089 context()->DropAndPlug(1, eax);
3091 } else if (call_type == Call::GLOBAL_CALL) {
3092 EmitCallWithLoadIC(expr);
3093 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3094 // Call to a lookup slot (dynamically introduced variable).
3095 PushCalleeAndWithBaseObject(expr);
3097 } else if (call_type == Call::PROPERTY_CALL) {
3098 Property* property = callee->AsProperty();
3099 bool is_named_call = property->key()->IsPropertyName();
3100 if (property->IsSuperAccess()) {
3101 if (is_named_call) {
3102 EmitSuperCallWithLoadIC(expr);
3104 EmitKeyedSuperCallWithLoadIC(expr);
3107 VisitForStackValue(property->obj());
3108 if (is_named_call) {
3109 EmitCallWithLoadIC(expr);
3111 EmitKeyedCallWithLoadIC(expr, property->key());
3114 } else if (call_type == Call::SUPER_CALL) {
3115 EmitSuperConstructorCall(expr);
3117 DCHECK(call_type == Call::OTHER_CALL);
3118 // Call to an arbitrary expression not handled specially above.
3119 VisitForStackValue(callee);
3120 __ push(Immediate(isolate()->factory()->undefined_value()));
3121 // Emit function call.
3126 // RecordJSReturnSite should have been called.
3127 DCHECK(expr->return_is_recorded_);
3132 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3133 Comment cmnt(masm_, "[ CallNew");
3134 // According to ECMA-262, section 11.2.2, page 44, the function
3135 // expression in new calls must be evaluated before the
3138 // Push constructor on the stack. If it's not a function it's used as
3139 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3141 DCHECK(!expr->expression()->IsSuperPropertyReference());
3142 VisitForStackValue(expr->expression());
3144 // Push the arguments ("left-to-right") on the stack.
3145 ZoneList<Expression*>* args = expr->arguments();
3146 int arg_count = args->length();
3147 for (int i = 0; i < arg_count; i++) {
3148 VisitForStackValue(args->at(i));
3151 // Call the construct call builtin that handles allocation and
3152 // constructor invocation.
3153 SetConstructCallPosition(expr);
3155 // Load function and argument count into edi and eax.
3156 __ Move(eax, Immediate(arg_count));
3157 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3159 // Record call targets in unoptimized code.
3160 if (FLAG_pretenuring_call_new) {
3161 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3162 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3163 expr->CallNewFeedbackSlot().ToInt() + 1);
3166 __ LoadHeapObject(ebx, FeedbackVector());
3167 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
3169 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3170 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3171 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3172 context()->Plug(eax);
3176 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3177 SuperCallReference* super_call_ref =
3178 expr->expression()->AsSuperCallReference();
3179 DCHECK_NOT_NULL(super_call_ref);
3181 EmitLoadSuperConstructor(super_call_ref);
3182 __ push(result_register());
3184 // Push the arguments ("left-to-right") on the stack.
3185 ZoneList<Expression*>* args = expr->arguments();
3186 int arg_count = args->length();
3187 for (int i = 0; i < arg_count; i++) {
3188 VisitForStackValue(args->at(i));
3191 // Call the construct call builtin that handles allocation and
3192 // constructor invocation.
3193 SetConstructCallPosition(expr);
3195 // Load original constructor into ecx.
3196 VisitForAccumulatorValue(super_call_ref->new_target_var());
3197 __ mov(ecx, result_register());
3199 // Load function and argument count into edi and eax.
3200 __ Move(eax, Immediate(arg_count));
3201 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3203 // Record call targets in unoptimized code.
3204 if (FLAG_pretenuring_call_new) {
3206 /* TODO(dslomov): support pretenuring.
3207 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3208 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3209 expr->CallNewFeedbackSlot().ToInt() + 1);
3213 __ LoadHeapObject(ebx, FeedbackVector());
3214 __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
3216 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3217 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3219 RecordJSReturnSite(expr);
3221 context()->Plug(eax);
3225 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3226 ZoneList<Expression*>* args = expr->arguments();
3227 DCHECK(args->length() == 1);
3229 VisitForAccumulatorValue(args->at(0));
3231 Label materialize_true, materialize_false;
3232 Label* if_true = NULL;
3233 Label* if_false = NULL;
3234 Label* fall_through = NULL;
3235 context()->PrepareTest(&materialize_true, &materialize_false,
3236 &if_true, &if_false, &fall_through);
3238 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3239 __ test(eax, Immediate(kSmiTagMask));
3240 Split(zero, if_true, if_false, fall_through);
3242 context()->Plug(if_true, if_false);
3246 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3247 ZoneList<Expression*>* args = expr->arguments();
3248 DCHECK(args->length() == 1);
3250 VisitForAccumulatorValue(args->at(0));
3252 Label materialize_true, materialize_false;
3253 Label* if_true = NULL;
3254 Label* if_false = NULL;
3255 Label* fall_through = NULL;
3256 context()->PrepareTest(&materialize_true, &materialize_false,
3257 &if_true, &if_false, &fall_through);
3259 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3260 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
3261 Split(zero, if_true, if_false, fall_through);
3263 context()->Plug(if_true, if_false);
3267 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3268 ZoneList<Expression*>* args = expr->arguments();
3269 DCHECK(args->length() == 1);
3271 VisitForAccumulatorValue(args->at(0));
3273 Label materialize_true, materialize_false;
3274 Label* if_true = NULL;
3275 Label* if_false = NULL;
3276 Label* fall_through = NULL;
3277 context()->PrepareTest(&materialize_true, &materialize_false,
3278 &if_true, &if_false, &fall_through);
3280 __ JumpIfSmi(eax, if_false);
3281 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3282 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3283 Split(above_equal, if_true, if_false, fall_through);
3285 context()->Plug(if_true, if_false);
3289 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3290 ZoneList<Expression*>* args = expr->arguments();
3291 DCHECK(args->length() == 1);
3293 VisitForAccumulatorValue(args->at(0));
3295 Label materialize_true, materialize_false;
3296 Label* if_true = NULL;
3297 Label* if_false = NULL;
3298 Label* fall_through = NULL;
3299 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3300 &if_false, &fall_through);
3302 __ JumpIfSmi(eax, if_false);
3303 __ CmpObjectType(eax, SIMD128_VALUE_TYPE, ebx);
3304 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3305 Split(equal, if_true, if_false, fall_through);
3307 context()->Plug(if_true, if_false);
3311 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3312 CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 DCHECK(args->length() == 1);
3316 VisitForAccumulatorValue(args->at(0));
3318 Label materialize_true, materialize_false, skip_lookup;
3319 Label* if_true = NULL;
3320 Label* if_false = NULL;
3321 Label* fall_through = NULL;
3322 context()->PrepareTest(&materialize_true, &materialize_false,
3323 &if_true, &if_false, &fall_through);
3325 __ AssertNotSmi(eax);
3327 // Check whether this map has already been checked to be safe for default
3329 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3330 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3331 1 << Map::kStringWrapperSafeForDefaultValueOf);
3332 __ j(not_zero, &skip_lookup);
3334 // Check for fast case object. Return false for slow case objects.
3335 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3336 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3337 __ cmp(ecx, isolate()->factory()->hash_table_map());
3338 __ j(equal, if_false);
3340 // Look for valueOf string in the descriptor array, and indicate false if
3341 // found. Since we omit an enumeration index check, if it is added via a
3342 // transition that shares its descriptor array, this is a false positive.
3343 Label entry, loop, done;
3345 // Skip loop if no descriptors are valid.
3346 __ NumberOfOwnDescriptors(ecx, ebx);
3350 __ LoadInstanceDescriptors(ebx, ebx);
3351 // ebx: descriptor array.
3352 // ecx: valid entries in the descriptor array.
3353 // Calculate the end of the descriptor array.
3354 STATIC_ASSERT(kSmiTag == 0);
3355 STATIC_ASSERT(kSmiTagSize == 1);
3356 STATIC_ASSERT(kPointerSize == 4);
3357 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3358 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3359 // Calculate location of the first key name.
3360 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3361 // Loop through all the keys in the descriptor array. If one of these is the
3362 // internalized string "valueOf" the result is false.
3365 __ mov(edx, FieldOperand(ebx, 0));
3366 __ cmp(edx, isolate()->factory()->value_of_string());
3367 __ j(equal, if_false);
3368 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3371 __ j(not_equal, &loop);
3375 // Reload map as register ebx was used as temporary above.
3376 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3378 // Set the bit in the map to indicate that there is no local valueOf field.
3379 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3380 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3382 __ bind(&skip_lookup);
3384 // If a valueOf property is not found on the object check that its
3385 // prototype is the un-modified String prototype. If not result is false.
3386 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3387 __ JumpIfSmi(ecx, if_false);
3388 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3389 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3391 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3394 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3395 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3396 Split(equal, if_true, if_false, fall_through);
3398 context()->Plug(if_true, if_false);
3402 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3403 ZoneList<Expression*>* args = expr->arguments();
3404 DCHECK(args->length() == 1);
3406 VisitForAccumulatorValue(args->at(0));
3408 Label materialize_true, materialize_false;
3409 Label* if_true = NULL;
3410 Label* if_false = NULL;
3411 Label* fall_through = NULL;
3412 context()->PrepareTest(&materialize_true, &materialize_false,
3413 &if_true, &if_false, &fall_through);
3415 __ JumpIfSmi(eax, if_false);
3416 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3417 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3418 Split(equal, if_true, if_false, fall_through);
3420 context()->Plug(if_true, if_false);
3424 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3425 ZoneList<Expression*>* args = expr->arguments();
3426 DCHECK(args->length() == 1);
3428 VisitForAccumulatorValue(args->at(0));
3430 Label materialize_true, materialize_false;
3431 Label* if_true = NULL;
3432 Label* if_false = NULL;
3433 Label* fall_through = NULL;
3434 context()->PrepareTest(&materialize_true, &materialize_false,
3435 &if_true, &if_false, &fall_through);
3437 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3438 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3439 // Check if the exponent half is 0x80000000. Comparing against 1 and
3440 // checking for overflow is the shortest possible encoding.
3441 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3442 __ j(no_overflow, if_false);
3443 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3444 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3445 Split(equal, if_true, if_false, fall_through);
3447 context()->Plug(if_true, if_false);
3451 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3452 ZoneList<Expression*>* args = expr->arguments();
3453 DCHECK(args->length() == 1);
3455 VisitForAccumulatorValue(args->at(0));
3457 Label materialize_true, materialize_false;
3458 Label* if_true = NULL;
3459 Label* if_false = NULL;
3460 Label* fall_through = NULL;
3461 context()->PrepareTest(&materialize_true, &materialize_false,
3462 &if_true, &if_false, &fall_through);
3464 __ JumpIfSmi(eax, if_false);
3465 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3466 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3467 Split(equal, if_true, if_false, fall_through);
3469 context()->Plug(if_true, if_false);
3473 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3474 ZoneList<Expression*>* args = expr->arguments();
3475 DCHECK(args->length() == 1);
3477 VisitForAccumulatorValue(args->at(0));
3479 Label materialize_true, materialize_false;
3480 Label* if_true = NULL;
3481 Label* if_false = NULL;
3482 Label* fall_through = NULL;
3483 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3484 &if_false, &fall_through);
3486 __ JumpIfSmi(eax, if_false);
3487 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
3488 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3489 Split(equal, if_true, if_false, fall_through);
3491 context()->Plug(if_true, if_false);
3495 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3496 ZoneList<Expression*>* args = expr->arguments();
3497 DCHECK(args->length() == 1);
3499 VisitForAccumulatorValue(args->at(0));
3501 Label materialize_true, materialize_false;
3502 Label* if_true = NULL;
3503 Label* if_false = NULL;
3504 Label* fall_through = NULL;
3505 context()->PrepareTest(&materialize_true, &materialize_false,
3506 &if_true, &if_false, &fall_through);
3508 __ JumpIfSmi(eax, if_false);
3509 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3510 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3511 Split(equal, if_true, if_false, fall_through);
3513 context()->Plug(if_true, if_false);
3517 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3518 ZoneList<Expression*>* args = expr->arguments();
3519 DCHECK(args->length() == 1);
3521 VisitForAccumulatorValue(args->at(0));
3523 Label materialize_true, materialize_false;
3524 Label* if_true = NULL;
3525 Label* if_false = NULL;
3526 Label* fall_through = NULL;
3527 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3528 &if_false, &fall_through);
3530 __ JumpIfSmi(eax, if_false);
3532 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3533 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3534 __ j(less, if_false);
3535 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3536 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3537 Split(less_equal, if_true, if_false, fall_through);
3539 context()->Plug(if_true, if_false);
3543 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3544 DCHECK(expr->arguments()->length() == 0);
3546 Label materialize_true, materialize_false;
3547 Label* if_true = NULL;
3548 Label* if_false = NULL;
3549 Label* fall_through = NULL;
3550 context()->PrepareTest(&materialize_true, &materialize_false,
3551 &if_true, &if_false, &fall_through);
3553 // Get the frame pointer for the calling frame.
3554 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3556 // Skip the arguments adaptor frame if it exists.
3557 Label check_frame_marker;
3558 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3559 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3560 __ j(not_equal, &check_frame_marker);
3561 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3563 // Check the marker in the calling frame.
3564 __ bind(&check_frame_marker);
3565 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3566 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3567 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3568 Split(equal, if_true, if_false, fall_through);
3570 context()->Plug(if_true, if_false);
3574 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3575 ZoneList<Expression*>* args = expr->arguments();
3576 DCHECK(args->length() == 2);
3578 // Load the two objects into registers and perform the comparison.
3579 VisitForStackValue(args->at(0));
3580 VisitForAccumulatorValue(args->at(1));
3582 Label materialize_true, materialize_false;
3583 Label* if_true = NULL;
3584 Label* if_false = NULL;
3585 Label* fall_through = NULL;
3586 context()->PrepareTest(&materialize_true, &materialize_false,
3587 &if_true, &if_false, &fall_through);
3591 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3592 Split(equal, if_true, if_false, fall_through);
3594 context()->Plug(if_true, if_false);
3598 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3599 ZoneList<Expression*>* args = expr->arguments();
3600 DCHECK(args->length() == 1);
3602 // ArgumentsAccessStub expects the key in edx and the formal
3603 // parameter count in eax.
3604 VisitForAccumulatorValue(args->at(0));
3606 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3607 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3609 context()->Plug(eax);
3613 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3614 DCHECK(expr->arguments()->length() == 0);
3617 // Get the number of formal parameters.
3618 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3620 // Check if the calling frame is an arguments adaptor frame.
3621 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3622 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3623 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3624 __ j(not_equal, &exit);
3626 // Arguments adaptor case: Read the arguments length from the
3628 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3632 context()->Plug(eax);
3636 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3637 ZoneList<Expression*>* args = expr->arguments();
3638 DCHECK(args->length() == 1);
3639 Label done, null, function, non_function_constructor;
3641 VisitForAccumulatorValue(args->at(0));
3643 // If the object is a smi, we return null.
3644 __ JumpIfSmi(eax, &null);
3646 // Check that the object is a JS object but take special care of JS
3647 // functions to make sure they have 'Function' as their class.
3648 // Assume that there are only two callable types, and one of them is at
3649 // either end of the type range for JS object types. Saves extra comparisons.
3650 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3651 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3652 // Map is now in eax.
3654 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3655 FIRST_SPEC_OBJECT_TYPE + 1);
3656 __ j(equal, &function);
3658 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3659 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3660 LAST_SPEC_OBJECT_TYPE - 1);
3661 __ j(equal, &function);
3662 // Assume that there is no larger type.
3663 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3665 // Check if the constructor in the map is a JS function.
3666 __ GetMapConstructor(eax, eax, ebx);
3667 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3668 __ j(not_equal, &non_function_constructor);
3670 // eax now contains the constructor function. Grab the
3671 // instance class name from there.
3672 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3673 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3676 // Functions have class 'Function'.
3678 __ mov(eax, isolate()->factory()->Function_string());
3681 // Objects with a non-function constructor have class 'Object'.
3682 __ bind(&non_function_constructor);
3683 __ mov(eax, isolate()->factory()->Object_string());
3686 // Non-JS objects have class null.
3688 __ mov(eax, isolate()->factory()->null_value());
3693 context()->Plug(eax);
3697 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3698 ZoneList<Expression*>* args = expr->arguments();
3699 DCHECK(args->length() == 1);
3701 VisitForAccumulatorValue(args->at(0)); // Load the object.
3704 // If the object is a smi return the object.
3705 __ JumpIfSmi(eax, &done, Label::kNear);
3706 // If the object is not a value type, return the object.
3707 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3708 __ j(not_equal, &done, Label::kNear);
3709 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3712 context()->Plug(eax);
3716 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3717 ZoneList<Expression*>* args = expr->arguments();
3718 DCHECK_EQ(1, args->length());
3720 VisitForAccumulatorValue(args->at(0));
3722 Label materialize_true, materialize_false;
3723 Label* if_true = nullptr;
3724 Label* if_false = nullptr;
3725 Label* fall_through = nullptr;
3726 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3727 &if_false, &fall_through);
3729 __ JumpIfSmi(eax, if_false);
3730 __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
3731 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3732 Split(equal, if_true, if_false, fall_through);
3734 context()->Plug(if_true, if_false);
3738 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3739 ZoneList<Expression*>* args = expr->arguments();
3740 DCHECK(args->length() == 2);
3741 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3742 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3744 VisitForAccumulatorValue(args->at(0)); // Load the object.
3746 Register object = eax;
3747 Register result = eax;
3748 Register scratch = ecx;
3750 if (index->value() == 0) {
3751 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3753 Label runtime, done;
3754 if (index->value() < JSDate::kFirstUncachedField) {
3755 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3756 __ mov(scratch, Operand::StaticVariable(stamp));
3757 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3758 __ j(not_equal, &runtime, Label::kNear);
3759 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3760 kPointerSize * index->value()));
3761 __ jmp(&done, Label::kNear);
3764 __ PrepareCallCFunction(2, scratch);
3765 __ mov(Operand(esp, 0), object);
3766 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3767 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3771 context()->Plug(result);
3775 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3776 ZoneList<Expression*>* args = expr->arguments();
3777 DCHECK_EQ(3, args->length());
3779 Register string = eax;
3780 Register index = ebx;
3781 Register value = ecx;
3783 VisitForStackValue(args->at(0)); // index
3784 VisitForStackValue(args->at(1)); // value
3785 VisitForAccumulatorValue(args->at(2)); // string
3790 if (FLAG_debug_code) {
3791 __ test(value, Immediate(kSmiTagMask));
3792 __ Check(zero, kNonSmiValue);
3793 __ test(index, Immediate(kSmiTagMask));
3794 __ Check(zero, kNonSmiValue);
3800 if (FLAG_debug_code) {
3801 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3802 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3805 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3807 context()->Plug(string);
3811 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3812 ZoneList<Expression*>* args = expr->arguments();
3813 DCHECK_EQ(3, args->length());
3815 Register string = eax;
3816 Register index = ebx;
3817 Register value = ecx;
3819 VisitForStackValue(args->at(0)); // index
3820 VisitForStackValue(args->at(1)); // value
3821 VisitForAccumulatorValue(args->at(2)); // string
3825 if (FLAG_debug_code) {
3826 __ test(value, Immediate(kSmiTagMask));
3827 __ Check(zero, kNonSmiValue);
3828 __ test(index, Immediate(kSmiTagMask));
3829 __ Check(zero, kNonSmiValue);
3831 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3832 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3837 // No need to untag a smi for two-byte addressing.
3838 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3840 context()->Plug(string);
3844 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3845 ZoneList<Expression*>* args = expr->arguments();
3846 DCHECK(args->length() == 2);
3848 VisitForStackValue(args->at(0)); // Load the object.
3849 VisitForAccumulatorValue(args->at(1)); // Load the value.
3850 __ pop(ebx); // eax = value. ebx = object.
3853 // If the object is a smi, return the value.
3854 __ JumpIfSmi(ebx, &done, Label::kNear);
3856 // If the object is not a value type, return the value.
3857 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3858 __ j(not_equal, &done, Label::kNear);
3861 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3863 // Update the write barrier. Save the value as it will be
3864 // overwritten by the write barrier code and is needed afterward.
3866 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3869 context()->Plug(eax);
3873 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3874 ZoneList<Expression*>* args = expr->arguments();
3875 DCHECK_EQ(args->length(), 1);
3877 // Load the argument into eax and call the stub.
3878 VisitForAccumulatorValue(args->at(0));
3880 NumberToStringStub stub(isolate());
3882 context()->Plug(eax);
3886 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3887 ZoneList<Expression*>* args = expr->arguments();
3888 DCHECK_EQ(1, args->length());
3890 // Load the argument into eax and convert it.
3891 VisitForAccumulatorValue(args->at(0));
3893 ToObjectStub stub(isolate());
3895 context()->Plug(eax);
3899 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3900 ZoneList<Expression*>* args = expr->arguments();
3901 DCHECK(args->length() == 1);
3903 VisitForAccumulatorValue(args->at(0));
3906 StringCharFromCodeGenerator generator(eax, ebx);
3907 generator.GenerateFast(masm_);
3910 NopRuntimeCallHelper call_helper;
3911 generator.GenerateSlow(masm_, call_helper);
3914 context()->Plug(ebx);
3918 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3919 ZoneList<Expression*>* args = expr->arguments();
3920 DCHECK(args->length() == 2);
3922 VisitForStackValue(args->at(0));
3923 VisitForAccumulatorValue(args->at(1));
3925 Register object = ebx;
3926 Register index = eax;
3927 Register result = edx;
3931 Label need_conversion;
3932 Label index_out_of_range;
3934 StringCharCodeAtGenerator generator(object,
3939 &index_out_of_range,
3940 STRING_INDEX_IS_NUMBER);
3941 generator.GenerateFast(masm_);
3944 __ bind(&index_out_of_range);
3945 // When the index is out of range, the spec requires us to return
3947 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3950 __ bind(&need_conversion);
3951 // Move the undefined value into the result register, which will
3952 // trigger conversion.
3953 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3956 NopRuntimeCallHelper call_helper;
3957 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3960 context()->Plug(result);
3964 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3965 ZoneList<Expression*>* args = expr->arguments();
3966 DCHECK(args->length() == 2);
3968 VisitForStackValue(args->at(0));
3969 VisitForAccumulatorValue(args->at(1));
3971 Register object = ebx;
3972 Register index = eax;
3973 Register scratch = edx;
3974 Register result = eax;
3978 Label need_conversion;
3979 Label index_out_of_range;
3981 StringCharAtGenerator generator(object,
3987 &index_out_of_range,
3988 STRING_INDEX_IS_NUMBER);
3989 generator.GenerateFast(masm_);
3992 __ bind(&index_out_of_range);
3993 // When the index is out of range, the spec requires us to return
3994 // the empty string.
3995 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3998 __ bind(&need_conversion);
3999 // Move smi zero into the result register, which will trigger
4001 __ Move(result, Immediate(Smi::FromInt(0)));
4004 NopRuntimeCallHelper call_helper;
4005 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4008 context()->Plug(result);
4012 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4013 ZoneList<Expression*>* args = expr->arguments();
4014 DCHECK_EQ(2, args->length());
4015 VisitForStackValue(args->at(0));
4016 VisitForAccumulatorValue(args->at(1));
4019 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4021 context()->Plug(eax);
4025 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4026 ZoneList<Expression*>* args = expr->arguments();
4027 DCHECK(args->length() >= 2);
4029 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4030 for (int i = 0; i < arg_count + 1; ++i) {
4031 VisitForStackValue(args->at(i));
4033 VisitForAccumulatorValue(args->last()); // Function.
4035 Label runtime, done;
4036 // Check for non-function argument (including proxy).
4037 __ JumpIfSmi(eax, &runtime);
4038 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
4039 __ j(not_equal, &runtime);
4041 // InvokeFunction requires the function in edi. Move it in there.
4042 __ mov(edi, result_register());
4043 ParameterCount count(arg_count);
4044 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
4045 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4050 __ CallRuntime(Runtime::kCall, args->length());
4053 context()->Plug(eax);
4057 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4058 ZoneList<Expression*>* args = expr->arguments();
4059 DCHECK(args->length() == 2);
4062 VisitForStackValue(args->at(0));
4065 VisitForStackValue(args->at(1));
4066 __ CallRuntime(Runtime::kGetPrototype, 1);
4067 __ push(result_register());
4069 // Load original constructor into ecx.
4070 __ mov(ecx, Operand(esp, 1 * kPointerSize));
4072 // Check if the calling frame is an arguments adaptor frame.
4073 Label adaptor_frame, args_set_up, runtime;
4074 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4075 __ mov(ebx, Operand(edx, StandardFrameConstants::kContextOffset));
4076 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4077 __ j(equal, &adaptor_frame);
4078 // default constructor has no arguments, so no adaptor frame means no args.
4079 __ mov(eax, Immediate(0));
4080 __ jmp(&args_set_up);
4082 // Copy arguments from adaptor frame.
4084 __ bind(&adaptor_frame);
4085 __ mov(ebx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4089 __ lea(edx, Operand(edx, ebx, times_pointer_size,
4090 StandardFrameConstants::kCallerSPOffset));
4093 __ push(Operand(edx, -1 * kPointerSize));
4094 __ sub(edx, Immediate(kPointerSize));
4096 __ j(not_zero, &loop);
4099 __ bind(&args_set_up);
4101 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
4102 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
4103 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4104 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4108 context()->Plug(eax);
4112 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4113 // Load the arguments on the stack and call the stub.
4114 RegExpConstructResultStub stub(isolate());
4115 ZoneList<Expression*>* args = expr->arguments();
4116 DCHECK(args->length() == 3);
4117 VisitForStackValue(args->at(0));
4118 VisitForStackValue(args->at(1));
4119 VisitForAccumulatorValue(args->at(2));
4123 context()->Plug(eax);
4127 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4128 ZoneList<Expression*>* args = expr->arguments();
4129 DCHECK(args->length() == 1);
4131 VisitForAccumulatorValue(args->at(0));
4133 __ AssertString(eax);
4135 Label materialize_true, materialize_false;
4136 Label* if_true = NULL;
4137 Label* if_false = NULL;
4138 Label* fall_through = NULL;
4139 context()->PrepareTest(&materialize_true, &materialize_false,
4140 &if_true, &if_false, &fall_through);
4142 __ test(FieldOperand(eax, String::kHashFieldOffset),
4143 Immediate(String::kContainsCachedArrayIndexMask));
4144 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4145 Split(zero, if_true, if_false, fall_through);
4147 context()->Plug(if_true, if_false);
4151 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4152 ZoneList<Expression*>* args = expr->arguments();
4153 DCHECK(args->length() == 1);
4154 VisitForAccumulatorValue(args->at(0));
4156 __ AssertString(eax);
4158 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
4159 __ IndexFromHash(eax, eax);
4161 context()->Plug(eax);
4165 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4166 Label bailout, done, one_char_separator, long_separator,
4167 non_trivial_array, not_size_one_array, loop,
4168 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4170 ZoneList<Expression*>* args = expr->arguments();
4171 DCHECK(args->length() == 2);
4172 // We will leave the separator on the stack until the end of the function.
4173 VisitForStackValue(args->at(1));
4174 // Load this to eax (= array)
4175 VisitForAccumulatorValue(args->at(0));
4176 // All aliases of the same register have disjoint lifetimes.
4177 Register array = eax;
4178 Register elements = no_reg; // Will be eax.
4180 Register index = edx;
4182 Register string_length = ecx;
4184 Register string = esi;
4186 Register scratch = ebx;
4188 Register array_length = edi;
4189 Register result_pos = no_reg; // Will be edi.
4191 // Separator operand is already pushed.
4192 Operand separator_operand = Operand(esp, 2 * kPointerSize);
4193 Operand result_operand = Operand(esp, 1 * kPointerSize);
4194 Operand array_length_operand = Operand(esp, 0);
4195 __ sub(esp, Immediate(2 * kPointerSize));
4197 // Check that the array is a JSArray
4198 __ JumpIfSmi(array, &bailout);
4199 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4200 __ j(not_equal, &bailout);
4202 // Check that the array has fast elements.
4203 __ CheckFastElements(scratch, &bailout);
4205 // If the array has length zero, return the empty string.
4206 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
4207 __ SmiUntag(array_length);
4208 __ j(not_zero, &non_trivial_array);
4209 __ mov(result_operand, isolate()->factory()->empty_string());
4212 // Save the array length.
4213 __ bind(&non_trivial_array);
4214 __ mov(array_length_operand, array_length);
4216 // Save the FixedArray containing array's elements.
4217 // End of array's live range.
4219 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
4223 // Check that all array elements are sequential one-byte strings, and
4224 // accumulate the sum of their lengths, as a smi-encoded value.
4225 __ Move(index, Immediate(0));
4226 __ Move(string_length, Immediate(0));
4227 // Loop condition: while (index < length).
4228 // Live loop registers: index, array_length, string,
4229 // scratch, string_length, elements.
4230 if (generate_debug_code_) {
4231 __ cmp(index, array_length);
4232 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4235 __ mov(string, FieldOperand(elements,
4238 FixedArray::kHeaderSize));
4239 __ JumpIfSmi(string, &bailout);
4240 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4241 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4242 __ and_(scratch, Immediate(
4243 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4244 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4245 __ j(not_equal, &bailout);
4246 __ add(string_length,
4247 FieldOperand(string, SeqOneByteString::kLengthOffset));
4248 __ j(overflow, &bailout);
4249 __ add(index, Immediate(1));
4250 __ cmp(index, array_length);
4253 // If array_length is 1, return elements[0], a string.
4254 __ cmp(array_length, 1);
4255 __ j(not_equal, ¬_size_one_array);
4256 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4257 __ mov(result_operand, scratch);
4260 __ bind(¬_size_one_array);
4262 // End of array_length live range.
4263 result_pos = array_length;
4264 array_length = no_reg;
4267 // string_length: Sum of string lengths, as a smi.
4268 // elements: FixedArray of strings.
4270 // Check that the separator is a flat one-byte string.
4271 __ mov(string, separator_operand);
4272 __ JumpIfSmi(string, &bailout);
4273 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4274 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4275 __ and_(scratch, Immediate(
4276 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4277 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4278 __ j(not_equal, &bailout);
4280 // Add (separator length times array_length) - separator length
4281 // to string_length.
4282 __ mov(scratch, separator_operand);
4283 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4284 __ sub(string_length, scratch); // May be negative, temporarily.
4285 __ imul(scratch, array_length_operand);
4286 __ j(overflow, &bailout);
4287 __ add(string_length, scratch);
4288 __ j(overflow, &bailout);
4290 __ shr(string_length, 1);
4291 // Live registers and stack values:
4294 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4296 __ mov(result_operand, result_pos);
4297 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4300 __ mov(string, separator_operand);
4301 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4302 Immediate(Smi::FromInt(1)));
4303 __ j(equal, &one_char_separator);
4304 __ j(greater, &long_separator);
4307 // Empty separator case
4308 __ mov(index, Immediate(0));
4309 __ jmp(&loop_1_condition);
4310 // Loop condition: while (index < length).
4312 // Each iteration of the loop concatenates one string to the result.
4313 // Live values in registers:
4314 // index: which element of the elements array we are adding to the result.
4315 // result_pos: the position to which we are currently copying characters.
4316 // elements: the FixedArray of strings we are joining.
4318 // Get string = array[index].
4319 __ mov(string, FieldOperand(elements, index,
4321 FixedArray::kHeaderSize));
4322 __ mov(string_length,
4323 FieldOperand(string, String::kLengthOffset));
4324 __ shr(string_length, 1);
4326 FieldOperand(string, SeqOneByteString::kHeaderSize));
4327 __ CopyBytes(string, result_pos, string_length, scratch);
4328 __ add(index, Immediate(1));
4329 __ bind(&loop_1_condition);
4330 __ cmp(index, array_length_operand);
4331 __ j(less, &loop_1); // End while (index < length).
4336 // One-character separator case
4337 __ bind(&one_char_separator);
4338 // Replace separator with its one-byte character value.
4339 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4340 __ mov_b(separator_operand, scratch);
4342 __ Move(index, Immediate(0));
4343 // Jump into the loop after the code that copies the separator, so the first
4344 // element is not preceded by a separator
4345 __ jmp(&loop_2_entry);
4346 // Loop condition: while (index < length).
4348 // Each iteration of the loop concatenates one string to the result.
4349 // Live values in registers:
4350 // index: which element of the elements array we are adding to the result.
4351 // result_pos: the position to which we are currently copying characters.
4353 // Copy the separator character to the result.
4354 __ mov_b(scratch, separator_operand);
4355 __ mov_b(Operand(result_pos, 0), scratch);
4358 __ bind(&loop_2_entry);
4359 // Get string = array[index].
4360 __ mov(string, FieldOperand(elements, index,
4362 FixedArray::kHeaderSize));
4363 __ mov(string_length,
4364 FieldOperand(string, String::kLengthOffset));
4365 __ shr(string_length, 1);
4367 FieldOperand(string, SeqOneByteString::kHeaderSize));
4368 __ CopyBytes(string, result_pos, string_length, scratch);
4369 __ add(index, Immediate(1));
4371 __ cmp(index, array_length_operand);
4372 __ j(less, &loop_2); // End while (index < length).
4376 // Long separator case (separator is more than one character).
4377 __ bind(&long_separator);
4379 __ Move(index, Immediate(0));
4380 // Jump into the loop after the code that copies the separator, so the first
4381 // element is not preceded by a separator
4382 __ jmp(&loop_3_entry);
4383 // Loop condition: while (index < length).
4385 // Each iteration of the loop concatenates one string to the result.
4386 // Live values in registers:
4387 // index: which element of the elements array we are adding to the result.
4388 // result_pos: the position to which we are currently copying characters.
4390 // Copy the separator to the result.
4391 __ mov(string, separator_operand);
4392 __ mov(string_length,
4393 FieldOperand(string, String::kLengthOffset));
4394 __ shr(string_length, 1);
4396 FieldOperand(string, SeqOneByteString::kHeaderSize));
4397 __ CopyBytes(string, result_pos, string_length, scratch);
4399 __ bind(&loop_3_entry);
4400 // Get string = array[index].
4401 __ mov(string, FieldOperand(elements, index,
4403 FixedArray::kHeaderSize));
4404 __ mov(string_length,
4405 FieldOperand(string, String::kLengthOffset));
4406 __ shr(string_length, 1);
4408 FieldOperand(string, SeqOneByteString::kHeaderSize));
4409 __ CopyBytes(string, result_pos, string_length, scratch);
4410 __ add(index, Immediate(1));
4412 __ cmp(index, array_length_operand);
4413 __ j(less, &loop_3); // End while (index < length).
4418 __ mov(result_operand, isolate()->factory()->undefined_value());
4420 __ mov(eax, result_operand);
4421 // Drop temp values from the stack, and restore context register.
4422 __ add(esp, Immediate(3 * kPointerSize));
4424 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4425 context()->Plug(eax);
4429 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4430 DCHECK(expr->arguments()->length() == 0);
4431 ExternalReference debug_is_active =
4432 ExternalReference::debug_is_active_address(isolate());
4433 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4435 context()->Plug(eax);
4439 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4440 // Push undefined as receiver.
4441 __ push(Immediate(isolate()->factory()->undefined_value()));
4443 __ mov(eax, GlobalObjectOperand());
4444 __ mov(eax, FieldOperand(eax, GlobalObject::kNativeContextOffset));
4445 __ mov(eax, ContextOperand(eax, expr->context_index()));
4449 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4450 ZoneList<Expression*>* args = expr->arguments();
4451 int arg_count = args->length();
4453 SetCallPosition(expr, arg_count);
4454 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4455 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4460 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4461 ZoneList<Expression*>* args = expr->arguments();
4462 int arg_count = args->length();
4464 if (expr->is_jsruntime()) {
4465 Comment cmnt(masm_, "[ CallRuntime");
4466 EmitLoadJSRuntimeFunction(expr);
4468 // Push the target function under the receiver.
4469 __ push(Operand(esp, 0));
4470 __ mov(Operand(esp, kPointerSize), eax);
4472 // Push the arguments ("left-to-right").
4473 for (int i = 0; i < arg_count; i++) {
4474 VisitForStackValue(args->at(i));
4477 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4478 EmitCallJSRuntimeFunction(expr);
4480 // Restore context register.
4481 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4482 context()->DropAndPlug(1, eax);
4485 const Runtime::Function* function = expr->function();
4486 switch (function->function_id) {
4487 #define CALL_INTRINSIC_GENERATOR(Name) \
4488 case Runtime::kInline##Name: { \
4489 Comment cmnt(masm_, "[ Inline" #Name); \
4490 return Emit##Name(expr); \
4492 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4493 #undef CALL_INTRINSIC_GENERATOR
4495 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4496 // Push the arguments ("left-to-right").
4497 for (int i = 0; i < arg_count; i++) {
4498 VisitForStackValue(args->at(i));
4501 // Call the C runtime function.
4502 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4503 __ CallRuntime(expr->function(), arg_count);
4504 context()->Plug(eax);
4511 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4512 switch (expr->op()) {
4513 case Token::DELETE: {
4514 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4515 Property* property = expr->expression()->AsProperty();
4516 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4518 if (property != NULL) {
4519 VisitForStackValue(property->obj());
4520 VisitForStackValue(property->key());
4521 __ CallRuntime(is_strict(language_mode())
4522 ? Runtime::kDeleteProperty_Strict
4523 : Runtime::kDeleteProperty_Sloppy,
4525 context()->Plug(eax);
4526 } else if (proxy != NULL) {
4527 Variable* var = proxy->var();
4528 // Delete of an unqualified identifier is disallowed in strict mode but
4529 // "delete this" is allowed.
4530 bool is_this = var->HasThisName(isolate());
4531 DCHECK(is_sloppy(language_mode()) || is_this);
4532 if (var->IsUnallocatedOrGlobalSlot()) {
4533 __ push(GlobalObjectOperand());
4534 __ push(Immediate(var->name()));
4535 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4536 context()->Plug(eax);
4537 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4538 // Result of deleting non-global variables is false. 'this' is
4539 // not really a variable, though we implement it as one. The
4540 // subexpression does not have side effects.
4541 context()->Plug(is_this);
4543 // Non-global variable. Call the runtime to try to delete from the
4544 // context where the variable was introduced.
4545 __ push(context_register());
4546 __ push(Immediate(var->name()));
4547 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4548 context()->Plug(eax);
4551 // Result of deleting non-property, non-variable reference is true.
4552 // The subexpression may have side effects.
4553 VisitForEffect(expr->expression());
4554 context()->Plug(true);
4560 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4561 VisitForEffect(expr->expression());
4562 context()->Plug(isolate()->factory()->undefined_value());
4567 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4568 if (context()->IsEffect()) {
4569 // Unary NOT has no side effects so it's only necessary to visit the
4570 // subexpression. Match the optimizing compiler by not branching.
4571 VisitForEffect(expr->expression());
4572 } else if (context()->IsTest()) {
4573 const TestContext* test = TestContext::cast(context());
4574 // The labels are swapped for the recursive call.
4575 VisitForControl(expr->expression(),
4576 test->false_label(),
4578 test->fall_through());
4579 context()->Plug(test->true_label(), test->false_label());
4581 // We handle value contexts explicitly rather than simply visiting
4582 // for control and plugging the control flow into the context,
4583 // because we need to prepare a pair of extra administrative AST ids
4584 // for the optimizing compiler.
4585 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4586 Label materialize_true, materialize_false, done;
4587 VisitForControl(expr->expression(),
4591 __ bind(&materialize_true);
4592 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4593 if (context()->IsAccumulatorValue()) {
4594 __ mov(eax, isolate()->factory()->true_value());
4596 __ Push(isolate()->factory()->true_value());
4598 __ jmp(&done, Label::kNear);
4599 __ bind(&materialize_false);
4600 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4601 if (context()->IsAccumulatorValue()) {
4602 __ mov(eax, isolate()->factory()->false_value());
4604 __ Push(isolate()->factory()->false_value());
4611 case Token::TYPEOF: {
4612 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4614 AccumulatorValueContext context(this);
4615 VisitForTypeofValue(expr->expression());
4618 TypeofStub typeof_stub(isolate());
4619 __ CallStub(&typeof_stub);
4620 context()->Plug(eax);
4630 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4631 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4633 Comment cmnt(masm_, "[ CountOperation");
4635 Property* prop = expr->expression()->AsProperty();
4636 LhsKind assign_type = Property::GetAssignType(prop);
4638 // Evaluate expression and get value.
4639 if (assign_type == VARIABLE) {
4640 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4641 AccumulatorValueContext context(this);
4642 EmitVariableLoad(expr->expression()->AsVariableProxy());
4644 // Reserve space for result of postfix operation.
4645 if (expr->is_postfix() && !context()->IsEffect()) {
4646 __ push(Immediate(Smi::FromInt(0)));
4648 switch (assign_type) {
4649 case NAMED_PROPERTY: {
4650 // Put the object both on the stack and in the register.
4651 VisitForStackValue(prop->obj());
4652 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4653 EmitNamedPropertyLoad(prop);
4657 case NAMED_SUPER_PROPERTY: {
4658 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4659 VisitForAccumulatorValue(
4660 prop->obj()->AsSuperPropertyReference()->home_object());
4661 __ push(result_register());
4662 __ push(MemOperand(esp, kPointerSize));
4663 __ push(result_register());
4664 EmitNamedSuperPropertyLoad(prop);
4668 case KEYED_SUPER_PROPERTY: {
4669 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4671 prop->obj()->AsSuperPropertyReference()->home_object());
4672 VisitForAccumulatorValue(prop->key());
4673 __ push(result_register());
4674 __ push(MemOperand(esp, 2 * kPointerSize));
4675 __ push(MemOperand(esp, 2 * kPointerSize));
4676 __ push(result_register());
4677 EmitKeyedSuperPropertyLoad(prop);
4681 case KEYED_PROPERTY: {
4682 VisitForStackValue(prop->obj());
4683 VisitForStackValue(prop->key());
4684 __ mov(LoadDescriptor::ReceiverRegister(),
4685 Operand(esp, kPointerSize)); // Object.
4686 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4687 EmitKeyedPropertyLoad(prop);
4696 // We need a second deoptimization point after loading the value
4697 // in case evaluating the property load my have a side effect.
4698 if (assign_type == VARIABLE) {
4699 PrepareForBailout(expr->expression(), TOS_REG);
4701 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4704 // Inline smi case if we are in a loop.
4705 Label done, stub_call;
4706 JumpPatchSite patch_site(masm_);
4707 if (ShouldInlineSmiCase(expr->op())) {
4709 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4711 // Save result for postfix expressions.
4712 if (expr->is_postfix()) {
4713 if (!context()->IsEffect()) {
4714 // Save the result on the stack. If we have a named or keyed property
4715 // we store the result under the receiver that is currently on top
4717 switch (assign_type) {
4721 case NAMED_PROPERTY:
4722 __ mov(Operand(esp, kPointerSize), eax);
4724 case NAMED_SUPER_PROPERTY:
4725 __ mov(Operand(esp, 2 * kPointerSize), eax);
4727 case KEYED_PROPERTY:
4728 __ mov(Operand(esp, 2 * kPointerSize), eax);
4730 case KEYED_SUPER_PROPERTY:
4731 __ mov(Operand(esp, 3 * kPointerSize), eax);
4737 if (expr->op() == Token::INC) {
4738 __ add(eax, Immediate(Smi::FromInt(1)));
4740 __ sub(eax, Immediate(Smi::FromInt(1)));
4742 __ j(no_overflow, &done, Label::kNear);
4743 // Call stub. Undo operation first.
4744 if (expr->op() == Token::INC) {
4745 __ sub(eax, Immediate(Smi::FromInt(1)));
4747 __ add(eax, Immediate(Smi::FromInt(1)));
4749 __ jmp(&stub_call, Label::kNear);
4752 if (!is_strong(language_mode())) {
4753 ToNumberStub convert_stub(isolate());
4754 __ CallStub(&convert_stub);
4755 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4758 // Save result for postfix expressions.
4759 if (expr->is_postfix()) {
4760 if (!context()->IsEffect()) {
4761 // Save the result on the stack. If we have a named or keyed property
4762 // we store the result under the receiver that is currently on top
4764 switch (assign_type) {
4768 case NAMED_PROPERTY:
4769 __ mov(Operand(esp, kPointerSize), eax);
4771 case NAMED_SUPER_PROPERTY:
4772 __ mov(Operand(esp, 2 * kPointerSize), eax);
4774 case KEYED_PROPERTY:
4775 __ mov(Operand(esp, 2 * kPointerSize), eax);
4777 case KEYED_SUPER_PROPERTY:
4778 __ mov(Operand(esp, 3 * kPointerSize), eax);
4784 SetExpressionPosition(expr);
4786 // Call stub for +1/-1.
4787 __ bind(&stub_call);
4789 __ mov(eax, Immediate(Smi::FromInt(1)));
4790 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4791 strength(language_mode())).code();
4792 CallIC(code, expr->CountBinOpFeedbackId());
4793 patch_site.EmitPatchInfo();
4796 if (is_strong(language_mode())) {
4797 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4799 // Store the value returned in eax.
4800 switch (assign_type) {
4802 if (expr->is_postfix()) {
4803 // Perform the assignment as if via '='.
4804 { EffectContext context(this);
4805 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4806 Token::ASSIGN, expr->CountSlot());
4807 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4810 // For all contexts except EffectContext We have the result on
4811 // top of the stack.
4812 if (!context()->IsEffect()) {
4813 context()->PlugTOS();
4816 // Perform the assignment as if via '='.
4817 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4818 Token::ASSIGN, expr->CountSlot());
4819 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4820 context()->Plug(eax);
4823 case NAMED_PROPERTY: {
4824 __ mov(StoreDescriptor::NameRegister(),
4825 prop->key()->AsLiteral()->value());
4826 __ pop(StoreDescriptor::ReceiverRegister());
4827 if (FLAG_vector_stores) {
4828 EmitLoadStoreICSlot(expr->CountSlot());
4831 CallStoreIC(expr->CountStoreFeedbackId());
4833 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4834 if (expr->is_postfix()) {
4835 if (!context()->IsEffect()) {
4836 context()->PlugTOS();
4839 context()->Plug(eax);
4843 case NAMED_SUPER_PROPERTY: {
4844 EmitNamedSuperPropertyStore(prop);
4845 if (expr->is_postfix()) {
4846 if (!context()->IsEffect()) {
4847 context()->PlugTOS();
4850 context()->Plug(eax);
4854 case KEYED_SUPER_PROPERTY: {
4855 EmitKeyedSuperPropertyStore(prop);
4856 if (expr->is_postfix()) {
4857 if (!context()->IsEffect()) {
4858 context()->PlugTOS();
4861 context()->Plug(eax);
4865 case KEYED_PROPERTY: {
4866 __ pop(StoreDescriptor::NameRegister());
4867 __ pop(StoreDescriptor::ReceiverRegister());
4869 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4870 if (FLAG_vector_stores) {
4871 EmitLoadStoreICSlot(expr->CountSlot());
4874 CallIC(ic, expr->CountStoreFeedbackId());
4876 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4877 if (expr->is_postfix()) {
4878 // Result is on the stack
4879 if (!context()->IsEffect()) {
4880 context()->PlugTOS();
4883 context()->Plug(eax);
4891 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4892 Expression* sub_expr,
4893 Handle<String> check) {
4894 Label materialize_true, materialize_false;
4895 Label* if_true = NULL;
4896 Label* if_false = NULL;
4897 Label* fall_through = NULL;
4898 context()->PrepareTest(&materialize_true, &materialize_false,
4899 &if_true, &if_false, &fall_through);
4901 { AccumulatorValueContext context(this);
4902 VisitForTypeofValue(sub_expr);
4904 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4906 Factory* factory = isolate()->factory();
4907 if (String::Equals(check, factory->number_string())) {
4908 __ JumpIfSmi(eax, if_true);
4909 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4910 isolate()->factory()->heap_number_map());
4911 Split(equal, if_true, if_false, fall_through);
4912 } else if (String::Equals(check, factory->string_string())) {
4913 __ JumpIfSmi(eax, if_false);
4914 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4915 Split(below, if_true, if_false, fall_through);
4916 } else if (String::Equals(check, factory->symbol_string())) {
4917 __ JumpIfSmi(eax, if_false);
4918 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4919 Split(equal, if_true, if_false, fall_through);
4920 } else if (String::Equals(check, factory->boolean_string())) {
4921 __ cmp(eax, isolate()->factory()->true_value());
4922 __ j(equal, if_true);
4923 __ cmp(eax, isolate()->factory()->false_value());
4924 Split(equal, if_true, if_false, fall_through);
4925 } else if (String::Equals(check, factory->undefined_string())) {
4926 __ cmp(eax, isolate()->factory()->undefined_value());
4927 __ j(equal, if_true);
4928 __ JumpIfSmi(eax, if_false);
4929 // Check for undetectable objects => true.
4930 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4931 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4932 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4933 Split(not_zero, if_true, if_false, fall_through);
4934 } else if (String::Equals(check, factory->function_string())) {
4935 __ JumpIfSmi(eax, if_false);
4936 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4937 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4938 __ j(equal, if_true);
4939 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4940 Split(equal, if_true, if_false, fall_through);
4941 } else if (String::Equals(check, factory->object_string())) {
4942 __ JumpIfSmi(eax, if_false);
4943 __ cmp(eax, isolate()->factory()->null_value());
4944 __ j(equal, if_true);
4945 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4946 __ j(below, if_false);
4947 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4948 __ j(above, if_false);
4949 // Check for undetectable objects => false.
4950 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4951 1 << Map::kIsUndetectable);
4952 Split(zero, if_true, if_false, fall_through);
4954 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
4955 } else if (String::Equals(check, factory->type##_string())) { \
4956 __ JumpIfSmi(eax, if_false); \
4957 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
4958 isolate()->factory()->type##_map()); \
4959 Split(equal, if_true, if_false, fall_through);
4960 SIMD128_TYPES(SIMD128_TYPE)
4964 if (if_false != fall_through) __ jmp(if_false);
4966 context()->Plug(if_true, if_false);
4970 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4971 Comment cmnt(masm_, "[ CompareOperation");
4972 SetExpressionPosition(expr);
4974 // First we try a fast inlined version of the compare when one of
4975 // the operands is a literal.
4976 if (TryLiteralCompare(expr)) return;
4978 // Always perform the comparison for its control flow. Pack the result
4979 // into the expression's context after the comparison is performed.
4980 Label materialize_true, materialize_false;
4981 Label* if_true = NULL;
4982 Label* if_false = NULL;
4983 Label* fall_through = NULL;
4984 context()->PrepareTest(&materialize_true, &materialize_false,
4985 &if_true, &if_false, &fall_through);
4987 Token::Value op = expr->op();
4988 VisitForStackValue(expr->left());
4991 VisitForStackValue(expr->right());
4992 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4993 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4994 __ cmp(eax, isolate()->factory()->true_value());
4995 Split(equal, if_true, if_false, fall_through);
4998 case Token::INSTANCEOF: {
4999 VisitForAccumulatorValue(expr->right());
5001 InstanceOfStub stub(isolate());
5003 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5004 __ cmp(eax, isolate()->factory()->true_value());
5005 Split(equal, if_true, if_false, fall_through);
5010 VisitForAccumulatorValue(expr->right());
5011 Condition cc = CompareIC::ComputeCondition(op);
5014 bool inline_smi_code = ShouldInlineSmiCase(op);
5015 JumpPatchSite patch_site(masm_);
5016 if (inline_smi_code) {
5020 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
5022 Split(cc, if_true, if_false, NULL);
5023 __ bind(&slow_case);
5026 Handle<Code> ic = CodeFactory::CompareIC(
5027 isolate(), op, strength(language_mode())).code();
5028 CallIC(ic, expr->CompareOperationFeedbackId());
5029 patch_site.EmitPatchInfo();
5031 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5033 Split(cc, if_true, if_false, fall_through);
5037 // Convert the result of the comparison into one expected for this
5038 // expression's context.
5039 context()->Plug(if_true, if_false);
5043 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5044 Expression* sub_expr,
5046 Label materialize_true, materialize_false;
5047 Label* if_true = NULL;
5048 Label* if_false = NULL;
5049 Label* fall_through = NULL;
5050 context()->PrepareTest(&materialize_true, &materialize_false,
5051 &if_true, &if_false, &fall_through);
5053 VisitForAccumulatorValue(sub_expr);
5054 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5056 Handle<Object> nil_value = nil == kNullValue
5057 ? isolate()->factory()->null_value()
5058 : isolate()->factory()->undefined_value();
5059 if (expr->op() == Token::EQ_STRICT) {
5060 __ cmp(eax, nil_value);
5061 Split(equal, if_true, if_false, fall_through);
5063 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5064 CallIC(ic, expr->CompareOperationFeedbackId());
5066 Split(not_zero, if_true, if_false, fall_through);
5068 context()->Plug(if_true, if_false);
5072 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5073 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5074 context()->Plug(eax);
5078 Register FullCodeGenerator::result_register() {
5083 Register FullCodeGenerator::context_register() {
5088 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5089 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5090 __ mov(Operand(ebp, frame_offset), value);
5094 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5095 __ mov(dst, ContextOperand(esi, context_index));
5099 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5100 Scope* closure_scope = scope()->ClosureScope();
5101 if (closure_scope->is_script_scope() ||
5102 closure_scope->is_module_scope()) {
5103 // Contexts nested in the native context have a canonical empty function
5104 // as their closure, not the anonymous closure containing the global
5105 // code. Pass a smi sentinel and let the runtime look up the empty
5107 __ push(Immediate(Smi::FromInt(0)));
5108 } else if (closure_scope->is_eval_scope()) {
5109 // Contexts nested inside eval code have the same closure as the context
5110 // calling eval, not the anonymous closure containing the eval code.
5111 // Fetch it from the context.
5112 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
5114 DCHECK(closure_scope->is_function_scope());
5115 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5120 // ----------------------------------------------------------------------------
5121 // Non-local control flow support.
5123 void FullCodeGenerator::EnterFinallyBlock() {
5124 // Cook return address on top of stack (smi encoded Code* delta)
5125 DCHECK(!result_register().is(edx));
5127 __ sub(edx, Immediate(masm_->CodeObject()));
5128 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5129 STATIC_ASSERT(kSmiTag == 0);
5133 // Store result register while executing finally block.
5134 __ push(result_register());
5136 // Store pending message while executing finally block.
5137 ExternalReference pending_message_obj =
5138 ExternalReference::address_of_pending_message_obj(isolate());
5139 __ mov(edx, Operand::StaticVariable(pending_message_obj));
5142 ClearPendingMessage();
5146 void FullCodeGenerator::ExitFinallyBlock() {
5147 DCHECK(!result_register().is(edx));
5148 // Restore pending message from stack.
5150 ExternalReference pending_message_obj =
5151 ExternalReference::address_of_pending_message_obj(isolate());
5152 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5154 // Restore result register from stack.
5155 __ pop(result_register());
5157 // Uncook return address.
5160 __ add(edx, Immediate(masm_->CodeObject()));
5165 void FullCodeGenerator::ClearPendingMessage() {
5166 DCHECK(!result_register().is(edx));
5167 ExternalReference pending_message_obj =
5168 ExternalReference::address_of_pending_message_obj(isolate());
5169 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
5170 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5174 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5175 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5176 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5177 Immediate(SmiFromSlot(slot)));
5184 static const byte kJnsInstruction = 0x79;
5185 static const byte kJnsOffset = 0x11;
5186 static const byte kNopByteOne = 0x66;
5187 static const byte kNopByteTwo = 0x90;
5189 static const byte kCallInstruction = 0xe8;
5193 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5195 BackEdgeState target_state,
5196 Code* replacement_code) {
5197 Address call_target_address = pc - kIntSize;
5198 Address jns_instr_address = call_target_address - 3;
5199 Address jns_offset_address = call_target_address - 2;
5201 switch (target_state) {
5203 // sub <profiling_counter>, <delta> ;; Not changed
5205 // call <interrupt stub>
5207 *jns_instr_address = kJnsInstruction;
5208 *jns_offset_address = kJnsOffset;
5210 case ON_STACK_REPLACEMENT:
5211 case OSR_AFTER_STACK_CHECK:
5212 // sub <profiling_counter>, <delta> ;; Not changed
5215 // call <on-stack replacment>
5217 *jns_instr_address = kNopByteOne;
5218 *jns_offset_address = kNopByteTwo;
5222 Assembler::set_target_address_at(call_target_address,
5224 replacement_code->entry());
5225 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5226 unoptimized_code, call_target_address, replacement_code);
5230 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5232 Code* unoptimized_code,
5234 Address call_target_address = pc - kIntSize;
5235 Address jns_instr_address = call_target_address - 3;
5236 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5238 if (*jns_instr_address == kJnsInstruction) {
5239 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5240 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5241 Assembler::target_address_at(call_target_address,
5246 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5247 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5249 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
5250 isolate->builtins()->OnStackReplacement()->entry()) {
5251 return ON_STACK_REPLACEMENT;
5254 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5255 Assembler::target_address_at(call_target_address,
5257 return OSR_AFTER_STACK_CHECK;
5261 } // namespace internal
5264 #endif // V8_TARGET_ARCH_IA32