1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parser.h"
15 #include "src/scopes.h"
16 #include "src/x87/frames-x87.h"
21 #define __ ACCESS_MASM(masm_)
24 class JumpPatchSite BASE_EMBEDDED {
26 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28 info_emitted_ = false;
33 DCHECK(patch_site_.is_bound() == info_emitted_);
36 void EmitJumpIfNotSmi(Register reg,
38 Label::Distance distance = Label::kFar) {
39 __ test(reg, Immediate(kSmiTagMask));
40 EmitJump(not_carry, target, distance); // Always taken before patched.
43 void EmitJumpIfSmi(Register reg,
45 Label::Distance distance = Label::kFar) {
46 __ test(reg, Immediate(kSmiTagMask));
47 EmitJump(carry, target, distance); // Never taken before patched.
50 void EmitPatchInfo() {
51 if (patch_site_.is_bound()) {
52 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
53 DCHECK(is_uint8(delta_to_patch_site));
54 __ test(eax, Immediate(delta_to_patch_site));
59 __ nop(); // Signals no inlined code.
64 // jc will be patched with jz, jnc will become jnz.
65 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
66 DCHECK(!patch_site_.is_bound() && !info_emitted_);
67 DCHECK(cc == carry || cc == not_carry);
68 __ bind(&patch_site_);
69 __ j(cc, target, distance);
72 MacroAssembler* masm_;
80 // Generate code for a JS function. On entry to the function the receiver
81 // and arguments have been pushed on the stack left to right, with the
82 // return address on top of them. The actual argument count matches the
83 // formal parameter count expected by the function.
85 // The live registers are:
86 // o edi: the JS function object being called (i.e. ourselves)
88 // o ebp: our caller's frame pointer
89 // o esp: stack pointer (pointing to return address)
91 // The function builds a JS frame. Please see JavaScriptFrameConstants in
92 // frames-x87.h for its layout.
93 void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(function());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
103 if (strlen(FLAG_stop_at) > 0 &&
104 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
109 // Sloppy mode functions and builtins need to replace the receiver with the
110 // global proxy when called as functions (without an explicit receiver
112 if (is_sloppy(info->language_mode()) && !info->is_native() &&
113 info->MayUseThis()) {
115 // +1 for return address.
116 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
117 __ mov(ecx, Operand(esp, receiver_offset));
119 __ cmp(ecx, isolate()->factory()->undefined_value());
120 __ j(not_equal, &ok, Label::kNear);
122 __ mov(ecx, GlobalObjectOperand());
123 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
125 __ mov(Operand(esp, receiver_offset), ecx);
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
135 info->set_prologue_offset(masm_->pc_offset());
136 __ Prologue(info->IsCodePreAgingActive());
137 info->AddNoFrameRange(0, masm_->pc_offset());
139 { Comment cmnt(masm_, "[ Allocate locals");
140 int locals_count = info->scope()->num_stack_slots();
141 // Generators allocate locals, if any, in context slots.
142 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
143 if (locals_count == 1) {
144 __ push(Immediate(isolate()->factory()->undefined_value()));
145 } else if (locals_count > 1) {
146 if (locals_count >= 128) {
149 __ sub(ecx, Immediate(locals_count * kPointerSize));
150 ExternalReference stack_limit =
151 ExternalReference::address_of_real_stack_limit(isolate());
152 __ cmp(ecx, Operand::StaticVariable(stack_limit));
153 __ j(above_equal, &ok, Label::kNear);
154 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
157 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
158 const int kMaxPushes = 32;
159 if (locals_count >= kMaxPushes) {
160 int loop_iterations = locals_count / kMaxPushes;
161 __ mov(ecx, loop_iterations);
163 __ bind(&loop_header);
165 for (int i = 0; i < kMaxPushes; i++) {
169 __ j(not_zero, &loop_header, Label::kNear);
171 int remaining = locals_count % kMaxPushes;
172 // Emit the remaining pushes.
173 for (int i = 0; i < remaining; i++) {
179 bool function_in_register = true;
181 // Possibly allocate a local context.
182 if (info->scope()->num_heap_slots() > 0) {
183 Comment cmnt(masm_, "[ Allocate context");
184 bool need_write_barrier = true;
185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 // Argument to NewContext is the function, which is still in edi.
187 if (info->scope()->is_script_scope()) {
189 __ Push(info->scope()->GetScopeInfo(info->isolate()));
190 __ CallRuntime(Runtime::kNewScriptContext, 2);
191 } else if (slots <= FastNewContextStub::kMaximumSlots) {
192 FastNewContextStub stub(isolate(), slots);
194 // Result of FastNewContextStub is always in new space.
195 need_write_barrier = false;
198 __ CallRuntime(Runtime::kNewFunctionContext, 1);
200 function_in_register = false;
201 // Context is returned in eax. It replaces the context passed to us.
202 // It's saved in the stack and kept live in esi.
204 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
206 // Copy parameters into context if necessary.
207 int num_parameters = info->scope()->num_parameters();
208 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
209 for (int i = first_parameter; i < num_parameters; i++) {
210 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
211 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize;
214 // Load parameter from stack.
215 __ mov(eax, Operand(ebp, parameter_offset));
216 // Store it in the context.
217 int context_offset = Context::SlotOffset(var->index());
218 __ mov(Operand(esi, context_offset), eax);
219 // Update the write barrier. This clobbers eax and ebx.
220 if (need_write_barrier) {
221 __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
223 } else if (FLAG_debug_code) {
225 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
226 __ Abort(kExpectedNewSpaceObject);
233 // Possibly set up a local binding to the this function which is used in
234 // derived constructors with super calls.
235 Variable* this_function_var = scope()->this_function_var();
236 if (this_function_var != nullptr) {
237 Comment cmnt(masm_, "[ This function");
238 if (!function_in_register) {
239 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
240 // The write barrier clobbers register again, keep is marked as such.
242 SetVar(this_function_var, edi, ebx, edx);
245 Variable* new_target_var = scope()->new_target_var();
246 if (new_target_var != nullptr) {
247 Comment cmnt(masm_, "[ new.target");
248 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
249 Label non_adaptor_frame;
250 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
251 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
252 __ j(not_equal, &non_adaptor_frame);
253 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
255 __ bind(&non_adaptor_frame);
256 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
257 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
259 Label non_construct_frame, done;
260 __ j(not_equal, &non_construct_frame);
264 Operand(eax, ConstructFrameConstants::kOriginalConstructorOffset));
267 // Non-construct frame
268 __ bind(&non_construct_frame);
269 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
272 SetVar(new_target_var, eax, ebx, edx);
276 // Possibly allocate RestParameters
278 Variable* rest_param = scope()->rest_parameter(&rest_index);
280 Comment cmnt(masm_, "[ Allocate rest parameter array");
282 int num_parameters = info->scope()->num_parameters();
283 int offset = num_parameters * kPointerSize;
286 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
288 __ push(Immediate(Smi::FromInt(num_parameters)));
289 __ push(Immediate(Smi::FromInt(rest_index)));
290 __ push(Immediate(Smi::FromInt(language_mode())));
292 RestParamAccessStub stub(isolate());
295 SetVar(rest_param, eax, ebx, edx);
298 Variable* arguments = scope()->arguments();
299 if (arguments != NULL) {
300 // Function uses arguments object.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (function_in_register) {
305 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
307 // Receiver is just before the parameters on the caller's stack.
308 int num_parameters = info->scope()->num_parameters();
309 int offset = num_parameters * kPointerSize;
311 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
313 __ push(Immediate(Smi::FromInt(num_parameters)));
314 // Arguments to ArgumentsAccessStub:
315 // function, receiver address, parameter count.
316 // The stub will rewrite receiver and parameter count if the previous
317 // stack frame was an arguments adapter frame.
318 ArgumentsAccessStub::Type type;
319 if (is_strict(language_mode()) || !has_simple_parameters()) {
320 type = ArgumentsAccessStub::NEW_STRICT;
321 } else if (function()->has_duplicate_parameters()) {
322 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
324 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
327 ArgumentsAccessStub stub(isolate(), type);
330 SetVar(arguments, eax, ebx, edx);
334 __ CallRuntime(Runtime::kTraceEnter, 0);
337 // Visit the declarations and body unless there is an illegal
339 if (scope()->HasIllegalRedeclaration()) {
340 Comment cmnt(masm_, "[ Declarations");
341 scope()->VisitIllegalRedeclaration(this);
344 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
345 { Comment cmnt(masm_, "[ Declarations");
346 VisitDeclarations(scope()->declarations());
349 // Assert that the declarations do not use ICs. Otherwise the debugger
350 // won't be able to redirect a PC at an IC to the correct IC in newly
352 DCHECK_EQ(0, ic_total_count_);
354 { Comment cmnt(masm_, "[ Stack check");
355 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
357 ExternalReference stack_limit
358 = ExternalReference::address_of_stack_limit(isolate());
359 __ cmp(esp, Operand::StaticVariable(stack_limit));
360 __ j(above_equal, &ok, Label::kNear);
361 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
365 { Comment cmnt(masm_, "[ Body");
366 DCHECK(loop_depth() == 0);
367 VisitStatements(function()->body());
368 DCHECK(loop_depth() == 0);
372 // Always emit a 'return undefined' in case control fell off the end of
374 { Comment cmnt(masm_, "[ return <undefined>;");
375 __ mov(eax, isolate()->factory()->undefined_value());
376 EmitReturnSequence();
381 void FullCodeGenerator::ClearAccumulator() {
382 __ Move(eax, Immediate(Smi::FromInt(0)));
386 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
387 __ mov(ebx, Immediate(profiling_counter_));
388 __ sub(FieldOperand(ebx, Cell::kValueOffset),
389 Immediate(Smi::FromInt(delta)));
393 void FullCodeGenerator::EmitProfilingCounterReset() {
394 int reset_value = FLAG_interrupt_budget;
395 __ mov(ebx, Immediate(profiling_counter_));
396 __ mov(FieldOperand(ebx, Cell::kValueOffset),
397 Immediate(Smi::FromInt(reset_value)));
401 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
402 Label* back_edge_target) {
403 Comment cmnt(masm_, "[ Back edge bookkeeping");
406 DCHECK(back_edge_target->is_bound());
407 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
408 int weight = Min(kMaxBackEdgeWeight,
409 Max(1, distance / kCodeSizeMultiplier));
410 EmitProfilingCounterDecrement(weight);
411 __ j(positive, &ok, Label::kNear);
412 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
414 // Record a mapping of this PC offset to the OSR id. This is used to find
415 // the AST id from the unoptimized code in order to use it as a key into
416 // the deoptimization input data found in the optimized code.
417 RecordBackEdge(stmt->OsrEntryId());
419 EmitProfilingCounterReset();
422 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
423 // Record a mapping of the OSR id to this PC. This is used if the OSR
424 // entry becomes the target of a bailout. We don't expect it to be, but
425 // we want it to work if it is.
426 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
430 void FullCodeGenerator::EmitReturnSequence() {
431 Comment cmnt(masm_, "[ Return sequence");
432 if (return_label_.is_bound()) {
433 __ jmp(&return_label_);
435 // Common return label
436 __ bind(&return_label_);
439 __ CallRuntime(Runtime::kTraceExit, 1);
441 // Pretend that the exit is a backwards jump to the entry.
443 if (info_->ShouldSelfOptimize()) {
444 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
446 int distance = masm_->pc_offset();
447 weight = Min(kMaxBackEdgeWeight,
448 Max(1, distance / kCodeSizeMultiplier));
450 EmitProfilingCounterDecrement(weight);
452 __ j(positive, &ok, Label::kNear);
454 __ call(isolate()->builtins()->InterruptCheck(),
455 RelocInfo::CODE_TARGET);
457 EmitProfilingCounterReset();
460 SetReturnPosition(function());
461 int no_frame_start = masm_->pc_offset();
464 int arg_count = info_->scope()->num_parameters() + 1;
465 int arguments_bytes = arg_count * kPointerSize;
466 __ Ret(arguments_bytes, ecx);
467 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
472 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
473 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
474 MemOperand operand = codegen()->VarOperand(var, result_register());
475 // Memory operands can be pushed directly.
480 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
481 UNREACHABLE(); // Not used on X87.
485 void FullCodeGenerator::AccumulatorValueContext::Plug(
486 Heap::RootListIndex index) const {
487 UNREACHABLE(); // Not used on X87.
491 void FullCodeGenerator::StackValueContext::Plug(
492 Heap::RootListIndex index) const {
493 UNREACHABLE(); // Not used on X87.
497 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
498 UNREACHABLE(); // Not used on X87.
502 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
506 void FullCodeGenerator::AccumulatorValueContext::Plug(
507 Handle<Object> lit) const {
509 __ SafeMove(result_register(), Immediate(lit));
511 __ Move(result_register(), Immediate(lit));
516 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
518 __ SafePush(Immediate(lit));
520 __ push(Immediate(lit));
525 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
526 codegen()->PrepareForBailoutBeforeSplit(condition(),
530 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
531 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
532 if (false_label_ != fall_through_) __ jmp(false_label_);
533 } else if (lit->IsTrue() || lit->IsJSObject()) {
534 if (true_label_ != fall_through_) __ jmp(true_label_);
535 } else if (lit->IsString()) {
536 if (String::cast(*lit)->length() == 0) {
537 if (false_label_ != fall_through_) __ jmp(false_label_);
539 if (true_label_ != fall_through_) __ jmp(true_label_);
541 } else if (lit->IsSmi()) {
542 if (Smi::cast(*lit)->value() == 0) {
543 if (false_label_ != fall_through_) __ jmp(false_label_);
545 if (true_label_ != fall_through_) __ jmp(true_label_);
548 // For simplicity we always test the accumulator register.
549 __ mov(result_register(), lit);
550 codegen()->DoTest(this);
555 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
556 Register reg) const {
562 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
564 Register reg) const {
567 __ Move(result_register(), reg);
571 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
572 Register reg) const {
574 if (count > 1) __ Drop(count - 1);
575 __ mov(Operand(esp, 0), reg);
579 void FullCodeGenerator::TestContext::DropAndPlug(int count,
580 Register reg) const {
582 // For simplicity we always test the accumulator register.
584 __ Move(result_register(), reg);
585 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
586 codegen()->DoTest(this);
590 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
591 Label* materialize_false) const {
592 DCHECK(materialize_true == materialize_false);
593 __ bind(materialize_true);
597 void FullCodeGenerator::AccumulatorValueContext::Plug(
598 Label* materialize_true,
599 Label* materialize_false) const {
601 __ bind(materialize_true);
602 __ mov(result_register(), isolate()->factory()->true_value());
603 __ jmp(&done, Label::kNear);
604 __ bind(materialize_false);
605 __ mov(result_register(), isolate()->factory()->false_value());
610 void FullCodeGenerator::StackValueContext::Plug(
611 Label* materialize_true,
612 Label* materialize_false) const {
614 __ bind(materialize_true);
615 __ push(Immediate(isolate()->factory()->true_value()));
616 __ jmp(&done, Label::kNear);
617 __ bind(materialize_false);
618 __ push(Immediate(isolate()->factory()->false_value()));
623 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
624 Label* materialize_false) const {
625 DCHECK(materialize_true == true_label_);
626 DCHECK(materialize_false == false_label_);
630 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
631 Handle<Object> value = flag
632 ? isolate()->factory()->true_value()
633 : isolate()->factory()->false_value();
634 __ mov(result_register(), value);
638 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
639 Handle<Object> value = flag
640 ? isolate()->factory()->true_value()
641 : isolate()->factory()->false_value();
642 __ push(Immediate(value));
646 void FullCodeGenerator::TestContext::Plug(bool flag) const {
647 codegen()->PrepareForBailoutBeforeSplit(condition(),
652 if (true_label_ != fall_through_) __ jmp(true_label_);
654 if (false_label_ != fall_through_) __ jmp(false_label_);
659 void FullCodeGenerator::DoTest(Expression* condition,
662 Label* fall_through) {
663 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
664 CallIC(ic, condition->test_id());
665 __ test(result_register(), result_register());
666 // The stub returns nonzero for true.
667 Split(not_zero, if_true, if_false, fall_through);
671 void FullCodeGenerator::Split(Condition cc,
674 Label* fall_through) {
675 if (if_false == fall_through) {
677 } else if (if_true == fall_through) {
678 __ j(NegateCondition(cc), if_false);
686 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
687 DCHECK(var->IsStackAllocated());
688 // Offset is negative because higher indexes are at lower addresses.
689 int offset = -var->index() * kPointerSize;
690 // Adjust by a (parameter or local) base offset.
691 if (var->IsParameter()) {
692 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
694 offset += JavaScriptFrameConstants::kLocal0Offset;
696 return Operand(ebp, offset);
700 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702 if (var->IsContextSlot()) {
703 int context_chain_length = scope()->ContextChainLength(var->scope());
704 __ LoadContext(scratch, context_chain_length);
705 return ContextOperand(scratch, var->index());
707 return StackOperand(var);
712 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
714 MemOperand location = VarOperand(var, dest);
715 __ mov(dest, location);
719 void FullCodeGenerator::SetVar(Variable* var,
723 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
724 DCHECK(!scratch0.is(src));
725 DCHECK(!scratch0.is(scratch1));
726 DCHECK(!scratch1.is(src));
727 MemOperand location = VarOperand(var, scratch0);
728 __ mov(location, src);
730 // Emit the write barrier code if the location is in the heap.
731 if (var->IsContextSlot()) {
732 int offset = Context::SlotOffset(var->index());
733 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
734 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
739 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
740 bool should_normalize,
743 // Only prepare for bailouts before splits if we're in a test
744 // context. Otherwise, we let the Visit function deal with the
745 // preparation to avoid preparing with the same AST id twice.
746 if (!context()->IsTest() || !info_->IsOptimizable()) return;
749 if (should_normalize) __ jmp(&skip, Label::kNear);
750 PrepareForBailout(expr, TOS_REG);
751 if (should_normalize) {
752 __ cmp(eax, isolate()->factory()->true_value());
753 Split(equal, if_true, if_false, NULL);
759 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
760 // The variable in the declaration always resides in the current context.
761 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
762 if (generate_debug_code_) {
763 // Check that we're not inside a with or catch context.
764 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
765 __ cmp(ebx, isolate()->factory()->with_context_map());
766 __ Check(not_equal, kDeclarationInWithContext);
767 __ cmp(ebx, isolate()->factory()->catch_context_map());
768 __ Check(not_equal, kDeclarationInCatchContext);
773 void FullCodeGenerator::VisitVariableDeclaration(
774 VariableDeclaration* declaration) {
775 // If it was not possible to allocate the variable at compile time, we
776 // need to "declare" it at runtime to make sure it actually exists in the
778 VariableProxy* proxy = declaration->proxy();
779 VariableMode mode = declaration->mode();
780 Variable* variable = proxy->var();
781 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
782 switch (variable->location()) {
783 case VariableLocation::GLOBAL:
784 case VariableLocation::UNALLOCATED:
785 globals_->Add(variable->name(), zone());
786 globals_->Add(variable->binding_needs_init()
787 ? isolate()->factory()->the_hole_value()
788 : isolate()->factory()->undefined_value(), zone());
791 case VariableLocation::PARAMETER:
792 case VariableLocation::LOCAL:
794 Comment cmnt(masm_, "[ VariableDeclaration");
795 __ mov(StackOperand(variable),
796 Immediate(isolate()->factory()->the_hole_value()));
800 case VariableLocation::CONTEXT:
802 Comment cmnt(masm_, "[ VariableDeclaration");
803 EmitDebugCheckDeclarationContext(variable);
804 __ mov(ContextOperand(esi, variable->index()),
805 Immediate(isolate()->factory()->the_hole_value()));
806 // No write barrier since the hole value is in old space.
807 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
811 case VariableLocation::LOOKUP: {
812 Comment cmnt(masm_, "[ VariableDeclaration");
813 __ push(Immediate(variable->name()));
814 // VariableDeclaration nodes are always introduced in one of four modes.
815 DCHECK(IsDeclaredVariableMode(mode));
816 // Push initial value, if any.
817 // Note: For variables we must not push an initial value (such as
818 // 'undefined') because we may have a (legal) redeclaration and we
819 // must not destroy the current value.
821 __ push(Immediate(isolate()->factory()->the_hole_value()));
823 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
825 __ CallRuntime(IsImmutableVariableMode(mode)
826 ? Runtime::kDeclareReadOnlyLookupSlot
827 : Runtime::kDeclareLookupSlot,
835 void FullCodeGenerator::VisitFunctionDeclaration(
836 FunctionDeclaration* declaration) {
837 VariableProxy* proxy = declaration->proxy();
838 Variable* variable = proxy->var();
839 switch (variable->location()) {
840 case VariableLocation::GLOBAL:
841 case VariableLocation::UNALLOCATED: {
842 globals_->Add(variable->name(), zone());
843 Handle<SharedFunctionInfo> function =
844 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
845 // Check for stack-overflow exception.
846 if (function.is_null()) return SetStackOverflow();
847 globals_->Add(function, zone());
851 case VariableLocation::PARAMETER:
852 case VariableLocation::LOCAL: {
853 Comment cmnt(masm_, "[ FunctionDeclaration");
854 VisitForAccumulatorValue(declaration->fun());
855 __ mov(StackOperand(variable), result_register());
859 case VariableLocation::CONTEXT: {
860 Comment cmnt(masm_, "[ FunctionDeclaration");
861 EmitDebugCheckDeclarationContext(variable);
862 VisitForAccumulatorValue(declaration->fun());
863 __ mov(ContextOperand(esi, variable->index()), result_register());
864 // We know that we have written a function, which is not a smi.
865 __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
866 result_register(), ecx, kDontSaveFPRegs,
867 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
868 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
872 case VariableLocation::LOOKUP: {
873 Comment cmnt(masm_, "[ FunctionDeclaration");
874 __ push(Immediate(variable->name()));
875 VisitForStackValue(declaration->fun());
876 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
883 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
884 // Call the runtime to declare the globals.
886 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
887 __ CallRuntime(Runtime::kDeclareGlobals, 2);
888 // Return value is ignored.
892 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
893 // Call the runtime to declare the modules.
894 __ Push(descriptions);
895 __ CallRuntime(Runtime::kDeclareModules, 1);
896 // Return value is ignored.
900 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
901 Comment cmnt(masm_, "[ SwitchStatement");
902 Breakable nested_statement(this, stmt);
903 SetStatementPosition(stmt);
905 // Keep the switch value on the stack until a case matches.
906 VisitForStackValue(stmt->tag());
907 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
909 ZoneList<CaseClause*>* clauses = stmt->cases();
910 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
912 Label next_test; // Recycled for each test.
913 // Compile all the tests with branches to their bodies.
914 for (int i = 0; i < clauses->length(); i++) {
915 CaseClause* clause = clauses->at(i);
916 clause->body_target()->Unuse();
918 // The default is not a test, but remember it as final fall through.
919 if (clause->is_default()) {
920 default_clause = clause;
924 Comment cmnt(masm_, "[ Case comparison");
928 // Compile the label expression.
929 VisitForAccumulatorValue(clause->label());
931 // Perform the comparison as if via '==='.
932 __ mov(edx, Operand(esp, 0)); // Switch value.
933 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
934 JumpPatchSite patch_site(masm_);
935 if (inline_smi_code) {
939 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
942 __ j(not_equal, &next_test);
943 __ Drop(1); // Switch value is no longer needed.
944 __ jmp(clause->body_target());
948 SetExpressionPosition(clause);
949 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
950 strength(language_mode())).code();
951 CallIC(ic, clause->CompareId());
952 patch_site.EmitPatchInfo();
955 __ jmp(&skip, Label::kNear);
956 PrepareForBailout(clause, TOS_REG);
957 __ cmp(eax, isolate()->factory()->true_value());
958 __ j(not_equal, &next_test);
960 __ jmp(clause->body_target());
964 __ j(not_equal, &next_test);
965 __ Drop(1); // Switch value is no longer needed.
966 __ jmp(clause->body_target());
969 // Discard the test value and jump to the default if present, otherwise to
970 // the end of the statement.
972 __ Drop(1); // Switch value is no longer needed.
973 if (default_clause == NULL) {
974 __ jmp(nested_statement.break_label());
976 __ jmp(default_clause->body_target());
979 // Compile all the case bodies.
980 for (int i = 0; i < clauses->length(); i++) {
981 Comment cmnt(masm_, "[ Case body");
982 CaseClause* clause = clauses->at(i);
983 __ bind(clause->body_target());
984 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
985 VisitStatements(clause->statements());
988 __ bind(nested_statement.break_label());
989 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
993 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
994 Comment cmnt(masm_, "[ ForInStatement");
995 SetStatementPosition(stmt, SKIP_BREAK);
997 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1000 ForIn loop_statement(this, stmt);
1001 increment_loop_depth();
1003 // Get the object to enumerate over. If the object is null or undefined, skip
1004 // over the loop. See ECMA-262 version 5, section 12.6.4.
1005 SetExpressionAsStatementPosition(stmt->enumerable());
1006 VisitForAccumulatorValue(stmt->enumerable());
1007 __ cmp(eax, isolate()->factory()->undefined_value());
1009 __ cmp(eax, isolate()->factory()->null_value());
1012 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1014 // Convert the object to a JS object.
1015 Label convert, done_convert;
1016 __ JumpIfSmi(eax, &convert, Label::kNear);
1017 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1018 __ j(above_equal, &done_convert, Label::kNear);
1020 ToObjectStub stub(isolate());
1022 __ bind(&done_convert);
1023 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1026 // Check for proxies.
1027 Label call_runtime, use_cache, fixed_array;
1028 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1029 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1030 __ j(below_equal, &call_runtime);
1032 // Check cache validity in generated code. This is a fast case for
1033 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1034 // guarantee cache validity, call the runtime system to check cache
1035 // validity or get the property names in a fixed array.
1036 __ CheckEnumCache(&call_runtime);
1038 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1039 __ jmp(&use_cache, Label::kNear);
1041 // Get the set of properties to enumerate.
1042 __ bind(&call_runtime);
1044 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1045 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1046 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1047 isolate()->factory()->meta_map());
1048 __ j(not_equal, &fixed_array);
1051 // We got a map in register eax. Get the enumeration cache from it.
1052 Label no_descriptors;
1053 __ bind(&use_cache);
1055 __ EnumLength(edx, eax);
1056 __ cmp(edx, Immediate(Smi::FromInt(0)));
1057 __ j(equal, &no_descriptors);
1059 __ LoadInstanceDescriptors(eax, ecx);
1060 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1061 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1063 // Set up the four remaining stack slots.
1064 __ push(eax); // Map.
1065 __ push(ecx); // Enumeration cache.
1066 __ push(edx); // Number of valid entries for the map in the enum cache.
1067 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1070 __ bind(&no_descriptors);
1071 __ add(esp, Immediate(kPointerSize));
1074 // We got a fixed array in register eax. Iterate through that.
1076 __ bind(&fixed_array);
1078 // No need for a write barrier, we are storing a Smi in the feedback vector.
1079 __ LoadHeapObject(ebx, FeedbackVector());
1080 int vector_index = FeedbackVector()->GetIndex(slot);
1081 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1082 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1084 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1085 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1086 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1087 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1088 __ j(above, &non_proxy);
1089 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1090 __ bind(&non_proxy);
1091 __ push(ebx); // Smi
1092 __ push(eax); // Array
1093 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1094 __ push(eax); // Fixed array length (as smi).
1095 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1097 // Generate code for doing the condition check.
1098 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1100 SetExpressionAsStatementPosition(stmt->each());
1102 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1103 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1104 __ j(above_equal, loop_statement.break_label());
1106 // Get the current entry of the array into register ebx.
1107 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1108 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1110 // Get the expected map from the stack or a smi in the
1111 // permanent slow case into register edx.
1112 __ mov(edx, Operand(esp, 3 * kPointerSize));
1114 // Check if the expected map still matches that of the enumerable.
1115 // If not, we may have to filter the key.
1117 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1118 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1119 __ j(equal, &update_each, Label::kNear);
1121 // For proxies, no filtering is done.
1122 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1123 DCHECK(Smi::FromInt(0) == 0);
1125 __ j(zero, &update_each);
1127 // Convert the entry to a string or null if it isn't a property
1128 // anymore. If the property has been removed while iterating, we
1130 __ push(ecx); // Enumerable.
1131 __ push(ebx); // Current entry.
1132 __ CallRuntime(Runtime::kForInFilter, 2);
1133 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1134 __ cmp(eax, isolate()->factory()->undefined_value());
1135 __ j(equal, loop_statement.continue_label());
1138 // Update the 'each' property or variable from the possibly filtered
1139 // entry in register ebx.
1140 __ bind(&update_each);
1141 __ mov(result_register(), ebx);
1142 // Perform the assignment as if via '='.
1143 { EffectContext context(this);
1144 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1145 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1148 // Generate code for the body of the loop.
1149 Visit(stmt->body());
1151 // Generate code for going to the next element by incrementing the
1152 // index (smi) stored on top of the stack.
1153 __ bind(loop_statement.continue_label());
1154 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1156 EmitBackEdgeBookkeeping(stmt, &loop);
1159 // Remove the pointers stored on the stack.
1160 __ bind(loop_statement.break_label());
1161 __ add(esp, Immediate(5 * kPointerSize));
1163 // Exit and decrement the loop depth.
1164 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1166 decrement_loop_depth();
1170 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1172 // Use the fast case closure allocation code that allocates in new
1173 // space for nested functions that don't need literals cloning. If
1174 // we're running with the --always-opt or the --prepare-always-opt
1175 // flag, we need to use the runtime function so that the new function
1176 // we are creating here gets a chance to have its code optimized and
1177 // doesn't just get a copy of the existing unoptimized code.
1178 if (!FLAG_always_opt &&
1179 !FLAG_prepare_always_opt &&
1181 scope()->is_function_scope() &&
1182 info->num_literals() == 0) {
1183 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1184 __ mov(ebx, Immediate(info));
1188 __ push(Immediate(info));
1189 __ push(Immediate(pretenure
1190 ? isolate()->factory()->true_value()
1191 : isolate()->factory()->false_value()));
1192 __ CallRuntime(Runtime::kNewClosure, 3);
1194 context()->Plug(eax);
1198 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1200 FeedbackVectorICSlot slot) {
1201 if (NeedsHomeObject(initializer)) {
1202 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1203 __ mov(StoreDescriptor::NameRegister(),
1204 Immediate(isolate()->factory()->home_object_symbol()));
1205 __ mov(StoreDescriptor::ValueRegister(),
1206 Operand(esp, offset * kPointerSize));
1207 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1213 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1214 TypeofMode typeof_mode,
1216 Register context = esi;
1217 Register temp = edx;
1221 if (s->num_heap_slots() > 0) {
1222 if (s->calls_sloppy_eval()) {
1223 // Check that extension is NULL.
1224 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1226 __ j(not_equal, slow);
1228 // Load next context in chain.
1229 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1230 // Walk the rest of the chain without clobbering esi.
1233 // If no outer scope calls eval, we do not need to check more
1234 // context extensions. If we have reached an eval scope, we check
1235 // all extensions from this point.
1236 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1237 s = s->outer_scope();
1240 if (s != NULL && s->is_eval_scope()) {
1241 // Loop up the context chain. There is no frame effect so it is
1242 // safe to use raw labels here.
1244 if (!context.is(temp)) {
1245 __ mov(temp, context);
1248 // Terminate at native context.
1249 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1250 Immediate(isolate()->factory()->native_context_map()));
1251 __ j(equal, &fast, Label::kNear);
1252 // Check that extension is NULL.
1253 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1254 __ j(not_equal, slow);
1255 // Load next context in chain.
1256 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1261 // All extension objects were empty and it is safe to use a normal global
1263 EmitGlobalVariableLoad(proxy, typeof_mode);
1267 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1269 DCHECK(var->IsContextSlot());
1270 Register context = esi;
1271 Register temp = ebx;
1273 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1274 if (s->num_heap_slots() > 0) {
1275 if (s->calls_sloppy_eval()) {
1276 // Check that extension is NULL.
1277 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1279 __ j(not_equal, slow);
1281 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1282 // Walk the rest of the chain without clobbering esi.
1286 // Check that last extension is NULL.
1287 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1288 __ j(not_equal, slow);
1290 // This function is used only for loads, not stores, so it's safe to
1291 // return an esi-based operand (the write barrier cannot be allowed to
1292 // destroy the esi register).
1293 return ContextOperand(context, var->index());
1297 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1298 TypeofMode typeof_mode,
1299 Label* slow, Label* done) {
1300 // Generate fast-case code for variables that might be shadowed by
1301 // eval-introduced variables. Eval is used a lot without
1302 // introducing variables. In those cases, we do not want to
1303 // perform a runtime call for all variables in the scope
1304 // containing the eval.
1305 Variable* var = proxy->var();
1306 if (var->mode() == DYNAMIC_GLOBAL) {
1307 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1309 } else if (var->mode() == DYNAMIC_LOCAL) {
1310 Variable* local = var->local_if_not_shadowed();
1311 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1312 if (local->mode() == LET || local->mode() == CONST ||
1313 local->mode() == CONST_LEGACY) {
1314 __ cmp(eax, isolate()->factory()->the_hole_value());
1315 __ j(not_equal, done);
1316 if (local->mode() == CONST_LEGACY) {
1317 __ mov(eax, isolate()->factory()->undefined_value());
1318 } else { // LET || CONST
1319 __ push(Immediate(var->name()));
1320 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1328 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1329 TypeofMode typeof_mode) {
1330 Variable* var = proxy->var();
1331 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1332 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1333 if (var->IsGlobalSlot()) {
1334 DCHECK(var->index() > 0);
1335 DCHECK(var->IsStaticGlobalObjectProperty());
1336 int const slot = var->index();
1337 int const depth = scope()->ContextChainLength(var->scope());
1338 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1339 __ Move(LoadGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
1340 LoadGlobalViaContextStub stub(isolate(), depth);
1343 __ Push(Smi::FromInt(slot));
1344 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1348 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1349 __ mov(LoadDescriptor::NameRegister(), var->name());
1350 __ mov(LoadDescriptor::SlotRegister(),
1351 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1352 CallLoadIC(typeof_mode);
1357 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1358 TypeofMode typeof_mode) {
1359 SetExpressionPosition(proxy);
1360 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1361 Variable* var = proxy->var();
1363 // Three cases: global variables, lookup variables, and all other types of
1365 switch (var->location()) {
1366 case VariableLocation::GLOBAL:
1367 case VariableLocation::UNALLOCATED: {
1368 Comment cmnt(masm_, "[ Global variable");
1369 EmitGlobalVariableLoad(proxy, typeof_mode);
1370 context()->Plug(eax);
1374 case VariableLocation::PARAMETER:
1375 case VariableLocation::LOCAL:
1376 case VariableLocation::CONTEXT: {
1377 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1378 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1379 : "[ Stack variable");
1380 if (var->binding_needs_init()) {
1381 // var->scope() may be NULL when the proxy is located in eval code and
1382 // refers to a potential outside binding. Currently those bindings are
1383 // always looked up dynamically, i.e. in that case
1384 // var->location() == LOOKUP.
1386 DCHECK(var->scope() != NULL);
1388 // Check if the binding really needs an initialization check. The check
1389 // can be skipped in the following situation: we have a LET or CONST
1390 // binding in harmony mode, both the Variable and the VariableProxy have
1391 // the same declaration scope (i.e. they are both in global code, in the
1392 // same function or in the same eval code) and the VariableProxy is in
1393 // the source physically located after the initializer of the variable.
1395 // We cannot skip any initialization checks for CONST in non-harmony
1396 // mode because const variables may be declared but never initialized:
1397 // if (false) { const x; }; var y = x;
1399 // The condition on the declaration scopes is a conservative check for
1400 // nested functions that access a binding and are called before the
1401 // binding is initialized:
1402 // function() { f(); let x = 1; function f() { x = 2; } }
1404 bool skip_init_check;
1405 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1406 skip_init_check = false;
1407 } else if (var->is_this()) {
1408 CHECK(info_->function() != nullptr &&
1409 (info_->function()->kind() & kSubclassConstructor) != 0);
1410 // TODO(dslomov): implement 'this' hole check elimination.
1411 skip_init_check = false;
1413 // Check that we always have valid source position.
1414 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1415 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1416 skip_init_check = var->mode() != CONST_LEGACY &&
1417 var->initializer_position() < proxy->position();
1420 if (!skip_init_check) {
1421 // Let and const need a read barrier.
1424 __ cmp(eax, isolate()->factory()->the_hole_value());
1425 __ j(not_equal, &done, Label::kNear);
1426 if (var->mode() == LET || var->mode() == CONST) {
1427 // Throw a reference error when using an uninitialized let/const
1428 // binding in harmony mode.
1429 __ push(Immediate(var->name()));
1430 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1432 // Uninitalized const bindings outside of harmony mode are unholed.
1433 DCHECK(var->mode() == CONST_LEGACY);
1434 __ mov(eax, isolate()->factory()->undefined_value());
1437 context()->Plug(eax);
1441 context()->Plug(var);
1445 case VariableLocation::LOOKUP: {
1446 Comment cmnt(masm_, "[ Lookup variable");
1448 // Generate code for loading from variables potentially shadowed
1449 // by eval-introduced variables.
1450 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1452 __ push(esi); // Context.
1453 __ push(Immediate(var->name()));
1454 Runtime::FunctionId function_id =
1455 typeof_mode == NOT_INSIDE_TYPEOF
1456 ? Runtime::kLoadLookupSlot
1457 : Runtime::kLoadLookupSlotNoReferenceError;
1458 __ CallRuntime(function_id, 2);
1460 context()->Plug(eax);
1467 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1468 Comment cmnt(masm_, "[ RegExpLiteral");
1470 // Registers will be used as follows:
1471 // edi = JS function.
1472 // ecx = literals array.
1473 // ebx = regexp literal.
1474 // eax = regexp literal clone.
1475 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1476 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1477 int literal_offset =
1478 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1479 __ mov(ebx, FieldOperand(ecx, literal_offset));
1480 __ cmp(ebx, isolate()->factory()->undefined_value());
1481 __ j(not_equal, &materialized, Label::kNear);
1483 // Create regexp literal using runtime function
1484 // Result will be in eax.
1486 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1487 __ push(Immediate(expr->pattern()));
1488 __ push(Immediate(expr->flags()));
1489 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1492 __ bind(&materialized);
1493 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1494 Label allocated, runtime_allocate;
1495 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1498 __ bind(&runtime_allocate);
1500 __ push(Immediate(Smi::FromInt(size)));
1501 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1504 __ bind(&allocated);
1505 // Copy the content into the newly allocated memory.
1506 // (Unroll copy loop once for better throughput).
1507 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1508 __ mov(edx, FieldOperand(ebx, i));
1509 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1510 __ mov(FieldOperand(eax, i), edx);
1511 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1513 if ((size % (2 * kPointerSize)) != 0) {
1514 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1515 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1517 context()->Plug(eax);
1521 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1522 if (expression == NULL) {
1523 __ push(Immediate(isolate()->factory()->null_value()));
1525 VisitForStackValue(expression);
1530 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1531 Comment cmnt(masm_, "[ ObjectLiteral");
1533 Handle<FixedArray> constant_properties = expr->constant_properties();
1534 int flags = expr->ComputeFlags();
1535 // If any of the keys would store to the elements array, then we shouldn't
1537 if (MustCreateObjectLiteralWithRuntime(expr)) {
1538 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1539 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1540 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1541 __ push(Immediate(constant_properties));
1542 __ push(Immediate(Smi::FromInt(flags)));
1543 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1545 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1546 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1547 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1548 __ mov(ecx, Immediate(constant_properties));
1549 __ mov(edx, Immediate(Smi::FromInt(flags)));
1550 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1553 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1555 // If result_saved is true the result is on top of the stack. If
1556 // result_saved is false the result is in eax.
1557 bool result_saved = false;
1559 AccessorTable accessor_table(zone());
1560 int property_index = 0;
1561 // store_slot_index points to the vector IC slot for the next store IC used.
1562 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1563 // and must be updated if the number of store ICs emitted here changes.
1564 int store_slot_index = 0;
1565 for (; property_index < expr->properties()->length(); property_index++) {
1566 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1567 if (property->is_computed_name()) break;
1568 if (property->IsCompileTimeValue()) continue;
1570 Literal* key = property->key()->AsLiteral();
1571 Expression* value = property->value();
1572 if (!result_saved) {
1573 __ push(eax); // Save result on the stack
1574 result_saved = true;
1576 switch (property->kind()) {
1577 case ObjectLiteral::Property::CONSTANT:
1579 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1580 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1582 case ObjectLiteral::Property::COMPUTED:
1583 // It is safe to use [[Put]] here because the boilerplate already
1584 // contains computed properties with an uninitialized value.
1585 if (key->value()->IsInternalizedString()) {
1586 if (property->emit_store()) {
1587 VisitForAccumulatorValue(value);
1588 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1589 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1590 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1591 if (FLAG_vector_stores) {
1592 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1595 CallStoreIC(key->LiteralFeedbackId());
1597 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1599 if (NeedsHomeObject(value)) {
1600 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1601 __ mov(StoreDescriptor::NameRegister(),
1602 Immediate(isolate()->factory()->home_object_symbol()));
1603 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, 0));
1604 if (FLAG_vector_stores) {
1605 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1610 VisitForEffect(value);
1614 __ push(Operand(esp, 0)); // Duplicate receiver.
1615 VisitForStackValue(key);
1616 VisitForStackValue(value);
1617 if (property->emit_store()) {
1618 EmitSetHomeObjectIfNeeded(
1619 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1620 __ push(Immediate(Smi::FromInt(SLOPPY))); // Language mode
1621 __ CallRuntime(Runtime::kSetProperty, 4);
1626 case ObjectLiteral::Property::PROTOTYPE:
1627 __ push(Operand(esp, 0)); // Duplicate receiver.
1628 VisitForStackValue(value);
1629 DCHECK(property->emit_store());
1630 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1632 case ObjectLiteral::Property::GETTER:
1633 if (property->emit_store()) {
1634 accessor_table.lookup(key)->second->getter = value;
1637 case ObjectLiteral::Property::SETTER:
1638 if (property->emit_store()) {
1639 accessor_table.lookup(key)->second->setter = value;
1645 // Emit code to define accessors, using only a single call to the runtime for
1646 // each pair of corresponding getters and setters.
1647 for (AccessorTable::Iterator it = accessor_table.begin();
1648 it != accessor_table.end();
1650 __ push(Operand(esp, 0)); // Duplicate receiver.
1651 VisitForStackValue(it->first);
1652 EmitAccessor(it->second->getter);
1653 EmitSetHomeObjectIfNeeded(
1654 it->second->getter, 2,
1655 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1657 EmitAccessor(it->second->setter);
1658 EmitSetHomeObjectIfNeeded(
1659 it->second->setter, 3,
1660 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1662 __ push(Immediate(Smi::FromInt(NONE)));
1663 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1666 // Object literals have two parts. The "static" part on the left contains no
1667 // computed property names, and so we can compute its map ahead of time; see
1668 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1669 // starts with the first computed property name, and continues with all
1670 // properties to its right. All the code from above initializes the static
1671 // component of the object literal, and arranges for the map of the result to
1672 // reflect the static order in which the keys appear. For the dynamic
1673 // properties, we compile them into a series of "SetOwnProperty" runtime
1674 // calls. This will preserve insertion order.
1675 for (; property_index < expr->properties()->length(); property_index++) {
1676 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1678 Expression* value = property->value();
1679 if (!result_saved) {
1680 __ push(eax); // Save result on the stack
1681 result_saved = true;
1684 __ push(Operand(esp, 0)); // Duplicate receiver.
1686 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1687 DCHECK(!property->is_computed_name());
1688 VisitForStackValue(value);
1689 DCHECK(property->emit_store());
1690 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1692 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1693 VisitForStackValue(value);
1694 EmitSetHomeObjectIfNeeded(
1695 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1697 switch (property->kind()) {
1698 case ObjectLiteral::Property::CONSTANT:
1699 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1700 case ObjectLiteral::Property::COMPUTED:
1701 if (property->emit_store()) {
1702 __ push(Immediate(Smi::FromInt(NONE)));
1703 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1709 case ObjectLiteral::Property::PROTOTYPE:
1713 case ObjectLiteral::Property::GETTER:
1714 __ push(Immediate(Smi::FromInt(NONE)));
1715 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1718 case ObjectLiteral::Property::SETTER:
1719 __ push(Immediate(Smi::FromInt(NONE)));
1720 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1726 if (expr->has_function()) {
1727 DCHECK(result_saved);
1728 __ push(Operand(esp, 0));
1729 __ CallRuntime(Runtime::kToFastProperties, 1);
1733 context()->PlugTOS();
1735 context()->Plug(eax);
1738 // Verify that compilation exactly consumed the number of store ic slots that
1739 // the ObjectLiteral node had to offer.
1740 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1744 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1745 Comment cmnt(masm_, "[ ArrayLiteral");
1747 expr->BuildConstantElements(isolate());
1748 Handle<FixedArray> constant_elements = expr->constant_elements();
1749 bool has_constant_fast_elements =
1750 IsFastObjectElementsKind(expr->constant_elements_kind());
1752 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1753 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1754 // If the only customer of allocation sites is transitioning, then
1755 // we can turn it off if we don't have anywhere else to transition to.
1756 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1759 if (MustCreateArrayLiteralWithRuntime(expr)) {
1760 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1761 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1762 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1763 __ push(Immediate(constant_elements));
1764 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1765 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1767 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1768 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1769 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1770 __ mov(ecx, Immediate(constant_elements));
1771 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1774 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1776 bool result_saved = false; // Is the result saved to the stack?
1777 ZoneList<Expression*>* subexprs = expr->values();
1778 int length = subexprs->length();
1780 // Emit code to evaluate all the non-constant subexpressions and to store
1781 // them into the newly cloned array.
1782 int array_index = 0;
1783 for (; array_index < length; array_index++) {
1784 Expression* subexpr = subexprs->at(array_index);
1785 if (subexpr->IsSpread()) break;
1787 // If the subexpression is a literal or a simple materialized literal it
1788 // is already set in the cloned array.
1789 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1791 if (!result_saved) {
1792 __ push(eax); // array literal.
1793 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1794 result_saved = true;
1796 VisitForAccumulatorValue(subexpr);
1798 if (has_constant_fast_elements) {
1799 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1800 // cannot transition and don't need to call the runtime stub.
1801 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1802 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1803 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1804 // Store the subexpression value in the array's elements.
1805 __ mov(FieldOperand(ebx, offset), result_register());
1806 // Update the write barrier for the array store.
1807 __ RecordWriteField(ebx, offset, result_register(), ecx, kDontSaveFPRegs,
1808 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1810 // Store the subexpression value in the array's elements.
1811 __ mov(ecx, Immediate(Smi::FromInt(array_index)));
1812 StoreArrayLiteralElementStub stub(isolate());
1816 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1819 // In case the array literal contains spread expressions it has two parts. The
1820 // first part is the "static" array which has a literal index is handled
1821 // above. The second part is the part after the first spread expression
1822 // (inclusive) and these elements gets appended to the array. Note that the
1823 // number elements an iterable produces is unknown ahead of time.
1824 if (array_index < length && result_saved) {
1825 __ Drop(1); // literal index
1827 result_saved = false;
1829 for (; array_index < length; array_index++) {
1830 Expression* subexpr = subexprs->at(array_index);
1833 if (subexpr->IsSpread()) {
1834 VisitForStackValue(subexpr->AsSpread()->expression());
1835 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1837 VisitForStackValue(subexpr);
1838 __ CallRuntime(Runtime::kAppendElement, 2);
1841 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1845 __ Drop(1); // literal index
1846 context()->PlugTOS();
1848 context()->Plug(eax);
1853 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1854 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1856 Comment cmnt(masm_, "[ Assignment");
1857 SetExpressionPosition(expr, INSERT_BREAK);
1859 Property* property = expr->target()->AsProperty();
1860 LhsKind assign_type = Property::GetAssignType(property);
1862 // Evaluate LHS expression.
1863 switch (assign_type) {
1865 // Nothing to do here.
1867 case NAMED_SUPER_PROPERTY:
1869 property->obj()->AsSuperPropertyReference()->this_var());
1870 VisitForAccumulatorValue(
1871 property->obj()->AsSuperPropertyReference()->home_object());
1872 __ push(result_register());
1873 if (expr->is_compound()) {
1874 __ push(MemOperand(esp, kPointerSize));
1875 __ push(result_register());
1878 case NAMED_PROPERTY:
1879 if (expr->is_compound()) {
1880 // We need the receiver both on the stack and in the register.
1881 VisitForStackValue(property->obj());
1882 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1884 VisitForStackValue(property->obj());
1887 case KEYED_SUPER_PROPERTY:
1889 property->obj()->AsSuperPropertyReference()->this_var());
1891 property->obj()->AsSuperPropertyReference()->home_object());
1892 VisitForAccumulatorValue(property->key());
1893 __ Push(result_register());
1894 if (expr->is_compound()) {
1895 __ push(MemOperand(esp, 2 * kPointerSize));
1896 __ push(MemOperand(esp, 2 * kPointerSize));
1897 __ push(result_register());
1900 case KEYED_PROPERTY: {
1901 if (expr->is_compound()) {
1902 VisitForStackValue(property->obj());
1903 VisitForStackValue(property->key());
1904 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1905 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1907 VisitForStackValue(property->obj());
1908 VisitForStackValue(property->key());
1914 // For compound assignments we need another deoptimization point after the
1915 // variable/property load.
1916 if (expr->is_compound()) {
1917 AccumulatorValueContext result_context(this);
1918 { AccumulatorValueContext left_operand_context(this);
1919 switch (assign_type) {
1921 EmitVariableLoad(expr->target()->AsVariableProxy());
1922 PrepareForBailout(expr->target(), TOS_REG);
1924 case NAMED_SUPER_PROPERTY:
1925 EmitNamedSuperPropertyLoad(property);
1926 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1928 case NAMED_PROPERTY:
1929 EmitNamedPropertyLoad(property);
1930 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1932 case KEYED_SUPER_PROPERTY:
1933 EmitKeyedSuperPropertyLoad(property);
1934 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1936 case KEYED_PROPERTY:
1937 EmitKeyedPropertyLoad(property);
1938 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1943 Token::Value op = expr->binary_op();
1944 __ push(eax); // Left operand goes on the stack.
1945 VisitForAccumulatorValue(expr->value());
1947 if (ShouldInlineSmiCase(op)) {
1948 EmitInlineSmiBinaryOp(expr->binary_operation(),
1953 EmitBinaryOp(expr->binary_operation(), op);
1956 // Deoptimization point in case the binary operation may have side effects.
1957 PrepareForBailout(expr->binary_operation(), TOS_REG);
1959 VisitForAccumulatorValue(expr->value());
1962 SetExpressionPosition(expr);
1965 switch (assign_type) {
1967 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1968 expr->op(), expr->AssignmentSlot());
1969 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1970 context()->Plug(eax);
1972 case NAMED_PROPERTY:
1973 EmitNamedPropertyAssignment(expr);
1975 case NAMED_SUPER_PROPERTY:
1976 EmitNamedSuperPropertyStore(property);
1977 context()->Plug(result_register());
1979 case KEYED_SUPER_PROPERTY:
1980 EmitKeyedSuperPropertyStore(property);
1981 context()->Plug(result_register());
1983 case KEYED_PROPERTY:
1984 EmitKeyedPropertyAssignment(expr);
1990 void FullCodeGenerator::VisitYield(Yield* expr) {
1991 Comment cmnt(masm_, "[ Yield");
1992 SetExpressionPosition(expr);
1994 // Evaluate yielded value first; the initial iterator definition depends on
1995 // this. It stays on the stack while we update the iterator.
1996 VisitForStackValue(expr->expression());
1998 switch (expr->yield_kind()) {
1999 case Yield::kSuspend:
2000 // Pop value from top-of-stack slot; box result into result register.
2001 EmitCreateIteratorResult(false);
2002 __ push(result_register());
2004 case Yield::kInitial: {
2005 Label suspend, continuation, post_runtime, resume;
2008 __ bind(&continuation);
2009 __ RecordGeneratorContinuation();
2013 VisitForAccumulatorValue(expr->generator_object());
2014 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2015 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2016 Immediate(Smi::FromInt(continuation.pos())));
2017 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2019 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2021 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
2023 __ j(equal, &post_runtime);
2024 __ push(eax); // generator object
2025 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2026 __ mov(context_register(),
2027 Operand(ebp, StandardFrameConstants::kContextOffset));
2028 __ bind(&post_runtime);
2029 __ pop(result_register());
2030 EmitReturnSequence();
2033 context()->Plug(result_register());
2037 case Yield::kFinal: {
2038 VisitForAccumulatorValue(expr->generator_object());
2039 __ mov(FieldOperand(result_register(),
2040 JSGeneratorObject::kContinuationOffset),
2041 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2042 // Pop value from top-of-stack slot, box result into result register.
2043 EmitCreateIteratorResult(true);
2044 EmitUnwindBeforeReturn();
2045 EmitReturnSequence();
2049 case Yield::kDelegating: {
2050 VisitForStackValue(expr->generator_object());
2052 // Initial stack layout is as follows:
2053 // [sp + 1 * kPointerSize] iter
2054 // [sp + 0 * kPointerSize] g
2056 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2057 Label l_next, l_call, l_loop;
2058 Register load_receiver = LoadDescriptor::ReceiverRegister();
2059 Register load_name = LoadDescriptor::NameRegister();
2061 // Initial send value is undefined.
2062 __ mov(eax, isolate()->factory()->undefined_value());
2065 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2067 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2068 __ push(load_name); // "throw"
2069 __ push(Operand(esp, 2 * kPointerSize)); // iter
2070 __ push(eax); // exception
2073 // try { received = %yield result }
2074 // Shuffle the received result above a try handler and yield it without
2077 __ pop(eax); // result
2078 int handler_index = NewHandlerTableEntry();
2079 EnterTryBlock(handler_index, &l_catch);
2080 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2081 __ push(eax); // result
2084 __ bind(&l_continuation);
2085 __ RecordGeneratorContinuation();
2088 __ bind(&l_suspend);
2089 const int generator_object_depth = kPointerSize + try_block_size;
2090 __ mov(eax, Operand(esp, generator_object_depth));
2092 __ push(Immediate(Smi::FromInt(handler_index))); // handler-index
2093 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2094 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2095 Immediate(Smi::FromInt(l_continuation.pos())));
2096 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2098 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2100 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2101 __ mov(context_register(),
2102 Operand(ebp, StandardFrameConstants::kContextOffset));
2103 __ pop(eax); // result
2104 EmitReturnSequence();
2105 __ bind(&l_resume); // received in eax
2106 ExitTryBlock(handler_index);
2108 // receiver = iter; f = iter.next; arg = received;
2111 __ mov(load_name, isolate()->factory()->next_string());
2112 __ push(load_name); // "next"
2113 __ push(Operand(esp, 2 * kPointerSize)); // iter
2114 __ push(eax); // received
2116 // result = receiver[f](arg);
2118 __ mov(load_receiver, Operand(esp, kPointerSize));
2119 __ mov(LoadDescriptor::SlotRegister(),
2120 Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2121 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2122 CallIC(ic, TypeFeedbackId::None());
2124 __ mov(Operand(esp, 2 * kPointerSize), edi);
2125 SetCallPosition(expr, 1);
2126 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2129 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2130 __ Drop(1); // The function is still on the stack; drop it.
2132 // if (!result.done) goto l_try;
2134 __ push(eax); // save result
2135 __ Move(load_receiver, eax); // result
2137 isolate()->factory()->done_string()); // "done"
2138 __ mov(LoadDescriptor::SlotRegister(),
2139 Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
2140 CallLoadIC(NOT_INSIDE_TYPEOF); // result.done in eax
2141 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2147 __ pop(load_receiver); // result
2149 isolate()->factory()->value_string()); // "value"
2150 __ mov(LoadDescriptor::SlotRegister(),
2151 Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
2152 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in eax
2153 context()->DropAndPlug(2, eax); // drop iter and g
2160 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2162 JSGeneratorObject::ResumeMode resume_mode) {
2163 // The value stays in eax, and is ultimately read by the resumed generator, as
2164 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2165 // is read to throw the value when the resumed generator is already closed.
2166 // ebx will hold the generator object until the activation has been resumed.
2167 VisitForStackValue(generator);
2168 VisitForAccumulatorValue(value);
2171 // Load suspended function and context.
2172 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2173 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2176 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2178 // Push holes for arguments to generator function.
2179 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2181 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2182 __ mov(ecx, isolate()->factory()->the_hole_value());
2183 Label push_argument_holes, push_frame;
2184 __ bind(&push_argument_holes);
2185 __ sub(edx, Immediate(Smi::FromInt(1)));
2186 __ j(carry, &push_frame);
2188 __ jmp(&push_argument_holes);
2190 // Enter a new JavaScript frame, and initialize its slots as they were when
2191 // the generator was suspended.
2192 Label resume_frame, done;
2193 __ bind(&push_frame);
2194 __ call(&resume_frame);
2196 __ bind(&resume_frame);
2197 __ push(ebp); // Caller's frame pointer.
2199 __ push(esi); // Callee's context.
2200 __ push(edi); // Callee's JS Function.
2202 // Load the operand stack size.
2203 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2204 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2207 // If we are sending a value and there is no operand stack, we can jump back
2209 if (resume_mode == JSGeneratorObject::NEXT) {
2211 __ cmp(edx, Immediate(0));
2212 __ j(not_zero, &slow_resume);
2213 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2214 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2217 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2218 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2220 __ bind(&slow_resume);
2223 // Otherwise, we push holes for the operand stack and call the runtime to fix
2224 // up the stack and the handlers.
2225 Label push_operand_holes, call_resume;
2226 __ bind(&push_operand_holes);
2227 __ sub(edx, Immediate(1));
2228 __ j(carry, &call_resume);
2230 __ jmp(&push_operand_holes);
2231 __ bind(&call_resume);
2233 __ push(result_register());
2234 __ Push(Smi::FromInt(resume_mode));
2235 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2236 // Not reached: the runtime call returns elsewhere.
2237 __ Abort(kGeneratorFailedToResume);
2240 context()->Plug(result_register());
2244 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2248 const int instance_size = 5 * kPointerSize;
2249 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2252 __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2255 __ bind(&gc_required);
2256 __ Push(Smi::FromInt(instance_size));
2257 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2258 __ mov(context_register(),
2259 Operand(ebp, StandardFrameConstants::kContextOffset));
2261 __ bind(&allocated);
2262 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2263 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2264 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2266 __ mov(edx, isolate()->factory()->ToBoolean(done));
2267 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2268 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2269 isolate()->factory()->empty_fixed_array());
2270 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2271 isolate()->factory()->empty_fixed_array());
2272 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2273 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2275 // Only the value field needs a write barrier, as the other values are in the
2277 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset, ecx,
2278 edx, kDontSaveFPRegs);
2282 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2283 SetExpressionPosition(prop);
2284 Literal* key = prop->key()->AsLiteral();
2285 DCHECK(!key->value()->IsSmi());
2286 DCHECK(!prop->IsSuperAccess());
2288 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2289 __ mov(LoadDescriptor::SlotRegister(),
2290 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2291 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2295 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2296 // Stack: receiver, home_object.
2297 SetExpressionPosition(prop);
2298 Literal* key = prop->key()->AsLiteral();
2299 DCHECK(!key->value()->IsSmi());
2300 DCHECK(prop->IsSuperAccess());
2302 __ push(Immediate(key->value()));
2303 __ push(Immediate(Smi::FromInt(language_mode())));
2304 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2308 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2309 SetExpressionPosition(prop);
2310 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2311 __ mov(LoadDescriptor::SlotRegister(),
2312 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2317 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2318 // Stack: receiver, home_object, key.
2319 SetExpressionPosition(prop);
2320 __ push(Immediate(Smi::FromInt(language_mode())));
2321 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2325 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2328 Expression* right) {
2329 // Do combined smi check of the operands. Left operand is on the
2330 // stack. Right operand is in eax.
2331 Label smi_case, done, stub_call;
2335 JumpPatchSite patch_site(masm_);
2336 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2338 __ bind(&stub_call);
2341 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2342 CallIC(code, expr->BinaryOperationFeedbackId());
2343 patch_site.EmitPatchInfo();
2344 __ jmp(&done, Label::kNear);
2348 __ mov(eax, edx); // Copy left operand in case of a stub call.
2353 __ sar_cl(eax); // No checks of result necessary
2354 __ and_(eax, Immediate(~kSmiTagMask));
2361 // Check that the *signed* result fits in a smi.
2362 __ cmp(eax, 0xc0000000);
2363 __ j(positive, &result_ok);
2366 __ bind(&result_ok);
2375 __ test(eax, Immediate(0xc0000000));
2376 __ j(zero, &result_ok);
2379 __ bind(&result_ok);
2385 __ j(overflow, &stub_call);
2389 __ j(overflow, &stub_call);
2394 __ j(overflow, &stub_call);
2396 __ j(not_zero, &done, Label::kNear);
2399 __ j(negative, &stub_call);
2405 case Token::BIT_AND:
2408 case Token::BIT_XOR:
2416 context()->Plug(eax);
2420 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2421 int* used_store_slots) {
2422 // Constructor is in eax.
2423 DCHECK(lit != NULL);
2426 // No access check is needed here since the constructor is created by the
2428 Register scratch = ebx;
2429 __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2432 for (int i = 0; i < lit->properties()->length(); i++) {
2433 ObjectLiteral::Property* property = lit->properties()->at(i);
2434 Expression* value = property->value();
2436 if (property->is_static()) {
2437 __ push(Operand(esp, kPointerSize)); // constructor
2439 __ push(Operand(esp, 0)); // prototype
2441 EmitPropertyKey(property, lit->GetIdForProperty(i));
2443 // The static prototype property is read only. We handle the non computed
2444 // property name case in the parser. Since this is the only case where we
2445 // need to check for an own read only property we special case this so we do
2446 // not need to do this for every property.
2447 if (property->is_static() && property->is_computed_name()) {
2448 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2452 VisitForStackValue(value);
2453 EmitSetHomeObjectIfNeeded(value, 2,
2454 lit->SlotForHomeObject(value, used_store_slots));
2456 switch (property->kind()) {
2457 case ObjectLiteral::Property::CONSTANT:
2458 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2459 case ObjectLiteral::Property::PROTOTYPE:
2461 case ObjectLiteral::Property::COMPUTED:
2462 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2465 case ObjectLiteral::Property::GETTER:
2466 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2467 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2470 case ObjectLiteral::Property::SETTER:
2471 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2472 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2477 // Set both the prototype and constructor to have fast properties, and also
2478 // freeze them in strong mode.
2479 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2483 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2486 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2487 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2488 CallIC(code, expr->BinaryOperationFeedbackId());
2489 patch_site.EmitPatchInfo();
2490 context()->Plug(eax);
2494 void FullCodeGenerator::EmitAssignment(Expression* expr,
2495 FeedbackVectorICSlot slot) {
2496 DCHECK(expr->IsValidReferenceExpressionOrThis());
2498 Property* prop = expr->AsProperty();
2499 LhsKind assign_type = Property::GetAssignType(prop);
2501 switch (assign_type) {
2503 Variable* var = expr->AsVariableProxy()->var();
2504 EffectContext context(this);
2505 EmitVariableAssignment(var, Token::ASSIGN, slot);
2508 case NAMED_PROPERTY: {
2509 __ push(eax); // Preserve value.
2510 VisitForAccumulatorValue(prop->obj());
2511 __ Move(StoreDescriptor::ReceiverRegister(), eax);
2512 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2513 __ mov(StoreDescriptor::NameRegister(),
2514 prop->key()->AsLiteral()->value());
2515 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2519 case NAMED_SUPER_PROPERTY: {
2521 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2522 VisitForAccumulatorValue(
2523 prop->obj()->AsSuperPropertyReference()->home_object());
2524 // stack: value, this; eax: home_object
2525 Register scratch = ecx;
2526 Register scratch2 = edx;
2527 __ mov(scratch, result_register()); // home_object
2528 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2529 __ mov(scratch2, MemOperand(esp, 0)); // this
2530 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2531 __ mov(MemOperand(esp, 0), scratch); // home_object
2532 // stack: this, home_object. eax: value
2533 EmitNamedSuperPropertyStore(prop);
2536 case KEYED_SUPER_PROPERTY: {
2538 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2540 prop->obj()->AsSuperPropertyReference()->home_object());
2541 VisitForAccumulatorValue(prop->key());
2542 Register scratch = ecx;
2543 Register scratch2 = edx;
2544 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2545 // stack: value, this, home_object; eax: key, edx: value
2546 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2547 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2548 __ mov(scratch, MemOperand(esp, 0)); // home_object
2549 __ mov(MemOperand(esp, kPointerSize), scratch);
2550 __ mov(MemOperand(esp, 0), eax);
2551 __ mov(eax, scratch2);
2552 // stack: this, home_object, key; eax: value.
2553 EmitKeyedSuperPropertyStore(prop);
2556 case KEYED_PROPERTY: {
2557 __ push(eax); // Preserve value.
2558 VisitForStackValue(prop->obj());
2559 VisitForAccumulatorValue(prop->key());
2560 __ Move(StoreDescriptor::NameRegister(), eax);
2561 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2562 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2563 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2565 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2570 context()->Plug(eax);
2574 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2575 Variable* var, MemOperand location) {
2576 __ mov(location, eax);
2577 if (var->IsContextSlot()) {
2579 int offset = Context::SlotOffset(var->index());
2580 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2585 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2586 FeedbackVectorICSlot slot) {
2587 if (var->IsUnallocated()) {
2588 // Global var, const, or let.
2589 __ mov(StoreDescriptor::NameRegister(), var->name());
2590 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2591 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2594 } else if (var->IsGlobalSlot()) {
2595 // Global var, const, or let.
2596 DCHECK(var->index() > 0);
2597 DCHECK(var->IsStaticGlobalObjectProperty());
2598 int const slot = var->index();
2599 int const depth = scope()->ContextChainLength(var->scope());
2600 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2601 __ Move(StoreGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
2602 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
2603 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2606 __ Push(Smi::FromInt(slot));
2608 __ CallRuntime(is_strict(language_mode())
2609 ? Runtime::kStoreGlobalViaContext_Strict
2610 : Runtime::kStoreGlobalViaContext_Sloppy,
2614 } else if (var->mode() == LET && op != Token::INIT_LET) {
2615 // Non-initializing assignment to let variable needs a write barrier.
2616 DCHECK(!var->IsLookupSlot());
2617 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2619 MemOperand location = VarOperand(var, ecx);
2620 __ mov(edx, location);
2621 __ cmp(edx, isolate()->factory()->the_hole_value());
2622 __ j(not_equal, &assign, Label::kNear);
2623 __ push(Immediate(var->name()));
2624 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2626 EmitStoreToStackLocalOrContextSlot(var, location);
2628 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2629 // Assignment to const variable needs a write barrier.
2630 DCHECK(!var->IsLookupSlot());
2631 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2633 MemOperand location = VarOperand(var, ecx);
2634 __ mov(edx, location);
2635 __ cmp(edx, isolate()->factory()->the_hole_value());
2636 __ j(not_equal, &const_error, Label::kNear);
2637 __ push(Immediate(var->name()));
2638 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2639 __ bind(&const_error);
2640 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2642 } else if (var->is_this() && op == Token::INIT_CONST) {
2643 // Initializing assignment to const {this} needs a write barrier.
2644 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2645 Label uninitialized_this;
2646 MemOperand location = VarOperand(var, ecx);
2647 __ mov(edx, location);
2648 __ cmp(edx, isolate()->factory()->the_hole_value());
2649 __ j(equal, &uninitialized_this);
2650 __ push(Immediate(var->name()));
2651 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2652 __ bind(&uninitialized_this);
2653 EmitStoreToStackLocalOrContextSlot(var, location);
2655 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2656 if (var->IsLookupSlot()) {
2657 // Assignment to var.
2658 __ push(eax); // Value.
2659 __ push(esi); // Context.
2660 __ push(Immediate(var->name()));
2661 __ push(Immediate(Smi::FromInt(language_mode())));
2662 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2664 // Assignment to var or initializing assignment to let/const in harmony
2666 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2667 MemOperand location = VarOperand(var, ecx);
2668 if (generate_debug_code_ && op == Token::INIT_LET) {
2669 // Check for an uninitialized let binding.
2670 __ mov(edx, location);
2671 __ cmp(edx, isolate()->factory()->the_hole_value());
2672 __ Check(equal, kLetBindingReInitialization);
2674 EmitStoreToStackLocalOrContextSlot(var, location);
2677 } else if (op == Token::INIT_CONST_LEGACY) {
2678 // Const initializers need a write barrier.
2679 DCHECK(var->mode() == CONST_LEGACY);
2680 DCHECK(!var->IsParameter()); // No const parameters.
2681 if (var->IsLookupSlot()) {
2684 __ push(Immediate(var->name()));
2685 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2687 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2689 MemOperand location = VarOperand(var, ecx);
2690 __ mov(edx, location);
2691 __ cmp(edx, isolate()->factory()->the_hole_value());
2692 __ j(not_equal, &skip, Label::kNear);
2693 EmitStoreToStackLocalOrContextSlot(var, location);
2698 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2699 if (is_strict(language_mode())) {
2700 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2702 // Silently ignore store in sloppy mode.
2707 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2708 // Assignment to a property, using a named store IC.
2710 // esp[0] : receiver
2711 Property* prop = expr->target()->AsProperty();
2712 DCHECK(prop != NULL);
2713 DCHECK(prop->key()->IsLiteral());
2715 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2716 __ pop(StoreDescriptor::ReceiverRegister());
2717 if (FLAG_vector_stores) {
2718 EmitLoadStoreICSlot(expr->AssignmentSlot());
2721 CallStoreIC(expr->AssignmentFeedbackId());
2723 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2724 context()->Plug(eax);
2728 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2729 // Assignment to named property of super.
2731 // stack : receiver ('this'), home_object
2732 DCHECK(prop != NULL);
2733 Literal* key = prop->key()->AsLiteral();
2734 DCHECK(key != NULL);
2736 __ push(Immediate(key->value()));
2738 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2739 : Runtime::kStoreToSuper_Sloppy),
2744 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2745 // Assignment to named property of super.
2747 // stack : receiver ('this'), home_object, key
2751 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2752 : Runtime::kStoreKeyedToSuper_Sloppy),
2757 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2758 // Assignment to a property, using a keyed store IC.
2761 // esp[kPointerSize] : receiver
2763 __ pop(StoreDescriptor::NameRegister()); // Key.
2764 __ pop(StoreDescriptor::ReceiverRegister());
2765 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2767 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2768 if (FLAG_vector_stores) {
2769 EmitLoadStoreICSlot(expr->AssignmentSlot());
2772 CallIC(ic, expr->AssignmentFeedbackId());
2775 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2776 context()->Plug(eax);
2780 void FullCodeGenerator::VisitProperty(Property* expr) {
2781 Comment cmnt(masm_, "[ Property");
2782 SetExpressionPosition(expr);
2784 Expression* key = expr->key();
2786 if (key->IsPropertyName()) {
2787 if (!expr->IsSuperAccess()) {
2788 VisitForAccumulatorValue(expr->obj());
2789 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2790 EmitNamedPropertyLoad(expr);
2792 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2794 expr->obj()->AsSuperPropertyReference()->home_object());
2795 EmitNamedSuperPropertyLoad(expr);
2798 if (!expr->IsSuperAccess()) {
2799 VisitForStackValue(expr->obj());
2800 VisitForAccumulatorValue(expr->key());
2801 __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2802 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2803 EmitKeyedPropertyLoad(expr);
2805 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2807 expr->obj()->AsSuperPropertyReference()->home_object());
2808 VisitForStackValue(expr->key());
2809 EmitKeyedSuperPropertyLoad(expr);
2812 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2813 context()->Plug(eax);
2817 void FullCodeGenerator::CallIC(Handle<Code> code,
2818 TypeFeedbackId ast_id) {
2820 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2824 // Code common for calls using the IC.
2825 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2826 Expression* callee = expr->expression();
2828 CallICState::CallType call_type =
2829 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2830 // Get the target function.
2831 if (call_type == CallICState::FUNCTION) {
2832 { StackValueContext context(this);
2833 EmitVariableLoad(callee->AsVariableProxy());
2834 PrepareForBailout(callee, NO_REGISTERS);
2836 // Push undefined as receiver. This is patched in the method prologue if it
2837 // is a sloppy mode method.
2838 __ push(Immediate(isolate()->factory()->undefined_value()));
2840 // Load the function from the receiver.
2841 DCHECK(callee->IsProperty());
2842 DCHECK(!callee->AsProperty()->IsSuperAccess());
2843 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2844 EmitNamedPropertyLoad(callee->AsProperty());
2845 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2846 // Push the target function under the receiver.
2847 __ push(Operand(esp, 0));
2848 __ mov(Operand(esp, kPointerSize), eax);
2851 EmitCall(expr, call_type);
2855 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2856 SetExpressionPosition(expr);
2857 Expression* callee = expr->expression();
2858 DCHECK(callee->IsProperty());
2859 Property* prop = callee->AsProperty();
2860 DCHECK(prop->IsSuperAccess());
2862 Literal* key = prop->key()->AsLiteral();
2863 DCHECK(!key->value()->IsSmi());
2864 // Load the function from the receiver.
2865 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2866 VisitForStackValue(super_ref->home_object());
2867 VisitForAccumulatorValue(super_ref->this_var());
2870 __ push(Operand(esp, kPointerSize * 2));
2871 __ push(Immediate(key->value()));
2872 __ push(Immediate(Smi::FromInt(language_mode())));
2875 // - this (receiver)
2876 // - this (receiver) <-- LoadFromSuper will pop here and below.
2880 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2882 // Replace home_object with target function.
2883 __ mov(Operand(esp, kPointerSize), eax);
2886 // - target function
2887 // - this (receiver)
2888 EmitCall(expr, CallICState::METHOD);
2892 // Code common for calls using the IC.
2893 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2896 VisitForAccumulatorValue(key);
2898 Expression* callee = expr->expression();
2900 // Load the function from the receiver.
2901 DCHECK(callee->IsProperty());
2902 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2903 __ mov(LoadDescriptor::NameRegister(), eax);
2904 EmitKeyedPropertyLoad(callee->AsProperty());
2905 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2907 // Push the target function under the receiver.
2908 __ push(Operand(esp, 0));
2909 __ mov(Operand(esp, kPointerSize), eax);
2911 EmitCall(expr, CallICState::METHOD);
2915 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2916 Expression* callee = expr->expression();
2917 DCHECK(callee->IsProperty());
2918 Property* prop = callee->AsProperty();
2919 DCHECK(prop->IsSuperAccess());
2921 SetExpressionPosition(prop);
2922 // Load the function from the receiver.
2923 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2924 VisitForStackValue(super_ref->home_object());
2925 VisitForAccumulatorValue(super_ref->this_var());
2928 __ push(Operand(esp, kPointerSize * 2));
2929 VisitForStackValue(prop->key());
2930 __ push(Immediate(Smi::FromInt(language_mode())));
2933 // - this (receiver)
2934 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2938 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2940 // Replace home_object with target function.
2941 __ mov(Operand(esp, kPointerSize), eax);
2944 // - target function
2945 // - this (receiver)
2946 EmitCall(expr, CallICState::METHOD);
2950 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2951 // Load the arguments.
2952 ZoneList<Expression*>* args = expr->arguments();
2953 int arg_count = args->length();
2954 for (int i = 0; i < arg_count; i++) {
2955 VisitForStackValue(args->at(i));
2958 SetCallPosition(expr, arg_count);
2959 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2960 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2961 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2962 // Don't assign a type feedback id to the IC, since type feedback is provided
2963 // by the vector above.
2966 RecordJSReturnSite(expr);
2968 // Restore context register.
2969 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2971 context()->DropAndPlug(1, eax);
2975 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2976 // Push copy of the first argument or undefined if it doesn't exist.
2977 if (arg_count > 0) {
2978 __ push(Operand(esp, arg_count * kPointerSize));
2980 __ push(Immediate(isolate()->factory()->undefined_value()));
2983 // Push the enclosing function.
2984 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2986 // Push the language mode.
2987 __ push(Immediate(Smi::FromInt(language_mode())));
2989 // Push the start position of the scope the calls resides in.
2990 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2992 // Do the runtime call.
2993 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2997 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2998 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2999 VariableProxy* callee = expr->expression()->AsVariableProxy();
3000 if (callee->var()->IsLookupSlot()) {
3002 SetExpressionPosition(callee);
3003 // Generate code for loading from variables potentially shadowed by
3004 // eval-introduced variables.
3005 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3008 // Call the runtime to find the function to call (returned in eax) and
3009 // the object holding it (returned in edx).
3010 __ push(context_register());
3011 __ push(Immediate(callee->name()));
3012 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3013 __ push(eax); // Function.
3014 __ push(edx); // Receiver.
3015 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3017 // If fast case code has been generated, emit code to push the function
3018 // and receiver and have the slow path jump around this code.
3019 if (done.is_linked()) {
3021 __ jmp(&call, Label::kNear);
3025 // The receiver is implicitly the global receiver. Indicate this by
3026 // passing the hole to the call function stub.
3027 __ push(Immediate(isolate()->factory()->undefined_value()));
3031 VisitForStackValue(callee);
3032 // refEnv.WithBaseObject()
3033 __ push(Immediate(isolate()->factory()->undefined_value()));
3038 void FullCodeGenerator::VisitCall(Call* expr) {
3040 // We want to verify that RecordJSReturnSite gets called on all paths
3041 // through this function. Avoid early returns.
3042 expr->return_is_recorded_ = false;
3045 Comment cmnt(masm_, "[ Call");
3046 Expression* callee = expr->expression();
3047 Call::CallType call_type = expr->GetCallType(isolate());
3049 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3050 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3051 // to resolve the function we need to call. Then we call the resolved
3052 // function using the given arguments.
3053 ZoneList<Expression*>* args = expr->arguments();
3054 int arg_count = args->length();
3056 PushCalleeAndWithBaseObject(expr);
3058 // Push the arguments.
3059 for (int i = 0; i < arg_count; i++) {
3060 VisitForStackValue(args->at(i));
3063 // Push a copy of the function (found below the arguments) and
3065 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
3066 EmitResolvePossiblyDirectEval(arg_count);
3068 // Touch up the stack with the resolved function.
3069 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
3071 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3073 SetCallPosition(expr, arg_count);
3074 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3075 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3077 RecordJSReturnSite(expr);
3078 // Restore context register.
3079 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3080 context()->DropAndPlug(1, eax);
3082 } else if (call_type == Call::GLOBAL_CALL) {
3083 EmitCallWithLoadIC(expr);
3084 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3085 // Call to a lookup slot (dynamically introduced variable).
3086 PushCalleeAndWithBaseObject(expr);
3088 } else if (call_type == Call::PROPERTY_CALL) {
3089 Property* property = callee->AsProperty();
3090 bool is_named_call = property->key()->IsPropertyName();
3091 if (property->IsSuperAccess()) {
3092 if (is_named_call) {
3093 EmitSuperCallWithLoadIC(expr);
3095 EmitKeyedSuperCallWithLoadIC(expr);
3098 VisitForStackValue(property->obj());
3099 if (is_named_call) {
3100 EmitCallWithLoadIC(expr);
3102 EmitKeyedCallWithLoadIC(expr, property->key());
3105 } else if (call_type == Call::SUPER_CALL) {
3106 EmitSuperConstructorCall(expr);
3108 DCHECK(call_type == Call::OTHER_CALL);
3109 // Call to an arbitrary expression not handled specially above.
3110 VisitForStackValue(callee);
3111 __ push(Immediate(isolate()->factory()->undefined_value()));
3112 // Emit function call.
3117 // RecordJSReturnSite should have been called.
3118 DCHECK(expr->return_is_recorded_);
3123 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3124 Comment cmnt(masm_, "[ CallNew");
3125 // According to ECMA-262, section 11.2.2, page 44, the function
3126 // expression in new calls must be evaluated before the
3129 // Push constructor on the stack. If it's not a function it's used as
3130 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3132 DCHECK(!expr->expression()->IsSuperPropertyReference());
3133 VisitForStackValue(expr->expression());
3135 // Push the arguments ("left-to-right") on the stack.
3136 ZoneList<Expression*>* args = expr->arguments();
3137 int arg_count = args->length();
3138 for (int i = 0; i < arg_count; i++) {
3139 VisitForStackValue(args->at(i));
3142 // Call the construct call builtin that handles allocation and
3143 // constructor invocation.
3144 SetConstructCallPosition(expr);
3146 // Load function and argument count into edi and eax.
3147 __ Move(eax, Immediate(arg_count));
3148 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3150 // Record call targets in unoptimized code.
3151 if (FLAG_pretenuring_call_new) {
3152 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3153 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3154 expr->CallNewFeedbackSlot().ToInt() + 1);
3157 __ LoadHeapObject(ebx, FeedbackVector());
3158 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
3160 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3161 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3162 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3163 context()->Plug(eax);
3167 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3168 SuperCallReference* super_call_ref =
3169 expr->expression()->AsSuperCallReference();
3170 DCHECK_NOT_NULL(super_call_ref);
3172 EmitLoadSuperConstructor(super_call_ref);
3173 __ push(result_register());
3175 // Push the arguments ("left-to-right") on the stack.
3176 ZoneList<Expression*>* args = expr->arguments();
3177 int arg_count = args->length();
3178 for (int i = 0; i < arg_count; i++) {
3179 VisitForStackValue(args->at(i));
3182 // Call the construct call builtin that handles allocation and
3183 // constructor invocation.
3184 SetConstructCallPosition(expr);
3186 // Load original constructor into ecx.
3187 VisitForAccumulatorValue(super_call_ref->new_target_var());
3188 __ mov(ecx, result_register());
3190 // Load function and argument count into edi and eax.
3191 __ Move(eax, Immediate(arg_count));
3192 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3194 // Record call targets in unoptimized code.
3195 if (FLAG_pretenuring_call_new) {
3197 /* TODO(dslomov): support pretenuring.
3198 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3199 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3200 expr->CallNewFeedbackSlot().ToInt() + 1);
3204 __ LoadHeapObject(ebx, FeedbackVector());
3205 __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
3207 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3208 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3210 RecordJSReturnSite(expr);
3212 context()->Plug(eax);
3216 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3217 ZoneList<Expression*>* args = expr->arguments();
3218 DCHECK(args->length() == 1);
3220 VisitForAccumulatorValue(args->at(0));
3222 Label materialize_true, materialize_false;
3223 Label* if_true = NULL;
3224 Label* if_false = NULL;
3225 Label* fall_through = NULL;
3226 context()->PrepareTest(&materialize_true, &materialize_false,
3227 &if_true, &if_false, &fall_through);
3229 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3230 __ test(eax, Immediate(kSmiTagMask));
3231 Split(zero, if_true, if_false, fall_through);
3233 context()->Plug(if_true, if_false);
3237 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3238 ZoneList<Expression*>* args = expr->arguments();
3239 DCHECK(args->length() == 1);
3241 VisitForAccumulatorValue(args->at(0));
3243 Label materialize_true, materialize_false;
3244 Label* if_true = NULL;
3245 Label* if_false = NULL;
3246 Label* fall_through = NULL;
3247 context()->PrepareTest(&materialize_true, &materialize_false,
3248 &if_true, &if_false, &fall_through);
3250 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3251 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
3252 Split(zero, if_true, if_false, fall_through);
3254 context()->Plug(if_true, if_false);
3258 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3259 ZoneList<Expression*>* args = expr->arguments();
3260 DCHECK(args->length() == 1);
3262 VisitForAccumulatorValue(args->at(0));
3264 Label materialize_true, materialize_false;
3265 Label* if_true = NULL;
3266 Label* if_false = NULL;
3267 Label* fall_through = NULL;
3268 context()->PrepareTest(&materialize_true, &materialize_false,
3269 &if_true, &if_false, &fall_through);
3271 __ JumpIfSmi(eax, if_false);
3272 __ cmp(eax, isolate()->factory()->null_value());
3273 __ j(equal, if_true);
3274 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3275 // Undetectable objects behave like undefined when tested with typeof.
3276 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
3277 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3278 __ j(not_zero, if_false);
3279 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3280 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3281 __ j(below, if_false);
3282 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3283 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3284 Split(below_equal, if_true, if_false, fall_through);
3286 context()->Plug(if_true, if_false);
3290 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3291 ZoneList<Expression*>* args = expr->arguments();
3292 DCHECK(args->length() == 1);
3294 VisitForAccumulatorValue(args->at(0));
3296 Label materialize_true, materialize_false;
3297 Label* if_true = NULL;
3298 Label* if_false = NULL;
3299 Label* fall_through = NULL;
3300 context()->PrepareTest(&materialize_true, &materialize_false,
3301 &if_true, &if_false, &fall_through);
3303 __ JumpIfSmi(eax, if_false);
3304 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3305 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3306 Split(above_equal, if_true, if_false, fall_through);
3308 context()->Plug(if_true, if_false);
3312 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 DCHECK(args->length() == 1);
3316 VisitForAccumulatorValue(args->at(0));
3318 Label materialize_true, materialize_false;
3319 Label* if_true = NULL;
3320 Label* if_false = NULL;
3321 Label* fall_through = NULL;
3322 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3323 &if_false, &fall_through);
3325 __ JumpIfSmi(eax, if_false);
3327 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3328 __ CmpInstanceType(map, FIRST_SIMD_VALUE_TYPE);
3329 __ j(less, if_false);
3330 __ CmpInstanceType(map, LAST_SIMD_VALUE_TYPE);
3331 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3332 Split(less_equal, if_true, if_false, fall_through);
3334 context()->Plug(if_true, if_false);
3338 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3339 CallRuntime* expr) {
3340 ZoneList<Expression*>* args = expr->arguments();
3341 DCHECK(args->length() == 1);
3343 VisitForAccumulatorValue(args->at(0));
3345 Label materialize_true, materialize_false, skip_lookup;
3346 Label* if_true = NULL;
3347 Label* if_false = NULL;
3348 Label* fall_through = NULL;
3349 context()->PrepareTest(&materialize_true, &materialize_false,
3350 &if_true, &if_false, &fall_through);
3352 __ AssertNotSmi(eax);
3354 // Check whether this map has already been checked to be safe for default
3356 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3357 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3358 1 << Map::kStringWrapperSafeForDefaultValueOf);
3359 __ j(not_zero, &skip_lookup);
3361 // Check for fast case object. Return false for slow case objects.
3362 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3363 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3364 __ cmp(ecx, isolate()->factory()->hash_table_map());
3365 __ j(equal, if_false);
3367 // Look for valueOf string in the descriptor array, and indicate false if
3368 // found. Since we omit an enumeration index check, if it is added via a
3369 // transition that shares its descriptor array, this is a false positive.
3370 Label entry, loop, done;
3372 // Skip loop if no descriptors are valid.
3373 __ NumberOfOwnDescriptors(ecx, ebx);
3377 __ LoadInstanceDescriptors(ebx, ebx);
3378 // ebx: descriptor array.
3379 // ecx: valid entries in the descriptor array.
3380 // Calculate the end of the descriptor array.
3381 STATIC_ASSERT(kSmiTag == 0);
3382 STATIC_ASSERT(kSmiTagSize == 1);
3383 STATIC_ASSERT(kPointerSize == 4);
3384 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3385 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3386 // Calculate location of the first key name.
3387 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3388 // Loop through all the keys in the descriptor array. If one of these is the
3389 // internalized string "valueOf" the result is false.
3392 __ mov(edx, FieldOperand(ebx, 0));
3393 __ cmp(edx, isolate()->factory()->value_of_string());
3394 __ j(equal, if_false);
3395 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3398 __ j(not_equal, &loop);
3402 // Reload map as register ebx was used as temporary above.
3403 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3405 // Set the bit in the map to indicate that there is no local valueOf field.
3406 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3407 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3409 __ bind(&skip_lookup);
3411 // If a valueOf property is not found on the object check that its
3412 // prototype is the un-modified String prototype. If not result is false.
3413 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3414 __ JumpIfSmi(ecx, if_false);
3415 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3416 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3418 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3421 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3422 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3423 Split(equal, if_true, if_false, fall_through);
3425 context()->Plug(if_true, if_false);
3429 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3430 ZoneList<Expression*>* args = expr->arguments();
3431 DCHECK(args->length() == 1);
3433 VisitForAccumulatorValue(args->at(0));
3435 Label materialize_true, materialize_false;
3436 Label* if_true = NULL;
3437 Label* if_false = NULL;
3438 Label* fall_through = NULL;
3439 context()->PrepareTest(&materialize_true, &materialize_false,
3440 &if_true, &if_false, &fall_through);
3442 __ JumpIfSmi(eax, if_false);
3443 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3444 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3445 Split(equal, if_true, if_false, fall_through);
3447 context()->Plug(if_true, if_false);
3451 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3452 ZoneList<Expression*>* args = expr->arguments();
3453 DCHECK(args->length() == 1);
3455 VisitForAccumulatorValue(args->at(0));
3457 Label materialize_true, materialize_false;
3458 Label* if_true = NULL;
3459 Label* if_false = NULL;
3460 Label* fall_through = NULL;
3461 context()->PrepareTest(&materialize_true, &materialize_false,
3462 &if_true, &if_false, &fall_through);
3464 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3465 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3466 // Check if the exponent half is 0x80000000. Comparing against 1 and
3467 // checking for overflow is the shortest possible encoding.
3468 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3469 __ j(no_overflow, if_false);
3470 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3471 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3472 Split(equal, if_true, if_false, fall_through);
3474 context()->Plug(if_true, if_false);
3478 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3479 ZoneList<Expression*>* args = expr->arguments();
3480 DCHECK(args->length() == 1);
3482 VisitForAccumulatorValue(args->at(0));
3484 Label materialize_true, materialize_false;
3485 Label* if_true = NULL;
3486 Label* if_false = NULL;
3487 Label* fall_through = NULL;
3488 context()->PrepareTest(&materialize_true, &materialize_false,
3489 &if_true, &if_false, &fall_through);
3491 __ JumpIfSmi(eax, if_false);
3492 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3493 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3494 Split(equal, if_true, if_false, fall_through);
3496 context()->Plug(if_true, if_false);
3500 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3501 ZoneList<Expression*>* args = expr->arguments();
3502 DCHECK(args->length() == 1);
3504 VisitForAccumulatorValue(args->at(0));
3506 Label materialize_true, materialize_false;
3507 Label* if_true = NULL;
3508 Label* if_false = NULL;
3509 Label* fall_through = NULL;
3510 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3511 &if_false, &fall_through);
3513 __ JumpIfSmi(eax, if_false);
3514 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
3515 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3516 Split(equal, if_true, if_false, fall_through);
3518 context()->Plug(if_true, if_false);
3522 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3523 ZoneList<Expression*>* args = expr->arguments();
3524 DCHECK(args->length() == 1);
3526 VisitForAccumulatorValue(args->at(0));
3528 Label materialize_true, materialize_false;
3529 Label* if_true = NULL;
3530 Label* if_false = NULL;
3531 Label* fall_through = NULL;
3532 context()->PrepareTest(&materialize_true, &materialize_false,
3533 &if_true, &if_false, &fall_through);
3535 __ JumpIfSmi(eax, if_false);
3536 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3537 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3538 Split(equal, if_true, if_false, fall_through);
3540 context()->Plug(if_true, if_false);
3544 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3545 ZoneList<Expression*>* args = expr->arguments();
3546 DCHECK(args->length() == 1);
3548 VisitForAccumulatorValue(args->at(0));
3550 Label materialize_true, materialize_false;
3551 Label* if_true = NULL;
3552 Label* if_false = NULL;
3553 Label* fall_through = NULL;
3554 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3555 &if_false, &fall_through);
3557 __ JumpIfSmi(eax, if_false);
3559 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3560 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3561 __ j(less, if_false);
3562 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3563 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3564 Split(less_equal, if_true, if_false, fall_through);
3566 context()->Plug(if_true, if_false);
3570 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3571 DCHECK(expr->arguments()->length() == 0);
3573 Label materialize_true, materialize_false;
3574 Label* if_true = NULL;
3575 Label* if_false = NULL;
3576 Label* fall_through = NULL;
3577 context()->PrepareTest(&materialize_true, &materialize_false,
3578 &if_true, &if_false, &fall_through);
3580 // Get the frame pointer for the calling frame.
3581 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3583 // Skip the arguments adaptor frame if it exists.
3584 Label check_frame_marker;
3585 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3586 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3587 __ j(not_equal, &check_frame_marker);
3588 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3590 // Check the marker in the calling frame.
3591 __ bind(&check_frame_marker);
3592 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3593 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3594 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3595 Split(equal, if_true, if_false, fall_through);
3597 context()->Plug(if_true, if_false);
3601 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3602 ZoneList<Expression*>* args = expr->arguments();
3603 DCHECK(args->length() == 2);
3605 // Load the two objects into registers and perform the comparison.
3606 VisitForStackValue(args->at(0));
3607 VisitForAccumulatorValue(args->at(1));
3609 Label materialize_true, materialize_false;
3610 Label* if_true = NULL;
3611 Label* if_false = NULL;
3612 Label* fall_through = NULL;
3613 context()->PrepareTest(&materialize_true, &materialize_false,
3614 &if_true, &if_false, &fall_through);
3618 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3619 Split(equal, if_true, if_false, fall_through);
3621 context()->Plug(if_true, if_false);
3625 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3626 ZoneList<Expression*>* args = expr->arguments();
3627 DCHECK(args->length() == 1);
3629 // ArgumentsAccessStub expects the key in edx and the formal
3630 // parameter count in eax.
3631 VisitForAccumulatorValue(args->at(0));
3633 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3634 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3636 context()->Plug(eax);
3640 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3641 DCHECK(expr->arguments()->length() == 0);
3644 // Get the number of formal parameters.
3645 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3647 // Check if the calling frame is an arguments adaptor frame.
3648 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3649 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3650 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3651 __ j(not_equal, &exit);
3653 // Arguments adaptor case: Read the arguments length from the
3655 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3659 context()->Plug(eax);
3663 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3664 ZoneList<Expression*>* args = expr->arguments();
3665 DCHECK(args->length() == 1);
3666 Label done, null, function, non_function_constructor;
3668 VisitForAccumulatorValue(args->at(0));
3670 // If the object is a smi, we return null.
3671 __ JumpIfSmi(eax, &null);
3673 // Check that the object is a JS object but take special care of JS
3674 // functions to make sure they have 'Function' as their class.
3675 // Assume that there are only two callable types, and one of them is at
3676 // either end of the type range for JS object types. Saves extra comparisons.
3677 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3678 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3679 // Map is now in eax.
3681 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3682 FIRST_SPEC_OBJECT_TYPE + 1);
3683 __ j(equal, &function);
3685 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3686 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3687 LAST_SPEC_OBJECT_TYPE - 1);
3688 __ j(equal, &function);
3689 // Assume that there is no larger type.
3690 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3692 // Check if the constructor in the map is a JS function.
3693 __ GetMapConstructor(eax, eax, ebx);
3694 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3695 __ j(not_equal, &non_function_constructor);
3697 // eax now contains the constructor function. Grab the
3698 // instance class name from there.
3699 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3700 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3703 // Functions have class 'Function'.
3705 __ mov(eax, isolate()->factory()->Function_string());
3708 // Objects with a non-function constructor have class 'Object'.
3709 __ bind(&non_function_constructor);
3710 __ mov(eax, isolate()->factory()->Object_string());
3713 // Non-JS objects have class null.
3715 __ mov(eax, isolate()->factory()->null_value());
3720 context()->Plug(eax);
3724 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3725 ZoneList<Expression*>* args = expr->arguments();
3726 DCHECK(args->length() == 1);
3728 VisitForAccumulatorValue(args->at(0)); // Load the object.
3731 // If the object is a smi return the object.
3732 __ JumpIfSmi(eax, &done, Label::kNear);
3733 // If the object is not a value type, return the object.
3734 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3735 __ j(not_equal, &done, Label::kNear);
3736 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3739 context()->Plug(eax);
3743 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3744 ZoneList<Expression*>* args = expr->arguments();
3745 DCHECK_EQ(1, args->length());
3747 VisitForAccumulatorValue(args->at(0));
3749 Label materialize_true, materialize_false;
3750 Label* if_true = nullptr;
3751 Label* if_false = nullptr;
3752 Label* fall_through = nullptr;
3753 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3754 &if_false, &fall_through);
3756 __ JumpIfSmi(eax, if_false);
3757 __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
3758 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3759 Split(equal, if_true, if_false, fall_through);
3761 context()->Plug(if_true, if_false);
3765 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3766 ZoneList<Expression*>* args = expr->arguments();
3767 DCHECK(args->length() == 2);
3768 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3769 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3771 VisitForAccumulatorValue(args->at(0)); // Load the object.
3773 Register object = eax;
3774 Register result = eax;
3775 Register scratch = ecx;
3777 if (index->value() == 0) {
3778 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3780 Label runtime, done;
3781 if (index->value() < JSDate::kFirstUncachedField) {
3782 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3783 __ mov(scratch, Operand::StaticVariable(stamp));
3784 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3785 __ j(not_equal, &runtime, Label::kNear);
3786 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3787 kPointerSize * index->value()));
3788 __ jmp(&done, Label::kNear);
3791 __ PrepareCallCFunction(2, scratch);
3792 __ mov(Operand(esp, 0), object);
3793 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3794 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3798 context()->Plug(result);
3802 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3803 ZoneList<Expression*>* args = expr->arguments();
3804 DCHECK_EQ(3, args->length());
3806 Register string = eax;
3807 Register index = ebx;
3808 Register value = ecx;
3810 VisitForStackValue(args->at(0)); // index
3811 VisitForStackValue(args->at(1)); // value
3812 VisitForAccumulatorValue(args->at(2)); // string
3817 if (FLAG_debug_code) {
3818 __ test(value, Immediate(kSmiTagMask));
3819 __ Check(zero, kNonSmiValue);
3820 __ test(index, Immediate(kSmiTagMask));
3821 __ Check(zero, kNonSmiValue);
3827 if (FLAG_debug_code) {
3828 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3829 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3832 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3834 context()->Plug(string);
3838 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3839 ZoneList<Expression*>* args = expr->arguments();
3840 DCHECK_EQ(3, args->length());
3842 Register string = eax;
3843 Register index = ebx;
3844 Register value = ecx;
3846 VisitForStackValue(args->at(0)); // index
3847 VisitForStackValue(args->at(1)); // value
3848 VisitForAccumulatorValue(args->at(2)); // string
3852 if (FLAG_debug_code) {
3853 __ test(value, Immediate(kSmiTagMask));
3854 __ Check(zero, kNonSmiValue);
3855 __ test(index, Immediate(kSmiTagMask));
3856 __ Check(zero, kNonSmiValue);
3858 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3859 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3864 // No need to untag a smi for two-byte addressing.
3865 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3867 context()->Plug(string);
3871 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3872 ZoneList<Expression*>* args = expr->arguments();
3873 DCHECK(args->length() == 2);
3875 VisitForStackValue(args->at(0)); // Load the object.
3876 VisitForAccumulatorValue(args->at(1)); // Load the value.
3877 __ pop(ebx); // eax = value. ebx = object.
3880 // If the object is a smi, return the value.
3881 __ JumpIfSmi(ebx, &done, Label::kNear);
3883 // If the object is not a value type, return the value.
3884 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3885 __ j(not_equal, &done, Label::kNear);
3888 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3890 // Update the write barrier. Save the value as it will be
3891 // overwritten by the write barrier code and is needed afterward.
3893 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3896 context()->Plug(eax);
3900 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3901 ZoneList<Expression*>* args = expr->arguments();
3902 DCHECK_EQ(args->length(), 1);
3904 // Load the argument into eax and call the stub.
3905 VisitForAccumulatorValue(args->at(0));
3907 NumberToStringStub stub(isolate());
3909 context()->Plug(eax);
3913 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3914 ZoneList<Expression*>* args = expr->arguments();
3915 DCHECK_EQ(1, args->length());
3917 // Load the argument into eax and convert it.
3918 VisitForAccumulatorValue(args->at(0));
3920 ToObjectStub stub(isolate());
3922 context()->Plug(eax);
3926 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3927 ZoneList<Expression*>* args = expr->arguments();
3928 DCHECK(args->length() == 1);
3930 VisitForAccumulatorValue(args->at(0));
3933 StringCharFromCodeGenerator generator(eax, ebx);
3934 generator.GenerateFast(masm_);
3937 NopRuntimeCallHelper call_helper;
3938 generator.GenerateSlow(masm_, call_helper);
3941 context()->Plug(ebx);
3945 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3946 ZoneList<Expression*>* args = expr->arguments();
3947 DCHECK(args->length() == 2);
3949 VisitForStackValue(args->at(0));
3950 VisitForAccumulatorValue(args->at(1));
3952 Register object = ebx;
3953 Register index = eax;
3954 Register result = edx;
3958 Label need_conversion;
3959 Label index_out_of_range;
3961 StringCharCodeAtGenerator generator(object,
3966 &index_out_of_range,
3967 STRING_INDEX_IS_NUMBER);
3968 generator.GenerateFast(masm_);
3971 __ bind(&index_out_of_range);
3972 // When the index is out of range, the spec requires us to return
3974 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3977 __ bind(&need_conversion);
3978 // Move the undefined value into the result register, which will
3979 // trigger conversion.
3980 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3983 NopRuntimeCallHelper call_helper;
3984 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3987 context()->Plug(result);
3991 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3992 ZoneList<Expression*>* args = expr->arguments();
3993 DCHECK(args->length() == 2);
3995 VisitForStackValue(args->at(0));
3996 VisitForAccumulatorValue(args->at(1));
3998 Register object = ebx;
3999 Register index = eax;
4000 Register scratch = edx;
4001 Register result = eax;
4005 Label need_conversion;
4006 Label index_out_of_range;
4008 StringCharAtGenerator generator(object,
4014 &index_out_of_range,
4015 STRING_INDEX_IS_NUMBER);
4016 generator.GenerateFast(masm_);
4019 __ bind(&index_out_of_range);
4020 // When the index is out of range, the spec requires us to return
4021 // the empty string.
4022 __ Move(result, Immediate(isolate()->factory()->empty_string()));
4025 __ bind(&need_conversion);
4026 // Move smi zero into the result register, which will trigger
4028 __ Move(result, Immediate(Smi::FromInt(0)));
4031 NopRuntimeCallHelper call_helper;
4032 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4035 context()->Plug(result);
4039 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4040 ZoneList<Expression*>* args = expr->arguments();
4041 DCHECK_EQ(2, args->length());
4042 VisitForStackValue(args->at(0));
4043 VisitForAccumulatorValue(args->at(1));
4046 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4048 context()->Plug(eax);
4052 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4053 ZoneList<Expression*>* args = expr->arguments();
4054 DCHECK(args->length() >= 2);
4056 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4057 for (int i = 0; i < arg_count + 1; ++i) {
4058 VisitForStackValue(args->at(i));
4060 VisitForAccumulatorValue(args->last()); // Function.
4062 Label runtime, done;
4063 // Check for non-function argument (including proxy).
4064 __ JumpIfSmi(eax, &runtime);
4065 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
4066 __ j(not_equal, &runtime);
4068 // InvokeFunction requires the function in edi. Move it in there.
4069 __ mov(edi, result_register());
4070 ParameterCount count(arg_count);
4071 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
4072 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4077 __ CallRuntime(Runtime::kCall, args->length());
4080 context()->Plug(eax);
4084 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4085 ZoneList<Expression*>* args = expr->arguments();
4086 DCHECK(args->length() == 2);
4089 VisitForStackValue(args->at(0));
4092 VisitForStackValue(args->at(1));
4093 __ CallRuntime(Runtime::kGetPrototype, 1);
4094 __ push(result_register());
4096 // Load original constructor into ecx.
4097 __ mov(ecx, Operand(esp, 1 * kPointerSize));
4099 // Check if the calling frame is an arguments adaptor frame.
4100 Label adaptor_frame, args_set_up, runtime;
4101 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4102 __ mov(ebx, Operand(edx, StandardFrameConstants::kContextOffset));
4103 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4104 __ j(equal, &adaptor_frame);
4105 // default constructor has no arguments, so no adaptor frame means no args.
4106 __ mov(eax, Immediate(0));
4107 __ jmp(&args_set_up);
4109 // Copy arguments from adaptor frame.
4111 __ bind(&adaptor_frame);
4112 __ mov(ebx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4116 __ lea(edx, Operand(edx, ebx, times_pointer_size,
4117 StandardFrameConstants::kCallerSPOffset));
4120 __ push(Operand(edx, -1 * kPointerSize));
4121 __ sub(edx, Immediate(kPointerSize));
4123 __ j(not_zero, &loop);
4126 __ bind(&args_set_up);
4128 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
4129 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
4130 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4131 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4135 context()->Plug(eax);
4139 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4140 // Load the arguments on the stack and call the stub.
4141 RegExpConstructResultStub stub(isolate());
4142 ZoneList<Expression*>* args = expr->arguments();
4143 DCHECK(args->length() == 3);
4144 VisitForStackValue(args->at(0));
4145 VisitForStackValue(args->at(1));
4146 VisitForAccumulatorValue(args->at(2));
4150 context()->Plug(eax);
4154 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4155 ZoneList<Expression*>* args = expr->arguments();
4156 DCHECK(args->length() == 1);
4158 VisitForAccumulatorValue(args->at(0));
4160 __ AssertString(eax);
4162 Label materialize_true, materialize_false;
4163 Label* if_true = NULL;
4164 Label* if_false = NULL;
4165 Label* fall_through = NULL;
4166 context()->PrepareTest(&materialize_true, &materialize_false,
4167 &if_true, &if_false, &fall_through);
4169 __ test(FieldOperand(eax, String::kHashFieldOffset),
4170 Immediate(String::kContainsCachedArrayIndexMask));
4171 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4172 Split(zero, if_true, if_false, fall_through);
4174 context()->Plug(if_true, if_false);
4178 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4179 ZoneList<Expression*>* args = expr->arguments();
4180 DCHECK(args->length() == 1);
4181 VisitForAccumulatorValue(args->at(0));
4183 __ AssertString(eax);
4185 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
4186 __ IndexFromHash(eax, eax);
4188 context()->Plug(eax);
4192 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4193 Label bailout, done, one_char_separator, long_separator,
4194 non_trivial_array, not_size_one_array, loop,
4195 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4197 ZoneList<Expression*>* args = expr->arguments();
4198 DCHECK(args->length() == 2);
4199 // We will leave the separator on the stack until the end of the function.
4200 VisitForStackValue(args->at(1));
4201 // Load this to eax (= array)
4202 VisitForAccumulatorValue(args->at(0));
4203 // All aliases of the same register have disjoint lifetimes.
4204 Register array = eax;
4205 Register elements = no_reg; // Will be eax.
4207 Register index = edx;
4209 Register string_length = ecx;
4211 Register string = esi;
4213 Register scratch = ebx;
4215 Register array_length = edi;
4216 Register result_pos = no_reg; // Will be edi.
4218 // Separator operand is already pushed.
4219 Operand separator_operand = Operand(esp, 2 * kPointerSize);
4220 Operand result_operand = Operand(esp, 1 * kPointerSize);
4221 Operand array_length_operand = Operand(esp, 0);
4222 __ sub(esp, Immediate(2 * kPointerSize));
4224 // Check that the array is a JSArray
4225 __ JumpIfSmi(array, &bailout);
4226 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4227 __ j(not_equal, &bailout);
4229 // Check that the array has fast elements.
4230 __ CheckFastElements(scratch, &bailout);
4232 // If the array has length zero, return the empty string.
4233 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
4234 __ SmiUntag(array_length);
4235 __ j(not_zero, &non_trivial_array);
4236 __ mov(result_operand, isolate()->factory()->empty_string());
4239 // Save the array length.
4240 __ bind(&non_trivial_array);
4241 __ mov(array_length_operand, array_length);
4243 // Save the FixedArray containing array's elements.
4244 // End of array's live range.
4246 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
4250 // Check that all array elements are sequential one-byte strings, and
4251 // accumulate the sum of their lengths, as a smi-encoded value.
4252 __ Move(index, Immediate(0));
4253 __ Move(string_length, Immediate(0));
4254 // Loop condition: while (index < length).
4255 // Live loop registers: index, array_length, string,
4256 // scratch, string_length, elements.
4257 if (generate_debug_code_) {
4258 __ cmp(index, array_length);
4259 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4262 __ mov(string, FieldOperand(elements,
4265 FixedArray::kHeaderSize));
4266 __ JumpIfSmi(string, &bailout);
4267 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4268 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4269 __ and_(scratch, Immediate(
4270 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4271 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4272 __ j(not_equal, &bailout);
4273 __ add(string_length,
4274 FieldOperand(string, SeqOneByteString::kLengthOffset));
4275 __ j(overflow, &bailout);
4276 __ add(index, Immediate(1));
4277 __ cmp(index, array_length);
4280 // If array_length is 1, return elements[0], a string.
4281 __ cmp(array_length, 1);
4282 __ j(not_equal, ¬_size_one_array);
4283 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4284 __ mov(result_operand, scratch);
4287 __ bind(¬_size_one_array);
4289 // End of array_length live range.
4290 result_pos = array_length;
4291 array_length = no_reg;
4294 // string_length: Sum of string lengths, as a smi.
4295 // elements: FixedArray of strings.
4297 // Check that the separator is a flat one-byte string.
4298 __ mov(string, separator_operand);
4299 __ JumpIfSmi(string, &bailout);
4300 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4301 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4302 __ and_(scratch, Immediate(
4303 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4304 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4305 __ j(not_equal, &bailout);
4307 // Add (separator length times array_length) - separator length
4308 // to string_length.
4309 __ mov(scratch, separator_operand);
4310 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4311 __ sub(string_length, scratch); // May be negative, temporarily.
4312 __ imul(scratch, array_length_operand);
4313 __ j(overflow, &bailout);
4314 __ add(string_length, scratch);
4315 __ j(overflow, &bailout);
4317 __ shr(string_length, 1);
4318 // Live registers and stack values:
4321 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4323 __ mov(result_operand, result_pos);
4324 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4327 __ mov(string, separator_operand);
4328 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4329 Immediate(Smi::FromInt(1)));
4330 __ j(equal, &one_char_separator);
4331 __ j(greater, &long_separator);
4334 // Empty separator case
4335 __ mov(index, Immediate(0));
4336 __ jmp(&loop_1_condition);
4337 // Loop condition: while (index < length).
4339 // Each iteration of the loop concatenates one string to the result.
4340 // Live values in registers:
4341 // index: which element of the elements array we are adding to the result.
4342 // result_pos: the position to which we are currently copying characters.
4343 // elements: the FixedArray of strings we are joining.
4345 // Get string = array[index].
4346 __ mov(string, FieldOperand(elements, index,
4348 FixedArray::kHeaderSize));
4349 __ mov(string_length,
4350 FieldOperand(string, String::kLengthOffset));
4351 __ shr(string_length, 1);
4353 FieldOperand(string, SeqOneByteString::kHeaderSize));
4354 __ CopyBytes(string, result_pos, string_length, scratch);
4355 __ add(index, Immediate(1));
4356 __ bind(&loop_1_condition);
4357 __ cmp(index, array_length_operand);
4358 __ j(less, &loop_1); // End while (index < length).
4363 // One-character separator case
4364 __ bind(&one_char_separator);
4365 // Replace separator with its one-byte character value.
4366 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4367 __ mov_b(separator_operand, scratch);
4369 __ Move(index, Immediate(0));
4370 // Jump into the loop after the code that copies the separator, so the first
4371 // element is not preceded by a separator
4372 __ jmp(&loop_2_entry);
4373 // Loop condition: while (index < length).
4375 // Each iteration of the loop concatenates one string to the result.
4376 // Live values in registers:
4377 // index: which element of the elements array we are adding to the result.
4378 // result_pos: the position to which we are currently copying characters.
4380 // Copy the separator character to the result.
4381 __ mov_b(scratch, separator_operand);
4382 __ mov_b(Operand(result_pos, 0), scratch);
4385 __ bind(&loop_2_entry);
4386 // Get string = array[index].
4387 __ mov(string, FieldOperand(elements, index,
4389 FixedArray::kHeaderSize));
4390 __ mov(string_length,
4391 FieldOperand(string, String::kLengthOffset));
4392 __ shr(string_length, 1);
4394 FieldOperand(string, SeqOneByteString::kHeaderSize));
4395 __ CopyBytes(string, result_pos, string_length, scratch);
4396 __ add(index, Immediate(1));
4398 __ cmp(index, array_length_operand);
4399 __ j(less, &loop_2); // End while (index < length).
4403 // Long separator case (separator is more than one character).
4404 __ bind(&long_separator);
4406 __ Move(index, Immediate(0));
4407 // Jump into the loop after the code that copies the separator, so the first
4408 // element is not preceded by a separator
4409 __ jmp(&loop_3_entry);
4410 // Loop condition: while (index < length).
4412 // Each iteration of the loop concatenates one string to the result.
4413 // Live values in registers:
4414 // index: which element of the elements array we are adding to the result.
4415 // result_pos: the position to which we are currently copying characters.
4417 // Copy the separator to the result.
4418 __ mov(string, separator_operand);
4419 __ mov(string_length,
4420 FieldOperand(string, String::kLengthOffset));
4421 __ shr(string_length, 1);
4423 FieldOperand(string, SeqOneByteString::kHeaderSize));
4424 __ CopyBytes(string, result_pos, string_length, scratch);
4426 __ bind(&loop_3_entry);
4427 // Get string = array[index].
4428 __ mov(string, FieldOperand(elements, index,
4430 FixedArray::kHeaderSize));
4431 __ mov(string_length,
4432 FieldOperand(string, String::kLengthOffset));
4433 __ shr(string_length, 1);
4435 FieldOperand(string, SeqOneByteString::kHeaderSize));
4436 __ CopyBytes(string, result_pos, string_length, scratch);
4437 __ add(index, Immediate(1));
4439 __ cmp(index, array_length_operand);
4440 __ j(less, &loop_3); // End while (index < length).
4445 __ mov(result_operand, isolate()->factory()->undefined_value());
4447 __ mov(eax, result_operand);
4448 // Drop temp values from the stack, and restore context register.
4449 __ add(esp, Immediate(3 * kPointerSize));
4451 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4452 context()->Plug(eax);
4456 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4457 DCHECK(expr->arguments()->length() == 0);
4458 ExternalReference debug_is_active =
4459 ExternalReference::debug_is_active_address(isolate());
4460 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4462 context()->Plug(eax);
4466 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4467 // Push the builtins object as receiver.
4468 __ mov(eax, GlobalObjectOperand());
4469 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4471 // Load the function from the receiver.
4472 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4473 __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4474 __ mov(LoadDescriptor::SlotRegister(),
4475 Immediate(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4476 CallLoadIC(NOT_INSIDE_TYPEOF);
4480 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4481 ZoneList<Expression*>* args = expr->arguments();
4482 int arg_count = args->length();
4484 SetCallPosition(expr, arg_count);
4485 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4486 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4491 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4492 ZoneList<Expression*>* args = expr->arguments();
4493 int arg_count = args->length();
4495 if (expr->is_jsruntime()) {
4496 Comment cmnt(masm_, "[ CallRuntime");
4497 EmitLoadJSRuntimeFunction(expr);
4499 // Push the target function under the receiver.
4500 __ push(Operand(esp, 0));
4501 __ mov(Operand(esp, kPointerSize), eax);
4503 // Push the arguments ("left-to-right").
4504 for (int i = 0; i < arg_count; i++) {
4505 VisitForStackValue(args->at(i));
4508 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4509 EmitCallJSRuntimeFunction(expr);
4511 // Restore context register.
4512 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4513 context()->DropAndPlug(1, eax);
4516 const Runtime::Function* function = expr->function();
4517 switch (function->function_id) {
4518 #define CALL_INTRINSIC_GENERATOR(Name) \
4519 case Runtime::kInline##Name: { \
4520 Comment cmnt(masm_, "[ Inline" #Name); \
4521 return Emit##Name(expr); \
4523 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4524 #undef CALL_INTRINSIC_GENERATOR
4526 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4527 // Push the arguments ("left-to-right").
4528 for (int i = 0; i < arg_count; i++) {
4529 VisitForStackValue(args->at(i));
4532 // Call the C runtime function.
4533 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4534 __ CallRuntime(expr->function(), arg_count);
4535 context()->Plug(eax);
4542 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4543 switch (expr->op()) {
4544 case Token::DELETE: {
4545 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4546 Property* property = expr->expression()->AsProperty();
4547 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4549 if (property != NULL) {
4550 VisitForStackValue(property->obj());
4551 VisitForStackValue(property->key());
4552 __ CallRuntime(is_strict(language_mode())
4553 ? Runtime::kDeleteProperty_Strict
4554 : Runtime::kDeleteProperty_Sloppy,
4556 context()->Plug(eax);
4557 } else if (proxy != NULL) {
4558 Variable* var = proxy->var();
4559 // Delete of an unqualified identifier is disallowed in strict mode but
4560 // "delete this" is allowed.
4561 bool is_this = var->HasThisName(isolate());
4562 DCHECK(is_sloppy(language_mode()) || is_this);
4563 if (var->IsUnallocatedOrGlobalSlot()) {
4564 __ push(GlobalObjectOperand());
4565 __ push(Immediate(var->name()));
4566 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4567 context()->Plug(eax);
4568 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4569 // Result of deleting non-global variables is false. 'this' is
4570 // not really a variable, though we implement it as one. The
4571 // subexpression does not have side effects.
4572 context()->Plug(is_this);
4574 // Non-global variable. Call the runtime to try to delete from the
4575 // context where the variable was introduced.
4576 __ push(context_register());
4577 __ push(Immediate(var->name()));
4578 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4579 context()->Plug(eax);
4582 // Result of deleting non-property, non-variable reference is true.
4583 // The subexpression may have side effects.
4584 VisitForEffect(expr->expression());
4585 context()->Plug(true);
4591 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4592 VisitForEffect(expr->expression());
4593 context()->Plug(isolate()->factory()->undefined_value());
4598 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4599 if (context()->IsEffect()) {
4600 // Unary NOT has no side effects so it's only necessary to visit the
4601 // subexpression. Match the optimizing compiler by not branching.
4602 VisitForEffect(expr->expression());
4603 } else if (context()->IsTest()) {
4604 const TestContext* test = TestContext::cast(context());
4605 // The labels are swapped for the recursive call.
4606 VisitForControl(expr->expression(),
4607 test->false_label(),
4609 test->fall_through());
4610 context()->Plug(test->true_label(), test->false_label());
4612 // We handle value contexts explicitly rather than simply visiting
4613 // for control and plugging the control flow into the context,
4614 // because we need to prepare a pair of extra administrative AST ids
4615 // for the optimizing compiler.
4616 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4617 Label materialize_true, materialize_false, done;
4618 VisitForControl(expr->expression(),
4622 __ bind(&materialize_true);
4623 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4624 if (context()->IsAccumulatorValue()) {
4625 __ mov(eax, isolate()->factory()->true_value());
4627 __ Push(isolate()->factory()->true_value());
4629 __ jmp(&done, Label::kNear);
4630 __ bind(&materialize_false);
4631 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4632 if (context()->IsAccumulatorValue()) {
4633 __ mov(eax, isolate()->factory()->false_value());
4635 __ Push(isolate()->factory()->false_value());
4642 case Token::TYPEOF: {
4643 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4645 AccumulatorValueContext context(this);
4646 VisitForTypeofValue(expr->expression());
4649 TypeofStub typeof_stub(isolate());
4650 __ CallStub(&typeof_stub);
4651 context()->Plug(eax);
4661 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4662 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4664 Comment cmnt(masm_, "[ CountOperation");
4666 Property* prop = expr->expression()->AsProperty();
4667 LhsKind assign_type = Property::GetAssignType(prop);
4669 // Evaluate expression and get value.
4670 if (assign_type == VARIABLE) {
4671 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4672 AccumulatorValueContext context(this);
4673 EmitVariableLoad(expr->expression()->AsVariableProxy());
4675 // Reserve space for result of postfix operation.
4676 if (expr->is_postfix() && !context()->IsEffect()) {
4677 __ push(Immediate(Smi::FromInt(0)));
4679 switch (assign_type) {
4680 case NAMED_PROPERTY: {
4681 // Put the object both on the stack and in the register.
4682 VisitForStackValue(prop->obj());
4683 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4684 EmitNamedPropertyLoad(prop);
4688 case NAMED_SUPER_PROPERTY: {
4689 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4690 VisitForAccumulatorValue(
4691 prop->obj()->AsSuperPropertyReference()->home_object());
4692 __ push(result_register());
4693 __ push(MemOperand(esp, kPointerSize));
4694 __ push(result_register());
4695 EmitNamedSuperPropertyLoad(prop);
4699 case KEYED_SUPER_PROPERTY: {
4700 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4702 prop->obj()->AsSuperPropertyReference()->home_object());
4703 VisitForAccumulatorValue(prop->key());
4704 __ push(result_register());
4705 __ push(MemOperand(esp, 2 * kPointerSize));
4706 __ push(MemOperand(esp, 2 * kPointerSize));
4707 __ push(result_register());
4708 EmitKeyedSuperPropertyLoad(prop);
4712 case KEYED_PROPERTY: {
4713 VisitForStackValue(prop->obj());
4714 VisitForStackValue(prop->key());
4715 __ mov(LoadDescriptor::ReceiverRegister(),
4716 Operand(esp, kPointerSize)); // Object.
4717 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4718 EmitKeyedPropertyLoad(prop);
4727 // We need a second deoptimization point after loading the value
4728 // in case evaluating the property load my have a side effect.
4729 if (assign_type == VARIABLE) {
4730 PrepareForBailout(expr->expression(), TOS_REG);
4732 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4735 // Inline smi case if we are in a loop.
4736 Label done, stub_call;
4737 JumpPatchSite patch_site(masm_);
4738 if (ShouldInlineSmiCase(expr->op())) {
4740 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4742 // Save result for postfix expressions.
4743 if (expr->is_postfix()) {
4744 if (!context()->IsEffect()) {
4745 // Save the result on the stack. If we have a named or keyed property
4746 // we store the result under the receiver that is currently on top
4748 switch (assign_type) {
4752 case NAMED_PROPERTY:
4753 __ mov(Operand(esp, kPointerSize), eax);
4755 case NAMED_SUPER_PROPERTY:
4756 __ mov(Operand(esp, 2 * kPointerSize), eax);
4758 case KEYED_PROPERTY:
4759 __ mov(Operand(esp, 2 * kPointerSize), eax);
4761 case KEYED_SUPER_PROPERTY:
4762 __ mov(Operand(esp, 3 * kPointerSize), eax);
4768 if (expr->op() == Token::INC) {
4769 __ add(eax, Immediate(Smi::FromInt(1)));
4771 __ sub(eax, Immediate(Smi::FromInt(1)));
4773 __ j(no_overflow, &done, Label::kNear);
4774 // Call stub. Undo operation first.
4775 if (expr->op() == Token::INC) {
4776 __ sub(eax, Immediate(Smi::FromInt(1)));
4778 __ add(eax, Immediate(Smi::FromInt(1)));
4780 __ jmp(&stub_call, Label::kNear);
4783 if (!is_strong(language_mode())) {
4784 ToNumberStub convert_stub(isolate());
4785 __ CallStub(&convert_stub);
4786 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4789 // Save result for postfix expressions.
4790 if (expr->is_postfix()) {
4791 if (!context()->IsEffect()) {
4792 // Save the result on the stack. If we have a named or keyed property
4793 // we store the result under the receiver that is currently on top
4795 switch (assign_type) {
4799 case NAMED_PROPERTY:
4800 __ mov(Operand(esp, kPointerSize), eax);
4802 case NAMED_SUPER_PROPERTY:
4803 __ mov(Operand(esp, 2 * kPointerSize), eax);
4805 case KEYED_PROPERTY:
4806 __ mov(Operand(esp, 2 * kPointerSize), eax);
4808 case KEYED_SUPER_PROPERTY:
4809 __ mov(Operand(esp, 3 * kPointerSize), eax);
4815 SetExpressionPosition(expr);
4817 // Call stub for +1/-1.
4818 __ bind(&stub_call);
4820 __ mov(eax, Immediate(Smi::FromInt(1)));
4821 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4822 strength(language_mode())).code();
4823 CallIC(code, expr->CountBinOpFeedbackId());
4824 patch_site.EmitPatchInfo();
4827 if (is_strong(language_mode())) {
4828 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4830 // Store the value returned in eax.
4831 switch (assign_type) {
4833 if (expr->is_postfix()) {
4834 // Perform the assignment as if via '='.
4835 { EffectContext context(this);
4836 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4837 Token::ASSIGN, expr->CountSlot());
4838 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4841 // For all contexts except EffectContext We have the result on
4842 // top of the stack.
4843 if (!context()->IsEffect()) {
4844 context()->PlugTOS();
4847 // Perform the assignment as if via '='.
4848 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4849 Token::ASSIGN, expr->CountSlot());
4850 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4851 context()->Plug(eax);
4854 case NAMED_PROPERTY: {
4855 __ mov(StoreDescriptor::NameRegister(),
4856 prop->key()->AsLiteral()->value());
4857 __ pop(StoreDescriptor::ReceiverRegister());
4858 if (FLAG_vector_stores) {
4859 EmitLoadStoreICSlot(expr->CountSlot());
4862 CallStoreIC(expr->CountStoreFeedbackId());
4864 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4865 if (expr->is_postfix()) {
4866 if (!context()->IsEffect()) {
4867 context()->PlugTOS();
4870 context()->Plug(eax);
4874 case NAMED_SUPER_PROPERTY: {
4875 EmitNamedSuperPropertyStore(prop);
4876 if (expr->is_postfix()) {
4877 if (!context()->IsEffect()) {
4878 context()->PlugTOS();
4881 context()->Plug(eax);
4885 case KEYED_SUPER_PROPERTY: {
4886 EmitKeyedSuperPropertyStore(prop);
4887 if (expr->is_postfix()) {
4888 if (!context()->IsEffect()) {
4889 context()->PlugTOS();
4892 context()->Plug(eax);
4896 case KEYED_PROPERTY: {
4897 __ pop(StoreDescriptor::NameRegister());
4898 __ pop(StoreDescriptor::ReceiverRegister());
4900 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4901 if (FLAG_vector_stores) {
4902 EmitLoadStoreICSlot(expr->CountSlot());
4905 CallIC(ic, expr->CountStoreFeedbackId());
4907 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4908 if (expr->is_postfix()) {
4909 // Result is on the stack
4910 if (!context()->IsEffect()) {
4911 context()->PlugTOS();
4914 context()->Plug(eax);
4922 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4923 Expression* sub_expr,
4924 Handle<String> check) {
4925 Label materialize_true, materialize_false;
4926 Label* if_true = NULL;
4927 Label* if_false = NULL;
4928 Label* fall_through = NULL;
4929 context()->PrepareTest(&materialize_true, &materialize_false,
4930 &if_true, &if_false, &fall_through);
4932 { AccumulatorValueContext context(this);
4933 VisitForTypeofValue(sub_expr);
4935 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4937 Factory* factory = isolate()->factory();
4938 if (String::Equals(check, factory->number_string())) {
4939 __ JumpIfSmi(eax, if_true);
4940 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4941 isolate()->factory()->heap_number_map());
4942 Split(equal, if_true, if_false, fall_through);
4943 } else if (String::Equals(check, factory->string_string())) {
4944 __ JumpIfSmi(eax, if_false);
4945 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4946 __ j(above_equal, if_false);
4947 // Check for undetectable objects => false.
4948 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4949 1 << Map::kIsUndetectable);
4950 Split(zero, if_true, if_false, fall_through);
4951 } else if (String::Equals(check, factory->symbol_string())) {
4952 __ JumpIfSmi(eax, if_false);
4953 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4954 Split(equal, if_true, if_false, fall_through);
4955 } else if (String::Equals(check, factory->float32x4_string())) {
4956 __ JumpIfSmi(eax, if_false);
4957 __ CmpObjectType(eax, FLOAT32X4_TYPE, edx);
4958 Split(equal, if_true, if_false, fall_through);
4959 } else if (String::Equals(check, factory->int32x4_string())) {
4960 __ JumpIfSmi(eax, if_false);
4961 __ CmpObjectType(eax, INT32X4_TYPE, edx);
4962 Split(equal, if_true, if_false, fall_through);
4963 } else if (String::Equals(check, factory->bool32x4_string())) {
4964 __ JumpIfSmi(eax, if_false);
4965 __ CmpObjectType(eax, BOOL32X4_TYPE, edx);
4966 Split(equal, if_true, if_false, fall_through);
4967 } else if (String::Equals(check, factory->int16x8_string())) {
4968 __ JumpIfSmi(eax, if_false);
4969 __ CmpObjectType(eax, INT16X8_TYPE, edx);
4970 Split(equal, if_true, if_false, fall_through);
4971 } else if (String::Equals(check, factory->bool16x8_string())) {
4972 __ JumpIfSmi(eax, if_false);
4973 __ CmpObjectType(eax, BOOL16X8_TYPE, edx);
4974 Split(equal, if_true, if_false, fall_through);
4975 } else if (String::Equals(check, factory->int8x16_string())) {
4976 __ JumpIfSmi(eax, if_false);
4977 __ CmpObjectType(eax, INT8X16_TYPE, edx);
4978 Split(equal, if_true, if_false, fall_through);
4979 } else if (String::Equals(check, factory->bool8x16_string())) {
4980 __ JumpIfSmi(eax, if_false);
4981 __ CmpObjectType(eax, BOOL8X16_TYPE, edx);
4982 Split(equal, if_true, if_false, fall_through);
4983 } else if (String::Equals(check, factory->boolean_string())) {
4984 __ cmp(eax, isolate()->factory()->true_value());
4985 __ j(equal, if_true);
4986 __ cmp(eax, isolate()->factory()->false_value());
4987 Split(equal, if_true, if_false, fall_through);
4988 } else if (String::Equals(check, factory->undefined_string())) {
4989 __ cmp(eax, isolate()->factory()->undefined_value());
4990 __ j(equal, if_true);
4991 __ JumpIfSmi(eax, if_false);
4992 // Check for undetectable objects => true.
4993 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4994 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4995 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4996 Split(not_zero, if_true, if_false, fall_through);
4997 } else if (String::Equals(check, factory->function_string())) {
4998 __ JumpIfSmi(eax, if_false);
4999 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5000 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
5001 __ j(equal, if_true);
5002 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
5003 Split(equal, if_true, if_false, fall_through);
5004 } else if (String::Equals(check, factory->object_string())) {
5005 __ JumpIfSmi(eax, if_false);
5006 __ cmp(eax, isolate()->factory()->null_value());
5007 __ j(equal, if_true);
5008 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
5009 __ j(below, if_false);
5010 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5011 __ j(above, if_false);
5012 // Check for undetectable objects => false.
5013 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5014 1 << Map::kIsUndetectable);
5015 Split(zero, if_true, if_false, fall_through);
5017 if (if_false != fall_through) __ jmp(if_false);
5019 context()->Plug(if_true, if_false);
5023 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5024 Comment cmnt(masm_, "[ CompareOperation");
5025 SetExpressionPosition(expr);
5027 // First we try a fast inlined version of the compare when one of
5028 // the operands is a literal.
5029 if (TryLiteralCompare(expr)) return;
5031 // Always perform the comparison for its control flow. Pack the result
5032 // into the expression's context after the comparison is performed.
5033 Label materialize_true, materialize_false;
5034 Label* if_true = NULL;
5035 Label* if_false = NULL;
5036 Label* fall_through = NULL;
5037 context()->PrepareTest(&materialize_true, &materialize_false,
5038 &if_true, &if_false, &fall_through);
5040 Token::Value op = expr->op();
5041 VisitForStackValue(expr->left());
5044 VisitForStackValue(expr->right());
5045 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5046 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5047 __ cmp(eax, isolate()->factory()->true_value());
5048 Split(equal, if_true, if_false, fall_through);
5051 case Token::INSTANCEOF: {
5052 VisitForStackValue(expr->right());
5053 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5055 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5057 // The stub returns 0 for true.
5058 Split(zero, if_true, if_false, fall_through);
5063 VisitForAccumulatorValue(expr->right());
5064 Condition cc = CompareIC::ComputeCondition(op);
5067 bool inline_smi_code = ShouldInlineSmiCase(op);
5068 JumpPatchSite patch_site(masm_);
5069 if (inline_smi_code) {
5073 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
5075 Split(cc, if_true, if_false, NULL);
5076 __ bind(&slow_case);
5079 Handle<Code> ic = CodeFactory::CompareIC(
5080 isolate(), op, strength(language_mode())).code();
5081 CallIC(ic, expr->CompareOperationFeedbackId());
5082 patch_site.EmitPatchInfo();
5084 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5086 Split(cc, if_true, if_false, fall_through);
5090 // Convert the result of the comparison into one expected for this
5091 // expression's context.
5092 context()->Plug(if_true, if_false);
5096 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5097 Expression* sub_expr,
5099 Label materialize_true, materialize_false;
5100 Label* if_true = NULL;
5101 Label* if_false = NULL;
5102 Label* fall_through = NULL;
5103 context()->PrepareTest(&materialize_true, &materialize_false,
5104 &if_true, &if_false, &fall_through);
5106 VisitForAccumulatorValue(sub_expr);
5107 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5109 Handle<Object> nil_value = nil == kNullValue
5110 ? isolate()->factory()->null_value()
5111 : isolate()->factory()->undefined_value();
5112 if (expr->op() == Token::EQ_STRICT) {
5113 __ cmp(eax, nil_value);
5114 Split(equal, if_true, if_false, fall_through);
5116 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5117 CallIC(ic, expr->CompareOperationFeedbackId());
5119 Split(not_zero, if_true, if_false, fall_through);
5121 context()->Plug(if_true, if_false);
5125 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5126 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5127 context()->Plug(eax);
5131 Register FullCodeGenerator::result_register() {
5136 Register FullCodeGenerator::context_register() {
5141 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5142 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5143 __ mov(Operand(ebp, frame_offset), value);
5147 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5148 __ mov(dst, ContextOperand(esi, context_index));
5152 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5153 Scope* closure_scope = scope()->ClosureScope();
5154 if (closure_scope->is_script_scope() ||
5155 closure_scope->is_module_scope()) {
5156 // Contexts nested in the native context have a canonical empty function
5157 // as their closure, not the anonymous closure containing the global
5158 // code. Pass a smi sentinel and let the runtime look up the empty
5160 __ push(Immediate(Smi::FromInt(0)));
5161 } else if (closure_scope->is_eval_scope()) {
5162 // Contexts nested inside eval code have the same closure as the context
5163 // calling eval, not the anonymous closure containing the eval code.
5164 // Fetch it from the context.
5165 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
5167 DCHECK(closure_scope->is_function_scope());
5168 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5173 // ----------------------------------------------------------------------------
5174 // Non-local control flow support.
5176 void FullCodeGenerator::EnterFinallyBlock() {
5177 // Cook return address on top of stack (smi encoded Code* delta)
5178 DCHECK(!result_register().is(edx));
5180 __ sub(edx, Immediate(masm_->CodeObject()));
5181 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5182 STATIC_ASSERT(kSmiTag == 0);
5186 // Store result register while executing finally block.
5187 __ push(result_register());
5189 // Store pending message while executing finally block.
5190 ExternalReference pending_message_obj =
5191 ExternalReference::address_of_pending_message_obj(isolate());
5192 __ mov(edx, Operand::StaticVariable(pending_message_obj));
5195 ClearPendingMessage();
5199 void FullCodeGenerator::ExitFinallyBlock() {
5200 DCHECK(!result_register().is(edx));
5201 // Restore pending message from stack.
5203 ExternalReference pending_message_obj =
5204 ExternalReference::address_of_pending_message_obj(isolate());
5205 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5207 // Restore result register from stack.
5208 __ pop(result_register());
5210 // Uncook return address.
5213 __ add(edx, Immediate(masm_->CodeObject()));
5218 void FullCodeGenerator::ClearPendingMessage() {
5219 DCHECK(!result_register().is(edx));
5220 ExternalReference pending_message_obj =
5221 ExternalReference::address_of_pending_message_obj(isolate());
5222 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
5223 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5227 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5228 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5229 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5230 Immediate(SmiFromSlot(slot)));
5237 static const byte kJnsInstruction = 0x79;
5238 static const byte kJnsOffset = 0x11;
5239 static const byte kNopByteOne = 0x66;
5240 static const byte kNopByteTwo = 0x90;
5242 static const byte kCallInstruction = 0xe8;
5246 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5248 BackEdgeState target_state,
5249 Code* replacement_code) {
5250 Address call_target_address = pc - kIntSize;
5251 Address jns_instr_address = call_target_address - 3;
5252 Address jns_offset_address = call_target_address - 2;
5254 switch (target_state) {
5256 // sub <profiling_counter>, <delta> ;; Not changed
5258 // call <interrupt stub>
5260 *jns_instr_address = kJnsInstruction;
5261 *jns_offset_address = kJnsOffset;
5263 case ON_STACK_REPLACEMENT:
5264 case OSR_AFTER_STACK_CHECK:
5265 // sub <profiling_counter>, <delta> ;; Not changed
5268 // call <on-stack replacment>
5270 *jns_instr_address = kNopByteOne;
5271 *jns_offset_address = kNopByteTwo;
5275 Assembler::set_target_address_at(call_target_address,
5277 replacement_code->entry());
5278 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5279 unoptimized_code, call_target_address, replacement_code);
5283 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5285 Code* unoptimized_code,
5287 Address call_target_address = pc - kIntSize;
5288 Address jns_instr_address = call_target_address - 3;
5289 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5291 if (*jns_instr_address == kJnsInstruction) {
5292 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5293 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5294 Assembler::target_address_at(call_target_address,
5299 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5300 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5302 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
5303 isolate->builtins()->OnStackReplacement()->entry()) {
5304 return ON_STACK_REPLACEMENT;
5307 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5308 Assembler::target_address_at(call_target_address,
5310 return OSR_AFTER_STACK_CHECK;
5314 } // namespace internal
5317 #endif // V8_TARGET_ARCH_X87