1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
23 #define __ ACCESS_MASM(masm_)
26 class JumpPatchSite BASE_EMBEDDED {
28 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
30 info_emitted_ = false;
35 DCHECK(patch_site_.is_bound() == info_emitted_);
38 void EmitJumpIfNotSmi(Register reg,
40 Label::Distance distance = Label::kFar) {
41 __ test(reg, Immediate(kSmiTagMask));
42 EmitJump(not_carry, target, distance); // Always taken before patched.
45 void EmitJumpIfSmi(Register reg,
47 Label::Distance distance = Label::kFar) {
48 __ test(reg, Immediate(kSmiTagMask));
49 EmitJump(carry, target, distance); // Never taken before patched.
52 void EmitPatchInfo() {
53 if (patch_site_.is_bound()) {
54 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55 DCHECK(is_uint8(delta_to_patch_site));
56 __ test(eax, Immediate(delta_to_patch_site));
61 __ nop(); // Signals no inlined code.
66 // jc will be patched with jz, jnc will become jnz.
67 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
68 DCHECK(!patch_site_.is_bound() && !info_emitted_);
69 DCHECK(cc == carry || cc == not_carry);
70 __ bind(&patch_site_);
71 __ j(cc, target, distance);
74 MacroAssembler* masm_;
82 // Generate code for a JS function. On entry to the function the receiver
83 // and arguments have been pushed on the stack left to right, with the
84 // return address on top of them. The actual argument count matches the
85 // formal parameter count expected by the function.
87 // The live registers are:
88 // o edi: the JS function object being called (i.e. ourselves)
90 // o ebp: our caller's frame pointer
91 // o esp: stack pointer (pointing to return address)
93 // The function builds a JS frame. Please see JavaScriptFrameConstants in
94 // frames-ia32.h for its layout.
95 void FullCodeGenerator::Generate() {
96 CompilationInfo* info = info_;
98 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
100 profiling_counter_ = isolate()->factory()->NewCell(
101 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102 SetFunctionPosition(function());
103 Comment cmnt(masm_, "[ function compiled by full code generator");
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
108 if (strlen(FLAG_stop_at) > 0 &&
109 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
114 // Sloppy mode functions and builtins need to replace the receiver with the
115 // global proxy when called as functions (without an explicit receiver
117 if (is_sloppy(info->language_mode()) && !info->is_native()) {
119 // +1 for return address.
120 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
121 __ mov(ecx, Operand(esp, receiver_offset));
123 __ cmp(ecx, isolate()->factory()->undefined_value());
124 __ j(not_equal, &ok, Label::kNear);
126 __ mov(ecx, GlobalObjectOperand());
127 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
129 __ mov(Operand(esp, receiver_offset), ecx);
134 // Open a frame scope to indicate that there is a frame on the stack. The
135 // MANUAL indicates that the scope shouldn't actually generate code to set up
136 // the frame (that is done below).
137 FrameScope frame_scope(masm_, StackFrame::MANUAL);
139 info->set_prologue_offset(masm_->pc_offset());
140 __ Prologue(info->IsCodePreAgingActive());
141 info->AddNoFrameRange(0, masm_->pc_offset());
143 { Comment cmnt(masm_, "[ Allocate locals");
144 int locals_count = info->scope()->num_stack_slots();
145 // Generators allocate locals, if any, in context slots.
146 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
147 if (locals_count == 1) {
148 __ push(Immediate(isolate()->factory()->undefined_value()));
149 } else if (locals_count > 1) {
150 if (locals_count >= 128) {
153 __ sub(ecx, Immediate(locals_count * kPointerSize));
154 ExternalReference stack_limit =
155 ExternalReference::address_of_real_stack_limit(isolate());
156 __ cmp(ecx, Operand::StaticVariable(stack_limit));
157 __ j(above_equal, &ok, Label::kNear);
158 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
161 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
162 const int kMaxPushes = 32;
163 if (locals_count >= kMaxPushes) {
164 int loop_iterations = locals_count / kMaxPushes;
165 __ mov(ecx, loop_iterations);
167 __ bind(&loop_header);
169 for (int i = 0; i < kMaxPushes; i++) {
173 __ j(not_zero, &loop_header, Label::kNear);
175 int remaining = locals_count % kMaxPushes;
176 // Emit the remaining pushes.
177 for (int i = 0; i < remaining; i++) {
183 bool function_in_register = true;
185 // Possibly allocate a local context.
186 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187 if (heap_slots > 0) {
188 Comment cmnt(masm_, "[ Allocate context");
189 bool need_write_barrier = true;
190 // Argument to NewContext is the function, which is still in edi.
191 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) {
193 __ Push(info->scope()->GetScopeInfo(info->isolate()));
194 __ CallRuntime(Runtime::kNewScriptContext, 2);
195 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(isolate(), heap_slots);
198 // Result of FastNewContextStub is always in new space.
199 need_write_barrier = false;
202 __ CallRuntime(Runtime::kNewFunctionContext, 1);
204 function_in_register = false;
205 // Context is returned in eax. It replaces the context passed to us.
206 // It's saved in the stack and kept live in esi.
208 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
210 // Copy parameters into context if necessary.
211 int num_parameters = info->scope()->num_parameters();
212 for (int i = 0; i < num_parameters; i++) {
213 Variable* var = scope()->parameter(i);
214 if (var->IsContextSlot()) {
215 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
216 (num_parameters - 1 - i) * kPointerSize;
217 // Load parameter from stack.
218 __ mov(eax, Operand(ebp, parameter_offset));
219 // Store it in the context.
220 int context_offset = Context::SlotOffset(var->index());
221 __ mov(Operand(esi, context_offset), eax);
222 // Update the write barrier. This clobbers eax and ebx.
223 if (need_write_barrier) {
224 __ RecordWriteContextSlot(esi,
229 } else if (FLAG_debug_code) {
231 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
232 __ Abort(kExpectedNewSpaceObject);
239 // Possibly allocate RestParameters
241 Variable* rest_param = scope()->rest_parameter(&rest_index);
243 Comment cmnt(masm_, "[ Allocate rest parameter array");
245 int num_parameters = info->scope()->num_parameters();
246 int offset = num_parameters * kPointerSize;
248 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
250 __ push(Immediate(Smi::FromInt(num_parameters)));
251 __ push(Immediate(Smi::FromInt(rest_index)));
253 RestParamAccessStub stub(isolate());
256 SetVar(rest_param, eax, ebx, edx);
259 Variable* arguments = scope()->arguments();
260 if (arguments != NULL) {
261 // Function uses arguments object.
262 Comment cmnt(masm_, "[ Allocate arguments object");
263 if (function_in_register) {
266 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
268 // Receiver is just before the parameters on the caller's stack.
269 int num_parameters = info->scope()->num_parameters();
270 int offset = num_parameters * kPointerSize;
272 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
274 __ push(Immediate(Smi::FromInt(num_parameters)));
275 // Arguments to ArgumentsAccessStub:
276 // function, receiver address, parameter count.
277 // The stub will rewrite receiver and parameter count if the previous
278 // stack frame was an arguments adapter frame.
279 ArgumentsAccessStub::Type type;
280 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
281 type = ArgumentsAccessStub::NEW_STRICT;
282 } else if (function()->has_duplicate_parameters()) {
283 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
285 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
287 ArgumentsAccessStub::HasNewTarget has_new_target =
288 IsSubclassConstructor(info->function()->kind())
289 ? ArgumentsAccessStub::HAS_NEW_TARGET
290 : ArgumentsAccessStub::NO_NEW_TARGET;
291 ArgumentsAccessStub stub(isolate(), type, has_new_target);
294 SetVar(arguments, eax, ebx, edx);
298 __ CallRuntime(Runtime::kTraceEnter, 0);
301 // Visit the declarations and body unless there is an illegal
303 if (scope()->HasIllegalRedeclaration()) {
304 Comment cmnt(masm_, "[ Declarations");
305 scope()->VisitIllegalRedeclaration(this);
308 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
309 { Comment cmnt(masm_, "[ Declarations");
310 // For named function expressions, declare the function name as a
312 if (scope()->is_function_scope() && scope()->function() != NULL) {
313 VariableDeclaration* function = scope()->function();
314 DCHECK(function->proxy()->var()->mode() == CONST ||
315 function->proxy()->var()->mode() == CONST_LEGACY);
316 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
317 VisitVariableDeclaration(function);
319 VisitDeclarations(scope()->declarations());
322 { Comment cmnt(masm_, "[ Stack check");
323 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
325 ExternalReference stack_limit
326 = ExternalReference::address_of_stack_limit(isolate());
327 __ cmp(esp, Operand::StaticVariable(stack_limit));
328 __ j(above_equal, &ok, Label::kNear);
329 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
333 { Comment cmnt(masm_, "[ Body");
334 DCHECK(loop_depth() == 0);
335 VisitStatements(function()->body());
336 DCHECK(loop_depth() == 0);
340 // Always emit a 'return undefined' in case control fell off the end of
342 { Comment cmnt(masm_, "[ return <undefined>;");
343 __ mov(eax, isolate()->factory()->undefined_value());
344 EmitReturnSequence();
349 void FullCodeGenerator::ClearAccumulator() {
350 __ Move(eax, Immediate(Smi::FromInt(0)));
354 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
355 __ mov(ebx, Immediate(profiling_counter_));
356 __ sub(FieldOperand(ebx, Cell::kValueOffset),
357 Immediate(Smi::FromInt(delta)));
361 void FullCodeGenerator::EmitProfilingCounterReset() {
362 int reset_value = FLAG_interrupt_budget;
363 __ mov(ebx, Immediate(profiling_counter_));
364 __ mov(FieldOperand(ebx, Cell::kValueOffset),
365 Immediate(Smi::FromInt(reset_value)));
369 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
370 Label* back_edge_target) {
371 Comment cmnt(masm_, "[ Back edge bookkeeping");
374 DCHECK(back_edge_target->is_bound());
375 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
376 int weight = Min(kMaxBackEdgeWeight,
377 Max(1, distance / kCodeSizeMultiplier));
378 EmitProfilingCounterDecrement(weight);
379 __ j(positive, &ok, Label::kNear);
380 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
382 // Record a mapping of this PC offset to the OSR id. This is used to find
383 // the AST id from the unoptimized code in order to use it as a key into
384 // the deoptimization input data found in the optimized code.
385 RecordBackEdge(stmt->OsrEntryId());
387 EmitProfilingCounterReset();
390 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
391 // Record a mapping of the OSR id to this PC. This is used if the OSR
392 // entry becomes the target of a bailout. We don't expect it to be, but
393 // we want it to work if it is.
394 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
398 void FullCodeGenerator::EmitReturnSequence() {
399 Comment cmnt(masm_, "[ Return sequence");
400 if (return_label_.is_bound()) {
401 __ jmp(&return_label_);
403 // Common return label
404 __ bind(&return_label_);
407 __ CallRuntime(Runtime::kTraceExit, 1);
409 // Pretend that the exit is a backwards jump to the entry.
411 if (info_->ShouldSelfOptimize()) {
412 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
414 int distance = masm_->pc_offset();
415 weight = Min(kMaxBackEdgeWeight,
416 Max(1, distance / kCodeSizeMultiplier));
418 EmitProfilingCounterDecrement(weight);
420 __ j(positive, &ok, Label::kNear);
422 __ call(isolate()->builtins()->InterruptCheck(),
423 RelocInfo::CODE_TARGET);
425 EmitProfilingCounterReset();
428 // Add a label for checking the size of the code used for returning.
429 Label check_exit_codesize;
430 masm_->bind(&check_exit_codesize);
432 SetSourcePosition(function()->end_position() - 1);
434 // Do not use the leave instruction here because it is too short to
435 // patch with the code required by the debugger.
437 int no_frame_start = masm_->pc_offset();
440 int arg_count = info_->scope()->num_parameters() + 1;
441 if (IsSubclassConstructor(info_->function()->kind())) {
444 int arguments_bytes = arg_count * kPointerSize;
445 __ Ret(arguments_bytes, ecx);
446 // Check that the size of the code used for returning is large enough
447 // for the debugger's requirements.
448 DCHECK(Assembler::kJSReturnSequenceLength <=
449 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
450 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
455 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
456 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
460 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
461 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
462 codegen()->GetVar(result_register(), var);
466 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
467 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
468 MemOperand operand = codegen()->VarOperand(var, result_register());
469 // Memory operands can be pushed directly.
474 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
475 // For simplicity we always test the accumulator register.
476 codegen()->GetVar(result_register(), var);
477 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
478 codegen()->DoTest(this);
482 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
483 UNREACHABLE(); // Not used on IA32.
487 void FullCodeGenerator::AccumulatorValueContext::Plug(
488 Heap::RootListIndex index) const {
489 UNREACHABLE(); // Not used on IA32.
493 void FullCodeGenerator::StackValueContext::Plug(
494 Heap::RootListIndex index) const {
495 UNREACHABLE(); // Not used on IA32.
499 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
500 UNREACHABLE(); // Not used on IA32.
504 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
508 void FullCodeGenerator::AccumulatorValueContext::Plug(
509 Handle<Object> lit) const {
511 __ SafeMove(result_register(), Immediate(lit));
513 __ Move(result_register(), Immediate(lit));
518 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
520 __ SafePush(Immediate(lit));
522 __ push(Immediate(lit));
527 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
528 codegen()->PrepareForBailoutBeforeSplit(condition(),
532 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
533 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
534 if (false_label_ != fall_through_) __ jmp(false_label_);
535 } else if (lit->IsTrue() || lit->IsJSObject()) {
536 if (true_label_ != fall_through_) __ jmp(true_label_);
537 } else if (lit->IsString()) {
538 if (String::cast(*lit)->length() == 0) {
539 if (false_label_ != fall_through_) __ jmp(false_label_);
541 if (true_label_ != fall_through_) __ jmp(true_label_);
543 } else if (lit->IsSmi()) {
544 if (Smi::cast(*lit)->value() == 0) {
545 if (false_label_ != fall_through_) __ jmp(false_label_);
547 if (true_label_ != fall_through_) __ jmp(true_label_);
550 // For simplicity we always test the accumulator register.
551 __ mov(result_register(), lit);
552 codegen()->DoTest(this);
557 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
558 Register reg) const {
564 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
566 Register reg) const {
569 __ Move(result_register(), reg);
573 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
574 Register reg) const {
576 if (count > 1) __ Drop(count - 1);
577 __ mov(Operand(esp, 0), reg);
581 void FullCodeGenerator::TestContext::DropAndPlug(int count,
582 Register reg) const {
584 // For simplicity we always test the accumulator register.
586 __ Move(result_register(), reg);
587 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
588 codegen()->DoTest(this);
592 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
593 Label* materialize_false) const {
594 DCHECK(materialize_true == materialize_false);
595 __ bind(materialize_true);
599 void FullCodeGenerator::AccumulatorValueContext::Plug(
600 Label* materialize_true,
601 Label* materialize_false) const {
603 __ bind(materialize_true);
604 __ mov(result_register(), isolate()->factory()->true_value());
605 __ jmp(&done, Label::kNear);
606 __ bind(materialize_false);
607 __ mov(result_register(), isolate()->factory()->false_value());
612 void FullCodeGenerator::StackValueContext::Plug(
613 Label* materialize_true,
614 Label* materialize_false) const {
616 __ bind(materialize_true);
617 __ push(Immediate(isolate()->factory()->true_value()));
618 __ jmp(&done, Label::kNear);
619 __ bind(materialize_false);
620 __ push(Immediate(isolate()->factory()->false_value()));
625 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
626 Label* materialize_false) const {
627 DCHECK(materialize_true == true_label_);
628 DCHECK(materialize_false == false_label_);
632 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
636 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
637 Handle<Object> value = flag
638 ? isolate()->factory()->true_value()
639 : isolate()->factory()->false_value();
640 __ mov(result_register(), value);
644 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
645 Handle<Object> value = flag
646 ? isolate()->factory()->true_value()
647 : isolate()->factory()->false_value();
648 __ push(Immediate(value));
652 void FullCodeGenerator::TestContext::Plug(bool flag) const {
653 codegen()->PrepareForBailoutBeforeSplit(condition(),
658 if (true_label_ != fall_through_) __ jmp(true_label_);
660 if (false_label_ != fall_through_) __ jmp(false_label_);
665 void FullCodeGenerator::DoTest(Expression* condition,
668 Label* fall_through) {
669 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
670 CallIC(ic, condition->test_id());
671 __ test(result_register(), result_register());
672 // The stub returns nonzero for true.
673 Split(not_zero, if_true, if_false, fall_through);
677 void FullCodeGenerator::Split(Condition cc,
680 Label* fall_through) {
681 if (if_false == fall_through) {
683 } else if (if_true == fall_through) {
684 __ j(NegateCondition(cc), if_false);
692 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
693 DCHECK(var->IsStackAllocated());
694 // Offset is negative because higher indexes are at lower addresses.
695 int offset = -var->index() * kPointerSize;
696 // Adjust by a (parameter or local) base offset.
697 if (var->IsParameter()) {
698 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
700 offset += JavaScriptFrameConstants::kLocal0Offset;
702 return Operand(ebp, offset);
706 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
707 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
708 if (var->IsContextSlot()) {
709 int context_chain_length = scope()->ContextChainLength(var->scope());
710 __ LoadContext(scratch, context_chain_length);
711 return ContextOperand(scratch, var->index());
713 return StackOperand(var);
718 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
719 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
720 MemOperand location = VarOperand(var, dest);
721 __ mov(dest, location);
725 void FullCodeGenerator::SetVar(Variable* var,
729 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
730 DCHECK(!scratch0.is(src));
731 DCHECK(!scratch0.is(scratch1));
732 DCHECK(!scratch1.is(src));
733 MemOperand location = VarOperand(var, scratch0);
734 __ mov(location, src);
736 // Emit the write barrier code if the location is in the heap.
737 if (var->IsContextSlot()) {
738 int offset = Context::SlotOffset(var->index());
739 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
740 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
745 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
746 bool should_normalize,
749 // Only prepare for bailouts before splits if we're in a test
750 // context. Otherwise, we let the Visit function deal with the
751 // preparation to avoid preparing with the same AST id twice.
752 if (!context()->IsTest() || !info_->IsOptimizable()) return;
755 if (should_normalize) __ jmp(&skip, Label::kNear);
756 PrepareForBailout(expr, TOS_REG);
757 if (should_normalize) {
758 __ cmp(eax, isolate()->factory()->true_value());
759 Split(equal, if_true, if_false, NULL);
765 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
766 // The variable in the declaration always resides in the current context.
767 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
768 if (generate_debug_code_) {
769 // Check that we're not inside a with or catch context.
770 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
771 __ cmp(ebx, isolate()->factory()->with_context_map());
772 __ Check(not_equal, kDeclarationInWithContext);
773 __ cmp(ebx, isolate()->factory()->catch_context_map());
774 __ Check(not_equal, kDeclarationInCatchContext);
779 void FullCodeGenerator::VisitVariableDeclaration(
780 VariableDeclaration* declaration) {
781 // If it was not possible to allocate the variable at compile time, we
782 // need to "declare" it at runtime to make sure it actually exists in the
784 VariableProxy* proxy = declaration->proxy();
785 VariableMode mode = declaration->mode();
786 Variable* variable = proxy->var();
787 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
788 switch (variable->location()) {
789 case Variable::UNALLOCATED:
790 globals_->Add(variable->name(), zone());
791 globals_->Add(variable->binding_needs_init()
792 ? isolate()->factory()->the_hole_value()
793 : isolate()->factory()->undefined_value(), zone());
796 case Variable::PARAMETER:
797 case Variable::LOCAL:
799 Comment cmnt(masm_, "[ VariableDeclaration");
800 __ mov(StackOperand(variable),
801 Immediate(isolate()->factory()->the_hole_value()));
805 case Variable::CONTEXT:
807 Comment cmnt(masm_, "[ VariableDeclaration");
808 EmitDebugCheckDeclarationContext(variable);
809 __ mov(ContextOperand(esi, variable->index()),
810 Immediate(isolate()->factory()->the_hole_value()));
811 // No write barrier since the hole value is in old space.
812 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
816 case Variable::LOOKUP: {
817 Comment cmnt(masm_, "[ VariableDeclaration");
819 __ push(Immediate(variable->name()));
820 // VariableDeclaration nodes are always introduced in one of four modes.
821 DCHECK(IsDeclaredVariableMode(mode));
822 PropertyAttributes attr =
823 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
824 __ push(Immediate(Smi::FromInt(attr)));
825 // Push initial value, if any.
826 // Note: For variables we must not push an initial value (such as
827 // 'undefined') because we may have a (legal) redeclaration and we
828 // must not destroy the current value.
830 __ push(Immediate(isolate()->factory()->the_hole_value()));
832 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
834 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
841 void FullCodeGenerator::VisitFunctionDeclaration(
842 FunctionDeclaration* declaration) {
843 VariableProxy* proxy = declaration->proxy();
844 Variable* variable = proxy->var();
845 switch (variable->location()) {
846 case Variable::UNALLOCATED: {
847 globals_->Add(variable->name(), zone());
848 Handle<SharedFunctionInfo> function =
849 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
850 // Check for stack-overflow exception.
851 if (function.is_null()) return SetStackOverflow();
852 globals_->Add(function, zone());
856 case Variable::PARAMETER:
857 case Variable::LOCAL: {
858 Comment cmnt(masm_, "[ FunctionDeclaration");
859 VisitForAccumulatorValue(declaration->fun());
860 __ mov(StackOperand(variable), result_register());
864 case Variable::CONTEXT: {
865 Comment cmnt(masm_, "[ FunctionDeclaration");
866 EmitDebugCheckDeclarationContext(variable);
867 VisitForAccumulatorValue(declaration->fun());
868 __ mov(ContextOperand(esi, variable->index()), result_register());
869 // We know that we have written a function, which is not a smi.
870 __ RecordWriteContextSlot(esi,
871 Context::SlotOffset(variable->index()),
877 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
881 case Variable::LOOKUP: {
882 Comment cmnt(masm_, "[ FunctionDeclaration");
884 __ push(Immediate(variable->name()));
885 __ push(Immediate(Smi::FromInt(NONE)));
886 VisitForStackValue(declaration->fun());
887 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
894 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
895 Variable* variable = declaration->proxy()->var();
896 ModuleDescriptor* descriptor = declaration->module()->descriptor();
897 DCHECK(variable->location() == Variable::CONTEXT);
898 DCHECK(descriptor->IsFrozen());
900 Comment cmnt(masm_, "[ ModuleDeclaration");
901 EmitDebugCheckDeclarationContext(variable);
903 // Load instance object.
904 __ LoadContext(eax, scope_->ContextChainLength(scope_->ScriptScope()));
905 __ mov(eax, ContextOperand(eax, descriptor->Index()));
906 __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
909 __ mov(ContextOperand(esi, variable->index()), eax);
910 // We know that we have written a module, which is not a smi.
911 __ RecordWriteContextSlot(esi,
912 Context::SlotOffset(variable->index()),
918 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
920 // Traverse into body.
921 Visit(declaration->module());
925 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
926 VariableProxy* proxy = declaration->proxy();
927 Variable* variable = proxy->var();
928 switch (variable->location()) {
929 case Variable::UNALLOCATED:
933 case Variable::CONTEXT: {
934 Comment cmnt(masm_, "[ ImportDeclaration");
935 EmitDebugCheckDeclarationContext(variable);
940 case Variable::PARAMETER:
941 case Variable::LOCAL:
942 case Variable::LOOKUP:
948 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
953 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
954 // Call the runtime to declare the globals.
955 __ push(esi); // The context is the first argument.
957 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
958 __ CallRuntime(Runtime::kDeclareGlobals, 3);
959 // Return value is ignored.
963 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
964 // Call the runtime to declare the modules.
965 __ Push(descriptions);
966 __ CallRuntime(Runtime::kDeclareModules, 1);
967 // Return value is ignored.
971 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
972 Comment cmnt(masm_, "[ SwitchStatement");
973 Breakable nested_statement(this, stmt);
974 SetStatementPosition(stmt);
976 // Keep the switch value on the stack until a case matches.
977 VisitForStackValue(stmt->tag());
978 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
980 ZoneList<CaseClause*>* clauses = stmt->cases();
981 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
983 Label next_test; // Recycled for each test.
984 // Compile all the tests with branches to their bodies.
985 for (int i = 0; i < clauses->length(); i++) {
986 CaseClause* clause = clauses->at(i);
987 clause->body_target()->Unuse();
989 // The default is not a test, but remember it as final fall through.
990 if (clause->is_default()) {
991 default_clause = clause;
995 Comment cmnt(masm_, "[ Case comparison");
999 // Compile the label expression.
1000 VisitForAccumulatorValue(clause->label());
1002 // Perform the comparison as if via '==='.
1003 __ mov(edx, Operand(esp, 0)); // Switch value.
1004 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1005 JumpPatchSite patch_site(masm_);
1006 if (inline_smi_code) {
1010 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1013 __ j(not_equal, &next_test);
1014 __ Drop(1); // Switch value is no longer needed.
1015 __ jmp(clause->body_target());
1016 __ bind(&slow_case);
1019 // Record position before stub call for type feedback.
1020 SetSourcePosition(clause->position());
1022 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1023 CallIC(ic, clause->CompareId());
1024 patch_site.EmitPatchInfo();
1027 __ jmp(&skip, Label::kNear);
1028 PrepareForBailout(clause, TOS_REG);
1029 __ cmp(eax, isolate()->factory()->true_value());
1030 __ j(not_equal, &next_test);
1032 __ jmp(clause->body_target());
1036 __ j(not_equal, &next_test);
1037 __ Drop(1); // Switch value is no longer needed.
1038 __ jmp(clause->body_target());
1041 // Discard the test value and jump to the default if present, otherwise to
1042 // the end of the statement.
1043 __ bind(&next_test);
1044 __ Drop(1); // Switch value is no longer needed.
1045 if (default_clause == NULL) {
1046 __ jmp(nested_statement.break_label());
1048 __ jmp(default_clause->body_target());
1051 // Compile all the case bodies.
1052 for (int i = 0; i < clauses->length(); i++) {
1053 Comment cmnt(masm_, "[ Case body");
1054 CaseClause* clause = clauses->at(i);
1055 __ bind(clause->body_target());
1056 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1057 VisitStatements(clause->statements());
1060 __ bind(nested_statement.break_label());
1061 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1065 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1066 Comment cmnt(masm_, "[ ForInStatement");
1067 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1069 SetStatementPosition(stmt);
1072 ForIn loop_statement(this, stmt);
1073 increment_loop_depth();
1075 // Get the object to enumerate over. If the object is null or undefined, skip
1076 // over the loop. See ECMA-262 version 5, section 12.6.4.
1077 SetExpressionPosition(stmt->enumerable());
1078 VisitForAccumulatorValue(stmt->enumerable());
1079 __ cmp(eax, isolate()->factory()->undefined_value());
1081 __ cmp(eax, isolate()->factory()->null_value());
1084 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1086 // Convert the object to a JS object.
1087 Label convert, done_convert;
1088 __ JumpIfSmi(eax, &convert, Label::kNear);
1089 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1090 __ j(above_equal, &done_convert, Label::kNear);
1093 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1094 __ bind(&done_convert);
1095 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1098 // Check for proxies.
1099 Label call_runtime, use_cache, fixed_array;
1100 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1101 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1102 __ j(below_equal, &call_runtime);
1104 // Check cache validity in generated code. This is a fast case for
1105 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1106 // guarantee cache validity, call the runtime system to check cache
1107 // validity or get the property names in a fixed array.
1108 __ CheckEnumCache(&call_runtime);
1110 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1111 __ jmp(&use_cache, Label::kNear);
1113 // Get the set of properties to enumerate.
1114 __ bind(&call_runtime);
1116 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1117 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1118 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1119 isolate()->factory()->meta_map());
1120 __ j(not_equal, &fixed_array);
1123 // We got a map in register eax. Get the enumeration cache from it.
1124 Label no_descriptors;
1125 __ bind(&use_cache);
1127 __ EnumLength(edx, eax);
1128 __ cmp(edx, Immediate(Smi::FromInt(0)));
1129 __ j(equal, &no_descriptors);
1131 __ LoadInstanceDescriptors(eax, ecx);
1132 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1133 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1135 // Set up the four remaining stack slots.
1136 __ push(eax); // Map.
1137 __ push(ecx); // Enumeration cache.
1138 __ push(edx); // Number of valid entries for the map in the enum cache.
1139 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1142 __ bind(&no_descriptors);
1143 __ add(esp, Immediate(kPointerSize));
1146 // We got a fixed array in register eax. Iterate through that.
1148 __ bind(&fixed_array);
1150 // No need for a write barrier, we are storing a Smi in the feedback vector.
1151 __ LoadHeapObject(ebx, FeedbackVector());
1152 int vector_index = FeedbackVector()->GetIndex(slot);
1153 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1154 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1156 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1157 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1158 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1159 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1160 __ j(above, &non_proxy);
1161 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1162 __ bind(&non_proxy);
1163 __ push(ebx); // Smi
1164 __ push(eax); // Array
1165 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1166 __ push(eax); // Fixed array length (as smi).
1167 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1169 // Generate code for doing the condition check.
1170 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1172 SetExpressionPosition(stmt->each());
1174 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1175 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1176 __ j(above_equal, loop_statement.break_label());
1178 // Get the current entry of the array into register ebx.
1179 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1180 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1182 // Get the expected map from the stack or a smi in the
1183 // permanent slow case into register edx.
1184 __ mov(edx, Operand(esp, 3 * kPointerSize));
1186 // Check if the expected map still matches that of the enumerable.
1187 // If not, we may have to filter the key.
1189 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1190 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1191 __ j(equal, &update_each, Label::kNear);
1193 // For proxies, no filtering is done.
1194 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1195 DCHECK(Smi::FromInt(0) == 0);
1197 __ j(zero, &update_each);
1199 // Convert the entry to a string or null if it isn't a property
1200 // anymore. If the property has been removed while iterating, we
1202 __ push(ecx); // Enumerable.
1203 __ push(ebx); // Current entry.
1204 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1206 __ j(equal, loop_statement.continue_label());
1209 // Update the 'each' property or variable from the possibly filtered
1210 // entry in register ebx.
1211 __ bind(&update_each);
1212 __ mov(result_register(), ebx);
1213 // Perform the assignment as if via '='.
1214 { EffectContext context(this);
1215 EmitAssignment(stmt->each());
1216 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1219 // Generate code for the body of the loop.
1220 Visit(stmt->body());
1222 // Generate code for going to the next element by incrementing the
1223 // index (smi) stored on top of the stack.
1224 __ bind(loop_statement.continue_label());
1225 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1227 EmitBackEdgeBookkeeping(stmt, &loop);
1230 // Remove the pointers stored on the stack.
1231 __ bind(loop_statement.break_label());
1232 __ add(esp, Immediate(5 * kPointerSize));
1234 // Exit and decrement the loop depth.
1235 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1237 decrement_loop_depth();
1241 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1243 // Use the fast case closure allocation code that allocates in new
1244 // space for nested functions that don't need literals cloning. If
1245 // we're running with the --always-opt or the --prepare-always-opt
1246 // flag, we need to use the runtime function so that the new function
1247 // we are creating here gets a chance to have its code optimized and
1248 // doesn't just get a copy of the existing unoptimized code.
1249 if (!FLAG_always_opt &&
1250 !FLAG_prepare_always_opt &&
1252 scope()->is_function_scope() &&
1253 info->num_literals() == 0) {
1254 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1255 __ mov(ebx, Immediate(info));
1259 __ push(Immediate(info));
1260 __ push(Immediate(pretenure
1261 ? isolate()->factory()->true_value()
1262 : isolate()->factory()->false_value()));
1263 __ CallRuntime(Runtime::kNewClosure, 3);
1265 context()->Plug(eax);
1269 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1270 Comment cmnt(masm_, "[ VariableProxy");
1271 EmitVariableLoad(expr);
1275 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1276 Comment cnmt(masm_, "[ SuperReference ");
1278 __ mov(LoadDescriptor::ReceiverRegister(),
1279 Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1281 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1282 __ mov(LoadDescriptor::NameRegister(), home_object_symbol);
1284 if (FLAG_vector_ics) {
1285 __ mov(VectorLoadICDescriptor::SlotRegister(),
1286 Immediate(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1287 CallLoadIC(NOT_CONTEXTUAL);
1289 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1292 __ cmp(eax, isolate()->factory()->undefined_value());
1294 __ j(not_equal, &done);
1295 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1300 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1302 if (NeedsHomeObject(initializer)) {
1303 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1304 __ mov(StoreDescriptor::NameRegister(),
1305 Immediate(isolate()->factory()->home_object_symbol()));
1306 __ mov(StoreDescriptor::ValueRegister(),
1307 Operand(esp, offset * kPointerSize));
1313 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1314 TypeofState typeof_state,
1316 Register context = esi;
1317 Register temp = edx;
1321 if (s->num_heap_slots() > 0) {
1322 if (s->calls_sloppy_eval()) {
1323 // Check that extension is NULL.
1324 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1326 __ j(not_equal, slow);
1328 // Load next context in chain.
1329 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1330 // Walk the rest of the chain without clobbering esi.
1333 // If no outer scope calls eval, we do not need to check more
1334 // context extensions. If we have reached an eval scope, we check
1335 // all extensions from this point.
1336 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1337 s = s->outer_scope();
1340 if (s != NULL && s->is_eval_scope()) {
1341 // Loop up the context chain. There is no frame effect so it is
1342 // safe to use raw labels here.
1344 if (!context.is(temp)) {
1345 __ mov(temp, context);
1348 // Terminate at native context.
1349 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1350 Immediate(isolate()->factory()->native_context_map()));
1351 __ j(equal, &fast, Label::kNear);
1352 // Check that extension is NULL.
1353 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1354 __ j(not_equal, slow);
1355 // Load next context in chain.
1356 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1361 // All extension objects were empty and it is safe to use a global
1363 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1364 __ mov(LoadDescriptor::NameRegister(), proxy->var()->name());
1365 if (FLAG_vector_ics) {
1366 __ mov(VectorLoadICDescriptor::SlotRegister(),
1367 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1370 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1378 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1380 DCHECK(var->IsContextSlot());
1381 Register context = esi;
1382 Register temp = ebx;
1384 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1385 if (s->num_heap_slots() > 0) {
1386 if (s->calls_sloppy_eval()) {
1387 // Check that extension is NULL.
1388 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1390 __ j(not_equal, slow);
1392 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1393 // Walk the rest of the chain without clobbering esi.
1397 // Check that last extension is NULL.
1398 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1399 __ j(not_equal, slow);
1401 // This function is used only for loads, not stores, so it's safe to
1402 // return an esi-based operand (the write barrier cannot be allowed to
1403 // destroy the esi register).
1404 return ContextOperand(context, var->index());
1408 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1409 TypeofState typeof_state,
1412 // Generate fast-case code for variables that might be shadowed by
1413 // eval-introduced variables. Eval is used a lot without
1414 // introducing variables. In those cases, we do not want to
1415 // perform a runtime call for all variables in the scope
1416 // containing the eval.
1417 Variable* var = proxy->var();
1418 if (var->mode() == DYNAMIC_GLOBAL) {
1419 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1421 } else if (var->mode() == DYNAMIC_LOCAL) {
1422 Variable* local = var->local_if_not_shadowed();
1423 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1424 if (local->mode() == LET || local->mode() == CONST ||
1425 local->mode() == CONST_LEGACY) {
1426 __ cmp(eax, isolate()->factory()->the_hole_value());
1427 __ j(not_equal, done);
1428 if (local->mode() == CONST_LEGACY) {
1429 __ mov(eax, isolate()->factory()->undefined_value());
1430 } else { // LET || CONST
1431 __ push(Immediate(var->name()));
1432 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1440 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1441 // Record position before possible IC call.
1442 SetSourcePosition(proxy->position());
1443 Variable* var = proxy->var();
1445 // Three cases: global variables, lookup variables, and all other types of
1447 switch (var->location()) {
1448 case Variable::UNALLOCATED: {
1449 Comment cmnt(masm_, "[ Global variable");
1450 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1451 __ mov(LoadDescriptor::NameRegister(), var->name());
1452 if (FLAG_vector_ics) {
1453 __ mov(VectorLoadICDescriptor::SlotRegister(),
1454 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1456 CallLoadIC(CONTEXTUAL);
1457 context()->Plug(eax);
1461 case Variable::PARAMETER:
1462 case Variable::LOCAL:
1463 case Variable::CONTEXT: {
1464 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1465 : "[ Stack variable");
1466 if (var->binding_needs_init()) {
1467 // var->scope() may be NULL when the proxy is located in eval code and
1468 // refers to a potential outside binding. Currently those bindings are
1469 // always looked up dynamically, i.e. in that case
1470 // var->location() == LOOKUP.
1472 DCHECK(var->scope() != NULL);
1474 // Check if the binding really needs an initialization check. The check
1475 // can be skipped in the following situation: we have a LET or CONST
1476 // binding in harmony mode, both the Variable and the VariableProxy have
1477 // the same declaration scope (i.e. they are both in global code, in the
1478 // same function or in the same eval code) and the VariableProxy is in
1479 // the source physically located after the initializer of the variable.
1481 // We cannot skip any initialization checks for CONST in non-harmony
1482 // mode because const variables may be declared but never initialized:
1483 // if (false) { const x; }; var y = x;
1485 // The condition on the declaration scopes is a conservative check for
1486 // nested functions that access a binding and are called before the
1487 // binding is initialized:
1488 // function() { f(); let x = 1; function f() { x = 2; } }
1490 bool skip_init_check;
1491 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1492 skip_init_check = false;
1493 } else if (var->is_this()) {
1494 CHECK(info_->function() != nullptr &&
1495 (info_->function()->kind() & kSubclassConstructor) != 0);
1496 // TODO(dslomov): implement 'this' hole check elimination.
1497 skip_init_check = false;
1499 // Check that we always have valid source position.
1500 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1501 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1502 skip_init_check = var->mode() != CONST_LEGACY &&
1503 var->initializer_position() < proxy->position();
1506 if (!skip_init_check) {
1507 // Let and const need a read barrier.
1510 __ cmp(eax, isolate()->factory()->the_hole_value());
1511 __ j(not_equal, &done, Label::kNear);
1512 if (var->mode() == LET || var->mode() == CONST) {
1513 // Throw a reference error when using an uninitialized let/const
1514 // binding in harmony mode.
1515 __ push(Immediate(var->name()));
1516 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1518 // Uninitalized const bindings outside of harmony mode are unholed.
1519 DCHECK(var->mode() == CONST_LEGACY);
1520 __ mov(eax, isolate()->factory()->undefined_value());
1523 context()->Plug(eax);
1527 context()->Plug(var);
1531 case Variable::LOOKUP: {
1532 Comment cmnt(masm_, "[ Lookup variable");
1534 // Generate code for loading from variables potentially shadowed
1535 // by eval-introduced variables.
1536 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1538 __ push(esi); // Context.
1539 __ push(Immediate(var->name()));
1540 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1542 context()->Plug(eax);
1549 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1550 Comment cmnt(masm_, "[ RegExpLiteral");
1552 // Registers will be used as follows:
1553 // edi = JS function.
1554 // ecx = literals array.
1555 // ebx = regexp literal.
1556 // eax = regexp literal clone.
1557 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1558 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1559 int literal_offset =
1560 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1561 __ mov(ebx, FieldOperand(ecx, literal_offset));
1562 __ cmp(ebx, isolate()->factory()->undefined_value());
1563 __ j(not_equal, &materialized, Label::kNear);
1565 // Create regexp literal using runtime function
1566 // Result will be in eax.
1568 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1569 __ push(Immediate(expr->pattern()));
1570 __ push(Immediate(expr->flags()));
1571 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1574 __ bind(&materialized);
1575 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1576 Label allocated, runtime_allocate;
1577 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1580 __ bind(&runtime_allocate);
1582 __ push(Immediate(Smi::FromInt(size)));
1583 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1586 __ bind(&allocated);
1587 // Copy the content into the newly allocated memory.
1588 // (Unroll copy loop once for better throughput).
1589 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1590 __ mov(edx, FieldOperand(ebx, i));
1591 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1592 __ mov(FieldOperand(eax, i), edx);
1593 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1595 if ((size % (2 * kPointerSize)) != 0) {
1596 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1597 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1599 context()->Plug(eax);
1603 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1604 if (expression == NULL) {
1605 __ push(Immediate(isolate()->factory()->null_value()));
1607 VisitForStackValue(expression);
1612 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1613 Comment cmnt(masm_, "[ ObjectLiteral");
1615 expr->BuildConstantProperties(isolate());
1616 Handle<FixedArray> constant_properties = expr->constant_properties();
1617 int flags = expr->ComputeFlags();
1618 // If any of the keys would store to the elements array, then we shouldn't
1620 if (MustCreateObjectLiteralWithRuntime(expr)) {
1621 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1622 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1623 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1624 __ push(Immediate(constant_properties));
1625 __ push(Immediate(Smi::FromInt(flags)));
1626 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1628 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1629 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1630 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1631 __ mov(ecx, Immediate(constant_properties));
1632 __ mov(edx, Immediate(Smi::FromInt(flags)));
1633 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1636 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1638 // If result_saved is true the result is on top of the stack. If
1639 // result_saved is false the result is in eax.
1640 bool result_saved = false;
1642 // Mark all computed expressions that are bound to a key that
1643 // is shadowed by a later occurrence of the same key. For the
1644 // marked expressions, no store code is emitted.
1645 expr->CalculateEmitStore(zone());
1647 AccessorTable accessor_table(zone());
1648 int property_index = 0;
1649 for (; property_index < expr->properties()->length(); property_index++) {
1650 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1651 if (property->is_computed_name()) break;
1652 if (property->IsCompileTimeValue()) continue;
1654 Literal* key = property->key()->AsLiteral();
1655 Expression* value = property->value();
1656 if (!result_saved) {
1657 __ push(eax); // Save result on the stack
1658 result_saved = true;
1660 switch (property->kind()) {
1661 case ObjectLiteral::Property::CONSTANT:
1663 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1664 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1666 case ObjectLiteral::Property::COMPUTED:
1667 // It is safe to use [[Put]] here because the boilerplate already
1668 // contains computed properties with an uninitialized value.
1669 if (key->value()->IsInternalizedString()) {
1670 if (property->emit_store()) {
1671 VisitForAccumulatorValue(value);
1672 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1673 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1674 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1675 CallStoreIC(key->LiteralFeedbackId());
1676 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1678 if (NeedsHomeObject(value)) {
1679 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1680 __ mov(StoreDescriptor::NameRegister(),
1681 Immediate(isolate()->factory()->home_object_symbol()));
1682 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, 0));
1686 VisitForEffect(value);
1690 __ push(Operand(esp, 0)); // Duplicate receiver.
1691 VisitForStackValue(key);
1692 VisitForStackValue(value);
1693 if (property->emit_store()) {
1694 EmitSetHomeObjectIfNeeded(value, 2);
1695 __ push(Immediate(Smi::FromInt(SLOPPY))); // Language mode
1696 __ CallRuntime(Runtime::kSetProperty, 4);
1701 case ObjectLiteral::Property::PROTOTYPE:
1702 __ push(Operand(esp, 0)); // Duplicate receiver.
1703 VisitForStackValue(value);
1704 DCHECK(property->emit_store());
1705 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1707 case ObjectLiteral::Property::GETTER:
1708 if (property->emit_store()) {
1709 accessor_table.lookup(key)->second->getter = value;
1712 case ObjectLiteral::Property::SETTER:
1713 if (property->emit_store()) {
1714 accessor_table.lookup(key)->second->setter = value;
1720 // Emit code to define accessors, using only a single call to the runtime for
1721 // each pair of corresponding getters and setters.
1722 for (AccessorTable::Iterator it = accessor_table.begin();
1723 it != accessor_table.end();
1725 __ push(Operand(esp, 0)); // Duplicate receiver.
1726 VisitForStackValue(it->first);
1727 EmitAccessor(it->second->getter);
1728 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1729 EmitAccessor(it->second->setter);
1730 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1731 __ push(Immediate(Smi::FromInt(NONE)));
1732 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1735 // Object literals have two parts. The "static" part on the left contains no
1736 // computed property names, and so we can compute its map ahead of time; see
1737 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1738 // starts with the first computed property name, and continues with all
1739 // properties to its right. All the code from above initializes the static
1740 // component of the object literal, and arranges for the map of the result to
1741 // reflect the static order in which the keys appear. For the dynamic
1742 // properties, we compile them into a series of "SetOwnProperty" runtime
1743 // calls. This will preserve insertion order.
1744 for (; property_index < expr->properties()->length(); property_index++) {
1745 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1747 Expression* value = property->value();
1748 if (!result_saved) {
1749 __ push(eax); // Save result on the stack
1750 result_saved = true;
1753 __ push(Operand(esp, 0)); // Duplicate receiver.
1755 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1756 DCHECK(!property->is_computed_name());
1757 VisitForStackValue(value);
1758 DCHECK(property->emit_store());
1759 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1761 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1762 VisitForStackValue(value);
1763 EmitSetHomeObjectIfNeeded(value, 2);
1765 switch (property->kind()) {
1766 case ObjectLiteral::Property::CONSTANT:
1767 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1768 case ObjectLiteral::Property::COMPUTED:
1769 if (property->emit_store()) {
1770 __ push(Immediate(Smi::FromInt(NONE)));
1771 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1777 case ObjectLiteral::Property::PROTOTYPE:
1781 case ObjectLiteral::Property::GETTER:
1782 __ push(Immediate(Smi::FromInt(NONE)));
1783 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1786 case ObjectLiteral::Property::SETTER:
1787 __ push(Immediate(Smi::FromInt(NONE)));
1788 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1794 if (expr->has_function()) {
1795 DCHECK(result_saved);
1796 __ push(Operand(esp, 0));
1797 __ CallRuntime(Runtime::kToFastProperties, 1);
1801 context()->PlugTOS();
1803 context()->Plug(eax);
1808 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1809 Comment cmnt(masm_, "[ ArrayLiteral");
1811 expr->BuildConstantElements(isolate());
1812 Handle<FixedArray> constant_elements = expr->constant_elements();
1813 bool has_constant_fast_elements =
1814 IsFastObjectElementsKind(expr->constant_elements_kind());
1816 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1817 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1818 // If the only customer of allocation sites is transitioning, then
1819 // we can turn it off if we don't have anywhere else to transition to.
1820 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1823 if (MustCreateArrayLiteralWithRuntime(expr)) {
1824 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1825 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1826 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1827 __ push(Immediate(constant_elements));
1828 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1829 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1831 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1832 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1833 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1834 __ mov(ecx, Immediate(constant_elements));
1835 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1838 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1840 bool result_saved = false; // Is the result saved to the stack?
1841 ZoneList<Expression*>* subexprs = expr->values();
1842 int length = subexprs->length();
1844 // Emit code to evaluate all the non-constant subexpressions and to store
1845 // them into the newly cloned array.
1846 for (int i = 0; i < length; i++) {
1847 Expression* subexpr = subexprs->at(i);
1848 // If the subexpression is a literal or a simple materialized literal it
1849 // is already set in the cloned array.
1850 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1852 if (!result_saved) {
1853 __ push(eax); // array literal.
1854 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1855 result_saved = true;
1857 VisitForAccumulatorValue(subexpr);
1859 if (has_constant_fast_elements) {
1860 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1861 // cannot transition and don't need to call the runtime stub.
1862 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1863 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1864 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1865 // Store the subexpression value in the array's elements.
1866 __ mov(FieldOperand(ebx, offset), result_register());
1867 // Update the write barrier for the array store.
1868 __ RecordWriteField(ebx, offset, result_register(), ecx,
1870 EMIT_REMEMBERED_SET,
1873 // Store the subexpression value in the array's elements.
1874 __ mov(ecx, Immediate(Smi::FromInt(i)));
1875 StoreArrayLiteralElementStub stub(isolate());
1879 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1883 __ add(esp, Immediate(kPointerSize)); // literal index
1884 context()->PlugTOS();
1886 context()->Plug(eax);
1891 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1892 DCHECK(expr->target()->IsValidReferenceExpression());
1894 Comment cmnt(masm_, "[ Assignment");
1896 Property* property = expr->target()->AsProperty();
1897 LhsKind assign_type = GetAssignType(property);
1899 // Evaluate LHS expression.
1900 switch (assign_type) {
1902 // Nothing to do here.
1904 case NAMED_SUPER_PROPERTY:
1905 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1906 EmitLoadHomeObject(property->obj()->AsSuperReference());
1907 __ push(result_register());
1908 if (expr->is_compound()) {
1909 __ push(MemOperand(esp, kPointerSize));
1910 __ push(result_register());
1913 case NAMED_PROPERTY:
1914 if (expr->is_compound()) {
1915 // We need the receiver both on the stack and in the register.
1916 VisitForStackValue(property->obj());
1917 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1919 VisitForStackValue(property->obj());
1922 case KEYED_SUPER_PROPERTY:
1923 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1924 EmitLoadHomeObject(property->obj()->AsSuperReference());
1925 __ Push(result_register());
1926 VisitForAccumulatorValue(property->key());
1927 __ Push(result_register());
1928 if (expr->is_compound()) {
1929 __ push(MemOperand(esp, 2 * kPointerSize));
1930 __ push(MemOperand(esp, 2 * kPointerSize));
1931 __ push(result_register());
1934 case KEYED_PROPERTY: {
1935 if (expr->is_compound()) {
1936 VisitForStackValue(property->obj());
1937 VisitForStackValue(property->key());
1938 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1939 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1941 VisitForStackValue(property->obj());
1942 VisitForStackValue(property->key());
1948 // For compound assignments we need another deoptimization point after the
1949 // variable/property load.
1950 if (expr->is_compound()) {
1951 AccumulatorValueContext result_context(this);
1952 { AccumulatorValueContext left_operand_context(this);
1953 switch (assign_type) {
1955 EmitVariableLoad(expr->target()->AsVariableProxy());
1956 PrepareForBailout(expr->target(), TOS_REG);
1958 case NAMED_SUPER_PROPERTY:
1959 EmitNamedSuperPropertyLoad(property);
1960 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1962 case NAMED_PROPERTY:
1963 EmitNamedPropertyLoad(property);
1964 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1966 case KEYED_SUPER_PROPERTY:
1967 EmitKeyedSuperPropertyLoad(property);
1968 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1970 case KEYED_PROPERTY:
1971 EmitKeyedPropertyLoad(property);
1972 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1977 Token::Value op = expr->binary_op();
1978 __ push(eax); // Left operand goes on the stack.
1979 VisitForAccumulatorValue(expr->value());
1981 SetSourcePosition(expr->position() + 1);
1982 if (ShouldInlineSmiCase(op)) {
1983 EmitInlineSmiBinaryOp(expr->binary_operation(),
1988 EmitBinaryOp(expr->binary_operation(), op);
1991 // Deoptimization point in case the binary operation may have side effects.
1992 PrepareForBailout(expr->binary_operation(), TOS_REG);
1994 VisitForAccumulatorValue(expr->value());
1997 // Record source position before possible IC call.
1998 SetSourcePosition(expr->position());
2001 switch (assign_type) {
2003 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2005 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2006 context()->Plug(eax);
2008 case NAMED_PROPERTY:
2009 EmitNamedPropertyAssignment(expr);
2011 case NAMED_SUPER_PROPERTY:
2012 EmitNamedSuperPropertyStore(property);
2013 context()->Plug(result_register());
2015 case KEYED_SUPER_PROPERTY:
2016 EmitKeyedSuperPropertyStore(property);
2017 context()->Plug(result_register());
2019 case KEYED_PROPERTY:
2020 EmitKeyedPropertyAssignment(expr);
2026 void FullCodeGenerator::VisitYield(Yield* expr) {
2027 Comment cmnt(masm_, "[ Yield");
2028 // Evaluate yielded value first; the initial iterator definition depends on
2029 // this. It stays on the stack while we update the iterator.
2030 VisitForStackValue(expr->expression());
2032 switch (expr->yield_kind()) {
2033 case Yield::kSuspend:
2034 // Pop value from top-of-stack slot; box result into result register.
2035 EmitCreateIteratorResult(false);
2036 __ push(result_register());
2038 case Yield::kInitial: {
2039 Label suspend, continuation, post_runtime, resume;
2043 __ bind(&continuation);
2047 VisitForAccumulatorValue(expr->generator_object());
2048 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2049 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2050 Immediate(Smi::FromInt(continuation.pos())));
2051 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2053 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2055 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
2057 __ j(equal, &post_runtime);
2058 __ push(eax); // generator object
2059 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2060 __ mov(context_register(),
2061 Operand(ebp, StandardFrameConstants::kContextOffset));
2062 __ bind(&post_runtime);
2063 __ pop(result_register());
2064 EmitReturnSequence();
2067 context()->Plug(result_register());
2071 case Yield::kFinal: {
2072 VisitForAccumulatorValue(expr->generator_object());
2073 __ mov(FieldOperand(result_register(),
2074 JSGeneratorObject::kContinuationOffset),
2075 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2076 // Pop value from top-of-stack slot, box result into result register.
2077 EmitCreateIteratorResult(true);
2078 EmitUnwindBeforeReturn();
2079 EmitReturnSequence();
2083 case Yield::kDelegating: {
2084 VisitForStackValue(expr->generator_object());
2086 // Initial stack layout is as follows:
2087 // [sp + 1 * kPointerSize] iter
2088 // [sp + 0 * kPointerSize] g
2090 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2091 Label l_next, l_call, l_loop;
2092 Register load_receiver = LoadDescriptor::ReceiverRegister();
2093 Register load_name = LoadDescriptor::NameRegister();
2095 // Initial send value is undefined.
2096 __ mov(eax, isolate()->factory()->undefined_value());
2099 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2101 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2102 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2103 __ push(load_name); // "throw"
2104 __ push(Operand(esp, 2 * kPointerSize)); // iter
2105 __ push(eax); // exception
2108 // try { received = %yield result }
2109 // Shuffle the received result above a try handler and yield it without
2112 __ pop(eax); // result
2113 __ PushTryHandler(StackHandler::CATCH, expr->index());
2114 const int handler_size = StackHandlerConstants::kSize;
2115 __ push(eax); // result
2117 __ bind(&l_continuation);
2119 __ bind(&l_suspend);
2120 const int generator_object_depth = kPointerSize + handler_size;
2121 __ mov(eax, Operand(esp, generator_object_depth));
2123 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2124 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2125 Immediate(Smi::FromInt(l_continuation.pos())));
2126 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2128 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2130 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2131 __ mov(context_register(),
2132 Operand(ebp, StandardFrameConstants::kContextOffset));
2133 __ pop(eax); // result
2134 EmitReturnSequence();
2135 __ bind(&l_resume); // received in eax
2138 // receiver = iter; f = iter.next; arg = received;
2141 __ mov(load_name, isolate()->factory()->next_string());
2142 __ push(load_name); // "next"
2143 __ push(Operand(esp, 2 * kPointerSize)); // iter
2144 __ push(eax); // received
2146 // result = receiver[f](arg);
2148 __ mov(load_receiver, Operand(esp, kPointerSize));
2149 if (FLAG_vector_ics) {
2150 __ mov(VectorLoadICDescriptor::SlotRegister(),
2151 Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2153 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2154 CallIC(ic, TypeFeedbackId::None());
2156 __ mov(Operand(esp, 2 * kPointerSize), edi);
2157 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2160 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2161 __ Drop(1); // The function is still on the stack; drop it.
2163 // if (!result.done) goto l_try;
2165 __ push(eax); // save result
2166 __ Move(load_receiver, eax); // result
2168 isolate()->factory()->done_string()); // "done"
2169 if (FLAG_vector_ics) {
2170 __ mov(VectorLoadICDescriptor::SlotRegister(),
2171 Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
2173 CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2174 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2180 __ pop(load_receiver); // result
2182 isolate()->factory()->value_string()); // "value"
2183 if (FLAG_vector_ics) {
2184 __ mov(VectorLoadICDescriptor::SlotRegister(),
2185 Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
2187 CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2188 context()->DropAndPlug(2, eax); // drop iter and g
2195 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2197 JSGeneratorObject::ResumeMode resume_mode) {
2198 // The value stays in eax, and is ultimately read by the resumed generator, as
2199 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2200 // is read to throw the value when the resumed generator is already closed.
2201 // ebx will hold the generator object until the activation has been resumed.
2202 VisitForStackValue(generator);
2203 VisitForAccumulatorValue(value);
2206 // Load suspended function and context.
2207 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2208 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2211 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2213 // Push holes for arguments to generator function.
2214 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2216 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2217 __ mov(ecx, isolate()->factory()->the_hole_value());
2218 Label push_argument_holes, push_frame;
2219 __ bind(&push_argument_holes);
2220 __ sub(edx, Immediate(Smi::FromInt(1)));
2221 __ j(carry, &push_frame);
2223 __ jmp(&push_argument_holes);
2225 // Enter a new JavaScript frame, and initialize its slots as they were when
2226 // the generator was suspended.
2227 Label resume_frame, done;
2228 __ bind(&push_frame);
2229 __ call(&resume_frame);
2231 __ bind(&resume_frame);
2232 __ push(ebp); // Caller's frame pointer.
2234 __ push(esi); // Callee's context.
2235 __ push(edi); // Callee's JS Function.
2237 // Load the operand stack size.
2238 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2239 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2242 // If we are sending a value and there is no operand stack, we can jump back
2244 if (resume_mode == JSGeneratorObject::NEXT) {
2246 __ cmp(edx, Immediate(0));
2247 __ j(not_zero, &slow_resume);
2248 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2249 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2252 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2253 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2255 __ bind(&slow_resume);
2258 // Otherwise, we push holes for the operand stack and call the runtime to fix
2259 // up the stack and the handlers.
2260 Label push_operand_holes, call_resume;
2261 __ bind(&push_operand_holes);
2262 __ sub(edx, Immediate(1));
2263 __ j(carry, &call_resume);
2265 __ jmp(&push_operand_holes);
2266 __ bind(&call_resume);
2268 __ push(result_register());
2269 __ Push(Smi::FromInt(resume_mode));
2270 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2271 // Not reached: the runtime call returns elsewhere.
2272 __ Abort(kGeneratorFailedToResume);
2275 context()->Plug(result_register());
2279 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2283 const int instance_size = 5 * kPointerSize;
2284 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2287 __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2290 __ bind(&gc_required);
2291 __ Push(Smi::FromInt(instance_size));
2292 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2293 __ mov(context_register(),
2294 Operand(ebp, StandardFrameConstants::kContextOffset));
2296 __ bind(&allocated);
2297 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2298 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2299 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2301 __ mov(edx, isolate()->factory()->ToBoolean(done));
2302 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2303 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2304 isolate()->factory()->empty_fixed_array());
2305 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2306 isolate()->factory()->empty_fixed_array());
2307 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2308 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2310 // Only the value field needs a write barrier, as the other values are in the
2312 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2313 ecx, edx, kDontSaveFPRegs);
2317 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2318 SetSourcePosition(prop->position());
2319 Literal* key = prop->key()->AsLiteral();
2320 DCHECK(!key->value()->IsSmi());
2321 DCHECK(!prop->IsSuperAccess());
2323 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2324 if (FLAG_vector_ics) {
2325 __ mov(VectorLoadICDescriptor::SlotRegister(),
2326 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2327 CallLoadIC(NOT_CONTEXTUAL);
2329 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2334 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2335 // Stack: receiver, home_object.
2336 SetSourcePosition(prop->position());
2337 Literal* key = prop->key()->AsLiteral();
2338 DCHECK(!key->value()->IsSmi());
2339 DCHECK(prop->IsSuperAccess());
2341 __ push(Immediate(key->value()));
2342 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2346 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2347 SetSourcePosition(prop->position());
2348 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2349 if (FLAG_vector_ics) {
2350 __ mov(VectorLoadICDescriptor::SlotRegister(),
2351 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2354 CallIC(ic, prop->PropertyFeedbackId());
2359 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2360 // Stack: receiver, home_object, key.
2361 SetSourcePosition(prop->position());
2363 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2367 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2370 Expression* right) {
2371 // Do combined smi check of the operands. Left operand is on the
2372 // stack. Right operand is in eax.
2373 Label smi_case, done, stub_call;
2377 JumpPatchSite patch_site(masm_);
2378 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2380 __ bind(&stub_call);
2382 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2383 CallIC(code, expr->BinaryOperationFeedbackId());
2384 patch_site.EmitPatchInfo();
2385 __ jmp(&done, Label::kNear);
2389 __ mov(eax, edx); // Copy left operand in case of a stub call.
2394 __ sar_cl(eax); // No checks of result necessary
2395 __ and_(eax, Immediate(~kSmiTagMask));
2402 // Check that the *signed* result fits in a smi.
2403 __ cmp(eax, 0xc0000000);
2404 __ j(positive, &result_ok);
2407 __ bind(&result_ok);
2416 __ test(eax, Immediate(0xc0000000));
2417 __ j(zero, &result_ok);
2420 __ bind(&result_ok);
2426 __ j(overflow, &stub_call);
2430 __ j(overflow, &stub_call);
2435 __ j(overflow, &stub_call);
2437 __ j(not_zero, &done, Label::kNear);
2440 __ j(negative, &stub_call);
2446 case Token::BIT_AND:
2449 case Token::BIT_XOR:
2457 context()->Plug(eax);
2461 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2462 // Constructor is in eax.
2463 DCHECK(lit != NULL);
2466 // No access check is needed here since the constructor is created by the
2468 Register scratch = ebx;
2469 __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2472 for (int i = 0; i < lit->properties()->length(); i++) {
2473 ObjectLiteral::Property* property = lit->properties()->at(i);
2474 Expression* value = property->value();
2476 if (property->is_static()) {
2477 __ push(Operand(esp, kPointerSize)); // constructor
2479 __ push(Operand(esp, 0)); // prototype
2481 EmitPropertyKey(property, lit->GetIdForProperty(i));
2482 VisitForStackValue(value);
2483 EmitSetHomeObjectIfNeeded(value, 2);
2485 switch (property->kind()) {
2486 case ObjectLiteral::Property::CONSTANT:
2487 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2488 case ObjectLiteral::Property::PROTOTYPE:
2490 case ObjectLiteral::Property::COMPUTED:
2491 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2494 case ObjectLiteral::Property::GETTER:
2495 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2496 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2499 case ObjectLiteral::Property::SETTER:
2500 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2501 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2507 __ CallRuntime(Runtime::kToFastProperties, 1);
2510 __ CallRuntime(Runtime::kToFastProperties, 1);
2514 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2516 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2517 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2518 CallIC(code, expr->BinaryOperationFeedbackId());
2519 patch_site.EmitPatchInfo();
2520 context()->Plug(eax);
2524 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2525 DCHECK(expr->IsValidReferenceExpression());
2527 Property* prop = expr->AsProperty();
2528 LhsKind assign_type = GetAssignType(prop);
2530 switch (assign_type) {
2532 Variable* var = expr->AsVariableProxy()->var();
2533 EffectContext context(this);
2534 EmitVariableAssignment(var, Token::ASSIGN);
2537 case NAMED_PROPERTY: {
2538 __ push(eax); // Preserve value.
2539 VisitForAccumulatorValue(prop->obj());
2540 __ Move(StoreDescriptor::ReceiverRegister(), eax);
2541 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2542 __ mov(StoreDescriptor::NameRegister(),
2543 prop->key()->AsLiteral()->value());
2547 case NAMED_SUPER_PROPERTY: {
2549 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2550 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2551 // stack: value, this; eax: home_object
2552 Register scratch = ecx;
2553 Register scratch2 = edx;
2554 __ mov(scratch, result_register()); // home_object
2555 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2556 __ mov(scratch2, MemOperand(esp, 0)); // this
2557 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2558 __ mov(MemOperand(esp, 0), scratch); // home_object
2559 // stack: this, home_object. eax: value
2560 EmitNamedSuperPropertyStore(prop);
2563 case KEYED_SUPER_PROPERTY: {
2565 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2566 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2567 __ push(result_register());
2568 VisitForAccumulatorValue(prop->key());
2569 Register scratch = ecx;
2570 Register scratch2 = edx;
2571 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2572 // stack: value, this, home_object; eax: key, edx: value
2573 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2574 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2575 __ mov(scratch, MemOperand(esp, 0)); // home_object
2576 __ mov(MemOperand(esp, kPointerSize), scratch);
2577 __ mov(MemOperand(esp, 0), eax);
2578 __ mov(eax, scratch2);
2579 // stack: this, home_object, key; eax: value.
2580 EmitKeyedSuperPropertyStore(prop);
2583 case KEYED_PROPERTY: {
2584 __ push(eax); // Preserve value.
2585 VisitForStackValue(prop->obj());
2586 VisitForAccumulatorValue(prop->key());
2587 __ Move(StoreDescriptor::NameRegister(), eax);
2588 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2589 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2591 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2596 context()->Plug(eax);
2600 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2601 Variable* var, MemOperand location) {
2602 __ mov(location, eax);
2603 if (var->IsContextSlot()) {
2605 int offset = Context::SlotOffset(var->index());
2606 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2611 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2613 if (var->IsUnallocated()) {
2614 // Global var, const, or let.
2615 __ mov(StoreDescriptor::NameRegister(), var->name());
2616 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2619 } else if (op == Token::INIT_CONST_LEGACY) {
2620 // Const initializers need a write barrier.
2621 DCHECK(!var->IsParameter()); // No const parameters.
2622 if (var->IsLookupSlot()) {
2625 __ push(Immediate(var->name()));
2626 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2628 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2630 MemOperand location = VarOperand(var, ecx);
2631 __ mov(edx, location);
2632 __ cmp(edx, isolate()->factory()->the_hole_value());
2633 __ j(not_equal, &skip, Label::kNear);
2634 EmitStoreToStackLocalOrContextSlot(var, location);
2638 } else if (var->mode() == LET && op != Token::INIT_LET) {
2639 // Non-initializing assignment to let variable needs a write barrier.
2640 DCHECK(!var->IsLookupSlot());
2641 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2643 MemOperand location = VarOperand(var, ecx);
2644 __ mov(edx, location);
2645 __ cmp(edx, isolate()->factory()->the_hole_value());
2646 __ j(not_equal, &assign, Label::kNear);
2647 __ push(Immediate(var->name()));
2648 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2650 EmitStoreToStackLocalOrContextSlot(var, location);
2651 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2652 if (var->IsLookupSlot()) {
2653 // Assignment to var.
2654 __ push(eax); // Value.
2655 __ push(esi); // Context.
2656 __ push(Immediate(var->name()));
2657 __ push(Immediate(Smi::FromInt(language_mode())));
2658 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2660 // Assignment to var or initializing assignment to let/const in harmony
2662 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2663 MemOperand location = VarOperand(var, ecx);
2664 if (generate_debug_code_ && op == Token::INIT_LET) {
2665 // Check for an uninitialized let binding.
2666 __ mov(edx, location);
2667 __ cmp(edx, isolate()->factory()->the_hole_value());
2668 __ Check(equal, kLetBindingReInitialization);
2670 EmitStoreToStackLocalOrContextSlot(var, location);
2672 } else if (IsSignallingAssignmentToConst(var, op, language_mode())) {
2673 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2678 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2679 // Assignment to a property, using a named store IC.
2681 // esp[0] : receiver
2683 Property* prop = expr->target()->AsProperty();
2684 DCHECK(prop != NULL);
2685 DCHECK(prop->key()->IsLiteral());
2687 // Record source code position before IC call.
2688 SetSourcePosition(expr->position());
2689 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2690 __ pop(StoreDescriptor::ReceiverRegister());
2691 CallStoreIC(expr->AssignmentFeedbackId());
2692 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2693 context()->Plug(eax);
2697 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2698 // Assignment to named property of super.
2700 // stack : receiver ('this'), home_object
2701 DCHECK(prop != NULL);
2702 Literal* key = prop->key()->AsLiteral();
2703 DCHECK(key != NULL);
2705 __ push(Immediate(key->value()));
2707 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2708 : Runtime::kStoreToSuper_Sloppy),
2713 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2714 // Assignment to named property of super.
2716 // stack : receiver ('this'), home_object, key
2720 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2721 : Runtime::kStoreKeyedToSuper_Sloppy),
2726 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2727 // Assignment to a property, using a keyed store IC.
2730 // esp[kPointerSize] : receiver
2732 __ pop(StoreDescriptor::NameRegister()); // Key.
2733 __ pop(StoreDescriptor::ReceiverRegister());
2734 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2735 // Record source code position before IC call.
2736 SetSourcePosition(expr->position());
2738 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2739 CallIC(ic, expr->AssignmentFeedbackId());
2741 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2742 context()->Plug(eax);
2746 void FullCodeGenerator::VisitProperty(Property* expr) {
2747 Comment cmnt(masm_, "[ Property");
2748 Expression* key = expr->key();
2750 if (key->IsPropertyName()) {
2751 if (!expr->IsSuperAccess()) {
2752 VisitForAccumulatorValue(expr->obj());
2753 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2754 EmitNamedPropertyLoad(expr);
2756 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2757 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2758 __ push(result_register());
2759 EmitNamedSuperPropertyLoad(expr);
2762 if (!expr->IsSuperAccess()) {
2763 VisitForStackValue(expr->obj());
2764 VisitForAccumulatorValue(expr->key());
2765 __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2766 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2767 EmitKeyedPropertyLoad(expr);
2769 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2770 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2771 __ push(result_register());
2772 VisitForStackValue(expr->key());
2773 EmitKeyedSuperPropertyLoad(expr);
2776 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2777 context()->Plug(eax);
2781 void FullCodeGenerator::CallIC(Handle<Code> code,
2782 TypeFeedbackId ast_id) {
2784 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2788 // Code common for calls using the IC.
2789 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2790 Expression* callee = expr->expression();
2792 CallICState::CallType call_type =
2793 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2794 // Get the target function.
2795 if (call_type == CallICState::FUNCTION) {
2796 { StackValueContext context(this);
2797 EmitVariableLoad(callee->AsVariableProxy());
2798 PrepareForBailout(callee, NO_REGISTERS);
2800 // Push undefined as receiver. This is patched in the method prologue if it
2801 // is a sloppy mode method.
2802 __ push(Immediate(isolate()->factory()->undefined_value()));
2804 // Load the function from the receiver.
2805 DCHECK(callee->IsProperty());
2806 DCHECK(!callee->AsProperty()->IsSuperAccess());
2807 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2808 EmitNamedPropertyLoad(callee->AsProperty());
2809 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2810 // Push the target function under the receiver.
2811 __ push(Operand(esp, 0));
2812 __ mov(Operand(esp, kPointerSize), eax);
2815 EmitCall(expr, call_type);
2819 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2820 Expression* callee = expr->expression();
2821 DCHECK(callee->IsProperty());
2822 Property* prop = callee->AsProperty();
2823 DCHECK(prop->IsSuperAccess());
2825 SetSourcePosition(prop->position());
2826 Literal* key = prop->key()->AsLiteral();
2827 DCHECK(!key->value()->IsSmi());
2828 // Load the function from the receiver.
2829 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2830 EmitLoadHomeObject(super_ref);
2832 VisitForAccumulatorValue(super_ref->this_var());
2835 __ push(Operand(esp, kPointerSize * 2));
2836 __ push(Immediate(key->value()));
2839 // - this (receiver)
2840 // - this (receiver) <-- LoadFromSuper will pop here and below.
2843 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2845 // Replace home_object with target function.
2846 __ mov(Operand(esp, kPointerSize), eax);
2849 // - target function
2850 // - this (receiver)
2851 EmitCall(expr, CallICState::METHOD);
2855 // Code common for calls using the IC.
2856 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2859 VisitForAccumulatorValue(key);
2861 Expression* callee = expr->expression();
2863 // Load the function from the receiver.
2864 DCHECK(callee->IsProperty());
2865 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2866 __ mov(LoadDescriptor::NameRegister(), eax);
2867 EmitKeyedPropertyLoad(callee->AsProperty());
2868 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2870 // Push the target function under the receiver.
2871 __ push(Operand(esp, 0));
2872 __ mov(Operand(esp, kPointerSize), eax);
2874 EmitCall(expr, CallICState::METHOD);
2878 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2879 Expression* callee = expr->expression();
2880 DCHECK(callee->IsProperty());
2881 Property* prop = callee->AsProperty();
2882 DCHECK(prop->IsSuperAccess());
2884 SetSourcePosition(prop->position());
2885 // Load the function from the receiver.
2886 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2887 EmitLoadHomeObject(super_ref);
2889 VisitForAccumulatorValue(super_ref->this_var());
2892 __ push(Operand(esp, kPointerSize * 2));
2893 VisitForStackValue(prop->key());
2896 // - this (receiver)
2897 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2900 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2902 // Replace home_object with target function.
2903 __ mov(Operand(esp, kPointerSize), eax);
2906 // - target function
2907 // - this (receiver)
2908 EmitCall(expr, CallICState::METHOD);
2912 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2913 // Load the arguments.
2914 ZoneList<Expression*>* args = expr->arguments();
2915 int arg_count = args->length();
2916 { PreservePositionScope scope(masm()->positions_recorder());
2917 for (int i = 0; i < arg_count; i++) {
2918 VisitForStackValue(args->at(i));
2922 // Record source position of the IC call.
2923 SetSourcePosition(expr->position());
2924 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2925 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2926 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2927 // Don't assign a type feedback id to the IC, since type feedback is provided
2928 // by the vector above.
2931 RecordJSReturnSite(expr);
2933 // Restore context register.
2934 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2936 context()->DropAndPlug(1, eax);
2940 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2941 // Push copy of the first argument or undefined if it doesn't exist.
2942 if (arg_count > 0) {
2943 __ push(Operand(esp, arg_count * kPointerSize));
2945 __ push(Immediate(isolate()->factory()->undefined_value()));
2948 // Push the enclosing function.
2949 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2950 // Push the receiver of the enclosing function.
2951 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2952 // Push the language mode.
2953 __ push(Immediate(Smi::FromInt(language_mode())));
2955 // Push the start position of the scope the calls resides in.
2956 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2958 // Do the runtime call.
2959 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2963 void FullCodeGenerator::EmitLoadSuperConstructor() {
2964 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2965 __ CallRuntime(Runtime::kGetPrototype, 1);
2969 void FullCodeGenerator::VisitCall(Call* expr) {
2971 // We want to verify that RecordJSReturnSite gets called on all paths
2972 // through this function. Avoid early returns.
2973 expr->return_is_recorded_ = false;
2976 Comment cmnt(masm_, "[ Call");
2977 Expression* callee = expr->expression();
2978 Call::CallType call_type = expr->GetCallType(isolate());
2980 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2981 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2982 // to resolve the function we need to call and the receiver of the call.
2983 // Then we call the resolved function using the given arguments.
2984 ZoneList<Expression*>* args = expr->arguments();
2985 int arg_count = args->length();
2986 { PreservePositionScope pos_scope(masm()->positions_recorder());
2987 VisitForStackValue(callee);
2988 // Reserved receiver slot.
2989 __ push(Immediate(isolate()->factory()->undefined_value()));
2990 // Push the arguments.
2991 for (int i = 0; i < arg_count; i++) {
2992 VisitForStackValue(args->at(i));
2995 // Push a copy of the function (found below the arguments) and
2997 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2998 EmitResolvePossiblyDirectEval(arg_count);
3000 // The runtime call returns a pair of values in eax (function) and
3001 // edx (receiver). Touch up the stack with the right values.
3002 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
3003 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
3005 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3007 // Record source position for debugger.
3008 SetSourcePosition(expr->position());
3009 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3010 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3012 RecordJSReturnSite(expr);
3013 // Restore context register.
3014 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3015 context()->DropAndPlug(1, eax);
3017 } else if (call_type == Call::GLOBAL_CALL) {
3018 EmitCallWithLoadIC(expr);
3019 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3020 // Call to a lookup slot (dynamically introduced variable).
3021 VariableProxy* proxy = callee->AsVariableProxy();
3023 { PreservePositionScope scope(masm()->positions_recorder());
3024 // Generate code for loading from variables potentially shadowed by
3025 // eval-introduced variables.
3026 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3029 // Call the runtime to find the function to call (returned in eax) and
3030 // the object holding it (returned in edx).
3031 __ push(context_register());
3032 __ push(Immediate(proxy->name()));
3033 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3034 __ push(eax); // Function.
3035 __ push(edx); // Receiver.
3036 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3038 // If fast case code has been generated, emit code to push the function
3039 // and receiver and have the slow path jump around this code.
3040 if (done.is_linked()) {
3042 __ jmp(&call, Label::kNear);
3046 // The receiver is implicitly the global receiver. Indicate this by
3047 // passing the hole to the call function stub.
3048 __ push(Immediate(isolate()->factory()->undefined_value()));
3052 // The receiver is either the global receiver or an object found by
3056 } else if (call_type == Call::PROPERTY_CALL) {
3057 Property* property = callee->AsProperty();
3058 bool is_named_call = property->key()->IsPropertyName();
3059 if (property->IsSuperAccess()) {
3060 if (is_named_call) {
3061 EmitSuperCallWithLoadIC(expr);
3063 EmitKeyedSuperCallWithLoadIC(expr);
3067 PreservePositionScope scope(masm()->positions_recorder());
3068 VisitForStackValue(property->obj());
3070 if (is_named_call) {
3071 EmitCallWithLoadIC(expr);
3073 EmitKeyedCallWithLoadIC(expr, property->key());
3076 } else if (call_type == Call::SUPER_CALL) {
3077 EmitSuperConstructorCall(expr);
3079 DCHECK(call_type == Call::OTHER_CALL);
3080 // Call to an arbitrary expression not handled specially above.
3081 { PreservePositionScope scope(masm()->positions_recorder());
3082 VisitForStackValue(callee);
3084 __ push(Immediate(isolate()->factory()->undefined_value()));
3085 // Emit function call.
3090 // RecordJSReturnSite should have been called.
3091 DCHECK(expr->return_is_recorded_);
3096 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3097 Comment cmnt(masm_, "[ CallNew");
3098 // According to ECMA-262, section 11.2.2, page 44, the function
3099 // expression in new calls must be evaluated before the
3102 // Push constructor on the stack. If it's not a function it's used as
3103 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3105 DCHECK(!expr->expression()->IsSuperReference());
3106 VisitForStackValue(expr->expression());
3108 // Push the arguments ("left-to-right") on the stack.
3109 ZoneList<Expression*>* args = expr->arguments();
3110 int arg_count = args->length();
3111 for (int i = 0; i < arg_count; i++) {
3112 VisitForStackValue(args->at(i));
3115 // Call the construct call builtin that handles allocation and
3116 // constructor invocation.
3117 SetSourcePosition(expr->position());
3119 // Load function and argument count into edi and eax.
3120 __ Move(eax, Immediate(arg_count));
3121 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3123 // Record call targets in unoptimized code.
3124 if (FLAG_pretenuring_call_new) {
3125 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3126 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3127 expr->CallNewFeedbackSlot().ToInt() + 1);
3130 __ LoadHeapObject(ebx, FeedbackVector());
3131 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
3133 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3134 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3135 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3136 context()->Plug(eax);
3140 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3141 if (!ValidateSuperCall(expr)) return;
3143 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3144 GetVar(eax, new_target_var);
3147 EmitLoadSuperConstructor();
3148 __ push(result_register());
3150 // Push the arguments ("left-to-right") on the stack.
3151 ZoneList<Expression*>* args = expr->arguments();
3152 int arg_count = args->length();
3153 for (int i = 0; i < arg_count; i++) {
3154 VisitForStackValue(args->at(i));
3157 // Call the construct call builtin that handles allocation and
3158 // constructor invocation.
3159 SetSourcePosition(expr->position());
3161 // Load function and argument count into edi and eax.
3162 __ Move(eax, Immediate(arg_count));
3163 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3165 // Record call targets in unoptimized code.
3166 if (FLAG_pretenuring_call_new) {
3168 /* TODO(dslomov): support pretenuring.
3169 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3170 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3171 expr->CallNewFeedbackSlot().ToInt() + 1);
3175 __ LoadHeapObject(ebx, FeedbackVector());
3176 __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
3178 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3179 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3183 RecordJSReturnSite(expr);
3185 SuperReference* super_ref = expr->expression()->AsSuperReference();
3186 Variable* this_var = super_ref->this_var()->var();
3187 GetVar(ecx, this_var);
3188 __ cmp(ecx, isolate()->factory()->the_hole_value());
3189 Label uninitialized_this;
3190 __ j(equal, &uninitialized_this);
3191 __ push(Immediate(this_var->name()));
3192 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3193 __ bind(&uninitialized_this);
3195 EmitVariableAssignment(this_var, Token::INIT_CONST);
3196 context()->Plug(eax);
3200 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3201 ZoneList<Expression*>* args = expr->arguments();
3202 DCHECK(args->length() == 1);
3204 VisitForAccumulatorValue(args->at(0));
3206 Label materialize_true, materialize_false;
3207 Label* if_true = NULL;
3208 Label* if_false = NULL;
3209 Label* fall_through = NULL;
3210 context()->PrepareTest(&materialize_true, &materialize_false,
3211 &if_true, &if_false, &fall_through);
3213 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3214 __ test(eax, Immediate(kSmiTagMask));
3215 Split(zero, if_true, if_false, fall_through);
3217 context()->Plug(if_true, if_false);
3221 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3222 ZoneList<Expression*>* args = expr->arguments();
3223 DCHECK(args->length() == 1);
3225 VisitForAccumulatorValue(args->at(0));
3227 Label materialize_true, materialize_false;
3228 Label* if_true = NULL;
3229 Label* if_false = NULL;
3230 Label* fall_through = NULL;
3231 context()->PrepareTest(&materialize_true, &materialize_false,
3232 &if_true, &if_false, &fall_through);
3234 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3235 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
3236 Split(zero, if_true, if_false, fall_through);
3238 context()->Plug(if_true, if_false);
3242 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments();
3244 DCHECK(args->length() == 1);
3246 VisitForAccumulatorValue(args->at(0));
3248 Label materialize_true, materialize_false;
3249 Label* if_true = NULL;
3250 Label* if_false = NULL;
3251 Label* fall_through = NULL;
3252 context()->PrepareTest(&materialize_true, &materialize_false,
3253 &if_true, &if_false, &fall_through);
3255 __ JumpIfSmi(eax, if_false);
3256 __ cmp(eax, isolate()->factory()->null_value());
3257 __ j(equal, if_true);
3258 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3259 // Undetectable objects behave like undefined when tested with typeof.
3260 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
3261 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3262 __ j(not_zero, if_false);
3263 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3264 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3265 __ j(below, if_false);
3266 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3267 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3268 Split(below_equal, if_true, if_false, fall_through);
3270 context()->Plug(if_true, if_false);
3274 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3275 ZoneList<Expression*>* args = expr->arguments();
3276 DCHECK(args->length() == 1);
3278 VisitForAccumulatorValue(args->at(0));
3280 Label materialize_true, materialize_false;
3281 Label* if_true = NULL;
3282 Label* if_false = NULL;
3283 Label* fall_through = NULL;
3284 context()->PrepareTest(&materialize_true, &materialize_false,
3285 &if_true, &if_false, &fall_through);
3287 __ JumpIfSmi(eax, if_false);
3288 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3289 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3290 Split(above_equal, if_true, if_false, fall_through);
3292 context()->Plug(if_true, if_false);
3296 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3297 ZoneList<Expression*>* args = expr->arguments();
3298 DCHECK(args->length() == 1);
3300 VisitForAccumulatorValue(args->at(0));
3302 Label materialize_true, materialize_false;
3303 Label* if_true = NULL;
3304 Label* if_false = NULL;
3305 Label* fall_through = NULL;
3306 context()->PrepareTest(&materialize_true, &materialize_false,
3307 &if_true, &if_false, &fall_through);
3309 __ JumpIfSmi(eax, if_false);
3310 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3311 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
3312 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3313 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3314 Split(not_zero, if_true, if_false, fall_through);
3316 context()->Plug(if_true, if_false);
3320 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3321 CallRuntime* expr) {
3322 ZoneList<Expression*>* args = expr->arguments();
3323 DCHECK(args->length() == 1);
3325 VisitForAccumulatorValue(args->at(0));
3327 Label materialize_true, materialize_false, skip_lookup;
3328 Label* if_true = NULL;
3329 Label* if_false = NULL;
3330 Label* fall_through = NULL;
3331 context()->PrepareTest(&materialize_true, &materialize_false,
3332 &if_true, &if_false, &fall_through);
3334 __ AssertNotSmi(eax);
3336 // Check whether this map has already been checked to be safe for default
3338 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3339 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3340 1 << Map::kStringWrapperSafeForDefaultValueOf);
3341 __ j(not_zero, &skip_lookup);
3343 // Check for fast case object. Return false for slow case objects.
3344 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3345 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3346 __ cmp(ecx, isolate()->factory()->hash_table_map());
3347 __ j(equal, if_false);
3349 // Look for valueOf string in the descriptor array, and indicate false if
3350 // found. Since we omit an enumeration index check, if it is added via a
3351 // transition that shares its descriptor array, this is a false positive.
3352 Label entry, loop, done;
3354 // Skip loop if no descriptors are valid.
3355 __ NumberOfOwnDescriptors(ecx, ebx);
3359 __ LoadInstanceDescriptors(ebx, ebx);
3360 // ebx: descriptor array.
3361 // ecx: valid entries in the descriptor array.
3362 // Calculate the end of the descriptor array.
3363 STATIC_ASSERT(kSmiTag == 0);
3364 STATIC_ASSERT(kSmiTagSize == 1);
3365 STATIC_ASSERT(kPointerSize == 4);
3366 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3367 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3368 // Calculate location of the first key name.
3369 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3370 // Loop through all the keys in the descriptor array. If one of these is the
3371 // internalized string "valueOf" the result is false.
3374 __ mov(edx, FieldOperand(ebx, 0));
3375 __ cmp(edx, isolate()->factory()->value_of_string());
3376 __ j(equal, if_false);
3377 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3380 __ j(not_equal, &loop);
3384 // Reload map as register ebx was used as temporary above.
3385 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3387 // Set the bit in the map to indicate that there is no local valueOf field.
3388 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3389 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3391 __ bind(&skip_lookup);
3393 // If a valueOf property is not found on the object check that its
3394 // prototype is the un-modified String prototype. If not result is false.
3395 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3396 __ JumpIfSmi(ecx, if_false);
3397 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3398 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3400 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3403 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3404 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3405 Split(equal, if_true, if_false, fall_through);
3407 context()->Plug(if_true, if_false);
3411 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3412 ZoneList<Expression*>* args = expr->arguments();
3413 DCHECK(args->length() == 1);
3415 VisitForAccumulatorValue(args->at(0));
3417 Label materialize_true, materialize_false;
3418 Label* if_true = NULL;
3419 Label* if_false = NULL;
3420 Label* fall_through = NULL;
3421 context()->PrepareTest(&materialize_true, &materialize_false,
3422 &if_true, &if_false, &fall_through);
3424 __ JumpIfSmi(eax, if_false);
3425 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3426 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3427 Split(equal, if_true, if_false, fall_through);
3429 context()->Plug(if_true, if_false);
3433 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3434 ZoneList<Expression*>* args = expr->arguments();
3435 DCHECK(args->length() == 1);
3437 VisitForAccumulatorValue(args->at(0));
3439 Label materialize_true, materialize_false;
3440 Label* if_true = NULL;
3441 Label* if_false = NULL;
3442 Label* fall_through = NULL;
3443 context()->PrepareTest(&materialize_true, &materialize_false,
3444 &if_true, &if_false, &fall_through);
3446 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3447 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3448 // Check if the exponent half is 0x80000000. Comparing against 1 and
3449 // checking for overflow is the shortest possible encoding.
3450 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3451 __ j(no_overflow, if_false);
3452 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3453 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3454 Split(equal, if_true, if_false, fall_through);
3456 context()->Plug(if_true, if_false);
3461 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3462 ZoneList<Expression*>* args = expr->arguments();
3463 DCHECK(args->length() == 1);
3465 VisitForAccumulatorValue(args->at(0));
3467 Label materialize_true, materialize_false;
3468 Label* if_true = NULL;
3469 Label* if_false = NULL;
3470 Label* fall_through = NULL;
3471 context()->PrepareTest(&materialize_true, &materialize_false,
3472 &if_true, &if_false, &fall_through);
3474 __ JumpIfSmi(eax, if_false);
3475 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3476 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3477 Split(equal, if_true, if_false, fall_through);
3479 context()->Plug(if_true, if_false);
3483 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 DCHECK(args->length() == 1);
3487 VisitForAccumulatorValue(args->at(0));
3489 Label materialize_true, materialize_false;
3490 Label* if_true = NULL;
3491 Label* if_false = NULL;
3492 Label* fall_through = NULL;
3493 context()->PrepareTest(&materialize_true, &materialize_false,
3494 &if_true, &if_false, &fall_through);
3496 __ JumpIfSmi(eax, if_false);
3497 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3498 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3499 Split(equal, if_true, if_false, fall_through);
3501 context()->Plug(if_true, if_false);
3505 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3506 ZoneList<Expression*>* args = expr->arguments();
3507 DCHECK(args->length() == 1);
3509 VisitForAccumulatorValue(args->at(0));
3511 Label materialize_true, materialize_false;
3512 Label* if_true = NULL;
3513 Label* if_false = NULL;
3514 Label* fall_through = NULL;
3515 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3516 &if_false, &fall_through);
3518 __ JumpIfSmi(eax, if_false);
3520 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3521 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3522 __ j(less, if_false);
3523 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3524 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3525 Split(less_equal, if_true, if_false, fall_through);
3527 context()->Plug(if_true, if_false);
3531 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3532 DCHECK(expr->arguments()->length() == 0);
3534 Label materialize_true, materialize_false;
3535 Label* if_true = NULL;
3536 Label* if_false = NULL;
3537 Label* fall_through = NULL;
3538 context()->PrepareTest(&materialize_true, &materialize_false,
3539 &if_true, &if_false, &fall_through);
3541 // Get the frame pointer for the calling frame.
3542 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3544 // Skip the arguments adaptor frame if it exists.
3545 Label check_frame_marker;
3546 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3547 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3548 __ j(not_equal, &check_frame_marker);
3549 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3551 // Check the marker in the calling frame.
3552 __ bind(&check_frame_marker);
3553 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3554 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3555 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3556 Split(equal, if_true, if_false, fall_through);
3558 context()->Plug(if_true, if_false);
3562 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3563 ZoneList<Expression*>* args = expr->arguments();
3564 DCHECK(args->length() == 2);
3566 // Load the two objects into registers and perform the comparison.
3567 VisitForStackValue(args->at(0));
3568 VisitForAccumulatorValue(args->at(1));
3570 Label materialize_true, materialize_false;
3571 Label* if_true = NULL;
3572 Label* if_false = NULL;
3573 Label* fall_through = NULL;
3574 context()->PrepareTest(&materialize_true, &materialize_false,
3575 &if_true, &if_false, &fall_through);
3579 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3580 Split(equal, if_true, if_false, fall_through);
3582 context()->Plug(if_true, if_false);
3586 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3587 ZoneList<Expression*>* args = expr->arguments();
3588 DCHECK(args->length() == 1);
3590 // ArgumentsAccessStub expects the key in edx and the formal
3591 // parameter count in eax.
3592 VisitForAccumulatorValue(args->at(0));
3594 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3595 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3597 context()->Plug(eax);
3601 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3602 DCHECK(expr->arguments()->length() == 0);
3605 // Get the number of formal parameters.
3606 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3608 // Check if the calling frame is an arguments adaptor frame.
3609 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3610 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3611 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3612 __ j(not_equal, &exit);
3614 // Arguments adaptor case: Read the arguments length from the
3616 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3620 context()->Plug(eax);
3624 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3625 ZoneList<Expression*>* args = expr->arguments();
3626 DCHECK(args->length() == 1);
3627 Label done, null, function, non_function_constructor;
3629 VisitForAccumulatorValue(args->at(0));
3631 // If the object is a smi, we return null.
3632 __ JumpIfSmi(eax, &null);
3634 // Check that the object is a JS object but take special care of JS
3635 // functions to make sure they have 'Function' as their class.
3636 // Assume that there are only two callable types, and one of them is at
3637 // either end of the type range for JS object types. Saves extra comparisons.
3638 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3639 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3640 // Map is now in eax.
3642 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3643 FIRST_SPEC_OBJECT_TYPE + 1);
3644 __ j(equal, &function);
3646 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3647 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3648 LAST_SPEC_OBJECT_TYPE - 1);
3649 __ j(equal, &function);
3650 // Assume that there is no larger type.
3651 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3653 // Check if the constructor in the map is a JS function.
3654 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
3655 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3656 __ j(not_equal, &non_function_constructor);
3658 // eax now contains the constructor function. Grab the
3659 // instance class name from there.
3660 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3661 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3664 // Functions have class 'Function'.
3666 __ mov(eax, isolate()->factory()->Function_string());
3669 // Objects with a non-function constructor have class 'Object'.
3670 __ bind(&non_function_constructor);
3671 __ mov(eax, isolate()->factory()->Object_string());
3674 // Non-JS objects have class null.
3676 __ mov(eax, isolate()->factory()->null_value());
3681 context()->Plug(eax);
3685 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3686 // Load the arguments on the stack and call the stub.
3687 SubStringStub stub(isolate());
3688 ZoneList<Expression*>* args = expr->arguments();
3689 DCHECK(args->length() == 3);
3690 VisitForStackValue(args->at(0));
3691 VisitForStackValue(args->at(1));
3692 VisitForStackValue(args->at(2));
3694 context()->Plug(eax);
3698 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3699 // Load the arguments on the stack and call the stub.
3700 RegExpExecStub stub(isolate());
3701 ZoneList<Expression*>* args = expr->arguments();
3702 DCHECK(args->length() == 4);
3703 VisitForStackValue(args->at(0));
3704 VisitForStackValue(args->at(1));
3705 VisitForStackValue(args->at(2));
3706 VisitForStackValue(args->at(3));
3708 context()->Plug(eax);
3712 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3713 ZoneList<Expression*>* args = expr->arguments();
3714 DCHECK(args->length() == 1);
3716 VisitForAccumulatorValue(args->at(0)); // Load the object.
3719 // If the object is a smi return the object.
3720 __ JumpIfSmi(eax, &done, Label::kNear);
3721 // If the object is not a value type, return the object.
3722 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3723 __ j(not_equal, &done, Label::kNear);
3724 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3727 context()->Plug(eax);
3731 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3732 ZoneList<Expression*>* args = expr->arguments();
3733 DCHECK(args->length() == 2);
3734 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3735 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3737 VisitForAccumulatorValue(args->at(0)); // Load the object.
3739 Label runtime, done, not_date_object;
3740 Register object = eax;
3741 Register result = eax;
3742 Register scratch = ecx;
3744 __ JumpIfSmi(object, ¬_date_object);
3745 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3746 __ j(not_equal, ¬_date_object);
3748 if (index->value() == 0) {
3749 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3752 if (index->value() < JSDate::kFirstUncachedField) {
3753 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3754 __ mov(scratch, Operand::StaticVariable(stamp));
3755 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3756 __ j(not_equal, &runtime, Label::kNear);
3757 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3758 kPointerSize * index->value()));
3762 __ PrepareCallCFunction(2, scratch);
3763 __ mov(Operand(esp, 0), object);
3764 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3765 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3769 __ bind(¬_date_object);
3770 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3772 context()->Plug(result);
3776 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3777 ZoneList<Expression*>* args = expr->arguments();
3778 DCHECK_EQ(3, args->length());
3780 Register string = eax;
3781 Register index = ebx;
3782 Register value = ecx;
3784 VisitForStackValue(args->at(0)); // index
3785 VisitForStackValue(args->at(1)); // value
3786 VisitForAccumulatorValue(args->at(2)); // string
3791 if (FLAG_debug_code) {
3792 __ test(value, Immediate(kSmiTagMask));
3793 __ Check(zero, kNonSmiValue);
3794 __ test(index, Immediate(kSmiTagMask));
3795 __ Check(zero, kNonSmiValue);
3801 if (FLAG_debug_code) {
3802 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3803 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3806 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3808 context()->Plug(string);
3812 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3813 ZoneList<Expression*>* args = expr->arguments();
3814 DCHECK_EQ(3, args->length());
3816 Register string = eax;
3817 Register index = ebx;
3818 Register value = ecx;
3820 VisitForStackValue(args->at(0)); // index
3821 VisitForStackValue(args->at(1)); // value
3822 VisitForAccumulatorValue(args->at(2)); // string
3826 if (FLAG_debug_code) {
3827 __ test(value, Immediate(kSmiTagMask));
3828 __ Check(zero, kNonSmiValue);
3829 __ test(index, Immediate(kSmiTagMask));
3830 __ Check(zero, kNonSmiValue);
3832 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3833 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3838 // No need to untag a smi for two-byte addressing.
3839 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3841 context()->Plug(string);
3845 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3846 // Load the arguments on the stack and call the runtime function.
3847 ZoneList<Expression*>* args = expr->arguments();
3848 DCHECK(args->length() == 2);
3849 VisitForStackValue(args->at(0));
3850 VisitForStackValue(args->at(1));
3852 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3854 context()->Plug(eax);
3858 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3859 ZoneList<Expression*>* args = expr->arguments();
3860 DCHECK(args->length() == 2);
3862 VisitForStackValue(args->at(0)); // Load the object.
3863 VisitForAccumulatorValue(args->at(1)); // Load the value.
3864 __ pop(ebx); // eax = value. ebx = object.
3867 // If the object is a smi, return the value.
3868 __ JumpIfSmi(ebx, &done, Label::kNear);
3870 // If the object is not a value type, return the value.
3871 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3872 __ j(not_equal, &done, Label::kNear);
3875 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3877 // Update the write barrier. Save the value as it will be
3878 // overwritten by the write barrier code and is needed afterward.
3880 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3883 context()->Plug(eax);
3887 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3888 ZoneList<Expression*>* args = expr->arguments();
3889 DCHECK_EQ(args->length(), 1);
3891 // Load the argument into eax and call the stub.
3892 VisitForAccumulatorValue(args->at(0));
3894 NumberToStringStub stub(isolate());
3896 context()->Plug(eax);
3900 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3901 ZoneList<Expression*>* args = expr->arguments();
3902 DCHECK(args->length() == 1);
3904 VisitForAccumulatorValue(args->at(0));
3907 StringCharFromCodeGenerator generator(eax, ebx);
3908 generator.GenerateFast(masm_);
3911 NopRuntimeCallHelper call_helper;
3912 generator.GenerateSlow(masm_, call_helper);
3915 context()->Plug(ebx);
3919 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3920 ZoneList<Expression*>* args = expr->arguments();
3921 DCHECK(args->length() == 2);
3923 VisitForStackValue(args->at(0));
3924 VisitForAccumulatorValue(args->at(1));
3926 Register object = ebx;
3927 Register index = eax;
3928 Register result = edx;
3932 Label need_conversion;
3933 Label index_out_of_range;
3935 StringCharCodeAtGenerator generator(object,
3940 &index_out_of_range,
3941 STRING_INDEX_IS_NUMBER);
3942 generator.GenerateFast(masm_);
3945 __ bind(&index_out_of_range);
3946 // When the index is out of range, the spec requires us to return
3948 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3951 __ bind(&need_conversion);
3952 // Move the undefined value into the result register, which will
3953 // trigger conversion.
3954 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3957 NopRuntimeCallHelper call_helper;
3958 generator.GenerateSlow(masm_, call_helper);
3961 context()->Plug(result);
3965 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3966 ZoneList<Expression*>* args = expr->arguments();
3967 DCHECK(args->length() == 2);
3969 VisitForStackValue(args->at(0));
3970 VisitForAccumulatorValue(args->at(1));
3972 Register object = ebx;
3973 Register index = eax;
3974 Register scratch = edx;
3975 Register result = eax;
3979 Label need_conversion;
3980 Label index_out_of_range;
3982 StringCharAtGenerator generator(object,
3988 &index_out_of_range,
3989 STRING_INDEX_IS_NUMBER);
3990 generator.GenerateFast(masm_);
3993 __ bind(&index_out_of_range);
3994 // When the index is out of range, the spec requires us to return
3995 // the empty string.
3996 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3999 __ bind(&need_conversion);
4000 // Move smi zero into the result register, which will trigger
4002 __ Move(result, Immediate(Smi::FromInt(0)));
4005 NopRuntimeCallHelper call_helper;
4006 generator.GenerateSlow(masm_, call_helper);
4009 context()->Plug(result);
4013 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4014 ZoneList<Expression*>* args = expr->arguments();
4015 DCHECK_EQ(2, args->length());
4016 VisitForStackValue(args->at(0));
4017 VisitForAccumulatorValue(args->at(1));
4020 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4022 context()->Plug(eax);
4026 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4027 ZoneList<Expression*>* args = expr->arguments();
4028 DCHECK_EQ(2, args->length());
4030 VisitForStackValue(args->at(0));
4031 VisitForStackValue(args->at(1));
4033 StringCompareStub stub(isolate());
4035 context()->Plug(eax);
4039 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4040 ZoneList<Expression*>* args = expr->arguments();
4041 DCHECK(args->length() >= 2);
4043 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4044 for (int i = 0; i < arg_count + 1; ++i) {
4045 VisitForStackValue(args->at(i));
4047 VisitForAccumulatorValue(args->last()); // Function.
4049 Label runtime, done;
4050 // Check for non-function argument (including proxy).
4051 __ JumpIfSmi(eax, &runtime);
4052 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
4053 __ j(not_equal, &runtime);
4055 // InvokeFunction requires the function in edi. Move it in there.
4056 __ mov(edi, result_register());
4057 ParameterCount count(arg_count);
4058 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
4059 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4064 __ CallRuntime(Runtime::kCall, args->length());
4067 context()->Plug(eax);
4071 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4072 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4073 GetVar(eax, new_target_var);
4076 EmitLoadSuperConstructor();
4077 __ push(result_register());
4079 // Check if the calling frame is an arguments adaptor frame.
4080 Label adaptor_frame, args_set_up, runtime;
4081 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4082 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
4083 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4084 __ j(equal, &adaptor_frame);
4085 // default constructor has no arguments, so no adaptor frame means no args.
4086 __ mov(eax, Immediate(0));
4087 __ jmp(&args_set_up);
4089 // Copy arguments from adaptor frame.
4091 __ bind(&adaptor_frame);
4092 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4095 // Subtract 1 from arguments count, for new.target.
4096 __ sub(ecx, Immediate(1));
4098 __ lea(edx, Operand(edx, ecx, times_pointer_size,
4099 StandardFrameConstants::kCallerSPOffset));
4102 __ push(Operand(edx, -1 * kPointerSize));
4103 __ sub(edx, Immediate(kPointerSize));
4105 __ j(not_zero, &loop);
4108 __ bind(&args_set_up);
4110 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
4111 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
4112 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4113 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4117 context()->Plug(eax);
4121 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4122 // Load the arguments on the stack and call the stub.
4123 RegExpConstructResultStub stub(isolate());
4124 ZoneList<Expression*>* args = expr->arguments();
4125 DCHECK(args->length() == 3);
4126 VisitForStackValue(args->at(0));
4127 VisitForStackValue(args->at(1));
4128 VisitForAccumulatorValue(args->at(2));
4132 context()->Plug(eax);
4136 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4137 ZoneList<Expression*>* args = expr->arguments();
4138 DCHECK_EQ(2, args->length());
4140 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4141 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4143 Handle<FixedArray> jsfunction_result_caches(
4144 isolate()->native_context()->jsfunction_result_caches());
4145 if (jsfunction_result_caches->length() <= cache_id) {
4146 __ Abort(kAttemptToUseUndefinedCache);
4147 __ mov(eax, isolate()->factory()->undefined_value());
4148 context()->Plug(eax);
4152 VisitForAccumulatorValue(args->at(1));
4155 Register cache = ebx;
4157 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
4159 FieldOperand(cache, GlobalObject::kNativeContextOffset));
4160 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4162 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4164 Label done, not_found;
4165 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4166 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
4167 // tmp now holds finger offset as a smi.
4168 __ cmp(key, FixedArrayElementOperand(cache, tmp));
4169 __ j(not_equal, ¬_found);
4171 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
4174 __ bind(¬_found);
4175 // Call runtime to perform the lookup.
4178 __ CallRuntime(Runtime::kGetFromCache, 2);
4181 context()->Plug(eax);
4185 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4186 ZoneList<Expression*>* args = expr->arguments();
4187 DCHECK(args->length() == 1);
4189 VisitForAccumulatorValue(args->at(0));
4191 __ AssertString(eax);
4193 Label materialize_true, materialize_false;
4194 Label* if_true = NULL;
4195 Label* if_false = NULL;
4196 Label* fall_through = NULL;
4197 context()->PrepareTest(&materialize_true, &materialize_false,
4198 &if_true, &if_false, &fall_through);
4200 __ test(FieldOperand(eax, String::kHashFieldOffset),
4201 Immediate(String::kContainsCachedArrayIndexMask));
4202 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4203 Split(zero, if_true, if_false, fall_through);
4205 context()->Plug(if_true, if_false);
4209 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4210 ZoneList<Expression*>* args = expr->arguments();
4211 DCHECK(args->length() == 1);
4212 VisitForAccumulatorValue(args->at(0));
4214 __ AssertString(eax);
4216 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
4217 __ IndexFromHash(eax, eax);
4219 context()->Plug(eax);
4223 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4224 Label bailout, done, one_char_separator, long_separator,
4225 non_trivial_array, not_size_one_array, loop,
4226 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4228 ZoneList<Expression*>* args = expr->arguments();
4229 DCHECK(args->length() == 2);
4230 // We will leave the separator on the stack until the end of the function.
4231 VisitForStackValue(args->at(1));
4232 // Load this to eax (= array)
4233 VisitForAccumulatorValue(args->at(0));
4234 // All aliases of the same register have disjoint lifetimes.
4235 Register array = eax;
4236 Register elements = no_reg; // Will be eax.
4238 Register index = edx;
4240 Register string_length = ecx;
4242 Register string = esi;
4244 Register scratch = ebx;
4246 Register array_length = edi;
4247 Register result_pos = no_reg; // Will be edi.
4249 // Separator operand is already pushed.
4250 Operand separator_operand = Operand(esp, 2 * kPointerSize);
4251 Operand result_operand = Operand(esp, 1 * kPointerSize);
4252 Operand array_length_operand = Operand(esp, 0);
4253 __ sub(esp, Immediate(2 * kPointerSize));
4255 // Check that the array is a JSArray
4256 __ JumpIfSmi(array, &bailout);
4257 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4258 __ j(not_equal, &bailout);
4260 // Check that the array has fast elements.
4261 __ CheckFastElements(scratch, &bailout);
4263 // If the array has length zero, return the empty string.
4264 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
4265 __ SmiUntag(array_length);
4266 __ j(not_zero, &non_trivial_array);
4267 __ mov(result_operand, isolate()->factory()->empty_string());
4270 // Save the array length.
4271 __ bind(&non_trivial_array);
4272 __ mov(array_length_operand, array_length);
4274 // Save the FixedArray containing array's elements.
4275 // End of array's live range.
4277 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
4281 // Check that all array elements are sequential one-byte strings, and
4282 // accumulate the sum of their lengths, as a smi-encoded value.
4283 __ Move(index, Immediate(0));
4284 __ Move(string_length, Immediate(0));
4285 // Loop condition: while (index < length).
4286 // Live loop registers: index, array_length, string,
4287 // scratch, string_length, elements.
4288 if (generate_debug_code_) {
4289 __ cmp(index, array_length);
4290 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4293 __ mov(string, FieldOperand(elements,
4296 FixedArray::kHeaderSize));
4297 __ JumpIfSmi(string, &bailout);
4298 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4299 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4300 __ and_(scratch, Immediate(
4301 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4302 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4303 __ j(not_equal, &bailout);
4304 __ add(string_length,
4305 FieldOperand(string, SeqOneByteString::kLengthOffset));
4306 __ j(overflow, &bailout);
4307 __ add(index, Immediate(1));
4308 __ cmp(index, array_length);
4311 // If array_length is 1, return elements[0], a string.
4312 __ cmp(array_length, 1);
4313 __ j(not_equal, ¬_size_one_array);
4314 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4315 __ mov(result_operand, scratch);
4318 __ bind(¬_size_one_array);
4320 // End of array_length live range.
4321 result_pos = array_length;
4322 array_length = no_reg;
4325 // string_length: Sum of string lengths, as a smi.
4326 // elements: FixedArray of strings.
4328 // Check that the separator is a flat one-byte string.
4329 __ mov(string, separator_operand);
4330 __ JumpIfSmi(string, &bailout);
4331 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4332 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4333 __ and_(scratch, Immediate(
4334 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4335 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4336 __ j(not_equal, &bailout);
4338 // Add (separator length times array_length) - separator length
4339 // to string_length.
4340 __ mov(scratch, separator_operand);
4341 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4342 __ sub(string_length, scratch); // May be negative, temporarily.
4343 __ imul(scratch, array_length_operand);
4344 __ j(overflow, &bailout);
4345 __ add(string_length, scratch);
4346 __ j(overflow, &bailout);
4348 __ shr(string_length, 1);
4349 // Live registers and stack values:
4352 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4354 __ mov(result_operand, result_pos);
4355 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4358 __ mov(string, separator_operand);
4359 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4360 Immediate(Smi::FromInt(1)));
4361 __ j(equal, &one_char_separator);
4362 __ j(greater, &long_separator);
4365 // Empty separator case
4366 __ mov(index, Immediate(0));
4367 __ jmp(&loop_1_condition);
4368 // Loop condition: while (index < length).
4370 // Each iteration of the loop concatenates one string to the result.
4371 // Live values in registers:
4372 // index: which element of the elements array we are adding to the result.
4373 // result_pos: the position to which we are currently copying characters.
4374 // elements: the FixedArray of strings we are joining.
4376 // Get string = array[index].
4377 __ mov(string, FieldOperand(elements, index,
4379 FixedArray::kHeaderSize));
4380 __ mov(string_length,
4381 FieldOperand(string, String::kLengthOffset));
4382 __ shr(string_length, 1);
4384 FieldOperand(string, SeqOneByteString::kHeaderSize));
4385 __ CopyBytes(string, result_pos, string_length, scratch);
4386 __ add(index, Immediate(1));
4387 __ bind(&loop_1_condition);
4388 __ cmp(index, array_length_operand);
4389 __ j(less, &loop_1); // End while (index < length).
4394 // One-character separator case
4395 __ bind(&one_char_separator);
4396 // Replace separator with its one-byte character value.
4397 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4398 __ mov_b(separator_operand, scratch);
4400 __ Move(index, Immediate(0));
4401 // Jump into the loop after the code that copies the separator, so the first
4402 // element is not preceded by a separator
4403 __ jmp(&loop_2_entry);
4404 // Loop condition: while (index < length).
4406 // Each iteration of the loop concatenates one string to the result.
4407 // Live values in registers:
4408 // index: which element of the elements array we are adding to the result.
4409 // result_pos: the position to which we are currently copying characters.
4411 // Copy the separator character to the result.
4412 __ mov_b(scratch, separator_operand);
4413 __ mov_b(Operand(result_pos, 0), scratch);
4416 __ bind(&loop_2_entry);
4417 // Get string = array[index].
4418 __ mov(string, FieldOperand(elements, index,
4420 FixedArray::kHeaderSize));
4421 __ mov(string_length,
4422 FieldOperand(string, String::kLengthOffset));
4423 __ shr(string_length, 1);
4425 FieldOperand(string, SeqOneByteString::kHeaderSize));
4426 __ CopyBytes(string, result_pos, string_length, scratch);
4427 __ add(index, Immediate(1));
4429 __ cmp(index, array_length_operand);
4430 __ j(less, &loop_2); // End while (index < length).
4434 // Long separator case (separator is more than one character).
4435 __ bind(&long_separator);
4437 __ Move(index, Immediate(0));
4438 // Jump into the loop after the code that copies the separator, so the first
4439 // element is not preceded by a separator
4440 __ jmp(&loop_3_entry);
4441 // Loop condition: while (index < length).
4443 // Each iteration of the loop concatenates one string to the result.
4444 // Live values in registers:
4445 // index: which element of the elements array we are adding to the result.
4446 // result_pos: the position to which we are currently copying characters.
4448 // Copy the separator to the result.
4449 __ mov(string, separator_operand);
4450 __ mov(string_length,
4451 FieldOperand(string, String::kLengthOffset));
4452 __ shr(string_length, 1);
4454 FieldOperand(string, SeqOneByteString::kHeaderSize));
4455 __ CopyBytes(string, result_pos, string_length, scratch);
4457 __ bind(&loop_3_entry);
4458 // Get string = array[index].
4459 __ mov(string, FieldOperand(elements, index,
4461 FixedArray::kHeaderSize));
4462 __ mov(string_length,
4463 FieldOperand(string, String::kLengthOffset));
4464 __ shr(string_length, 1);
4466 FieldOperand(string, SeqOneByteString::kHeaderSize));
4467 __ CopyBytes(string, result_pos, string_length, scratch);
4468 __ add(index, Immediate(1));
4470 __ cmp(index, array_length_operand);
4471 __ j(less, &loop_3); // End while (index < length).
4476 __ mov(result_operand, isolate()->factory()->undefined_value());
4478 __ mov(eax, result_operand);
4479 // Drop temp values from the stack, and restore context register.
4480 __ add(esp, Immediate(3 * kPointerSize));
4482 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4483 context()->Plug(eax);
4487 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4488 DCHECK(expr->arguments()->length() == 0);
4489 ExternalReference debug_is_active =
4490 ExternalReference::debug_is_active_address(isolate());
4491 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4493 context()->Plug(eax);
4497 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4498 if (expr->function() != NULL &&
4499 expr->function()->intrinsic_type == Runtime::INLINE) {
4500 Comment cmnt(masm_, "[ InlineRuntimeCall");
4501 EmitInlineRuntimeCall(expr);
4505 Comment cmnt(masm_, "[ CallRuntime");
4506 ZoneList<Expression*>* args = expr->arguments();
4508 if (expr->is_jsruntime()) {
4509 // Push the builtins object as receiver.
4510 __ mov(eax, GlobalObjectOperand());
4511 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4513 // Load the function from the receiver.
4514 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4515 __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4516 if (FLAG_vector_ics) {
4517 __ mov(VectorLoadICDescriptor::SlotRegister(),
4518 Immediate(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4519 CallLoadIC(NOT_CONTEXTUAL);
4521 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4524 // Push the target function under the receiver.
4525 __ push(Operand(esp, 0));
4526 __ mov(Operand(esp, kPointerSize), eax);
4528 // Code common for calls using the IC.
4529 ZoneList<Expression*>* args = expr->arguments();
4530 int arg_count = args->length();
4531 for (int i = 0; i < arg_count; i++) {
4532 VisitForStackValue(args->at(i));
4535 // Record source position of the IC call.
4536 SetSourcePosition(expr->position());
4537 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4538 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4540 // Restore context register.
4541 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4542 context()->DropAndPlug(1, eax);
4545 // Push the arguments ("left-to-right").
4546 int arg_count = args->length();
4547 for (int i = 0; i < arg_count; i++) {
4548 VisitForStackValue(args->at(i));
4551 // Call the C runtime function.
4552 __ CallRuntime(expr->function(), arg_count);
4554 context()->Plug(eax);
4559 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4560 switch (expr->op()) {
4561 case Token::DELETE: {
4562 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4563 Property* property = expr->expression()->AsProperty();
4564 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4566 if (property != NULL) {
4567 VisitForStackValue(property->obj());
4568 VisitForStackValue(property->key());
4569 __ push(Immediate(Smi::FromInt(language_mode())));
4570 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4571 context()->Plug(eax);
4572 } else if (proxy != NULL) {
4573 Variable* var = proxy->var();
4574 // Delete of an unqualified identifier is disallowed in strict mode
4575 // but "delete this" is allowed.
4576 DCHECK(is_sloppy(language_mode()) || var->is_this());
4577 if (var->IsUnallocated()) {
4578 __ push(GlobalObjectOperand());
4579 __ push(Immediate(var->name()));
4580 __ push(Immediate(Smi::FromInt(SLOPPY)));
4581 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4582 context()->Plug(eax);
4583 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4584 // Result of deleting non-global variables is false. 'this' is
4585 // not really a variable, though we implement it as one. The
4586 // subexpression does not have side effects.
4587 context()->Plug(var->is_this());
4589 // Non-global variable. Call the runtime to try to delete from the
4590 // context where the variable was introduced.
4591 __ push(context_register());
4592 __ push(Immediate(var->name()));
4593 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4594 context()->Plug(eax);
4597 // Result of deleting non-property, non-variable reference is true.
4598 // The subexpression may have side effects.
4599 VisitForEffect(expr->expression());
4600 context()->Plug(true);
4606 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4607 VisitForEffect(expr->expression());
4608 context()->Plug(isolate()->factory()->undefined_value());
4613 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4614 if (context()->IsEffect()) {
4615 // Unary NOT has no side effects so it's only necessary to visit the
4616 // subexpression. Match the optimizing compiler by not branching.
4617 VisitForEffect(expr->expression());
4618 } else if (context()->IsTest()) {
4619 const TestContext* test = TestContext::cast(context());
4620 // The labels are swapped for the recursive call.
4621 VisitForControl(expr->expression(),
4622 test->false_label(),
4624 test->fall_through());
4625 context()->Plug(test->true_label(), test->false_label());
4627 // We handle value contexts explicitly rather than simply visiting
4628 // for control and plugging the control flow into the context,
4629 // because we need to prepare a pair of extra administrative AST ids
4630 // for the optimizing compiler.
4631 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4632 Label materialize_true, materialize_false, done;
4633 VisitForControl(expr->expression(),
4637 __ bind(&materialize_true);
4638 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4639 if (context()->IsAccumulatorValue()) {
4640 __ mov(eax, isolate()->factory()->true_value());
4642 __ Push(isolate()->factory()->true_value());
4644 __ jmp(&done, Label::kNear);
4645 __ bind(&materialize_false);
4646 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4647 if (context()->IsAccumulatorValue()) {
4648 __ mov(eax, isolate()->factory()->false_value());
4650 __ Push(isolate()->factory()->false_value());
4657 case Token::TYPEOF: {
4658 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4659 { StackValueContext context(this);
4660 VisitForTypeofValue(expr->expression());
4662 __ CallRuntime(Runtime::kTypeof, 1);
4663 context()->Plug(eax);
4673 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4674 DCHECK(expr->expression()->IsValidReferenceExpression());
4676 Comment cmnt(masm_, "[ CountOperation");
4677 SetSourcePosition(expr->position());
4679 Property* prop = expr->expression()->AsProperty();
4680 LhsKind assign_type = GetAssignType(prop);
4682 // Evaluate expression and get value.
4683 if (assign_type == VARIABLE) {
4684 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4685 AccumulatorValueContext context(this);
4686 EmitVariableLoad(expr->expression()->AsVariableProxy());
4688 // Reserve space for result of postfix operation.
4689 if (expr->is_postfix() && !context()->IsEffect()) {
4690 __ push(Immediate(Smi::FromInt(0)));
4692 switch (assign_type) {
4693 case NAMED_PROPERTY: {
4694 // Put the object both on the stack and in the register.
4695 VisitForStackValue(prop->obj());
4696 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4697 EmitNamedPropertyLoad(prop);
4701 case NAMED_SUPER_PROPERTY: {
4702 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4703 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4704 __ push(result_register());
4705 __ push(MemOperand(esp, kPointerSize));
4706 __ push(result_register());
4707 EmitNamedSuperPropertyLoad(prop);
4711 case KEYED_SUPER_PROPERTY: {
4712 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4713 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4714 __ push(result_register());
4715 VisitForAccumulatorValue(prop->key());
4716 __ push(result_register());
4717 __ push(MemOperand(esp, 2 * kPointerSize));
4718 __ push(MemOperand(esp, 2 * kPointerSize));
4719 __ push(result_register());
4720 EmitKeyedSuperPropertyLoad(prop);
4724 case KEYED_PROPERTY: {
4725 VisitForStackValue(prop->obj());
4726 VisitForStackValue(prop->key());
4727 __ mov(LoadDescriptor::ReceiverRegister(),
4728 Operand(esp, kPointerSize)); // Object.
4729 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4730 EmitKeyedPropertyLoad(prop);
4739 // We need a second deoptimization point after loading the value
4740 // in case evaluating the property load my have a side effect.
4741 if (assign_type == VARIABLE) {
4742 PrepareForBailout(expr->expression(), TOS_REG);
4744 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4747 // Inline smi case if we are in a loop.
4748 Label done, stub_call;
4749 JumpPatchSite patch_site(masm_);
4750 if (ShouldInlineSmiCase(expr->op())) {
4752 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4754 // Save result for postfix expressions.
4755 if (expr->is_postfix()) {
4756 if (!context()->IsEffect()) {
4757 // Save the result on the stack. If we have a named or keyed property
4758 // we store the result under the receiver that is currently on top
4760 switch (assign_type) {
4764 case NAMED_PROPERTY:
4765 __ mov(Operand(esp, kPointerSize), eax);
4767 case NAMED_SUPER_PROPERTY:
4768 __ mov(Operand(esp, 2 * kPointerSize), eax);
4770 case KEYED_PROPERTY:
4771 __ mov(Operand(esp, 2 * kPointerSize), eax);
4773 case KEYED_SUPER_PROPERTY:
4774 __ mov(Operand(esp, 3 * kPointerSize), eax);
4780 if (expr->op() == Token::INC) {
4781 __ add(eax, Immediate(Smi::FromInt(1)));
4783 __ sub(eax, Immediate(Smi::FromInt(1)));
4785 __ j(no_overflow, &done, Label::kNear);
4786 // Call stub. Undo operation first.
4787 if (expr->op() == Token::INC) {
4788 __ sub(eax, Immediate(Smi::FromInt(1)));
4790 __ add(eax, Immediate(Smi::FromInt(1)));
4792 __ jmp(&stub_call, Label::kNear);
4795 ToNumberStub convert_stub(isolate());
4796 __ CallStub(&convert_stub);
4797 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4799 // Save result for postfix expressions.
4800 if (expr->is_postfix()) {
4801 if (!context()->IsEffect()) {
4802 // Save the result on the stack. If we have a named or keyed property
4803 // we store the result under the receiver that is currently on top
4805 switch (assign_type) {
4809 case NAMED_PROPERTY:
4810 __ mov(Operand(esp, kPointerSize), eax);
4812 case NAMED_SUPER_PROPERTY:
4813 __ mov(Operand(esp, 2 * kPointerSize), eax);
4815 case KEYED_PROPERTY:
4816 __ mov(Operand(esp, 2 * kPointerSize), eax);
4818 case KEYED_SUPER_PROPERTY:
4819 __ mov(Operand(esp, 3 * kPointerSize), eax);
4825 // Record position before stub call.
4826 SetSourcePosition(expr->position());
4828 // Call stub for +1/-1.
4829 __ bind(&stub_call);
4831 __ mov(eax, Immediate(Smi::FromInt(1)));
4833 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
4834 CallIC(code, expr->CountBinOpFeedbackId());
4835 patch_site.EmitPatchInfo();
4838 // Store the value returned in eax.
4839 switch (assign_type) {
4841 if (expr->is_postfix()) {
4842 // Perform the assignment as if via '='.
4843 { EffectContext context(this);
4844 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4846 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4849 // For all contexts except EffectContext We have the result on
4850 // top of the stack.
4851 if (!context()->IsEffect()) {
4852 context()->PlugTOS();
4855 // Perform the assignment as if via '='.
4856 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4858 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4859 context()->Plug(eax);
4862 case NAMED_PROPERTY: {
4863 __ mov(StoreDescriptor::NameRegister(),
4864 prop->key()->AsLiteral()->value());
4865 __ pop(StoreDescriptor::ReceiverRegister());
4866 CallStoreIC(expr->CountStoreFeedbackId());
4867 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4868 if (expr->is_postfix()) {
4869 if (!context()->IsEffect()) {
4870 context()->PlugTOS();
4873 context()->Plug(eax);
4877 case NAMED_SUPER_PROPERTY: {
4878 EmitNamedSuperPropertyStore(prop);
4879 if (expr->is_postfix()) {
4880 if (!context()->IsEffect()) {
4881 context()->PlugTOS();
4884 context()->Plug(eax);
4888 case KEYED_SUPER_PROPERTY: {
4889 EmitKeyedSuperPropertyStore(prop);
4890 if (expr->is_postfix()) {
4891 if (!context()->IsEffect()) {
4892 context()->PlugTOS();
4895 context()->Plug(eax);
4899 case KEYED_PROPERTY: {
4900 __ pop(StoreDescriptor::NameRegister());
4901 __ pop(StoreDescriptor::ReceiverRegister());
4903 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4904 CallIC(ic, expr->CountStoreFeedbackId());
4905 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4906 if (expr->is_postfix()) {
4907 // Result is on the stack
4908 if (!context()->IsEffect()) {
4909 context()->PlugTOS();
4912 context()->Plug(eax);
4920 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4921 VariableProxy* proxy = expr->AsVariableProxy();
4922 DCHECK(!context()->IsEffect());
4923 DCHECK(!context()->IsTest());
4925 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4926 Comment cmnt(masm_, "[ Global variable");
4927 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4928 __ mov(LoadDescriptor::NameRegister(), Immediate(proxy->name()));
4929 if (FLAG_vector_ics) {
4930 __ mov(VectorLoadICDescriptor::SlotRegister(),
4931 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
4933 // Use a regular load, not a contextual load, to avoid a reference
4935 CallLoadIC(NOT_CONTEXTUAL);
4936 PrepareForBailout(expr, TOS_REG);
4937 context()->Plug(eax);
4938 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4939 Comment cmnt(masm_, "[ Lookup slot");
4942 // Generate code for loading from variables potentially shadowed
4943 // by eval-introduced variables.
4944 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4948 __ push(Immediate(proxy->name()));
4949 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4950 PrepareForBailout(expr, TOS_REG);
4953 context()->Plug(eax);
4955 // This expression cannot throw a reference error at the top level.
4956 VisitInDuplicateContext(expr);
4961 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4962 Expression* sub_expr,
4963 Handle<String> check) {
4964 Label materialize_true, materialize_false;
4965 Label* if_true = NULL;
4966 Label* if_false = NULL;
4967 Label* fall_through = NULL;
4968 context()->PrepareTest(&materialize_true, &materialize_false,
4969 &if_true, &if_false, &fall_through);
4971 { AccumulatorValueContext context(this);
4972 VisitForTypeofValue(sub_expr);
4974 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4976 Factory* factory = isolate()->factory();
4977 if (String::Equals(check, factory->number_string())) {
4978 __ JumpIfSmi(eax, if_true);
4979 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4980 isolate()->factory()->heap_number_map());
4981 Split(equal, if_true, if_false, fall_through);
4982 } else if (String::Equals(check, factory->string_string())) {
4983 __ JumpIfSmi(eax, if_false);
4984 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4985 __ j(above_equal, if_false);
4986 // Check for undetectable objects => false.
4987 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4988 1 << Map::kIsUndetectable);
4989 Split(zero, if_true, if_false, fall_through);
4990 } else if (String::Equals(check, factory->symbol_string())) {
4991 __ JumpIfSmi(eax, if_false);
4992 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4993 Split(equal, if_true, if_false, fall_through);
4994 } else if (String::Equals(check, factory->boolean_string())) {
4995 __ cmp(eax, isolate()->factory()->true_value());
4996 __ j(equal, if_true);
4997 __ cmp(eax, isolate()->factory()->false_value());
4998 Split(equal, if_true, if_false, fall_through);
4999 } else if (String::Equals(check, factory->undefined_string())) {
5000 __ cmp(eax, isolate()->factory()->undefined_value());
5001 __ j(equal, if_true);
5002 __ JumpIfSmi(eax, if_false);
5003 // Check for undetectable objects => true.
5004 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
5005 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
5006 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
5007 Split(not_zero, if_true, if_false, fall_through);
5008 } else if (String::Equals(check, factory->function_string())) {
5009 __ JumpIfSmi(eax, if_false);
5010 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5011 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
5012 __ j(equal, if_true);
5013 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
5014 Split(equal, if_true, if_false, fall_through);
5015 } else if (String::Equals(check, factory->object_string())) {
5016 __ JumpIfSmi(eax, if_false);
5017 __ cmp(eax, isolate()->factory()->null_value());
5018 __ j(equal, if_true);
5019 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
5020 __ j(below, if_false);
5021 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5022 __ j(above, if_false);
5023 // Check for undetectable objects => false.
5024 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5025 1 << Map::kIsUndetectable);
5026 Split(zero, if_true, if_false, fall_through);
5028 if (if_false != fall_through) __ jmp(if_false);
5030 context()->Plug(if_true, if_false);
5034 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5035 Comment cmnt(masm_, "[ CompareOperation");
5036 SetSourcePosition(expr->position());
5038 // First we try a fast inlined version of the compare when one of
5039 // the operands is a literal.
5040 if (TryLiteralCompare(expr)) return;
5042 // Always perform the comparison for its control flow. Pack the result
5043 // into the expression's context after the comparison is performed.
5044 Label materialize_true, materialize_false;
5045 Label* if_true = NULL;
5046 Label* if_false = NULL;
5047 Label* fall_through = NULL;
5048 context()->PrepareTest(&materialize_true, &materialize_false,
5049 &if_true, &if_false, &fall_through);
5051 Token::Value op = expr->op();
5052 VisitForStackValue(expr->left());
5055 VisitForStackValue(expr->right());
5056 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5057 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5058 __ cmp(eax, isolate()->factory()->true_value());
5059 Split(equal, if_true, if_false, fall_through);
5062 case Token::INSTANCEOF: {
5063 VisitForStackValue(expr->right());
5064 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5066 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5068 // The stub returns 0 for true.
5069 Split(zero, if_true, if_false, fall_through);
5074 VisitForAccumulatorValue(expr->right());
5075 Condition cc = CompareIC::ComputeCondition(op);
5078 bool inline_smi_code = ShouldInlineSmiCase(op);
5079 JumpPatchSite patch_site(masm_);
5080 if (inline_smi_code) {
5084 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
5086 Split(cc, if_true, if_false, NULL);
5087 __ bind(&slow_case);
5090 // Record position and call the compare IC.
5091 SetSourcePosition(expr->position());
5092 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5093 CallIC(ic, expr->CompareOperationFeedbackId());
5094 patch_site.EmitPatchInfo();
5096 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5098 Split(cc, if_true, if_false, fall_through);
5102 // Convert the result of the comparison into one expected for this
5103 // expression's context.
5104 context()->Plug(if_true, if_false);
5108 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5109 Expression* sub_expr,
5111 Label materialize_true, materialize_false;
5112 Label* if_true = NULL;
5113 Label* if_false = NULL;
5114 Label* fall_through = NULL;
5115 context()->PrepareTest(&materialize_true, &materialize_false,
5116 &if_true, &if_false, &fall_through);
5118 VisitForAccumulatorValue(sub_expr);
5119 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5121 Handle<Object> nil_value = nil == kNullValue
5122 ? isolate()->factory()->null_value()
5123 : isolate()->factory()->undefined_value();
5124 if (expr->op() == Token::EQ_STRICT) {
5125 __ cmp(eax, nil_value);
5126 Split(equal, if_true, if_false, fall_through);
5128 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5129 CallIC(ic, expr->CompareOperationFeedbackId());
5131 Split(not_zero, if_true, if_false, fall_through);
5133 context()->Plug(if_true, if_false);
5137 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5138 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5139 context()->Plug(eax);
5143 Register FullCodeGenerator::result_register() {
5148 Register FullCodeGenerator::context_register() {
5153 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5154 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5155 __ mov(Operand(ebp, frame_offset), value);
5159 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5160 __ mov(dst, ContextOperand(esi, context_index));
5164 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5165 Scope* declaration_scope = scope()->DeclarationScope();
5166 if (declaration_scope->is_script_scope() ||
5167 declaration_scope->is_module_scope()) {
5168 // Contexts nested in the native context have a canonical empty function
5169 // as their closure, not the anonymous closure containing the global
5170 // code. Pass a smi sentinel and let the runtime look up the empty
5172 __ push(Immediate(Smi::FromInt(0)));
5173 } else if (declaration_scope->is_eval_scope()) {
5174 // Contexts nested inside eval code have the same closure as the context
5175 // calling eval, not the anonymous closure containing the eval code.
5176 // Fetch it from the context.
5177 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
5179 DCHECK(declaration_scope->is_function_scope());
5180 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5185 // ----------------------------------------------------------------------------
5186 // Non-local control flow support.
5188 void FullCodeGenerator::EnterFinallyBlock() {
5189 // Cook return address on top of stack (smi encoded Code* delta)
5190 DCHECK(!result_register().is(edx));
5192 __ sub(edx, Immediate(masm_->CodeObject()));
5193 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5194 STATIC_ASSERT(kSmiTag == 0);
5198 // Store result register while executing finally block.
5199 __ push(result_register());
5201 // Store pending message while executing finally block.
5202 ExternalReference pending_message_obj =
5203 ExternalReference::address_of_pending_message_obj(isolate());
5204 __ mov(edx, Operand::StaticVariable(pending_message_obj));
5207 ExternalReference has_pending_message =
5208 ExternalReference::address_of_has_pending_message(isolate());
5209 __ mov(edx, Operand::StaticVariable(has_pending_message));
5213 ExternalReference pending_message_script =
5214 ExternalReference::address_of_pending_message_script(isolate());
5215 __ mov(edx, Operand::StaticVariable(pending_message_script));
5220 void FullCodeGenerator::ExitFinallyBlock() {
5221 DCHECK(!result_register().is(edx));
5222 // Restore pending message from stack.
5224 ExternalReference pending_message_script =
5225 ExternalReference::address_of_pending_message_script(isolate());
5226 __ mov(Operand::StaticVariable(pending_message_script), edx);
5230 ExternalReference has_pending_message =
5231 ExternalReference::address_of_has_pending_message(isolate());
5232 __ mov(Operand::StaticVariable(has_pending_message), edx);
5235 ExternalReference pending_message_obj =
5236 ExternalReference::address_of_pending_message_obj(isolate());
5237 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5239 // Restore result register from stack.
5240 __ pop(result_register());
5242 // Uncook return address.
5245 __ add(edx, Immediate(masm_->CodeObject()));
5252 #define __ ACCESS_MASM(masm())
5254 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5256 int* context_length) {
5257 // The macros used here must preserve the result register.
5259 // Because the handler block contains the context of the finally
5260 // code, we can restore it directly from there for the finally code
5261 // rather than iteratively unwinding contexts via their previous
5263 __ Drop(*stack_depth); // Down to the handler block.
5264 if (*context_length > 0) {
5265 // Restore the context to its dedicated register and the stack.
5266 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
5267 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
5270 __ call(finally_entry_);
5273 *context_length = 0;
5280 static const byte kJnsInstruction = 0x79;
5281 static const byte kJnsOffset = 0x11;
5282 static const byte kNopByteOne = 0x66;
5283 static const byte kNopByteTwo = 0x90;
5285 static const byte kCallInstruction = 0xe8;
5289 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5291 BackEdgeState target_state,
5292 Code* replacement_code) {
5293 Address call_target_address = pc - kIntSize;
5294 Address jns_instr_address = call_target_address - 3;
5295 Address jns_offset_address = call_target_address - 2;
5297 switch (target_state) {
5299 // sub <profiling_counter>, <delta> ;; Not changed
5301 // call <interrupt stub>
5303 *jns_instr_address = kJnsInstruction;
5304 *jns_offset_address = kJnsOffset;
5306 case ON_STACK_REPLACEMENT:
5307 case OSR_AFTER_STACK_CHECK:
5308 // sub <profiling_counter>, <delta> ;; Not changed
5311 // call <on-stack replacment>
5313 *jns_instr_address = kNopByteOne;
5314 *jns_offset_address = kNopByteTwo;
5318 Assembler::set_target_address_at(call_target_address,
5320 replacement_code->entry());
5321 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5322 unoptimized_code, call_target_address, replacement_code);
5326 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5328 Code* unoptimized_code,
5330 Address call_target_address = pc - kIntSize;
5331 Address jns_instr_address = call_target_address - 3;
5332 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5334 if (*jns_instr_address == kJnsInstruction) {
5335 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5336 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5337 Assembler::target_address_at(call_target_address,
5342 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5343 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5345 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
5346 isolate->builtins()->OnStackReplacement()->entry()) {
5347 return ON_STACK_REPLACEMENT;
5350 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5351 Assembler::target_address_at(call_target_address,
5353 return OSR_AFTER_STACK_CHECK;
5357 } } // namespace v8::internal
5359 #endif // V8_TARGET_ARCH_IA32