1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_X64
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
45 #define __ ACCESS_MASM(masm_)
48 class JumpPatchSite BASE_EMBEDDED {
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ = false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance near_jump = Label::kFar) {
63 __ testb(reg, Immediate(kSmiTagMask));
64 EmitJump(not_carry, target, near_jump); // Always taken before patched.
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance near_jump = Label::kFar) {
70 __ testb(reg, Immediate(kSmiTagMask));
71 EmitJump(carry, target, near_jump); // Never taken before patched.
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77 ASSERT(is_int8(delta_to_patch_site));
78 __ testl(rax, Immediate(delta_to_patch_site));
83 __ nop(); // Signals no inlined code.
88 // jc will be patched with jz, jnc will become jnz.
89 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
91 ASSERT(cc == carry || cc == not_carry);
92 __ bind(&patch_site_);
93 __ j(cc, target, near_jump);
96 MacroAssembler* masm_;
104 static void EmitStackCheck(MacroAssembler* masm_,
106 Register scratch = rsp) {
107 Isolate* isolate = masm_->isolate();
109 ASSERT(scratch.is(rsp) == (pointers == 0));
111 __ movq(scratch, rsp);
112 __ subq(scratch, Immediate(pointers * kPointerSize));
114 __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
115 __ j(above_equal, &ok, Label::kNear);
116 __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
121 // Generate code for a JS function. On entry to the function the receiver
122 // and arguments have been pushed on the stack left to right, with the
123 // return address on top of them. The actual argument count matches the
124 // formal parameter count expected by the function.
126 // The live registers are:
127 // o rdi: the JS function object being called (i.e. ourselves)
128 // o rsi: our context
129 // o rbp: our caller's frame pointer
130 // o rsp: stack pointer (pointing to return address)
132 // The function builds a JS frame. Please see JavaScriptFrameConstants in
133 // frames-x64.h for its layout.
134 void FullCodeGenerator::Generate() {
135 CompilationInfo* info = info_;
137 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
139 InitializeFeedbackVector();
141 profiling_counter_ = isolate()->factory()->NewCell(
142 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
143 SetFunctionPosition(function());
144 Comment cmnt(masm_, "[ function compiled by full code generator");
146 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
149 if (strlen(FLAG_stop_at) > 0 &&
150 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
155 // Sloppy mode functions and builtins need to replace the receiver with the
156 // global proxy when called as functions (without an explicit receiver
158 if (info->strict_mode() == SLOPPY && !info->is_native()) {
160 // +1 for return address.
161 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
162 __ movp(rcx, args.GetReceiverOperand());
164 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
165 __ j(not_equal, &ok, Label::kNear);
167 __ movp(rcx, GlobalObjectOperand());
168 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
170 __ movp(args.GetReceiverOperand(), rcx);
175 // Open a frame scope to indicate that there is a frame on the stack. The
176 // MANUAL indicates that the scope shouldn't actually generate code to set up
177 // the frame (that is done below).
178 FrameScope frame_scope(masm_, StackFrame::MANUAL);
180 info->set_prologue_offset(masm_->pc_offset());
181 __ Prologue(BUILD_FUNCTION_FRAME);
182 info->AddNoFrameRange(0, masm_->pc_offset());
184 { Comment cmnt(masm_, "[ Allocate locals");
185 int locals_count = info->scope()->num_stack_slots();
186 // Generators allocate locals, if any, in context slots.
187 ASSERT(!info->function()->is_generator() || locals_count == 0);
188 if (locals_count == 1) {
189 __ PushRoot(Heap::kUndefinedValueRootIndex);
190 } else if (locals_count > 1) {
191 if (locals_count >= 128) {
192 EmitStackCheck(masm_, locals_count, rcx);
194 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
195 const int kMaxPushes = 32;
196 if (locals_count >= kMaxPushes) {
197 int loop_iterations = locals_count / kMaxPushes;
198 __ movq(rcx, Immediate(loop_iterations));
200 __ bind(&loop_header);
202 for (int i = 0; i < kMaxPushes; i++) {
205 // Continue loop if not done.
207 __ j(not_zero, &loop_header, Label::kNear);
209 int remaining = locals_count % kMaxPushes;
210 // Emit the remaining pushes.
211 for (int i = 0; i < remaining; i++) {
217 bool function_in_register = true;
219 // Possibly allocate a local context.
220 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
221 if (heap_slots > 0) {
222 Comment cmnt(masm_, "[ Allocate context");
223 // Argument to NewContext is the function, which is still in rdi.
224 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
226 __ Push(info->scope()->GetScopeInfo());
227 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
228 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
229 FastNewContextStub stub(heap_slots);
233 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
235 function_in_register = false;
236 // Context is returned in rax. It replaces the context passed to us.
237 // It's saved in the stack and kept live in rsi.
239 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
241 // Copy any necessary parameters into the context.
242 int num_parameters = info->scope()->num_parameters();
243 for (int i = 0; i < num_parameters; i++) {
244 Variable* var = scope()->parameter(i);
245 if (var->IsContextSlot()) {
246 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
247 (num_parameters - 1 - i) * kPointerSize;
248 // Load parameter from stack.
249 __ movp(rax, Operand(rbp, parameter_offset));
250 // Store it in the context.
251 int context_offset = Context::SlotOffset(var->index());
252 __ movp(Operand(rsi, context_offset), rax);
253 // Update the write barrier. This clobbers rax and rbx.
254 __ RecordWriteContextSlot(
255 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
260 // Possibly allocate an arguments object.
261 Variable* arguments = scope()->arguments();
262 if (arguments != NULL) {
263 // Arguments object must be allocated after the context object, in
264 // case the "arguments" or ".arguments" variables are in the context.
265 Comment cmnt(masm_, "[ Allocate arguments object");
266 if (function_in_register) {
269 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
271 // The receiver is just before the parameters on the caller's stack.
272 int num_parameters = info->scope()->num_parameters();
273 int offset = num_parameters * kPointerSize;
275 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
277 __ Push(Smi::FromInt(num_parameters));
278 // Arguments to ArgumentsAccessStub:
279 // function, receiver address, parameter count.
280 // The stub will rewrite receiver and parameter count if the previous
281 // stack frame was an arguments adapter frame.
282 ArgumentsAccessStub::Type type;
283 if (strict_mode() == STRICT) {
284 type = ArgumentsAccessStub::NEW_STRICT;
285 } else if (function()->has_duplicate_parameters()) {
286 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
288 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
290 ArgumentsAccessStub stub(type);
293 SetVar(arguments, rax, rbx, rdx);
297 __ CallRuntime(Runtime::kTraceEnter, 0);
300 // Visit the declarations and body unless there is an illegal
302 if (scope()->HasIllegalRedeclaration()) {
303 Comment cmnt(masm_, "[ Declarations");
304 scope()->VisitIllegalRedeclaration(this);
307 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
308 { Comment cmnt(masm_, "[ Declarations");
309 // For named function expressions, declare the function name as a
311 if (scope()->is_function_scope() && scope()->function() != NULL) {
312 VariableDeclaration* function = scope()->function();
313 ASSERT(function->proxy()->var()->mode() == CONST ||
314 function->proxy()->var()->mode() == CONST_LEGACY);
315 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
316 VisitVariableDeclaration(function);
318 VisitDeclarations(scope()->declarations());
321 { Comment cmnt(masm_, "[ Stack check");
322 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
323 EmitStackCheck(masm_);
326 { Comment cmnt(masm_, "[ Body");
327 ASSERT(loop_depth() == 0);
328 VisitStatements(function()->body());
329 ASSERT(loop_depth() == 0);
333 // Always emit a 'return undefined' in case control fell off the end of
335 { Comment cmnt(masm_, "[ return <undefined>;");
336 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
337 EmitReturnSequence();
342 void FullCodeGenerator::ClearAccumulator() {
347 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
348 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
349 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
350 Smi::FromInt(-delta));
354 void FullCodeGenerator::EmitProfilingCounterReset() {
355 int reset_value = FLAG_interrupt_budget;
356 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
357 __ Move(kScratchRegister, Smi::FromInt(reset_value));
358 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363 Label* back_edge_target) {
364 Comment cmnt(masm_, "[ Back edge bookkeeping");
367 ASSERT(back_edge_target->is_bound());
368 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
369 int weight = Min(kMaxBackEdgeWeight,
370 Max(1, distance / kCodeSizeMultiplier));
371 EmitProfilingCounterDecrement(weight);
372 __ j(positive, &ok, Label::kNear);
373 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
375 // Record a mapping of this PC offset to the OSR id. This is used to find
376 // the AST id from the unoptimized code in order to use it as a key into
377 // the deoptimization input data found in the optimized code.
378 RecordBackEdge(stmt->OsrEntryId());
380 EmitProfilingCounterReset();
383 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
384 // Record a mapping of the OSR id to this PC. This is used if the OSR
385 // entry becomes the target of a bailout. We don't expect it to be, but
386 // we want it to work if it is.
387 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
391 void FullCodeGenerator::EmitReturnSequence() {
392 Comment cmnt(masm_, "[ Return sequence");
393 if (return_label_.is_bound()) {
394 __ jmp(&return_label_);
396 __ bind(&return_label_);
399 __ CallRuntime(Runtime::kTraceExit, 1);
401 // Pretend that the exit is a backwards jump to the entry.
403 if (info_->ShouldSelfOptimize()) {
404 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
406 int distance = masm_->pc_offset();
407 weight = Min(kMaxBackEdgeWeight,
408 Max(1, distance / kCodeSizeMultiplier));
410 EmitProfilingCounterDecrement(weight);
412 __ j(positive, &ok, Label::kNear);
414 __ call(isolate()->builtins()->InterruptCheck(),
415 RelocInfo::CODE_TARGET);
417 EmitProfilingCounterReset();
420 // Add a label for checking the size of the code used for returning.
421 Label check_exit_codesize;
422 masm_->bind(&check_exit_codesize);
424 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
426 // Do not use the leave instruction here because it is too short to
427 // patch with the code required by the debugger.
430 int no_frame_start = masm_->pc_offset();
432 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
433 __ Ret(arguments_bytes, rcx);
435 #ifdef ENABLE_DEBUGGER_SUPPORT
436 // Add padding that will be overwritten by a debugger breakpoint. We
437 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
439 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
440 for (int i = 0; i < kPadding; ++i) {
443 // Check that the size of the code used for returning is large enough
444 // for the debugger's requirements.
445 ASSERT(Assembler::kJSReturnSequenceLength <=
446 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
448 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
453 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
458 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
459 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
460 codegen()->GetVar(result_register(), var);
464 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
465 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466 MemOperand operand = codegen()->VarOperand(var, result_register());
471 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
472 codegen()->GetVar(result_register(), var);
473 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
474 codegen()->DoTest(this);
478 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
482 void FullCodeGenerator::AccumulatorValueContext::Plug(
483 Heap::RootListIndex index) const {
484 __ LoadRoot(result_register(), index);
488 void FullCodeGenerator::StackValueContext::Plug(
489 Heap::RootListIndex index) const {
494 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
495 codegen()->PrepareForBailoutBeforeSplit(condition(),
499 if (index == Heap::kUndefinedValueRootIndex ||
500 index == Heap::kNullValueRootIndex ||
501 index == Heap::kFalseValueRootIndex) {
502 if (false_label_ != fall_through_) __ jmp(false_label_);
503 } else if (index == Heap::kTrueValueRootIndex) {
504 if (true_label_ != fall_through_) __ jmp(true_label_);
506 __ LoadRoot(result_register(), index);
507 codegen()->DoTest(this);
512 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
516 void FullCodeGenerator::AccumulatorValueContext::Plug(
517 Handle<Object> lit) const {
519 __ SafeMove(result_register(), Smi::cast(*lit));
521 __ Move(result_register(), lit);
526 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
528 __ SafePush(Smi::cast(*lit));
535 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
536 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
541 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
542 if (false_label_ != fall_through_) __ jmp(false_label_);
543 } else if (lit->IsTrue() || lit->IsJSObject()) {
544 if (true_label_ != fall_through_) __ jmp(true_label_);
545 } else if (lit->IsString()) {
546 if (String::cast(*lit)->length() == 0) {
547 if (false_label_ != fall_through_) __ jmp(false_label_);
549 if (true_label_ != fall_through_) __ jmp(true_label_);
551 } else if (lit->IsSmi()) {
552 if (Smi::cast(*lit)->value() == 0) {
553 if (false_label_ != fall_through_) __ jmp(false_label_);
555 if (true_label_ != fall_through_) __ jmp(true_label_);
558 // For simplicity we always test the accumulator register.
559 __ Move(result_register(), lit);
560 codegen()->DoTest(this);
565 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
566 Register reg) const {
572 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
574 Register reg) const {
577 __ Move(result_register(), reg);
581 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
582 Register reg) const {
584 if (count > 1) __ Drop(count - 1);
585 __ movp(Operand(rsp, 0), reg);
589 void FullCodeGenerator::TestContext::DropAndPlug(int count,
590 Register reg) const {
592 // For simplicity we always test the accumulator register.
594 __ Move(result_register(), reg);
595 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
596 codegen()->DoTest(this);
600 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
601 Label* materialize_false) const {
602 ASSERT(materialize_true == materialize_false);
603 __ bind(materialize_true);
607 void FullCodeGenerator::AccumulatorValueContext::Plug(
608 Label* materialize_true,
609 Label* materialize_false) const {
611 __ bind(materialize_true);
612 __ Move(result_register(), isolate()->factory()->true_value());
613 __ jmp(&done, Label::kNear);
614 __ bind(materialize_false);
615 __ Move(result_register(), isolate()->factory()->false_value());
620 void FullCodeGenerator::StackValueContext::Plug(
621 Label* materialize_true,
622 Label* materialize_false) const {
624 __ bind(materialize_true);
625 __ Push(isolate()->factory()->true_value());
626 __ jmp(&done, Label::kNear);
627 __ bind(materialize_false);
628 __ Push(isolate()->factory()->false_value());
633 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
634 Label* materialize_false) const {
635 ASSERT(materialize_true == true_label_);
636 ASSERT(materialize_false == false_label_);
640 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
644 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
645 Heap::RootListIndex value_root_index =
646 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
647 __ LoadRoot(result_register(), value_root_index);
651 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
652 Heap::RootListIndex value_root_index =
653 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654 __ PushRoot(value_root_index);
658 void FullCodeGenerator::TestContext::Plug(bool flag) const {
659 codegen()->PrepareForBailoutBeforeSplit(condition(),
664 if (true_label_ != fall_through_) __ jmp(true_label_);
666 if (false_label_ != fall_through_) __ jmp(false_label_);
671 void FullCodeGenerator::DoTest(Expression* condition,
674 Label* fall_through) {
675 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
676 CallIC(ic, condition->test_id());
677 __ testp(result_register(), result_register());
678 // The stub returns nonzero for true.
679 Split(not_zero, if_true, if_false, fall_through);
683 void FullCodeGenerator::Split(Condition cc,
686 Label* fall_through) {
687 if (if_false == fall_through) {
689 } else if (if_true == fall_through) {
690 __ j(NegateCondition(cc), if_false);
698 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
699 ASSERT(var->IsStackAllocated());
700 // Offset is negative because higher indexes are at lower addresses.
701 int offset = -var->index() * kPointerSize;
702 // Adjust by a (parameter or local) base offset.
703 if (var->IsParameter()) {
704 offset += kFPOnStackSize + kPCOnStackSize +
705 (info_->scope()->num_parameters() - 1) * kPointerSize;
707 offset += JavaScriptFrameConstants::kLocal0Offset;
709 return Operand(rbp, offset);
713 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
714 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
715 if (var->IsContextSlot()) {
716 int context_chain_length = scope()->ContextChainLength(var->scope());
717 __ LoadContext(scratch, context_chain_length);
718 return ContextOperand(scratch, var->index());
720 return StackOperand(var);
725 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
726 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
727 MemOperand location = VarOperand(var, dest);
728 __ movp(dest, location);
732 void FullCodeGenerator::SetVar(Variable* var,
736 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
737 ASSERT(!scratch0.is(src));
738 ASSERT(!scratch0.is(scratch1));
739 ASSERT(!scratch1.is(src));
740 MemOperand location = VarOperand(var, scratch0);
741 __ movp(location, src);
743 // Emit the write barrier code if the location is in the heap.
744 if (var->IsContextSlot()) {
745 int offset = Context::SlotOffset(var->index());
746 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
751 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
752 bool should_normalize,
755 // Only prepare for bailouts before splits if we're in a test
756 // context. Otherwise, we let the Visit function deal with the
757 // preparation to avoid preparing with the same AST id twice.
758 if (!context()->IsTest() || !info_->IsOptimizable()) return;
761 if (should_normalize) __ jmp(&skip, Label::kNear);
762 PrepareForBailout(expr, TOS_REG);
763 if (should_normalize) {
764 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
765 Split(equal, if_true, if_false, NULL);
771 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
772 // The variable in the declaration always resides in the current context.
773 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
774 if (generate_debug_code_) {
775 // Check that we're not inside a with or catch context.
776 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
777 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
778 __ Check(not_equal, kDeclarationInWithContext);
779 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
780 __ Check(not_equal, kDeclarationInCatchContext);
785 void FullCodeGenerator::VisitVariableDeclaration(
786 VariableDeclaration* declaration) {
787 // If it was not possible to allocate the variable at compile time, we
788 // need to "declare" it at runtime to make sure it actually exists in the
790 VariableProxy* proxy = declaration->proxy();
791 VariableMode mode = declaration->mode();
792 Variable* variable = proxy->var();
793 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
794 switch (variable->location()) {
795 case Variable::UNALLOCATED:
796 globals_->Add(variable->name(), zone());
797 globals_->Add(variable->binding_needs_init()
798 ? isolate()->factory()->the_hole_value()
799 : isolate()->factory()->undefined_value(),
803 case Variable::PARAMETER:
804 case Variable::LOCAL:
806 Comment cmnt(masm_, "[ VariableDeclaration");
807 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
808 __ movp(StackOperand(variable), kScratchRegister);
812 case Variable::CONTEXT:
814 Comment cmnt(masm_, "[ VariableDeclaration");
815 EmitDebugCheckDeclarationContext(variable);
816 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
817 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
818 // No write barrier since the hole value is in old space.
819 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
823 case Variable::LOOKUP: {
824 Comment cmnt(masm_, "[ VariableDeclaration");
826 __ Push(variable->name());
827 // Declaration nodes are always introduced in one of four modes.
828 ASSERT(IsDeclaredVariableMode(mode));
829 PropertyAttributes attr =
830 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
831 __ Push(Smi::FromInt(attr));
832 // Push initial value, if any.
833 // Note: For variables we must not push an initial value (such as
834 // 'undefined') because we may have a (legal) redeclaration and we
835 // must not destroy the current value.
837 __ PushRoot(Heap::kTheHoleValueRootIndex);
839 __ Push(Smi::FromInt(0)); // Indicates no initial value.
841 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
848 void FullCodeGenerator::VisitFunctionDeclaration(
849 FunctionDeclaration* declaration) {
850 VariableProxy* proxy = declaration->proxy();
851 Variable* variable = proxy->var();
852 switch (variable->location()) {
853 case Variable::UNALLOCATED: {
854 globals_->Add(variable->name(), zone());
855 Handle<SharedFunctionInfo> function =
856 Compiler::BuildFunctionInfo(declaration->fun(), script());
857 // Check for stack-overflow exception.
858 if (function.is_null()) return SetStackOverflow();
859 globals_->Add(function, zone());
863 case Variable::PARAMETER:
864 case Variable::LOCAL: {
865 Comment cmnt(masm_, "[ FunctionDeclaration");
866 VisitForAccumulatorValue(declaration->fun());
867 __ movp(StackOperand(variable), result_register());
871 case Variable::CONTEXT: {
872 Comment cmnt(masm_, "[ FunctionDeclaration");
873 EmitDebugCheckDeclarationContext(variable);
874 VisitForAccumulatorValue(declaration->fun());
875 __ movp(ContextOperand(rsi, variable->index()), result_register());
876 int offset = Context::SlotOffset(variable->index());
877 // We know that we have written a function, which is not a smi.
878 __ RecordWriteContextSlot(rsi,
885 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
889 case Variable::LOOKUP: {
890 Comment cmnt(masm_, "[ FunctionDeclaration");
892 __ Push(variable->name());
893 __ Push(Smi::FromInt(NONE));
894 VisitForStackValue(declaration->fun());
895 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
902 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
903 Variable* variable = declaration->proxy()->var();
904 ASSERT(variable->location() == Variable::CONTEXT);
905 ASSERT(variable->interface()->IsFrozen());
907 Comment cmnt(masm_, "[ ModuleDeclaration");
908 EmitDebugCheckDeclarationContext(variable);
910 // Load instance object.
911 __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
912 __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
913 __ movp(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
916 __ movp(ContextOperand(rsi, variable->index()), rax);
917 // We know that we have written a module, which is not a smi.
918 __ RecordWriteContextSlot(rsi,
919 Context::SlotOffset(variable->index()),
925 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
927 // Traverse into body.
928 Visit(declaration->module());
932 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
933 VariableProxy* proxy = declaration->proxy();
934 Variable* variable = proxy->var();
935 switch (variable->location()) {
936 case Variable::UNALLOCATED:
940 case Variable::CONTEXT: {
941 Comment cmnt(masm_, "[ ImportDeclaration");
942 EmitDebugCheckDeclarationContext(variable);
947 case Variable::PARAMETER:
948 case Variable::LOCAL:
949 case Variable::LOOKUP:
955 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
960 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
961 // Call the runtime to declare the globals.
962 __ Push(rsi); // The context is the first argument.
964 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
965 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
966 // Return value is ignored.
970 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
971 // Call the runtime to declare the modules.
972 __ Push(descriptions);
973 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
974 // Return value is ignored.
978 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
979 Comment cmnt(masm_, "[ SwitchStatement");
980 Breakable nested_statement(this, stmt);
981 SetStatementPosition(stmt);
983 // Keep the switch value on the stack until a case matches.
984 VisitForStackValue(stmt->tag());
985 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
987 ZoneList<CaseClause*>* clauses = stmt->cases();
988 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
990 Label next_test; // Recycled for each test.
991 // Compile all the tests with branches to their bodies.
992 for (int i = 0; i < clauses->length(); i++) {
993 CaseClause* clause = clauses->at(i);
994 clause->body_target()->Unuse();
996 // The default is not a test, but remember it as final fall through.
997 if (clause->is_default()) {
998 default_clause = clause;
1002 Comment cmnt(masm_, "[ Case comparison");
1003 __ bind(&next_test);
1006 // Compile the label expression.
1007 VisitForAccumulatorValue(clause->label());
1009 // Perform the comparison as if via '==='.
1010 __ movp(rdx, Operand(rsp, 0)); // Switch value.
1011 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1012 JumpPatchSite patch_site(masm_);
1013 if (inline_smi_code) {
1017 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1020 __ j(not_equal, &next_test);
1021 __ Drop(1); // Switch value is no longer needed.
1022 __ jmp(clause->body_target());
1023 __ bind(&slow_case);
1026 // Record position before stub call for type feedback.
1027 SetSourcePosition(clause->position());
1028 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1029 CallIC(ic, clause->CompareId());
1030 patch_site.EmitPatchInfo();
1033 __ jmp(&skip, Label::kNear);
1034 PrepareForBailout(clause, TOS_REG);
1035 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1036 __ j(not_equal, &next_test);
1038 __ jmp(clause->body_target());
1042 __ j(not_equal, &next_test);
1043 __ Drop(1); // Switch value is no longer needed.
1044 __ jmp(clause->body_target());
1047 // Discard the test value and jump to the default if present, otherwise to
1048 // the end of the statement.
1049 __ bind(&next_test);
1050 __ Drop(1); // Switch value is no longer needed.
1051 if (default_clause == NULL) {
1052 __ jmp(nested_statement.break_label());
1054 __ jmp(default_clause->body_target());
1057 // Compile all the case bodies.
1058 for (int i = 0; i < clauses->length(); i++) {
1059 Comment cmnt(masm_, "[ Case body");
1060 CaseClause* clause = clauses->at(i);
1061 __ bind(clause->body_target());
1062 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1063 VisitStatements(clause->statements());
1066 __ bind(nested_statement.break_label());
1067 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1071 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1072 Comment cmnt(masm_, "[ ForInStatement");
1073 int slot = stmt->ForInFeedbackSlot();
1074 SetStatementPosition(stmt);
1077 ForIn loop_statement(this, stmt);
1078 increment_loop_depth();
1080 // Get the object to enumerate over. If the object is null or undefined, skip
1081 // over the loop. See ECMA-262 version 5, section 12.6.4.
1082 VisitForAccumulatorValue(stmt->enumerable());
1083 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1085 Register null_value = rdi;
1086 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1087 __ cmpp(rax, null_value);
1090 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1092 // Convert the object to a JS object.
1093 Label convert, done_convert;
1094 __ JumpIfSmi(rax, &convert);
1095 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1096 __ j(above_equal, &done_convert);
1099 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1100 __ bind(&done_convert);
1103 // Check for proxies.
1105 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1106 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1107 __ j(below_equal, &call_runtime);
1109 // Check cache validity in generated code. This is a fast case for
1110 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1111 // guarantee cache validity, call the runtime system to check cache
1112 // validity or get the property names in a fixed array.
1113 __ CheckEnumCache(null_value, &call_runtime);
1115 // The enum cache is valid. Load the map of the object being
1116 // iterated over and use the cache for the iteration.
1118 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1119 __ jmp(&use_cache, Label::kNear);
1121 // Get the set of properties to enumerate.
1122 __ bind(&call_runtime);
1123 __ Push(rax); // Duplicate the enumerable object on the stack.
1124 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1126 // If we got a map from the runtime call, we can do a fast
1127 // modification check. Otherwise, we got a fixed array, and we have
1128 // to do a slow check.
1130 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1131 Heap::kMetaMapRootIndex);
1132 __ j(not_equal, &fixed_array);
1134 // We got a map in register rax. Get the enumeration cache from it.
1135 __ bind(&use_cache);
1137 Label no_descriptors;
1139 __ EnumLength(rdx, rax);
1140 __ Cmp(rdx, Smi::FromInt(0));
1141 __ j(equal, &no_descriptors);
1143 __ LoadInstanceDescriptors(rax, rcx);
1144 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1145 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1147 // Set up the four remaining stack slots.
1148 __ Push(rax); // Map.
1149 __ Push(rcx); // Enumeration cache.
1150 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1151 __ Push(Smi::FromInt(0)); // Initial index.
1154 __ bind(&no_descriptors);
1155 __ addp(rsp, Immediate(kPointerSize));
1158 // We got a fixed array in register rax. Iterate through that.
1160 __ bind(&fixed_array);
1162 Handle<Object> feedback = Handle<Object>(
1163 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1165 StoreFeedbackVectorSlot(slot, feedback);
1167 // No need for a write barrier, we are storing a Smi in the feedback vector.
1168 __ Move(rbx, FeedbackVector());
1169 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1170 Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
1171 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1172 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1173 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1174 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1175 __ j(above, &non_proxy);
1176 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1177 __ bind(&non_proxy);
1178 __ Push(rbx); // Smi
1179 __ Push(rax); // Array
1180 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1181 __ Push(rax); // Fixed array length (as smi).
1182 __ Push(Smi::FromInt(0)); // Initial index.
1184 // Generate code for doing the condition check.
1185 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1187 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1188 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1189 __ j(above_equal, loop_statement.break_label());
1191 // Get the current entry of the array into register rbx.
1192 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1193 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1194 __ movp(rbx, FieldOperand(rbx,
1197 FixedArray::kHeaderSize));
1199 // Get the expected map from the stack or a smi in the
1200 // permanent slow case into register rdx.
1201 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1203 // Check if the expected map still matches that of the enumerable.
1204 // If not, we may have to filter the key.
1206 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1207 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1208 __ j(equal, &update_each, Label::kNear);
1210 // For proxies, no filtering is done.
1211 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1212 __ Cmp(rdx, Smi::FromInt(0));
1213 __ j(equal, &update_each, Label::kNear);
1215 // Convert the entry to a string or null if it isn't a property
1216 // anymore. If the property has been removed while iterating, we
1218 __ Push(rcx); // Enumerable.
1219 __ Push(rbx); // Current entry.
1220 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1221 __ Cmp(rax, Smi::FromInt(0));
1222 __ j(equal, loop_statement.continue_label());
1225 // Update the 'each' property or variable from the possibly filtered
1226 // entry in register rbx.
1227 __ bind(&update_each);
1228 __ movp(result_register(), rbx);
1229 // Perform the assignment as if via '='.
1230 { EffectContext context(this);
1231 EmitAssignment(stmt->each());
1234 // Generate code for the body of the loop.
1235 Visit(stmt->body());
1237 // Generate code for going to the next element by incrementing the
1238 // index (smi) stored on top of the stack.
1239 __ bind(loop_statement.continue_label());
1240 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1242 EmitBackEdgeBookkeeping(stmt, &loop);
1245 // Remove the pointers stored on the stack.
1246 __ bind(loop_statement.break_label());
1247 __ addp(rsp, Immediate(5 * kPointerSize));
1249 // Exit and decrement the loop depth.
1250 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1252 decrement_loop_depth();
1256 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1257 Comment cmnt(masm_, "[ ForOfStatement");
1258 SetStatementPosition(stmt);
1260 Iteration loop_statement(this, stmt);
1261 increment_loop_depth();
1263 // var iterator = iterable[@@iterator]()
1264 VisitForAccumulatorValue(stmt->assign_iterator());
1266 // As with for-in, skip the loop if the iterator is null or undefined.
1267 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1268 __ j(equal, loop_statement.break_label());
1269 __ CompareRoot(rax, Heap::kNullValueRootIndex);
1270 __ j(equal, loop_statement.break_label());
1272 // Convert the iterator to a JS object.
1273 Label convert, done_convert;
1274 __ JumpIfSmi(rax, &convert);
1275 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1276 __ j(above_equal, &done_convert);
1279 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1280 __ bind(&done_convert);
1283 __ bind(loop_statement.continue_label());
1285 // result = iterator.next()
1286 VisitForEffect(stmt->next_result());
1288 // if (result.done) break;
1289 Label result_not_done;
1290 VisitForControl(stmt->result_done(),
1291 loop_statement.break_label(),
1294 __ bind(&result_not_done);
1296 // each = result.value
1297 VisitForEffect(stmt->assign_each());
1299 // Generate code for the body of the loop.
1300 Visit(stmt->body());
1302 // Check stack before looping.
1303 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1304 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1305 __ jmp(loop_statement.continue_label());
1307 // Exit and decrement the loop depth.
1308 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1309 __ bind(loop_statement.break_label());
1310 decrement_loop_depth();
1314 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1316 // Use the fast case closure allocation code that allocates in new
1317 // space for nested functions that don't need literals cloning. If
1318 // we're running with the --always-opt or the --prepare-always-opt
1319 // flag, we need to use the runtime function so that the new function
1320 // we are creating here gets a chance to have its code optimized and
1321 // doesn't just get a copy of the existing unoptimized code.
1322 if (!FLAG_always_opt &&
1323 !FLAG_prepare_always_opt &&
1325 scope()->is_function_scope() &&
1326 info->num_literals() == 0) {
1327 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1334 ? isolate()->factory()->true_value()
1335 : isolate()->factory()->false_value());
1336 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1338 context()->Plug(rax);
1342 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1343 Comment cmnt(masm_, "[ VariableProxy");
1344 EmitVariableLoad(expr);
1348 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1349 TypeofState typeof_state,
1351 Register context = rsi;
1352 Register temp = rdx;
1356 if (s->num_heap_slots() > 0) {
1357 if (s->calls_sloppy_eval()) {
1358 // Check that extension is NULL.
1359 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1361 __ j(not_equal, slow);
1363 // Load next context in chain.
1364 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1365 // Walk the rest of the chain without clobbering rsi.
1368 // If no outer scope calls eval, we do not need to check more
1369 // context extensions. If we have reached an eval scope, we check
1370 // all extensions from this point.
1371 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1372 s = s->outer_scope();
1375 if (s != NULL && s->is_eval_scope()) {
1376 // Loop up the context chain. There is no frame effect so it is
1377 // safe to use raw labels here.
1379 if (!context.is(temp)) {
1380 __ movp(temp, context);
1382 // Load map for comparison into register, outside loop.
1383 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1385 // Terminate at native context.
1386 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1387 __ j(equal, &fast, Label::kNear);
1388 // Check that extension is NULL.
1389 __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1390 __ j(not_equal, slow);
1391 // Load next context in chain.
1392 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1397 // All extension objects were empty and it is safe to use a global
1399 __ movp(rax, GlobalObjectOperand());
1400 __ Move(rcx, var->name());
1401 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1408 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1410 ASSERT(var->IsContextSlot());
1411 Register context = rsi;
1412 Register temp = rbx;
1414 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1415 if (s->num_heap_slots() > 0) {
1416 if (s->calls_sloppy_eval()) {
1417 // Check that extension is NULL.
1418 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1420 __ j(not_equal, slow);
1422 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1423 // Walk the rest of the chain without clobbering rsi.
1427 // Check that last extension is NULL.
1428 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1429 __ j(not_equal, slow);
1431 // This function is used only for loads, not stores, so it's safe to
1432 // return an rsi-based operand (the write barrier cannot be allowed to
1433 // destroy the rsi register).
1434 return ContextOperand(context, var->index());
1438 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1439 TypeofState typeof_state,
1442 // Generate fast-case code for variables that might be shadowed by
1443 // eval-introduced variables. Eval is used a lot without
1444 // introducing variables. In those cases, we do not want to
1445 // perform a runtime call for all variables in the scope
1446 // containing the eval.
1447 if (var->mode() == DYNAMIC_GLOBAL) {
1448 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1450 } else if (var->mode() == DYNAMIC_LOCAL) {
1451 Variable* local = var->local_if_not_shadowed();
1452 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1453 if (local->mode() == LET || local->mode() == CONST ||
1454 local->mode() == CONST_LEGACY) {
1455 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1456 __ j(not_equal, done);
1457 if (local->mode() == CONST_LEGACY) {
1458 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1459 } else { // LET || CONST
1460 __ Push(var->name());
1461 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1469 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1470 // Record position before possible IC call.
1471 SetSourcePosition(proxy->position());
1472 Variable* var = proxy->var();
1474 // Three cases: global variables, lookup variables, and all other types of
1476 switch (var->location()) {
1477 case Variable::UNALLOCATED: {
1478 Comment cmnt(masm_, "[ Global variable");
1479 // Use inline caching. Variable name is passed in rcx and the global
1480 // object on the stack.
1481 __ Move(rcx, var->name());
1482 __ movp(rax, GlobalObjectOperand());
1483 CallLoadIC(CONTEXTUAL);
1484 context()->Plug(rax);
1488 case Variable::PARAMETER:
1489 case Variable::LOCAL:
1490 case Variable::CONTEXT: {
1491 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1493 if (var->binding_needs_init()) {
1494 // var->scope() may be NULL when the proxy is located in eval code and
1495 // refers to a potential outside binding. Currently those bindings are
1496 // always looked up dynamically, i.e. in that case
1497 // var->location() == LOOKUP.
1499 ASSERT(var->scope() != NULL);
1501 // Check if the binding really needs an initialization check. The check
1502 // can be skipped in the following situation: we have a LET or CONST
1503 // binding in harmony mode, both the Variable and the VariableProxy have
1504 // the same declaration scope (i.e. they are both in global code, in the
1505 // same function or in the same eval code) and the VariableProxy is in
1506 // the source physically located after the initializer of the variable.
1508 // We cannot skip any initialization checks for CONST in non-harmony
1509 // mode because const variables may be declared but never initialized:
1510 // if (false) { const x; }; var y = x;
1512 // The condition on the declaration scopes is a conservative check for
1513 // nested functions that access a binding and are called before the
1514 // binding is initialized:
1515 // function() { f(); let x = 1; function f() { x = 2; } }
1517 bool skip_init_check;
1518 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1519 skip_init_check = false;
1521 // Check that we always have valid source position.
1522 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1523 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1524 skip_init_check = var->mode() != CONST_LEGACY &&
1525 var->initializer_position() < proxy->position();
1528 if (!skip_init_check) {
1529 // Let and const need a read barrier.
1532 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1533 __ j(not_equal, &done, Label::kNear);
1534 if (var->mode() == LET || var->mode() == CONST) {
1535 // Throw a reference error when using an uninitialized let/const
1536 // binding in harmony mode.
1537 __ Push(var->name());
1538 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1540 // Uninitalized const bindings outside of harmony mode are unholed.
1541 ASSERT(var->mode() == CONST_LEGACY);
1542 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1545 context()->Plug(rax);
1549 context()->Plug(var);
1553 case Variable::LOOKUP: {
1554 Comment cmnt(masm_, "[ Lookup slot");
1556 // Generate code for loading from variables potentially shadowed
1557 // by eval-introduced variables.
1558 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1560 __ Push(rsi); // Context.
1561 __ Push(var->name());
1562 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1564 context()->Plug(rax);
1571 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1572 Comment cmnt(masm_, "[ RegExpLiteral");
1574 // Registers will be used as follows:
1575 // rdi = JS function.
1576 // rcx = literals array.
1577 // rbx = regexp literal.
1578 // rax = regexp literal clone.
1579 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1580 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1581 int literal_offset =
1582 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1583 __ movp(rbx, FieldOperand(rcx, literal_offset));
1584 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1585 __ j(not_equal, &materialized, Label::kNear);
1587 // Create regexp literal using runtime function
1588 // Result will be in rax.
1590 __ Push(Smi::FromInt(expr->literal_index()));
1591 __ Push(expr->pattern());
1592 __ Push(expr->flags());
1593 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1596 __ bind(&materialized);
1597 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1598 Label allocated, runtime_allocate;
1599 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1602 __ bind(&runtime_allocate);
1604 __ Push(Smi::FromInt(size));
1605 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1608 __ bind(&allocated);
1609 // Copy the content into the newly allocated memory.
1610 // (Unroll copy loop once for better throughput).
1611 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1612 __ movp(rdx, FieldOperand(rbx, i));
1613 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1614 __ movp(FieldOperand(rax, i), rdx);
1615 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1617 if ((size % (2 * kPointerSize)) != 0) {
1618 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1619 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1621 context()->Plug(rax);
1625 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1626 if (expression == NULL) {
1627 __ PushRoot(Heap::kNullValueRootIndex);
1629 VisitForStackValue(expression);
1634 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1635 Comment cmnt(masm_, "[ ObjectLiteral");
1637 expr->BuildConstantProperties(isolate());
1638 Handle<FixedArray> constant_properties = expr->constant_properties();
1639 int flags = expr->fast_elements()
1640 ? ObjectLiteral::kFastElements
1641 : ObjectLiteral::kNoFlags;
1642 flags |= expr->has_function()
1643 ? ObjectLiteral::kHasFunction
1644 : ObjectLiteral::kNoFlags;
1645 int properties_count = constant_properties->length() / 2;
1646 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1647 flags != ObjectLiteral::kFastElements ||
1648 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1649 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1650 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1651 __ Push(Smi::FromInt(expr->literal_index()));
1652 __ Push(constant_properties);
1653 __ Push(Smi::FromInt(flags));
1654 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1656 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1657 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1658 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1659 __ Move(rcx, constant_properties);
1660 __ Move(rdx, Smi::FromInt(flags));
1661 FastCloneShallowObjectStub stub(properties_count);
1665 // If result_saved is true the result is on top of the stack. If
1666 // result_saved is false the result is in rax.
1667 bool result_saved = false;
1669 // Mark all computed expressions that are bound to a key that
1670 // is shadowed by a later occurrence of the same key. For the
1671 // marked expressions, no store code is emitted.
1672 expr->CalculateEmitStore(zone());
1674 AccessorTable accessor_table(zone());
1675 for (int i = 0; i < expr->properties()->length(); i++) {
1676 ObjectLiteral::Property* property = expr->properties()->at(i);
1677 if (property->IsCompileTimeValue()) continue;
1679 Literal* key = property->key();
1680 Expression* value = property->value();
1681 if (!result_saved) {
1682 __ Push(rax); // Save result on the stack
1683 result_saved = true;
1685 switch (property->kind()) {
1686 case ObjectLiteral::Property::CONSTANT:
1688 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1689 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1691 case ObjectLiteral::Property::COMPUTED:
1692 if (key->value()->IsInternalizedString()) {
1693 if (property->emit_store()) {
1694 VisitForAccumulatorValue(value);
1695 __ Move(rcx, key->value());
1696 __ movp(rdx, Operand(rsp, 0));
1697 CallStoreIC(key->LiteralFeedbackId());
1698 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1700 VisitForEffect(value);
1704 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1705 VisitForStackValue(key);
1706 VisitForStackValue(value);
1707 if (property->emit_store()) {
1708 __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1709 __ CallRuntime(Runtime::kSetProperty, 4);
1714 case ObjectLiteral::Property::PROTOTYPE:
1715 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1716 VisitForStackValue(value);
1717 if (property->emit_store()) {
1718 __ CallRuntime(Runtime::kSetPrototype, 2);
1723 case ObjectLiteral::Property::GETTER:
1724 accessor_table.lookup(key)->second->getter = value;
1726 case ObjectLiteral::Property::SETTER:
1727 accessor_table.lookup(key)->second->setter = value;
1732 // Emit code to define accessors, using only a single call to the runtime for
1733 // each pair of corresponding getters and setters.
1734 for (AccessorTable::Iterator it = accessor_table.begin();
1735 it != accessor_table.end();
1737 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1738 VisitForStackValue(it->first);
1739 EmitAccessor(it->second->getter);
1740 EmitAccessor(it->second->setter);
1741 __ Push(Smi::FromInt(NONE));
1742 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1745 if (expr->has_function()) {
1746 ASSERT(result_saved);
1747 __ Push(Operand(rsp, 0));
1748 __ CallRuntime(Runtime::kToFastProperties, 1);
1752 context()->PlugTOS();
1754 context()->Plug(rax);
1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1760 Comment cmnt(masm_, "[ ArrayLiteral");
1762 expr->BuildConstantElements(isolate());
1763 int flags = expr->depth() == 1
1764 ? ArrayLiteral::kShallowElements
1765 : ArrayLiteral::kNoFlags;
1767 ZoneList<Expression*>* subexprs = expr->values();
1768 int length = subexprs->length();
1769 Handle<FixedArray> constant_elements = expr->constant_elements();
1770 ASSERT_EQ(2, constant_elements->length());
1771 ElementsKind constant_elements_kind =
1772 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1773 bool has_constant_fast_elements =
1774 IsFastObjectElementsKind(constant_elements_kind);
1775 Handle<FixedArrayBase> constant_elements_values(
1776 FixedArrayBase::cast(constant_elements->get(1)));
1778 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1779 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1780 // If the only customer of allocation sites is transitioning, then
1781 // we can turn it off if we don't have anywhere else to transition to.
1782 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1785 Heap* heap = isolate()->heap();
1786 if (has_constant_fast_elements &&
1787 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1788 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1789 // change, so it's possible to specialize the stub in advance.
1790 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1791 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1792 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1793 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1794 __ Move(rcx, constant_elements);
1795 FastCloneShallowArrayStub stub(
1796 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1797 allocation_site_mode,
1800 } else if (expr->depth() > 1 || Serializer::enabled() ||
1801 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1802 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1803 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1804 __ Push(Smi::FromInt(expr->literal_index()));
1805 __ Push(constant_elements);
1806 __ Push(Smi::FromInt(flags));
1807 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1809 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1810 FLAG_smi_only_arrays);
1811 FastCloneShallowArrayStub::Mode mode =
1812 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1814 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1815 // change, so it's possible to specialize the stub in advance.
1816 if (has_constant_fast_elements) {
1817 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1820 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1821 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1822 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1823 __ Move(rcx, constant_elements);
1824 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1828 bool result_saved = false; // Is the result saved to the stack?
1830 // Emit code to evaluate all the non-constant subexpressions and to store
1831 // them into the newly cloned array.
1832 for (int i = 0; i < length; i++) {
1833 Expression* subexpr = subexprs->at(i);
1834 // If the subexpression is a literal or a simple materialized literal it
1835 // is already set in the cloned array.
1836 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1838 if (!result_saved) {
1839 __ Push(rax); // array literal
1840 __ Push(Smi::FromInt(expr->literal_index()));
1841 result_saved = true;
1843 VisitForAccumulatorValue(subexpr);
1845 if (IsFastObjectElementsKind(constant_elements_kind)) {
1846 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1847 // cannot transition and don't need to call the runtime stub.
1848 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1849 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1850 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1851 // Store the subexpression value in the array's elements.
1852 __ movp(FieldOperand(rbx, offset), result_register());
1853 // Update the write barrier for the array store.
1854 __ RecordWriteField(rbx, offset, result_register(), rcx,
1856 EMIT_REMEMBERED_SET,
1859 // Store the subexpression value in the array's elements.
1860 __ Move(rcx, Smi::FromInt(i));
1861 StoreArrayLiteralElementStub stub;
1865 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1869 __ addp(rsp, Immediate(kPointerSize)); // literal index
1870 context()->PlugTOS();
1872 context()->Plug(rax);
1877 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1878 ASSERT(expr->target()->IsValidLeftHandSide());
1880 Comment cmnt(masm_, "[ Assignment");
1882 // Left-hand side can only be a property, a global or a (parameter or local)
1884 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1885 LhsKind assign_type = VARIABLE;
1886 Property* property = expr->target()->AsProperty();
1887 if (property != NULL) {
1888 assign_type = (property->key()->IsPropertyName())
1893 // Evaluate LHS expression.
1894 switch (assign_type) {
1896 // Nothing to do here.
1898 case NAMED_PROPERTY:
1899 if (expr->is_compound()) {
1900 // We need the receiver both on the stack and in the accumulator.
1901 VisitForAccumulatorValue(property->obj());
1902 __ Push(result_register());
1904 VisitForStackValue(property->obj());
1907 case KEYED_PROPERTY: {
1908 if (expr->is_compound()) {
1909 VisitForStackValue(property->obj());
1910 VisitForAccumulatorValue(property->key());
1911 __ movp(rdx, Operand(rsp, 0));
1914 VisitForStackValue(property->obj());
1915 VisitForStackValue(property->key());
1921 // For compound assignments we need another deoptimization point after the
1922 // variable/property load.
1923 if (expr->is_compound()) {
1924 { AccumulatorValueContext context(this);
1925 switch (assign_type) {
1927 EmitVariableLoad(expr->target()->AsVariableProxy());
1928 PrepareForBailout(expr->target(), TOS_REG);
1930 case NAMED_PROPERTY:
1931 EmitNamedPropertyLoad(property);
1932 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1934 case KEYED_PROPERTY:
1935 EmitKeyedPropertyLoad(property);
1936 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1941 Token::Value op = expr->binary_op();
1942 __ Push(rax); // Left operand goes on the stack.
1943 VisitForAccumulatorValue(expr->value());
1945 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1948 SetSourcePosition(expr->position() + 1);
1949 AccumulatorValueContext context(this);
1950 if (ShouldInlineSmiCase(op)) {
1951 EmitInlineSmiBinaryOp(expr->binary_operation(),
1957 EmitBinaryOp(expr->binary_operation(), op, mode);
1959 // Deoptimization point in case the binary operation may have side effects.
1960 PrepareForBailout(expr->binary_operation(), TOS_REG);
1962 VisitForAccumulatorValue(expr->value());
1965 // Record source position before possible IC call.
1966 SetSourcePosition(expr->position());
1969 switch (assign_type) {
1971 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1973 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1974 context()->Plug(rax);
1976 case NAMED_PROPERTY:
1977 EmitNamedPropertyAssignment(expr);
1979 case KEYED_PROPERTY:
1980 EmitKeyedPropertyAssignment(expr);
1986 void FullCodeGenerator::VisitYield(Yield* expr) {
1987 Comment cmnt(masm_, "[ Yield");
1988 // Evaluate yielded value first; the initial iterator definition depends on
1989 // this. It stays on the stack while we update the iterator.
1990 VisitForStackValue(expr->expression());
1992 switch (expr->yield_kind()) {
1993 case Yield::SUSPEND:
1994 // Pop value from top-of-stack slot; box result into result register.
1995 EmitCreateIteratorResult(false);
1996 __ Push(result_register());
1998 case Yield::INITIAL: {
1999 Label suspend, continuation, post_runtime, resume;
2003 __ bind(&continuation);
2007 VisitForAccumulatorValue(expr->generator_object());
2008 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2009 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2010 Smi::FromInt(continuation.pos()));
2011 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2013 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2015 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
2017 __ j(equal, &post_runtime);
2018 __ Push(rax); // generator object
2019 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2020 __ movp(context_register(),
2021 Operand(rbp, StandardFrameConstants::kContextOffset));
2022 __ bind(&post_runtime);
2024 __ Pop(result_register());
2025 EmitReturnSequence();
2028 context()->Plug(result_register());
2032 case Yield::FINAL: {
2033 VisitForAccumulatorValue(expr->generator_object());
2034 __ Move(FieldOperand(result_register(),
2035 JSGeneratorObject::kContinuationOffset),
2036 Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2037 // Pop value from top-of-stack slot, box result into result register.
2038 EmitCreateIteratorResult(true);
2039 EmitUnwindBeforeReturn();
2040 EmitReturnSequence();
2044 case Yield::DELEGATING: {
2045 VisitForStackValue(expr->generator_object());
2047 // Initial stack layout is as follows:
2048 // [sp + 1 * kPointerSize] iter
2049 // [sp + 0 * kPointerSize] g
2051 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2052 Label l_next, l_call, l_loop;
2053 // Initial send value is undefined.
2054 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2057 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2059 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2060 __ LoadRoot(rcx, Heap::kthrow_stringRootIndex); // "throw"
2062 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2063 __ Push(rax); // exception
2066 // try { received = %yield result }
2067 // Shuffle the received result above a try handler and yield it without
2070 __ Pop(rax); // result
2071 __ PushTryHandler(StackHandler::CATCH, expr->index());
2072 const int handler_size = StackHandlerConstants::kSize;
2073 __ Push(rax); // result
2075 __ bind(&l_continuation);
2077 __ bind(&l_suspend);
2078 const int generator_object_depth = kPointerSize + handler_size;
2079 __ movp(rax, Operand(rsp, generator_object_depth));
2081 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2082 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2083 Smi::FromInt(l_continuation.pos()));
2084 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2086 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2088 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2089 __ movp(context_register(),
2090 Operand(rbp, StandardFrameConstants::kContextOffset));
2091 __ Pop(rax); // result
2092 EmitReturnSequence();
2093 __ bind(&l_resume); // received in rax
2096 // receiver = iter; f = 'next'; arg = received;
2098 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next"
2100 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2101 __ Push(rax); // received
2103 // result = receiver[f](arg);
2105 __ movp(rdx, Operand(rsp, kPointerSize));
2106 __ movp(rax, Operand(rsp, 2 * kPointerSize));
2107 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2108 CallIC(ic, TypeFeedbackId::None());
2110 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2111 CallFunctionStub stub(1, CALL_AS_METHOD);
2114 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2115 __ Drop(1); // The function is still on the stack; drop it.
2117 // if (!result.done) goto l_try;
2119 __ Push(rax); // save result
2120 __ LoadRoot(rcx, Heap::kdone_stringRootIndex); // "done"
2121 CallLoadIC(NOT_CONTEXTUAL); // result.done in rax
2122 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2124 __ testp(result_register(), result_register());
2128 __ Pop(rax); // result
2129 __ LoadRoot(rcx, Heap::kvalue_stringRootIndex); // "value"
2130 CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2131 context()->DropAndPlug(2, rax); // drop iter and g
2138 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2140 JSGeneratorObject::ResumeMode resume_mode) {
2141 // The value stays in rax, and is ultimately read by the resumed generator, as
2142 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2143 // is read to throw the value when the resumed generator is already closed.
2144 // rbx will hold the generator object until the activation has been resumed.
2145 VisitForStackValue(generator);
2146 VisitForAccumulatorValue(value);
2149 // Check generator state.
2150 Label wrong_state, closed_state, done;
2151 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2152 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2153 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2155 __ j(equal, &closed_state);
2156 __ j(less, &wrong_state);
2158 // Load suspended function and context.
2159 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2160 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2163 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2165 // Push holes for arguments to generator function.
2166 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2169 SharedFunctionInfo::kFormalParameterCountOffset));
2170 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2171 Label push_argument_holes, push_frame;
2172 __ bind(&push_argument_holes);
2173 __ subp(rdx, Immediate(1));
2174 __ j(carry, &push_frame);
2176 __ jmp(&push_argument_holes);
2178 // Enter a new JavaScript frame, and initialize its slots as they were when
2179 // the generator was suspended.
2181 __ bind(&push_frame);
2182 __ call(&resume_frame);
2184 __ bind(&resume_frame);
2185 __ pushq(rbp); // Caller's frame pointer.
2187 __ Push(rsi); // Callee's context.
2188 __ Push(rdi); // Callee's JS Function.
2190 // Load the operand stack size.
2191 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2192 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2193 __ SmiToInteger32(rdx, rdx);
2195 // If we are sending a value and there is no operand stack, we can jump back
2197 if (resume_mode == JSGeneratorObject::NEXT) {
2199 __ cmpp(rdx, Immediate(0));
2200 __ j(not_zero, &slow_resume);
2201 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2202 __ SmiToInteger64(rcx,
2203 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2205 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2206 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2208 __ bind(&slow_resume);
2211 // Otherwise, we push holes for the operand stack and call the runtime to fix
2212 // up the stack and the handlers.
2213 Label push_operand_holes, call_resume;
2214 __ bind(&push_operand_holes);
2215 __ subp(rdx, Immediate(1));
2216 __ j(carry, &call_resume);
2218 __ jmp(&push_operand_holes);
2219 __ bind(&call_resume);
2221 __ Push(result_register());
2222 __ Push(Smi::FromInt(resume_mode));
2223 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2224 // Not reached: the runtime call returns elsewhere.
2225 __ Abort(kGeneratorFailedToResume);
2227 // Reach here when generator is closed.
2228 __ bind(&closed_state);
2229 if (resume_mode == JSGeneratorObject::NEXT) {
2230 // Return completed iterator result when generator is closed.
2231 __ PushRoot(Heap::kUndefinedValueRootIndex);
2232 // Pop value from top-of-stack slot; box result into result register.
2233 EmitCreateIteratorResult(true);
2235 // Throw the provided value.
2237 __ CallRuntime(Runtime::kHiddenThrow, 1);
2241 // Throw error if we attempt to operate on a running generator.
2242 __ bind(&wrong_state);
2244 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2247 context()->Plug(result_register());
2251 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2255 Handle<Map> map(isolate()->native_context()->generator_result_map());
2257 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2260 __ bind(&gc_required);
2261 __ Push(Smi::FromInt(map->instance_size()));
2262 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2263 __ movp(context_register(),
2264 Operand(rbp, StandardFrameConstants::kContextOffset));
2266 __ bind(&allocated);
2269 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2270 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2271 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2272 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2273 isolate()->factory()->empty_fixed_array());
2274 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2275 isolate()->factory()->empty_fixed_array());
2276 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2278 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2281 // Only the value field needs a write barrier, as the other values are in the
2283 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2284 rcx, rdx, kDontSaveFPRegs);
2288 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2289 SetSourcePosition(prop->position());
2290 Literal* key = prop->key()->AsLiteral();
2291 __ Move(rcx, key->value());
2292 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2296 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2297 SetSourcePosition(prop->position());
2298 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2299 CallIC(ic, prop->PropertyFeedbackId());
2303 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2307 Expression* right) {
2308 // Do combined smi check of the operands. Left operand is on the
2309 // stack (popped into rdx). Right operand is in rax but moved into
2310 // rcx to make the shifts easier.
2311 Label done, stub_call, smi_case;
2315 JumpPatchSite patch_site(masm_);
2316 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2318 __ bind(&stub_call);
2320 BinaryOpICStub stub(op, mode);
2321 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2322 patch_site.EmitPatchInfo();
2323 __ jmp(&done, Label::kNear);
2328 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2331 __ SmiShiftLeft(rax, rdx, rcx);
2334 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2337 __ SmiAdd(rax, rdx, rcx, &stub_call);
2340 __ SmiSub(rax, rdx, rcx, &stub_call);
2343 __ SmiMul(rax, rdx, rcx, &stub_call);
2346 __ SmiOr(rax, rdx, rcx);
2348 case Token::BIT_AND:
2349 __ SmiAnd(rax, rdx, rcx);
2351 case Token::BIT_XOR:
2352 __ SmiXor(rax, rdx, rcx);
2360 context()->Plug(rax);
2364 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2366 OverwriteMode mode) {
2368 BinaryOpICStub stub(op, mode);
2369 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2370 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2371 patch_site.EmitPatchInfo();
2372 context()->Plug(rax);
2376 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2377 ASSERT(expr->IsValidLeftHandSide());
2379 // Left-hand side can only be a property, a global or a (parameter or local)
2381 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2382 LhsKind assign_type = VARIABLE;
2383 Property* prop = expr->AsProperty();
2385 assign_type = (prop->key()->IsPropertyName())
2390 switch (assign_type) {
2392 Variable* var = expr->AsVariableProxy()->var();
2393 EffectContext context(this);
2394 EmitVariableAssignment(var, Token::ASSIGN);
2397 case NAMED_PROPERTY: {
2398 __ Push(rax); // Preserve value.
2399 VisitForAccumulatorValue(prop->obj());
2401 __ Pop(rax); // Restore value.
2402 __ Move(rcx, prop->key()->AsLiteral()->value());
2406 case KEYED_PROPERTY: {
2407 __ Push(rax); // Preserve value.
2408 VisitForStackValue(prop->obj());
2409 VisitForAccumulatorValue(prop->key());
2412 __ Pop(rax); // Restore value.
2413 Handle<Code> ic = strict_mode() == SLOPPY
2414 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2415 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2420 context()->Plug(rax);
2424 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2425 Variable* var, MemOperand location) {
2426 __ movp(location, rax);
2427 if (var->IsContextSlot()) {
2429 __ RecordWriteContextSlot(
2430 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2435 void FullCodeGenerator::EmitCallStoreContextSlot(
2436 Handle<String> name, StrictMode strict_mode) {
2437 __ Push(rax); // Value.
2438 __ Push(rsi); // Context.
2440 __ Push(Smi::FromInt(strict_mode));
2441 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2445 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2447 if (var->IsUnallocated()) {
2448 // Global var, const, or let.
2449 __ Move(rcx, var->name());
2450 __ movp(rdx, GlobalObjectOperand());
2453 } else if (op == Token::INIT_CONST_LEGACY) {
2454 // Const initializers need a write barrier.
2455 ASSERT(!var->IsParameter()); // No const parameters.
2456 if (var->IsLookupSlot()) {
2459 __ Push(var->name());
2460 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2462 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2464 MemOperand location = VarOperand(var, rcx);
2465 __ movp(rdx, location);
2466 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2467 __ j(not_equal, &skip);
2468 EmitStoreToStackLocalOrContextSlot(var, location);
2472 } else if (var->mode() == LET && op != Token::INIT_LET) {
2473 // Non-initializing assignment to let variable needs a write barrier.
2474 if (var->IsLookupSlot()) {
2475 EmitCallStoreContextSlot(var->name(), strict_mode());
2477 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2479 MemOperand location = VarOperand(var, rcx);
2480 __ movp(rdx, location);
2481 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2482 __ j(not_equal, &assign, Label::kNear);
2483 __ Push(var->name());
2484 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2486 EmitStoreToStackLocalOrContextSlot(var, location);
2489 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2490 // Assignment to var or initializing assignment to let/const
2492 if (var->IsLookupSlot()) {
2493 EmitCallStoreContextSlot(var->name(), strict_mode());
2495 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2496 MemOperand location = VarOperand(var, rcx);
2497 if (generate_debug_code_ && op == Token::INIT_LET) {
2498 // Check for an uninitialized let binding.
2499 __ movp(rdx, location);
2500 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2501 __ Check(equal, kLetBindingReInitialization);
2503 EmitStoreToStackLocalOrContextSlot(var, location);
2506 // Non-initializing assignments to consts are ignored.
2510 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2511 // Assignment to a property, using a named store IC.
2512 Property* prop = expr->target()->AsProperty();
2513 ASSERT(prop != NULL);
2514 ASSERT(prop->key()->AsLiteral() != NULL);
2516 // Record source code position before IC call.
2517 SetSourcePosition(expr->position());
2518 __ Move(rcx, prop->key()->AsLiteral()->value());
2520 CallStoreIC(expr->AssignmentFeedbackId());
2522 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2523 context()->Plug(rax);
2527 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2528 // Assignment to a property, using a keyed store IC.
2532 // Record source code position before IC call.
2533 SetSourcePosition(expr->position());
2534 Handle<Code> ic = strict_mode() == SLOPPY
2535 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2536 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2537 CallIC(ic, expr->AssignmentFeedbackId());
2539 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2540 context()->Plug(rax);
2544 void FullCodeGenerator::VisitProperty(Property* expr) {
2545 Comment cmnt(masm_, "[ Property");
2546 Expression* key = expr->key();
2548 if (key->IsPropertyName()) {
2549 VisitForAccumulatorValue(expr->obj());
2550 EmitNamedPropertyLoad(expr);
2551 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2552 context()->Plug(rax);
2554 VisitForStackValue(expr->obj());
2555 VisitForAccumulatorValue(expr->key());
2557 EmitKeyedPropertyLoad(expr);
2558 context()->Plug(rax);
2563 void FullCodeGenerator::CallIC(Handle<Code> code,
2564 TypeFeedbackId ast_id) {
2566 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2570 // Code common for calls using the IC.
2571 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2572 Expression* callee = expr->expression();
2573 ZoneList<Expression*>* args = expr->arguments();
2574 int arg_count = args->length();
2576 CallFunctionFlags flags;
2577 // Get the target function;
2578 if (callee->IsVariableProxy()) {
2579 { StackValueContext context(this);
2580 EmitVariableLoad(callee->AsVariableProxy());
2581 PrepareForBailout(callee, NO_REGISTERS);
2583 // Push undefined as receiver. This is patched in the method prologue if it
2584 // is a sloppy mode method.
2585 __ Push(isolate()->factory()->undefined_value());
2586 flags = NO_CALL_FUNCTION_FLAGS;
2588 // Load the function from the receiver.
2589 ASSERT(callee->IsProperty());
2590 __ movp(rax, Operand(rsp, 0));
2591 EmitNamedPropertyLoad(callee->AsProperty());
2592 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2593 // Push the target function under the receiver.
2594 __ Push(Operand(rsp, 0));
2595 __ movp(Operand(rsp, kPointerSize), rax);
2596 flags = CALL_AS_METHOD;
2599 // Load the arguments.
2600 { PreservePositionScope scope(masm()->positions_recorder());
2601 for (int i = 0; i < arg_count; i++) {
2602 VisitForStackValue(args->at(i));
2606 // Record source position for debugger.
2607 SetSourcePosition(expr->position());
2608 CallFunctionStub stub(arg_count, flags);
2609 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2612 RecordJSReturnSite(expr);
2614 // Restore context register.
2615 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2617 context()->DropAndPlug(1, rax);
2621 // Common code for calls using the IC.
2622 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2625 VisitForAccumulatorValue(key);
2627 Expression* callee = expr->expression();
2628 ZoneList<Expression*>* args = expr->arguments();
2629 int arg_count = args->length();
2631 // Load the function from the receiver.
2632 ASSERT(callee->IsProperty());
2633 __ movp(rdx, Operand(rsp, 0));
2634 EmitKeyedPropertyLoad(callee->AsProperty());
2635 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2637 // Push the target function under the receiver.
2638 __ Push(Operand(rsp, 0));
2639 __ movp(Operand(rsp, kPointerSize), rax);
2641 // Load the arguments.
2642 { PreservePositionScope scope(masm()->positions_recorder());
2643 for (int i = 0; i < arg_count; i++) {
2644 VisitForStackValue(args->at(i));
2648 // Record source position for debugger.
2649 SetSourcePosition(expr->position());
2650 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2651 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2654 RecordJSReturnSite(expr);
2655 // Restore context register.
2656 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2658 context()->DropAndPlug(1, rax);
2662 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2663 // Code common for calls using the call stub.
2664 ZoneList<Expression*>* args = expr->arguments();
2665 int arg_count = args->length();
2666 { PreservePositionScope scope(masm()->positions_recorder());
2667 for (int i = 0; i < arg_count; i++) {
2668 VisitForStackValue(args->at(i));
2671 // Record source position for debugger.
2672 SetSourcePosition(expr->position());
2674 Handle<Object> uninitialized =
2675 TypeFeedbackInfo::UninitializedSentinel(isolate());
2676 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2677 __ Move(rbx, FeedbackVector());
2678 __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2680 // Record call targets in unoptimized code.
2681 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2682 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2684 RecordJSReturnSite(expr);
2685 // Restore context register.
2686 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2687 // Discard the function left on TOS.
2688 context()->DropAndPlug(1, rax);
2692 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2693 // Push copy of the first argument or undefined if it doesn't exist.
2694 if (arg_count > 0) {
2695 __ Push(Operand(rsp, arg_count * kPointerSize));
2697 __ PushRoot(Heap::kUndefinedValueRootIndex);
2700 // Push the receiver of the enclosing function and do runtime call.
2701 StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2702 __ Push(args.GetReceiverOperand());
2704 // Push the language mode.
2705 __ Push(Smi::FromInt(strict_mode()));
2707 // Push the start position of the scope the calls resides in.
2708 __ Push(Smi::FromInt(scope()->start_position()));
2710 // Do the runtime call.
2711 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2715 void FullCodeGenerator::VisitCall(Call* expr) {
2717 // We want to verify that RecordJSReturnSite gets called on all paths
2718 // through this function. Avoid early returns.
2719 expr->return_is_recorded_ = false;
2722 Comment cmnt(masm_, "[ Call");
2723 Expression* callee = expr->expression();
2724 Call::CallType call_type = expr->GetCallType(isolate());
2726 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2727 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2728 // to resolve the function we need to call and the receiver of the call.
2729 // Then we call the resolved function using the given arguments.
2730 ZoneList<Expression*>* args = expr->arguments();
2731 int arg_count = args->length();
2732 { PreservePositionScope pos_scope(masm()->positions_recorder());
2733 VisitForStackValue(callee);
2734 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2736 // Push the arguments.
2737 for (int i = 0; i < arg_count; i++) {
2738 VisitForStackValue(args->at(i));
2741 // Push a copy of the function (found below the arguments) and resolve
2743 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2744 EmitResolvePossiblyDirectEval(arg_count);
2746 // The runtime call returns a pair of values in rax (function) and
2747 // rdx (receiver). Touch up the stack with the right values.
2748 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2749 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2751 // Record source position for debugger.
2752 SetSourcePosition(expr->position());
2753 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2754 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2756 RecordJSReturnSite(expr);
2757 // Restore context register.
2758 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2759 context()->DropAndPlug(1, rax);
2760 } else if (call_type == Call::GLOBAL_CALL) {
2761 EmitCallWithIC(expr);
2763 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2764 // Call to a lookup slot (dynamically introduced variable).
2765 VariableProxy* proxy = callee->AsVariableProxy();
2768 { PreservePositionScope scope(masm()->positions_recorder());
2769 // Generate code for loading from variables potentially shadowed by
2770 // eval-introduced variables.
2771 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2774 // Call the runtime to find the function to call (returned in rax) and
2775 // the object holding it (returned in rdx).
2776 __ Push(context_register());
2777 __ Push(proxy->name());
2778 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2779 __ Push(rax); // Function.
2780 __ Push(rdx); // Receiver.
2782 // If fast case code has been generated, emit code to push the function
2783 // and receiver and have the slow path jump around this code.
2784 if (done.is_linked()) {
2786 __ jmp(&call, Label::kNear);
2790 // The receiver is implicitly the global receiver. Indicate this by
2791 // passing the hole to the call function stub.
2792 __ PushRoot(Heap::kUndefinedValueRootIndex);
2796 // The receiver is either the global receiver or an object found by
2798 EmitCallWithStub(expr);
2799 } else if (call_type == Call::PROPERTY_CALL) {
2800 Property* property = callee->AsProperty();
2801 { PreservePositionScope scope(masm()->positions_recorder());
2802 VisitForStackValue(property->obj());
2804 if (property->key()->IsPropertyName()) {
2805 EmitCallWithIC(expr);
2807 EmitKeyedCallWithIC(expr, property->key());
2810 ASSERT(call_type == Call::OTHER_CALL);
2811 // Call to an arbitrary expression not handled specially above.
2812 { PreservePositionScope scope(masm()->positions_recorder());
2813 VisitForStackValue(callee);
2815 __ PushRoot(Heap::kUndefinedValueRootIndex);
2816 // Emit function call.
2817 EmitCallWithStub(expr);
2821 // RecordJSReturnSite should have been called.
2822 ASSERT(expr->return_is_recorded_);
2827 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2828 Comment cmnt(masm_, "[ CallNew");
2829 // According to ECMA-262, section 11.2.2, page 44, the function
2830 // expression in new calls must be evaluated before the
2833 // Push constructor on the stack. If it's not a function it's used as
2834 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2836 VisitForStackValue(expr->expression());
2838 // Push the arguments ("left-to-right") on the stack.
2839 ZoneList<Expression*>* args = expr->arguments();
2840 int arg_count = args->length();
2841 for (int i = 0; i < arg_count; i++) {
2842 VisitForStackValue(args->at(i));
2845 // Call the construct call builtin that handles allocation and
2846 // constructor invocation.
2847 SetSourcePosition(expr->position());
2849 // Load function and argument count into rdi and rax.
2850 __ Set(rax, arg_count);
2851 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2853 // Record call targets in unoptimized code, but not in the snapshot.
2854 Handle<Object> uninitialized =
2855 TypeFeedbackInfo::UninitializedSentinel(isolate());
2856 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2857 if (FLAG_pretenuring_call_new) {
2858 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2859 isolate()->factory()->NewAllocationSite());
2860 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2861 expr->CallNewFeedbackSlot() + 1);
2864 __ Move(rbx, FeedbackVector());
2865 __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2867 CallConstructStub stub(RECORD_CALL_TARGET);
2868 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2869 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2870 context()->Plug(rax);
2874 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2875 ZoneList<Expression*>* args = expr->arguments();
2876 ASSERT(args->length() == 1);
2878 VisitForAccumulatorValue(args->at(0));
2880 Label materialize_true, materialize_false;
2881 Label* if_true = NULL;
2882 Label* if_false = NULL;
2883 Label* fall_through = NULL;
2884 context()->PrepareTest(&materialize_true, &materialize_false,
2885 &if_true, &if_false, &fall_through);
2887 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2888 __ JumpIfSmi(rax, if_true);
2891 context()->Plug(if_true, if_false);
2895 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2896 ZoneList<Expression*>* args = expr->arguments();
2897 ASSERT(args->length() == 1);
2899 VisitForAccumulatorValue(args->at(0));
2901 Label materialize_true, materialize_false;
2902 Label* if_true = NULL;
2903 Label* if_false = NULL;
2904 Label* fall_through = NULL;
2905 context()->PrepareTest(&materialize_true, &materialize_false,
2906 &if_true, &if_false, &fall_through);
2908 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2909 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2910 Split(non_negative_smi, if_true, if_false, fall_through);
2912 context()->Plug(if_true, if_false);
2916 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2917 ZoneList<Expression*>* args = expr->arguments();
2918 ASSERT(args->length() == 1);
2920 VisitForAccumulatorValue(args->at(0));
2922 Label materialize_true, materialize_false;
2923 Label* if_true = NULL;
2924 Label* if_false = NULL;
2925 Label* fall_through = NULL;
2926 context()->PrepareTest(&materialize_true, &materialize_false,
2927 &if_true, &if_false, &fall_through);
2929 __ JumpIfSmi(rax, if_false);
2930 __ CompareRoot(rax, Heap::kNullValueRootIndex);
2931 __ j(equal, if_true);
2932 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2933 // Undetectable objects behave like undefined when tested with typeof.
2934 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2935 Immediate(1 << Map::kIsUndetectable));
2936 __ j(not_zero, if_false);
2937 __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2938 __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2939 __ j(below, if_false);
2940 __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2941 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2942 Split(below_equal, if_true, if_false, fall_through);
2944 context()->Plug(if_true, if_false);
2948 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2949 ZoneList<Expression*>* args = expr->arguments();
2950 ASSERT(args->length() == 1);
2952 VisitForAccumulatorValue(args->at(0));
2954 Label materialize_true, materialize_false;
2955 Label* if_true = NULL;
2956 Label* if_false = NULL;
2957 Label* fall_through = NULL;
2958 context()->PrepareTest(&materialize_true, &materialize_false,
2959 &if_true, &if_false, &fall_through);
2961 __ JumpIfSmi(rax, if_false);
2962 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2963 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2964 Split(above_equal, if_true, if_false, fall_through);
2966 context()->Plug(if_true, if_false);
2970 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2971 ZoneList<Expression*>* args = expr->arguments();
2972 ASSERT(args->length() == 1);
2974 VisitForAccumulatorValue(args->at(0));
2976 Label materialize_true, materialize_false;
2977 Label* if_true = NULL;
2978 Label* if_false = NULL;
2979 Label* fall_through = NULL;
2980 context()->PrepareTest(&materialize_true, &materialize_false,
2981 &if_true, &if_false, &fall_through);
2983 __ JumpIfSmi(rax, if_false);
2984 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2985 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2986 Immediate(1 << Map::kIsUndetectable));
2987 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2988 Split(not_zero, if_true, if_false, fall_through);
2990 context()->Plug(if_true, if_false);
2994 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2995 CallRuntime* expr) {
2996 ZoneList<Expression*>* args = expr->arguments();
2997 ASSERT(args->length() == 1);
2999 VisitForAccumulatorValue(args->at(0));
3001 Label materialize_true, materialize_false, skip_lookup;
3002 Label* if_true = NULL;
3003 Label* if_false = NULL;
3004 Label* fall_through = NULL;
3005 context()->PrepareTest(&materialize_true, &materialize_false,
3006 &if_true, &if_false, &fall_through);
3008 __ AssertNotSmi(rax);
3010 // Check whether this map has already been checked to be safe for default
3012 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3013 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
3014 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3015 __ j(not_zero, &skip_lookup);
3017 // Check for fast case object. Generate false result for slow case object.
3018 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
3019 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3020 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3021 __ j(equal, if_false);
3023 // Look for valueOf string in the descriptor array, and indicate false if
3024 // found. Since we omit an enumeration index check, if it is added via a
3025 // transition that shares its descriptor array, this is a false positive.
3026 Label entry, loop, done;
3028 // Skip loop if no descriptors are valid.
3029 __ NumberOfOwnDescriptors(rcx, rbx);
3030 __ cmpp(rcx, Immediate(0));
3033 __ LoadInstanceDescriptors(rbx, r8);
3034 // rbx: descriptor array.
3035 // rcx: valid entries in the descriptor array.
3036 // Calculate the end of the descriptor array.
3037 __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3038 SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
3041 r8, index.reg, index.scale, DescriptorArray::kFirstOffset));
3042 // Calculate location of the first key name.
3043 __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3044 // Loop through all the keys in the descriptor array. If one of these is the
3045 // internalized string "valueOf" the result is false.
3048 __ movp(rdx, FieldOperand(r8, 0));
3049 __ Cmp(rdx, isolate()->factory()->value_of_string());
3050 __ j(equal, if_false);
3051 __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3054 __ j(not_equal, &loop);
3058 // Set the bit in the map to indicate that there is no local valueOf field.
3059 __ orp(FieldOperand(rbx, Map::kBitField2Offset),
3060 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3062 __ bind(&skip_lookup);
3064 // If a valueOf property is not found on the object check that its
3065 // prototype is the un-modified String prototype. If not result is false.
3066 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
3067 __ testp(rcx, Immediate(kSmiTagMask));
3068 __ j(zero, if_false);
3069 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3070 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3071 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3073 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3074 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3075 Split(equal, if_true, if_false, fall_through);
3077 context()->Plug(if_true, if_false);
3081 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3082 ZoneList<Expression*>* args = expr->arguments();
3083 ASSERT(args->length() == 1);
3085 VisitForAccumulatorValue(args->at(0));
3087 Label materialize_true, materialize_false;
3088 Label* if_true = NULL;
3089 Label* if_false = NULL;
3090 Label* fall_through = NULL;
3091 context()->PrepareTest(&materialize_true, &materialize_false,
3092 &if_true, &if_false, &fall_through);
3094 __ JumpIfSmi(rax, if_false);
3095 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3096 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3097 Split(equal, if_true, if_false, fall_through);
3099 context()->Plug(if_true, if_false);
3103 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3104 ZoneList<Expression*>* args = expr->arguments();
3105 ASSERT(args->length() == 1);
3107 VisitForAccumulatorValue(args->at(0));
3109 Label materialize_true, materialize_false;
3110 Label* if_true = NULL;
3111 Label* if_false = NULL;
3112 Label* fall_through = NULL;
3113 context()->PrepareTest(&materialize_true, &materialize_false,
3114 &if_true, &if_false, &fall_through);
3116 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3117 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3118 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3120 __ j(no_overflow, if_false);
3121 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3122 Immediate(0x00000000));
3123 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124 Split(equal, if_true, if_false, fall_through);
3126 context()->Plug(if_true, if_false);
3130 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3131 ZoneList<Expression*>* args = expr->arguments();
3132 ASSERT(args->length() == 1);
3134 VisitForAccumulatorValue(args->at(0));
3136 Label materialize_true, materialize_false;
3137 Label* if_true = NULL;
3138 Label* if_false = NULL;
3139 Label* fall_through = NULL;
3140 context()->PrepareTest(&materialize_true, &materialize_false,
3141 &if_true, &if_false, &fall_through);
3143 __ JumpIfSmi(rax, if_false);
3144 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3145 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3146 Split(equal, if_true, if_false, fall_through);
3148 context()->Plug(if_true, if_false);
3152 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3153 ZoneList<Expression*>* args = expr->arguments();
3154 ASSERT(args->length() == 1);
3156 VisitForAccumulatorValue(args->at(0));
3158 Label materialize_true, materialize_false;
3159 Label* if_true = NULL;
3160 Label* if_false = NULL;
3161 Label* fall_through = NULL;
3162 context()->PrepareTest(&materialize_true, &materialize_false,
3163 &if_true, &if_false, &fall_through);
3165 __ JumpIfSmi(rax, if_false);
3166 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3167 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3168 Split(equal, if_true, if_false, fall_through);
3170 context()->Plug(if_true, if_false);
3175 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3176 ASSERT(expr->arguments()->length() == 0);
3178 Label materialize_true, materialize_false;
3179 Label* if_true = NULL;
3180 Label* if_false = NULL;
3181 Label* fall_through = NULL;
3182 context()->PrepareTest(&materialize_true, &materialize_false,
3183 &if_true, &if_false, &fall_through);
3185 // Get the frame pointer for the calling frame.
3186 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3188 // Skip the arguments adaptor frame if it exists.
3189 Label check_frame_marker;
3190 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3191 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3192 __ j(not_equal, &check_frame_marker);
3193 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3195 // Check the marker in the calling frame.
3196 __ bind(&check_frame_marker);
3197 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3198 Smi::FromInt(StackFrame::CONSTRUCT));
3199 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3200 Split(equal, if_true, if_false, fall_through);
3202 context()->Plug(if_true, if_false);
3206 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3207 ZoneList<Expression*>* args = expr->arguments();
3208 ASSERT(args->length() == 2);
3210 // Load the two objects into registers and perform the comparison.
3211 VisitForStackValue(args->at(0));
3212 VisitForAccumulatorValue(args->at(1));
3214 Label materialize_true, materialize_false;
3215 Label* if_true = NULL;
3216 Label* if_false = NULL;
3217 Label* fall_through = NULL;
3218 context()->PrepareTest(&materialize_true, &materialize_false,
3219 &if_true, &if_false, &fall_through);
3223 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3224 Split(equal, if_true, if_false, fall_through);
3226 context()->Plug(if_true, if_false);
3230 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3231 ZoneList<Expression*>* args = expr->arguments();
3232 ASSERT(args->length() == 1);
3234 // ArgumentsAccessStub expects the key in rdx and the formal
3235 // parameter count in rax.
3236 VisitForAccumulatorValue(args->at(0));
3238 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3239 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3241 context()->Plug(rax);
3245 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3246 ASSERT(expr->arguments()->length() == 0);
3249 // Get the number of formal parameters.
3250 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3252 // Check if the calling frame is an arguments adaptor frame.
3253 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3254 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3255 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3256 __ j(not_equal, &exit, Label::kNear);
3258 // Arguments adaptor case: Read the arguments length from the
3260 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3264 context()->Plug(rax);
3268 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3269 ZoneList<Expression*>* args = expr->arguments();
3270 ASSERT(args->length() == 1);
3271 Label done, null, function, non_function_constructor;
3273 VisitForAccumulatorValue(args->at(0));
3275 // If the object is a smi, we return null.
3276 __ JumpIfSmi(rax, &null);
3278 // Check that the object is a JS object but take special care of JS
3279 // functions to make sure they have 'Function' as their class.
3280 // Assume that there are only two callable types, and one of them is at
3281 // either end of the type range for JS object types. Saves extra comparisons.
3282 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3283 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3284 // Map is now in rax.
3286 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3287 FIRST_SPEC_OBJECT_TYPE + 1);
3288 __ j(equal, &function);
3290 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3291 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3292 LAST_SPEC_OBJECT_TYPE - 1);
3293 __ j(equal, &function);
3294 // Assume that there is no larger type.
3295 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3297 // Check if the constructor in the map is a JS function.
3298 __ movp(rax, FieldOperand(rax, Map::kConstructorOffset));
3299 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3300 __ j(not_equal, &non_function_constructor);
3302 // rax now contains the constructor function. Grab the
3303 // instance class name from there.
3304 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3305 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3308 // Functions have class 'Function'.
3310 __ Move(rax, isolate()->factory()->function_class_string());
3313 // Objects with a non-function constructor have class 'Object'.
3314 __ bind(&non_function_constructor);
3315 __ Move(rax, isolate()->factory()->Object_string());
3318 // Non-JS objects have class null.
3320 __ LoadRoot(rax, Heap::kNullValueRootIndex);
3325 context()->Plug(rax);
3329 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3330 // Conditionally generate a log call.
3332 // 0 (literal string): The type of logging (corresponds to the flags).
3333 // This is used to determine whether or not to generate the log call.
3334 // 1 (string): Format string. Access the string at argument index 2
3335 // with '%2s' (see Logger::LogRuntime for all the formats).
3336 // 2 (array): Arguments to the format string.
3337 ZoneList<Expression*>* args = expr->arguments();
3338 ASSERT_EQ(args->length(), 3);
3339 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3340 VisitForStackValue(args->at(1));
3341 VisitForStackValue(args->at(2));
3342 __ CallRuntime(Runtime::kHiddenLog, 2);
3344 // Finally, we're expected to leave a value on the top of the stack.
3345 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3346 context()->Plug(rax);
3350 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3351 // Load the arguments on the stack and call the stub.
3353 ZoneList<Expression*>* args = expr->arguments();
3354 ASSERT(args->length() == 3);
3355 VisitForStackValue(args->at(0));
3356 VisitForStackValue(args->at(1));
3357 VisitForStackValue(args->at(2));
3359 context()->Plug(rax);
3363 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3364 // Load the arguments on the stack and call the stub.
3365 RegExpExecStub stub;
3366 ZoneList<Expression*>* args = expr->arguments();
3367 ASSERT(args->length() == 4);
3368 VisitForStackValue(args->at(0));
3369 VisitForStackValue(args->at(1));
3370 VisitForStackValue(args->at(2));
3371 VisitForStackValue(args->at(3));
3373 context()->Plug(rax);
3377 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3378 ZoneList<Expression*>* args = expr->arguments();
3379 ASSERT(args->length() == 1);
3381 VisitForAccumulatorValue(args->at(0)); // Load the object.
3384 // If the object is a smi return the object.
3385 __ JumpIfSmi(rax, &done);
3386 // If the object is not a value type, return the object.
3387 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3388 __ j(not_equal, &done);
3389 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3392 context()->Plug(rax);
3396 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3397 ZoneList<Expression*>* args = expr->arguments();
3398 ASSERT(args->length() == 2);
3399 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3400 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3402 VisitForAccumulatorValue(args->at(0)); // Load the object.
3404 Label runtime, done, not_date_object;
3405 Register object = rax;
3406 Register result = rax;
3407 Register scratch = rcx;
3409 __ JumpIfSmi(object, ¬_date_object);
3410 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3411 __ j(not_equal, ¬_date_object);
3413 if (index->value() == 0) {
3414 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3417 if (index->value() < JSDate::kFirstUncachedField) {
3418 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3419 Operand stamp_operand = __ ExternalOperand(stamp);
3420 __ movp(scratch, stamp_operand);
3421 __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3422 __ j(not_equal, &runtime, Label::kNear);
3423 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3424 kPointerSize * index->value()));
3428 __ PrepareCallCFunction(2);
3429 __ movp(arg_reg_1, object);
3430 __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3431 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3432 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3436 __ bind(¬_date_object);
3437 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3439 context()->Plug(rax);
3443 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3444 ZoneList<Expression*>* args = expr->arguments();
3445 ASSERT_EQ(3, args->length());
3447 Register string = rax;
3448 Register index = rbx;
3449 Register value = rcx;
3451 VisitForStackValue(args->at(1)); // index
3452 VisitForStackValue(args->at(2)); // value
3453 VisitForAccumulatorValue(args->at(0)); // string
3457 if (FLAG_debug_code) {
3458 __ Check(__ CheckSmi(value), kNonSmiValue);
3459 __ Check(__ CheckSmi(index), kNonSmiValue);
3462 __ SmiToInteger32(value, value);
3463 __ SmiToInteger32(index, index);
3465 if (FLAG_debug_code) {
3466 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3467 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3470 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3472 context()->Plug(string);
3476 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3477 ZoneList<Expression*>* args = expr->arguments();
3478 ASSERT_EQ(3, args->length());
3480 Register string = rax;
3481 Register index = rbx;
3482 Register value = rcx;
3484 VisitForStackValue(args->at(1)); // index
3485 VisitForStackValue(args->at(2)); // value
3486 VisitForAccumulatorValue(args->at(0)); // string
3490 if (FLAG_debug_code) {
3491 __ Check(__ CheckSmi(value), kNonSmiValue);
3492 __ Check(__ CheckSmi(index), kNonSmiValue);
3495 __ SmiToInteger32(value, value);
3496 __ SmiToInteger32(index, index);
3498 if (FLAG_debug_code) {
3499 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3500 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3503 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3505 context()->Plug(rax);
3509 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3510 // Load the arguments on the stack and call the runtime function.
3511 ZoneList<Expression*>* args = expr->arguments();
3512 ASSERT(args->length() == 2);
3513 VisitForStackValue(args->at(0));
3514 VisitForStackValue(args->at(1));
3515 MathPowStub stub(MathPowStub::ON_STACK);
3517 context()->Plug(rax);
3521 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3523 ASSERT(args->length() == 2);
3525 VisitForStackValue(args->at(0)); // Load the object.
3526 VisitForAccumulatorValue(args->at(1)); // Load the value.
3527 __ Pop(rbx); // rax = value. rbx = object.
3530 // If the object is a smi, return the value.
3531 __ JumpIfSmi(rbx, &done);
3533 // If the object is not a value type, return the value.
3534 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3535 __ j(not_equal, &done);
3538 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3539 // Update the write barrier. Save the value as it will be
3540 // overwritten by the write barrier code and is needed afterward.
3542 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3545 context()->Plug(rax);
3549 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3550 ZoneList<Expression*>* args = expr->arguments();
3551 ASSERT_EQ(args->length(), 1);
3553 // Load the argument into rax and call the stub.
3554 VisitForAccumulatorValue(args->at(0));
3556 NumberToStringStub stub;
3558 context()->Plug(rax);
3562 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3563 ZoneList<Expression*>* args = expr->arguments();
3564 ASSERT(args->length() == 1);
3566 VisitForAccumulatorValue(args->at(0));
3569 StringCharFromCodeGenerator generator(rax, rbx);
3570 generator.GenerateFast(masm_);
3573 NopRuntimeCallHelper call_helper;
3574 generator.GenerateSlow(masm_, call_helper);
3577 context()->Plug(rbx);
3581 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3582 ZoneList<Expression*>* args = expr->arguments();
3583 ASSERT(args->length() == 2);
3585 VisitForStackValue(args->at(0));
3586 VisitForAccumulatorValue(args->at(1));
3588 Register object = rbx;
3589 Register index = rax;
3590 Register result = rdx;
3594 Label need_conversion;
3595 Label index_out_of_range;
3597 StringCharCodeAtGenerator generator(object,
3602 &index_out_of_range,
3603 STRING_INDEX_IS_NUMBER);
3604 generator.GenerateFast(masm_);
3607 __ bind(&index_out_of_range);
3608 // When the index is out of range, the spec requires us to return
3610 __ LoadRoot(result, Heap::kNanValueRootIndex);
3613 __ bind(&need_conversion);
3614 // Move the undefined value into the result register, which will
3615 // trigger conversion.
3616 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3619 NopRuntimeCallHelper call_helper;
3620 generator.GenerateSlow(masm_, call_helper);
3623 context()->Plug(result);
3627 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3628 ZoneList<Expression*>* args = expr->arguments();
3629 ASSERT(args->length() == 2);
3631 VisitForStackValue(args->at(0));
3632 VisitForAccumulatorValue(args->at(1));
3634 Register object = rbx;
3635 Register index = rax;
3636 Register scratch = rdx;
3637 Register result = rax;
3641 Label need_conversion;
3642 Label index_out_of_range;
3644 StringCharAtGenerator generator(object,
3650 &index_out_of_range,
3651 STRING_INDEX_IS_NUMBER);
3652 generator.GenerateFast(masm_);
3655 __ bind(&index_out_of_range);
3656 // When the index is out of range, the spec requires us to return
3657 // the empty string.
3658 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3661 __ bind(&need_conversion);
3662 // Move smi zero into the result register, which will trigger
3664 __ Move(result, Smi::FromInt(0));
3667 NopRuntimeCallHelper call_helper;
3668 generator.GenerateSlow(masm_, call_helper);
3671 context()->Plug(result);
3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments();
3677 ASSERT_EQ(2, args->length());
3678 VisitForStackValue(args->at(0));
3679 VisitForAccumulatorValue(args->at(1));
3682 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3684 context()->Plug(rax);
3688 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3690 ASSERT_EQ(2, args->length());
3692 VisitForStackValue(args->at(0));
3693 VisitForStackValue(args->at(1));
3695 StringCompareStub stub;
3697 context()->Plug(rax);
3701 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3702 // Load the argument on the stack and call the runtime function.
3703 ZoneList<Expression*>* args = expr->arguments();
3704 ASSERT(args->length() == 1);
3705 VisitForStackValue(args->at(0));
3706 __ CallRuntime(Runtime::kMath_log, 1);
3707 context()->Plug(rax);
3711 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3712 // Load the argument on the stack and call the runtime function.
3713 ZoneList<Expression*>* args = expr->arguments();
3714 ASSERT(args->length() == 1);
3715 VisitForStackValue(args->at(0));
3716 __ CallRuntime(Runtime::kMath_sqrt, 1);
3717 context()->Plug(rax);
3721 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3722 ZoneList<Expression*>* args = expr->arguments();
3723 ASSERT(args->length() >= 2);
3725 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3726 for (int i = 0; i < arg_count + 1; i++) {
3727 VisitForStackValue(args->at(i));
3729 VisitForAccumulatorValue(args->last()); // Function.
3731 Label runtime, done;
3732 // Check for non-function argument (including proxy).
3733 __ JumpIfSmi(rax, &runtime);
3734 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3735 __ j(not_equal, &runtime);
3737 // InvokeFunction requires the function in rdi. Move it in there.
3738 __ movp(rdi, result_register());
3739 ParameterCount count(arg_count);
3740 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3741 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3746 __ CallRuntime(Runtime::kCall, args->length());
3749 context()->Plug(rax);
3753 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3754 RegExpConstructResultStub stub;
3755 ZoneList<Expression*>* args = expr->arguments();
3756 ASSERT(args->length() == 3);
3757 VisitForStackValue(args->at(0));
3758 VisitForStackValue(args->at(1));
3759 VisitForAccumulatorValue(args->at(2));
3763 context()->Plug(rax);
3767 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3768 ZoneList<Expression*>* args = expr->arguments();
3769 ASSERT_EQ(2, args->length());
3771 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3772 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3774 Handle<FixedArray> jsfunction_result_caches(
3775 isolate()->native_context()->jsfunction_result_caches());
3776 if (jsfunction_result_caches->length() <= cache_id) {
3777 __ Abort(kAttemptToUseUndefinedCache);
3778 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3779 context()->Plug(rax);
3783 VisitForAccumulatorValue(args->at(1));
3786 Register cache = rbx;
3788 __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3790 FieldOperand(cache, GlobalObject::kNativeContextOffset));
3792 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3794 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3796 Label done, not_found;
3797 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3798 __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3799 // tmp now holds finger offset as a smi.
3801 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3802 __ cmpp(key, FieldOperand(cache,
3805 FixedArray::kHeaderSize));
3806 __ j(not_equal, ¬_found, Label::kNear);
3807 __ movp(rax, FieldOperand(cache,
3810 FixedArray::kHeaderSize + kPointerSize));
3811 __ jmp(&done, Label::kNear);
3813 __ bind(¬_found);
3814 // Call runtime to perform the lookup.
3817 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3820 context()->Plug(rax);
3824 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3825 ZoneList<Expression*>* args = expr->arguments();
3826 ASSERT(args->length() == 1);
3828 VisitForAccumulatorValue(args->at(0));
3830 Label materialize_true, materialize_false;
3831 Label* if_true = NULL;
3832 Label* if_false = NULL;
3833 Label* fall_through = NULL;
3834 context()->PrepareTest(&materialize_true, &materialize_false,
3835 &if_true, &if_false, &fall_through);
3837 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3838 Immediate(String::kContainsCachedArrayIndexMask));
3839 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3840 __ j(zero, if_true);
3843 context()->Plug(if_true, if_false);
3847 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3848 ZoneList<Expression*>* args = expr->arguments();
3849 ASSERT(args->length() == 1);
3850 VisitForAccumulatorValue(args->at(0));
3852 __ AssertString(rax);
3854 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3855 ASSERT(String::kHashShift >= kSmiTagSize);
3856 __ IndexFromHash(rax, rax);
3858 context()->Plug(rax);
3862 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3863 Label bailout, return_result, done, one_char_separator, long_separator,
3864 non_trivial_array, not_size_one_array, loop,
3865 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3866 ZoneList<Expression*>* args = expr->arguments();
3867 ASSERT(args->length() == 2);
3868 // We will leave the separator on the stack until the end of the function.
3869 VisitForStackValue(args->at(1));
3870 // Load this to rax (= array)
3871 VisitForAccumulatorValue(args->at(0));
3872 // All aliases of the same register have disjoint lifetimes.
3873 Register array = rax;
3874 Register elements = no_reg; // Will be rax.
3876 Register index = rdx;
3878 Register string_length = rcx;
3880 Register string = rsi;
3882 Register scratch = rbx;
3884 Register array_length = rdi;
3885 Register result_pos = no_reg; // Will be rdi.
3887 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3888 Operand result_operand = Operand(rsp, 1 * kPointerSize);
3889 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3890 // Separator operand is already pushed. Make room for the two
3891 // other stack fields, and clear the direction flag in anticipation
3892 // of calling CopyBytes.
3893 __ subp(rsp, Immediate(2 * kPointerSize));
3895 // Check that the array is a JSArray
3896 __ JumpIfSmi(array, &bailout);
3897 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3898 __ j(not_equal, &bailout);
3900 // Check that the array has fast elements.
3901 __ CheckFastElements(scratch, &bailout);
3903 // Array has fast elements, so its length must be a smi.
3904 // If the array has length zero, return the empty string.
3905 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3906 __ SmiCompare(array_length, Smi::FromInt(0));
3907 __ j(not_zero, &non_trivial_array);
3908 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3909 __ jmp(&return_result);
3911 // Save the array length on the stack.
3912 __ bind(&non_trivial_array);
3913 __ SmiToInteger32(array_length, array_length);
3914 __ movl(array_length_operand, array_length);
3916 // Save the FixedArray containing array's elements.
3917 // End of array's live range.
3919 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3923 // Check that all array elements are sequential ASCII strings, and
3924 // accumulate the sum of their lengths, as a smi-encoded value.
3926 __ Set(string_length, 0);
3927 // Loop condition: while (index < array_length).
3928 // Live loop registers: index(int32), array_length(int32), string(String*),
3929 // scratch, string_length(int32), elements(FixedArray*).
3930 if (generate_debug_code_) {
3931 __ cmpp(index, array_length);
3932 __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3935 __ movp(string, FieldOperand(elements,
3938 FixedArray::kHeaderSize));
3939 __ JumpIfSmi(string, &bailout);
3940 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3941 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3942 __ andb(scratch, Immediate(
3943 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3944 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3945 __ j(not_equal, &bailout);
3946 __ AddSmiField(string_length,
3947 FieldOperand(string, SeqOneByteString::kLengthOffset));
3948 __ j(overflow, &bailout);
3950 __ cmpl(index, array_length);
3954 // string_length: Sum of string lengths.
3955 // elements: FixedArray of strings.
3956 // index: Array length.
3957 // array_length: Array length.
3959 // If array_length is 1, return elements[0], a string.
3960 __ cmpl(array_length, Immediate(1));
3961 __ j(not_equal, ¬_size_one_array);
3962 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3963 __ jmp(&return_result);
3965 __ bind(¬_size_one_array);
3967 // End of array_length live range.
3968 result_pos = array_length;
3969 array_length = no_reg;
3972 // string_length: Sum of string lengths.
3973 // elements: FixedArray of strings.
3974 // index: Array length.
3976 // Check that the separator is a sequential ASCII string.
3977 __ movp(string, separator_operand);
3978 __ JumpIfSmi(string, &bailout);
3979 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3980 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3981 __ andb(scratch, Immediate(
3982 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3983 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3984 __ j(not_equal, &bailout);
3987 // string_length: Sum of string lengths.
3988 // elements: FixedArray of strings.
3989 // index: Array length.
3990 // string: Separator string.
3992 // Add (separator length times (array_length - 1)) to string_length.
3993 __ SmiToInteger32(scratch,
3994 FieldOperand(string, SeqOneByteString::kLengthOffset));
3996 __ imull(scratch, index);
3997 __ j(overflow, &bailout);
3998 __ addl(string_length, scratch);
3999 __ j(overflow, &bailout);
4001 // Live registers and stack values:
4002 // string_length: Total length of result string.
4003 // elements: FixedArray of strings.
4004 __ AllocateAsciiString(result_pos, string_length, scratch,
4005 index, string, &bailout);
4006 __ movp(result_operand, result_pos);
4007 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4009 __ movp(string, separator_operand);
4010 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4012 __ j(equal, &one_char_separator);
4013 __ j(greater, &long_separator);
4016 // Empty separator case:
4018 __ movl(scratch, array_length_operand);
4019 __ jmp(&loop_1_condition);
4020 // Loop condition: while (index < array_length).
4022 // Each iteration of the loop concatenates one string to the result.
4023 // Live values in registers:
4024 // index: which element of the elements array we are adding to the result.
4025 // result_pos: the position to which we are currently copying characters.
4026 // elements: the FixedArray of strings we are joining.
4027 // scratch: array length.
4029 // Get string = array[index].
4030 __ movp(string, FieldOperand(elements, index,
4032 FixedArray::kHeaderSize));
4033 __ SmiToInteger32(string_length,
4034 FieldOperand(string, String::kLengthOffset));
4036 FieldOperand(string, SeqOneByteString::kHeaderSize));
4037 __ CopyBytes(result_pos, string, string_length);
4039 __ bind(&loop_1_condition);
4040 __ cmpl(index, scratch);
4041 __ j(less, &loop_1); // Loop while (index < array_length).
4044 // Generic bailout code used from several places.
4046 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4047 __ jmp(&return_result);
4050 // One-character separator case
4051 __ bind(&one_char_separator);
4052 // Get the separator ASCII character value.
4053 // Register "string" holds the separator.
4054 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4056 // Jump into the loop after the code that copies the separator, so the first
4057 // element is not preceded by a separator
4058 __ jmp(&loop_2_entry);
4059 // Loop condition: while (index < length).
4061 // Each iteration of the loop concatenates one string to the result.
4062 // Live values in registers:
4063 // elements: The FixedArray of strings we are joining.
4064 // index: which element of the elements array we are adding to the result.
4065 // result_pos: the position to which we are currently copying characters.
4066 // scratch: Separator character.
4068 // Copy the separator character to the result.
4069 __ movb(Operand(result_pos, 0), scratch);
4070 __ incp(result_pos);
4072 __ bind(&loop_2_entry);
4073 // Get string = array[index].
4074 __ movp(string, FieldOperand(elements, index,
4076 FixedArray::kHeaderSize));
4077 __ SmiToInteger32(string_length,
4078 FieldOperand(string, String::kLengthOffset));
4080 FieldOperand(string, SeqOneByteString::kHeaderSize));
4081 __ CopyBytes(result_pos, string, string_length);
4083 __ cmpl(index, array_length_operand);
4084 __ j(less, &loop_2); // End while (index < length).
4088 // Long separator case (separator is more than one character).
4089 __ bind(&long_separator);
4091 // Make elements point to end of elements array, and index
4092 // count from -array_length to zero, so we don't need to maintain
4094 __ movl(index, array_length_operand);
4095 __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4096 FixedArray::kHeaderSize));
4099 // Replace separator string with pointer to its first character, and
4100 // make scratch be its length.
4101 __ movp(string, separator_operand);
4102 __ SmiToInteger32(scratch,
4103 FieldOperand(string, String::kLengthOffset));
4105 FieldOperand(string, SeqOneByteString::kHeaderSize));
4106 __ movp(separator_operand, string);
4108 // Jump into the loop after the code that copies the separator, so the first
4109 // element is not preceded by a separator
4110 __ jmp(&loop_3_entry);
4111 // Loop condition: while (index < length).
4113 // Each iteration of the loop concatenates one string to the result.
4114 // Live values in registers:
4115 // index: which element of the elements array we are adding to the result.
4116 // result_pos: the position to which we are currently copying characters.
4117 // scratch: Separator length.
4118 // separator_operand (rsp[0x10]): Address of first char of separator.
4120 // Copy the separator to the result.
4121 __ movp(string, separator_operand);
4122 __ movl(string_length, scratch);
4123 __ CopyBytes(result_pos, string, string_length, 2);
4125 __ bind(&loop_3_entry);
4126 // Get string = array[index].
4127 __ movp(string, Operand(elements, index, times_pointer_size, 0));
4128 __ SmiToInteger32(string_length,
4129 FieldOperand(string, String::kLengthOffset));
4131 FieldOperand(string, SeqOneByteString::kHeaderSize));
4132 __ CopyBytes(result_pos, string, string_length);
4134 __ j(not_equal, &loop_3); // Loop while (index < 0).
4137 __ movp(rax, result_operand);
4139 __ bind(&return_result);
4140 // Drop temp values from the stack, and restore context register.
4141 __ addp(rsp, Immediate(3 * kPointerSize));
4142 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4143 context()->Plug(rax);
4147 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4148 if (expr->function() != NULL &&
4149 expr->function()->intrinsic_type == Runtime::INLINE) {
4150 Comment cmnt(masm_, "[ InlineRuntimeCall");
4151 EmitInlineRuntimeCall(expr);
4155 Comment cmnt(masm_, "[ CallRuntime");
4156 ZoneList<Expression*>* args = expr->arguments();
4157 int arg_count = args->length();
4159 if (expr->is_jsruntime()) {
4160 // Push the builtins object as receiver.
4161 __ movp(rax, GlobalObjectOperand());
4162 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4164 // Load the function from the receiver.
4165 __ movp(rax, Operand(rsp, 0));
4166 __ Move(rcx, expr->name());
4167 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4169 // Push the target function under the receiver.
4170 __ Push(Operand(rsp, 0));
4171 __ movp(Operand(rsp, kPointerSize), rax);
4173 // Push the arguments ("left-to-right").
4174 for (int i = 0; i < arg_count; i++) {
4175 VisitForStackValue(args->at(i));
4178 // Record source position of the IC call.
4179 SetSourcePosition(expr->position());
4180 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4181 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4184 // Restore context register.
4185 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4186 context()->DropAndPlug(1, rax);
4189 // Push the arguments ("left-to-right").
4190 for (int i = 0; i < arg_count; i++) {
4191 VisitForStackValue(args->at(i));
4194 // Call the C runtime.
4195 __ CallRuntime(expr->function(), arg_count);
4196 context()->Plug(rax);
4201 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4202 switch (expr->op()) {
4203 case Token::DELETE: {
4204 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4205 Property* property = expr->expression()->AsProperty();
4206 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4208 if (property != NULL) {
4209 VisitForStackValue(property->obj());
4210 VisitForStackValue(property->key());
4211 __ Push(Smi::FromInt(strict_mode()));
4212 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4213 context()->Plug(rax);
4214 } else if (proxy != NULL) {
4215 Variable* var = proxy->var();
4216 // Delete of an unqualified identifier is disallowed in strict mode
4217 // but "delete this" is allowed.
4218 ASSERT(strict_mode() == SLOPPY || var->is_this());
4219 if (var->IsUnallocated()) {
4220 __ Push(GlobalObjectOperand());
4221 __ Push(var->name());
4222 __ Push(Smi::FromInt(SLOPPY));
4223 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4224 context()->Plug(rax);
4225 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4226 // Result of deleting non-global variables is false. 'this' is
4227 // not really a variable, though we implement it as one. The
4228 // subexpression does not have side effects.
4229 context()->Plug(var->is_this());
4231 // Non-global variable. Call the runtime to try to delete from the
4232 // context where the variable was introduced.
4233 __ Push(context_register());
4234 __ Push(var->name());
4235 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4236 context()->Plug(rax);
4239 // Result of deleting non-property, non-variable reference is true.
4240 // The subexpression may have side effects.
4241 VisitForEffect(expr->expression());
4242 context()->Plug(true);
4248 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4249 VisitForEffect(expr->expression());
4250 context()->Plug(Heap::kUndefinedValueRootIndex);
4255 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4256 if (context()->IsEffect()) {
4257 // Unary NOT has no side effects so it's only necessary to visit the
4258 // subexpression. Match the optimizing compiler by not branching.
4259 VisitForEffect(expr->expression());
4260 } else if (context()->IsTest()) {
4261 const TestContext* test = TestContext::cast(context());
4262 // The labels are swapped for the recursive call.
4263 VisitForControl(expr->expression(),
4264 test->false_label(),
4266 test->fall_through());
4267 context()->Plug(test->true_label(), test->false_label());
4269 // We handle value contexts explicitly rather than simply visiting
4270 // for control and plugging the control flow into the context,
4271 // because we need to prepare a pair of extra administrative AST ids
4272 // for the optimizing compiler.
4273 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4274 Label materialize_true, materialize_false, done;
4275 VisitForControl(expr->expression(),
4279 __ bind(&materialize_true);
4280 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4281 if (context()->IsAccumulatorValue()) {
4282 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4284 __ PushRoot(Heap::kTrueValueRootIndex);
4286 __ jmp(&done, Label::kNear);
4287 __ bind(&materialize_false);
4288 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4289 if (context()->IsAccumulatorValue()) {
4290 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4292 __ PushRoot(Heap::kFalseValueRootIndex);
4299 case Token::TYPEOF: {
4300 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4301 { StackValueContext context(this);
4302 VisitForTypeofValue(expr->expression());
4304 __ CallRuntime(Runtime::kTypeof, 1);
4305 context()->Plug(rax);
4315 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4316 ASSERT(expr->expression()->IsValidLeftHandSide());
4318 Comment cmnt(masm_, "[ CountOperation");
4319 SetSourcePosition(expr->position());
4321 // Expression can only be a property, a global or a (parameter or local)
4323 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4324 LhsKind assign_type = VARIABLE;
4325 Property* prop = expr->expression()->AsProperty();
4326 // In case of a property we use the uninitialized expression context
4327 // of the key to detect a named property.
4330 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4333 // Evaluate expression and get value.
4334 if (assign_type == VARIABLE) {
4335 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4336 AccumulatorValueContext context(this);
4337 EmitVariableLoad(expr->expression()->AsVariableProxy());
4339 // Reserve space for result of postfix operation.
4340 if (expr->is_postfix() && !context()->IsEffect()) {
4341 __ Push(Smi::FromInt(0));
4343 if (assign_type == NAMED_PROPERTY) {
4344 VisitForAccumulatorValue(prop->obj());
4345 __ Push(rax); // Copy of receiver, needed for later store.
4346 EmitNamedPropertyLoad(prop);
4348 VisitForStackValue(prop->obj());
4349 VisitForAccumulatorValue(prop->key());
4350 __ movp(rdx, Operand(rsp, 0)); // Leave receiver on stack
4351 __ Push(rax); // Copy of key, needed for later store.
4352 EmitKeyedPropertyLoad(prop);
4356 // We need a second deoptimization point after loading the value
4357 // in case evaluating the property load my have a side effect.
4358 if (assign_type == VARIABLE) {
4359 PrepareForBailout(expr->expression(), TOS_REG);
4361 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4364 // Inline smi case if we are in a loop.
4365 Label done, stub_call;
4366 JumpPatchSite patch_site(masm_);
4367 if (ShouldInlineSmiCase(expr->op())) {
4369 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4371 // Save result for postfix expressions.
4372 if (expr->is_postfix()) {
4373 if (!context()->IsEffect()) {
4374 // Save the result on the stack. If we have a named or keyed property
4375 // we store the result under the receiver that is currently on top
4377 switch (assign_type) {
4381 case NAMED_PROPERTY:
4382 __ movp(Operand(rsp, kPointerSize), rax);
4384 case KEYED_PROPERTY:
4385 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4391 SmiOperationExecutionMode mode;
4392 mode.Add(PRESERVE_SOURCE_REGISTER);
4393 mode.Add(BAILOUT_ON_NO_OVERFLOW);
4394 if (expr->op() == Token::INC) {
4395 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4397 __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4399 __ jmp(&stub_call, Label::kNear);
4403 ToNumberStub convert_stub;
4404 __ CallStub(&convert_stub);
4406 // Save result for postfix expressions.
4407 if (expr->is_postfix()) {
4408 if (!context()->IsEffect()) {
4409 // Save the result on the stack. If we have a named or keyed property
4410 // we store the result under the receiver that is currently on top
4412 switch (assign_type) {
4416 case NAMED_PROPERTY:
4417 __ movp(Operand(rsp, kPointerSize), rax);
4419 case KEYED_PROPERTY:
4420 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4426 // Record position before stub call.
4427 SetSourcePosition(expr->position());
4429 // Call stub for +1/-1.
4430 __ bind(&stub_call);
4432 __ Move(rax, Smi::FromInt(1));
4433 BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
4434 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4435 patch_site.EmitPatchInfo();
4438 // Store the value returned in rax.
4439 switch (assign_type) {
4441 if (expr->is_postfix()) {
4442 // Perform the assignment as if via '='.
4443 { EffectContext context(this);
4444 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4446 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4449 // For all contexts except kEffect: We have the result on
4450 // top of the stack.
4451 if (!context()->IsEffect()) {
4452 context()->PlugTOS();
4455 // Perform the assignment as if via '='.
4456 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4458 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4459 context()->Plug(rax);
4462 case NAMED_PROPERTY: {
4463 __ Move(rcx, prop->key()->AsLiteral()->value());
4465 CallStoreIC(expr->CountStoreFeedbackId());
4466 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4467 if (expr->is_postfix()) {
4468 if (!context()->IsEffect()) {
4469 context()->PlugTOS();
4472 context()->Plug(rax);
4476 case KEYED_PROPERTY: {
4479 Handle<Code> ic = strict_mode() == SLOPPY
4480 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4481 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4482 CallIC(ic, expr->CountStoreFeedbackId());
4483 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4484 if (expr->is_postfix()) {
4485 if (!context()->IsEffect()) {
4486 context()->PlugTOS();
4489 context()->Plug(rax);
4497 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4498 VariableProxy* proxy = expr->AsVariableProxy();
4499 ASSERT(!context()->IsEffect());
4500 ASSERT(!context()->IsTest());
4502 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4503 Comment cmnt(masm_, "[ Global variable");
4504 __ Move(rcx, proxy->name());
4505 __ movp(rax, GlobalObjectOperand());
4506 // Use a regular load, not a contextual load, to avoid a reference
4508 CallLoadIC(NOT_CONTEXTUAL);
4509 PrepareForBailout(expr, TOS_REG);
4510 context()->Plug(rax);
4511 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4512 Comment cmnt(masm_, "[ Lookup slot");
4515 // Generate code for loading from variables potentially shadowed
4516 // by eval-introduced variables.
4517 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4521 __ Push(proxy->name());
4522 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4523 PrepareForBailout(expr, TOS_REG);
4526 context()->Plug(rax);
4528 // This expression cannot throw a reference error at the top level.
4529 VisitInDuplicateContext(expr);
4534 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4535 Expression* sub_expr,
4536 Handle<String> check) {
4537 Label materialize_true, materialize_false;
4538 Label* if_true = NULL;
4539 Label* if_false = NULL;
4540 Label* fall_through = NULL;
4541 context()->PrepareTest(&materialize_true, &materialize_false,
4542 &if_true, &if_false, &fall_through);
4544 { AccumulatorValueContext context(this);
4545 VisitForTypeofValue(sub_expr);
4547 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4549 if (check->Equals(isolate()->heap()->number_string())) {
4550 __ JumpIfSmi(rax, if_true);
4551 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
4552 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4553 Split(equal, if_true, if_false, fall_through);
4554 } else if (check->Equals(isolate()->heap()->float32x4_string())) {
4555 __ JumpIfSmi(rax, if_false);
4556 __ CmpObjectType(rax, FLOAT32x4_TYPE, rdx);
4557 Split(equal, if_true, if_false, fall_through);
4558 } else if (check->Equals(isolate()->heap()->int32x4_string())) {
4559 __ JumpIfSmi(rax, if_false);
4560 __ CmpObjectType(rax, INT32x4_TYPE, rdx);
4561 Split(equal, if_true, if_false, fall_through);
4562 } else if (check->Equals(isolate()->heap()->string_string())) {
4563 __ JumpIfSmi(rax, if_false);
4564 // Check for undetectable objects => false.
4565 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4566 __ j(above_equal, if_false);
4567 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4568 Immediate(1 << Map::kIsUndetectable));
4569 Split(zero, if_true, if_false, fall_through);
4570 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4571 __ JumpIfSmi(rax, if_false);
4572 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4573 Split(equal, if_true, if_false, fall_through);
4574 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4575 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4576 __ j(equal, if_true);
4577 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4578 Split(equal, if_true, if_false, fall_through);
4579 } else if (FLAG_harmony_typeof &&
4580 check->Equals(isolate()->heap()->null_string())) {
4581 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4582 Split(equal, if_true, if_false, fall_through);
4583 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4584 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4585 __ j(equal, if_true);
4586 __ JumpIfSmi(rax, if_false);
4587 // Check for undetectable objects => true.
4588 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4589 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4590 Immediate(1 << Map::kIsUndetectable));
4591 Split(not_zero, if_true, if_false, fall_through);
4592 } else if (check->Equals(isolate()->heap()->function_string())) {
4593 __ JumpIfSmi(rax, if_false);
4594 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4595 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4596 __ j(equal, if_true);
4597 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4598 Split(equal, if_true, if_false, fall_through);
4599 } else if (check->Equals(isolate()->heap()->object_string())) {
4600 __ JumpIfSmi(rax, if_false);
4601 if (!FLAG_harmony_typeof) {
4602 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4603 __ j(equal, if_true);
4605 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4606 __ j(below, if_false);
4607 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4608 __ j(above, if_false);
4609 // Check for undetectable objects => false.
4610 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4611 Immediate(1 << Map::kIsUndetectable));
4612 Split(zero, if_true, if_false, fall_through);
4614 if (if_false != fall_through) __ jmp(if_false);
4616 context()->Plug(if_true, if_false);
4620 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4621 Comment cmnt(masm_, "[ CompareOperation");
4622 SetSourcePosition(expr->position());
4624 // First we try a fast inlined version of the compare when one of
4625 // the operands is a literal.
4626 if (TryLiteralCompare(expr)) return;
4628 // Always perform the comparison for its control flow. Pack the result
4629 // into the expression's context after the comparison is performed.
4630 Label materialize_true, materialize_false;
4631 Label* if_true = NULL;
4632 Label* if_false = NULL;
4633 Label* fall_through = NULL;
4634 context()->PrepareTest(&materialize_true, &materialize_false,
4635 &if_true, &if_false, &fall_through);
4637 Token::Value op = expr->op();
4638 VisitForStackValue(expr->left());
4641 VisitForStackValue(expr->right());
4642 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4643 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4644 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4645 Split(equal, if_true, if_false, fall_through);
4648 case Token::INSTANCEOF: {
4649 VisitForStackValue(expr->right());
4650 InstanceofStub stub(InstanceofStub::kNoFlags);
4652 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4654 // The stub returns 0 for true.
4655 Split(zero, if_true, if_false, fall_through);
4660 VisitForAccumulatorValue(expr->right());
4661 Condition cc = CompareIC::ComputeCondition(op);
4664 bool inline_smi_code = ShouldInlineSmiCase(op);
4665 JumpPatchSite patch_site(masm_);
4666 if (inline_smi_code) {
4670 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4672 Split(cc, if_true, if_false, NULL);
4673 __ bind(&slow_case);
4676 // Record position and call the compare IC.
4677 SetSourcePosition(expr->position());
4678 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4679 CallIC(ic, expr->CompareOperationFeedbackId());
4680 patch_site.EmitPatchInfo();
4682 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4684 Split(cc, if_true, if_false, fall_through);
4688 // Convert the result of the comparison into one expected for this
4689 // expression's context.
4690 context()->Plug(if_true, if_false);
4694 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4695 Expression* sub_expr,
4697 Label materialize_true, materialize_false;
4698 Label* if_true = NULL;
4699 Label* if_false = NULL;
4700 Label* fall_through = NULL;
4701 context()->PrepareTest(&materialize_true, &materialize_false,
4702 &if_true, &if_false, &fall_through);
4704 VisitForAccumulatorValue(sub_expr);
4705 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4706 if (expr->op() == Token::EQ_STRICT) {
4707 Heap::RootListIndex nil_value = nil == kNullValue ?
4708 Heap::kNullValueRootIndex :
4709 Heap::kUndefinedValueRootIndex;
4710 __ CompareRoot(rax, nil_value);
4711 Split(equal, if_true, if_false, fall_through);
4713 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4714 CallIC(ic, expr->CompareOperationFeedbackId());
4716 Split(not_zero, if_true, if_false, fall_through);
4718 context()->Plug(if_true, if_false);
4722 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4723 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4724 context()->Plug(rax);
4728 Register FullCodeGenerator::result_register() {
4733 Register FullCodeGenerator::context_register() {
4738 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4739 ASSERT(IsAligned(frame_offset, kPointerSize));
4740 __ movp(Operand(rbp, frame_offset), value);
4744 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4745 __ movp(dst, ContextOperand(rsi, context_index));
4749 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4750 Scope* declaration_scope = scope()->DeclarationScope();
4751 if (declaration_scope->is_global_scope() ||
4752 declaration_scope->is_module_scope()) {
4753 // Contexts nested in the native context have a canonical empty function
4754 // as their closure, not the anonymous closure containing the global
4755 // code. Pass a smi sentinel and let the runtime look up the empty
4757 __ Push(Smi::FromInt(0));
4758 } else if (declaration_scope->is_eval_scope()) {
4759 // Contexts created by a call to eval have the same closure as the
4760 // context calling eval, not the anonymous closure containing the eval
4761 // code. Fetch it from the context.
4762 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4764 ASSERT(declaration_scope->is_function_scope());
4765 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4770 // ----------------------------------------------------------------------------
4771 // Non-local control flow support.
4774 void FullCodeGenerator::EnterFinallyBlock() {
4775 ASSERT(!result_register().is(rdx));
4776 ASSERT(!result_register().is(rcx));
4777 // Cook return address on top of stack (smi encoded Code* delta)
4778 __ PopReturnAddressTo(rdx);
4779 __ Move(rcx, masm_->CodeObject());
4781 __ Integer32ToSmi(rdx, rdx);
4784 // Store result register while executing finally block.
4785 __ Push(result_register());
4787 // Store pending message while executing finally block.
4788 ExternalReference pending_message_obj =
4789 ExternalReference::address_of_pending_message_obj(isolate());
4790 __ Load(rdx, pending_message_obj);
4793 ExternalReference has_pending_message =
4794 ExternalReference::address_of_has_pending_message(isolate());
4795 __ Load(rdx, has_pending_message);
4796 __ Integer32ToSmi(rdx, rdx);
4799 ExternalReference pending_message_script =
4800 ExternalReference::address_of_pending_message_script(isolate());
4801 __ Load(rdx, pending_message_script);
4806 void FullCodeGenerator::ExitFinallyBlock() {
4807 ASSERT(!result_register().is(rdx));
4808 ASSERT(!result_register().is(rcx));
4809 // Restore pending message from stack.
4811 ExternalReference pending_message_script =
4812 ExternalReference::address_of_pending_message_script(isolate());
4813 __ Store(pending_message_script, rdx);
4816 __ SmiToInteger32(rdx, rdx);
4817 ExternalReference has_pending_message =
4818 ExternalReference::address_of_has_pending_message(isolate());
4819 __ Store(has_pending_message, rdx);
4822 ExternalReference pending_message_obj =
4823 ExternalReference::address_of_pending_message_obj(isolate());
4824 __ Store(pending_message_obj, rdx);
4826 // Restore result register from stack.
4827 __ Pop(result_register());
4829 // Uncook return address.
4831 __ SmiToInteger32(rdx, rdx);
4832 __ Move(rcx, masm_->CodeObject());
4840 #define __ ACCESS_MASM(masm())
4842 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4844 int* context_length) {
4845 // The macros used here must preserve the result register.
4847 // Because the handler block contains the context of the finally
4848 // code, we can restore it directly from there for the finally code
4849 // rather than iteratively unwinding contexts via their previous
4851 __ Drop(*stack_depth); // Down to the handler block.
4852 if (*context_length > 0) {
4853 // Restore the context to its dedicated register and the stack.
4854 __ movp(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4855 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4858 __ call(finally_entry_);
4861 *context_length = 0;
4869 static const byte kJnsInstruction = 0x79;
4870 static const byte kJnsOffset = 0x1d;
4871 static const byte kNopByteOne = 0x66;
4872 static const byte kNopByteTwo = 0x90;
4874 static const byte kCallInstruction = 0xe8;
4878 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4880 BackEdgeState target_state,
4881 Code* replacement_code) {
4882 Address call_target_address = pc - kIntSize;
4883 Address jns_instr_address = call_target_address - 3;
4884 Address jns_offset_address = call_target_address - 2;
4886 switch (target_state) {
4888 // sub <profiling_counter>, <delta> ;; Not changed
4890 // call <interrupt stub>
4892 *jns_instr_address = kJnsInstruction;
4893 *jns_offset_address = kJnsOffset;
4895 case ON_STACK_REPLACEMENT:
4896 case OSR_AFTER_STACK_CHECK:
4897 // sub <profiling_counter>, <delta> ;; Not changed
4900 // call <on-stack replacment>
4902 *jns_instr_address = kNopByteOne;
4903 *jns_offset_address = kNopByteTwo;
4907 Assembler::set_target_address_at(call_target_address,
4909 replacement_code->entry());
4910 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4911 unoptimized_code, call_target_address, replacement_code);
4915 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4917 Code* unoptimized_code,
4919 Address call_target_address = pc - kIntSize;
4920 Address jns_instr_address = call_target_address - 3;
4921 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4923 if (*jns_instr_address == kJnsInstruction) {
4924 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4925 ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4926 Assembler::target_address_at(call_target_address,
4931 ASSERT_EQ(kNopByteOne, *jns_instr_address);
4932 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4934 if (Assembler::target_address_at(call_target_address,
4935 unoptimized_code) ==
4936 isolate->builtins()->OnStackReplacement()->entry()) {
4937 return ON_STACK_REPLACEMENT;
4940 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4941 Assembler::target_address_at(call_target_address,
4943 return OSR_AFTER_STACK_CHECK;
4947 } } // namespace v8::internal
4949 #endif // V8_TARGET_ARCH_X64