1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
23 #define __ ACCESS_MASM(masm_)
26 class JumpPatchSite BASE_EMBEDDED {
28 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
30 info_emitted_ = false;
35 DCHECK(patch_site_.is_bound() == info_emitted_);
38 void EmitJumpIfNotSmi(Register reg,
40 Label::Distance near_jump = Label::kFar) {
41 __ testb(reg, Immediate(kSmiTagMask));
42 EmitJump(not_carry, target, near_jump); // Always taken before patched.
45 void EmitJumpIfSmi(Register reg,
47 Label::Distance near_jump = Label::kFar) {
48 __ testb(reg, Immediate(kSmiTagMask));
49 EmitJump(carry, target, near_jump); // Never taken before patched.
52 void EmitPatchInfo() {
53 if (patch_site_.is_bound()) {
54 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55 DCHECK(is_uint8(delta_to_patch_site));
56 __ testl(rax, Immediate(delta_to_patch_site));
61 __ nop(); // Signals no inlined code.
66 // jc will be patched with jz, jnc will become jnz.
67 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
68 DCHECK(!patch_site_.is_bound() && !info_emitted_);
69 DCHECK(cc == carry || cc == not_carry);
70 __ bind(&patch_site_);
71 __ j(cc, target, near_jump);
74 MacroAssembler* masm_;
82 // Generate code for a JS function. On entry to the function the receiver
83 // and arguments have been pushed on the stack left to right, with the
84 // return address on top of them. The actual argument count matches the
85 // formal parameter count expected by the function.
87 // The live registers are:
88 // o rdi: the JS function object being called (i.e. ourselves)
90 // o rbp: our caller's frame pointer
91 // o rsp: stack pointer (pointing to return address)
93 // The function builds a JS frame. Please see JavaScriptFrameConstants in
94 // frames-x64.h for its layout.
95 void FullCodeGenerator::Generate() {
96 CompilationInfo* info = info_;
98 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
100 profiling_counter_ = isolate()->factory()->NewCell(
101 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102 SetFunctionPosition(function());
103 Comment cmnt(masm_, "[ function compiled by full code generator");
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
108 if (strlen(FLAG_stop_at) > 0 &&
109 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
114 // Sloppy mode functions and builtins need to replace the receiver with the
115 // global proxy when called as functions (without an explicit receiver
117 if (info->strict_mode() == SLOPPY && !info->is_native()) {
119 // +1 for return address.
120 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
121 __ movp(rcx, args.GetReceiverOperand());
123 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
124 __ j(not_equal, &ok, Label::kNear);
126 __ movp(rcx, GlobalObjectOperand());
127 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalProxyOffset));
129 __ movp(args.GetReceiverOperand(), rcx);
134 // Open a frame scope to indicate that there is a frame on the stack. The
135 // MANUAL indicates that the scope shouldn't actually generate code to set up
136 // the frame (that is done below).
137 FrameScope frame_scope(masm_, StackFrame::MANUAL);
139 info->set_prologue_offset(masm_->pc_offset());
140 __ Prologue(info->IsCodePreAgingActive());
141 info->AddNoFrameRange(0, masm_->pc_offset());
143 { Comment cmnt(masm_, "[ Allocate locals");
144 int locals_count = info->scope()->num_stack_slots();
145 // Generators allocate locals, if any, in context slots.
146 DCHECK(!info->function()->is_generator() || locals_count == 0);
147 if (locals_count == 1) {
148 __ PushRoot(Heap::kUndefinedValueRootIndex);
149 } else if (locals_count > 1) {
150 if (locals_count >= 128) {
153 __ subp(rcx, Immediate(locals_count * kPointerSize));
154 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
155 __ j(above_equal, &ok, Label::kNear);
156 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
159 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
160 const int kMaxPushes = 32;
161 if (locals_count >= kMaxPushes) {
162 int loop_iterations = locals_count / kMaxPushes;
163 __ movp(rcx, Immediate(loop_iterations));
165 __ bind(&loop_header);
167 for (int i = 0; i < kMaxPushes; i++) {
170 // Continue loop if not done.
172 __ j(not_zero, &loop_header, Label::kNear);
174 int remaining = locals_count % kMaxPushes;
175 // Emit the remaining pushes.
176 for (int i = 0; i < remaining; i++) {
182 bool function_in_register = true;
184 // Possibly allocate a local context.
185 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 if (heap_slots > 0) {
187 Comment cmnt(masm_, "[ Allocate context");
188 bool need_write_barrier = true;
189 // Argument to NewContext is the function, which is still in rdi.
190 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
192 __ Push(info->scope()->GetScopeInfo());
193 __ CallRuntime(Runtime::kNewGlobalContext, 2);
194 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
195 FastNewContextStub stub(isolate(), heap_slots);
197 // Result of FastNewContextStub is always in new space.
198 need_write_barrier = false;
201 __ CallRuntime(Runtime::kNewFunctionContext, 1);
203 function_in_register = false;
204 // Context is returned in rax. It replaces the context passed to us.
205 // It's saved in the stack and kept live in rsi.
207 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
209 // Copy any necessary parameters into the context.
210 int num_parameters = info->scope()->num_parameters();
211 for (int i = 0; i < num_parameters; i++) {
212 Variable* var = scope()->parameter(i);
213 if (var->IsContextSlot()) {
214 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
215 (num_parameters - 1 - i) * kPointerSize;
216 // Load parameter from stack.
217 __ movp(rax, Operand(rbp, parameter_offset));
218 // Store it in the context.
219 int context_offset = Context::SlotOffset(var->index());
220 __ movp(Operand(rsi, context_offset), rax);
221 // Update the write barrier. This clobbers rax and rbx.
222 if (need_write_barrier) {
223 __ RecordWriteContextSlot(
224 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
225 } else if (FLAG_debug_code) {
227 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
228 __ Abort(kExpectedNewSpaceObject);
235 // Possibly allocate an arguments object.
236 Variable* arguments = scope()->arguments();
237 if (arguments != NULL) {
238 // Arguments object must be allocated after the context object, in
239 // case the "arguments" or ".arguments" variables are in the context.
240 Comment cmnt(masm_, "[ Allocate arguments object");
241 if (function_in_register) {
244 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
246 // The receiver is just before the parameters on the caller's stack.
247 int num_parameters = info->scope()->num_parameters();
248 int offset = num_parameters * kPointerSize;
250 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
252 __ Push(Smi::FromInt(num_parameters));
253 // Arguments to ArgumentsAccessStub:
254 // function, receiver address, parameter count.
255 // The stub will rewrite receiver and parameter count if the previous
256 // stack frame was an arguments adapter frame.
257 ArgumentsAccessStub::Type type;
258 if (strict_mode() == STRICT) {
259 type = ArgumentsAccessStub::NEW_STRICT;
260 } else if (function()->has_duplicate_parameters()) {
261 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
263 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
265 ArgumentsAccessStub stub(isolate(), type);
268 SetVar(arguments, rax, rbx, rdx);
272 __ CallRuntime(Runtime::kTraceEnter, 0);
275 // Visit the declarations and body unless there is an illegal
277 if (scope()->HasIllegalRedeclaration()) {
278 Comment cmnt(masm_, "[ Declarations");
279 scope()->VisitIllegalRedeclaration(this);
282 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
283 { Comment cmnt(masm_, "[ Declarations");
284 // For named function expressions, declare the function name as a
286 if (scope()->is_function_scope() && scope()->function() != NULL) {
287 VariableDeclaration* function = scope()->function();
288 DCHECK(function->proxy()->var()->mode() == CONST ||
289 function->proxy()->var()->mode() == CONST_LEGACY);
290 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
291 VisitVariableDeclaration(function);
293 VisitDeclarations(scope()->declarations());
296 { Comment cmnt(masm_, "[ Stack check");
297 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
299 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
300 __ j(above_equal, &ok, Label::kNear);
301 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
305 { Comment cmnt(masm_, "[ Body");
306 DCHECK(loop_depth() == 0);
307 VisitStatements(function()->body());
308 DCHECK(loop_depth() == 0);
312 // Always emit a 'return undefined' in case control fell off the end of
314 { Comment cmnt(masm_, "[ return <undefined>;");
315 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
316 EmitReturnSequence();
321 void FullCodeGenerator::ClearAccumulator() {
326 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
327 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
328 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
329 Smi::FromInt(-delta));
333 void FullCodeGenerator::EmitProfilingCounterReset() {
334 int reset_value = FLAG_interrupt_budget;
335 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
336 __ Move(kScratchRegister, Smi::FromInt(reset_value));
337 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
341 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
344 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
345 Label* back_edge_target) {
346 Comment cmnt(masm_, "[ Back edge bookkeeping");
349 DCHECK(back_edge_target->is_bound());
350 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
351 int weight = Min(kMaxBackEdgeWeight,
352 Max(1, distance / kCodeSizeMultiplier));
353 EmitProfilingCounterDecrement(weight);
355 __ j(positive, &ok, Label::kNear);
357 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
358 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
359 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
361 // Record a mapping of this PC offset to the OSR id. This is used to find
362 // the AST id from the unoptimized code in order to use it as a key into
363 // the deoptimization input data found in the optimized code.
364 RecordBackEdge(stmt->OsrEntryId());
366 EmitProfilingCounterReset();
370 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
371 // Record a mapping of the OSR id to this PC. This is used if the OSR
372 // entry becomes the target of a bailout. We don't expect it to be, but
373 // we want it to work if it is.
374 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
378 void FullCodeGenerator::EmitReturnSequence() {
379 Comment cmnt(masm_, "[ Return sequence");
380 if (return_label_.is_bound()) {
381 __ jmp(&return_label_);
383 __ bind(&return_label_);
386 __ CallRuntime(Runtime::kTraceExit, 1);
388 // Pretend that the exit is a backwards jump to the entry.
390 if (info_->ShouldSelfOptimize()) {
391 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
393 int distance = masm_->pc_offset();
394 weight = Min(kMaxBackEdgeWeight,
395 Max(1, distance / kCodeSizeMultiplier));
397 EmitProfilingCounterDecrement(weight);
399 __ j(positive, &ok, Label::kNear);
401 __ call(isolate()->builtins()->InterruptCheck(),
402 RelocInfo::CODE_TARGET);
404 EmitProfilingCounterReset();
407 // Add a label for checking the size of the code used for returning.
408 Label check_exit_codesize;
409 masm_->bind(&check_exit_codesize);
411 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
413 // Do not use the leave instruction here because it is too short to
414 // patch with the code required by the debugger.
417 int no_frame_start = masm_->pc_offset();
419 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
420 __ Ret(arguments_bytes, rcx);
422 // Add padding that will be overwritten by a debugger breakpoint. We
423 // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k"
424 // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32.
425 const int kPadding = Assembler::kJSReturnSequenceLength -
426 kPointerSize == kInt64Size ? 7 : 6;
427 for (int i = 0; i < kPadding; ++i) {
430 // Check that the size of the code used for returning is large enough
431 // for the debugger's requirements.
432 DCHECK(Assembler::kJSReturnSequenceLength <=
433 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
435 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
440 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
441 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
445 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
446 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
447 codegen()->GetVar(result_register(), var);
451 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
452 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
453 MemOperand operand = codegen()->VarOperand(var, result_register());
458 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
459 codegen()->GetVar(result_register(), var);
460 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
461 codegen()->DoTest(this);
465 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
469 void FullCodeGenerator::AccumulatorValueContext::Plug(
470 Heap::RootListIndex index) const {
471 __ LoadRoot(result_register(), index);
475 void FullCodeGenerator::StackValueContext::Plug(
476 Heap::RootListIndex index) const {
481 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
482 codegen()->PrepareForBailoutBeforeSplit(condition(),
486 if (index == Heap::kUndefinedValueRootIndex ||
487 index == Heap::kNullValueRootIndex ||
488 index == Heap::kFalseValueRootIndex) {
489 if (false_label_ != fall_through_) __ jmp(false_label_);
490 } else if (index == Heap::kTrueValueRootIndex) {
491 if (true_label_ != fall_through_) __ jmp(true_label_);
493 __ LoadRoot(result_register(), index);
494 codegen()->DoTest(this);
499 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
503 void FullCodeGenerator::AccumulatorValueContext::Plug(
504 Handle<Object> lit) const {
506 __ SafeMove(result_register(), Smi::cast(*lit));
508 __ Move(result_register(), lit);
513 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
515 __ SafePush(Smi::cast(*lit));
522 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
523 codegen()->PrepareForBailoutBeforeSplit(condition(),
527 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
528 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
529 if (false_label_ != fall_through_) __ jmp(false_label_);
530 } else if (lit->IsTrue() || lit->IsJSObject()) {
531 if (true_label_ != fall_through_) __ jmp(true_label_);
532 } else if (lit->IsString()) {
533 if (String::cast(*lit)->length() == 0) {
534 if (false_label_ != fall_through_) __ jmp(false_label_);
536 if (true_label_ != fall_through_) __ jmp(true_label_);
538 } else if (lit->IsSmi()) {
539 if (Smi::cast(*lit)->value() == 0) {
540 if (false_label_ != fall_through_) __ jmp(false_label_);
542 if (true_label_ != fall_through_) __ jmp(true_label_);
545 // For simplicity we always test the accumulator register.
546 __ Move(result_register(), lit);
547 codegen()->DoTest(this);
552 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
553 Register reg) const {
559 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
561 Register reg) const {
564 __ Move(result_register(), reg);
568 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
569 Register reg) const {
571 if (count > 1) __ Drop(count - 1);
572 __ movp(Operand(rsp, 0), reg);
576 void FullCodeGenerator::TestContext::DropAndPlug(int count,
577 Register reg) const {
579 // For simplicity we always test the accumulator register.
581 __ Move(result_register(), reg);
582 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
583 codegen()->DoTest(this);
587 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
588 Label* materialize_false) const {
589 DCHECK(materialize_true == materialize_false);
590 __ bind(materialize_true);
594 void FullCodeGenerator::AccumulatorValueContext::Plug(
595 Label* materialize_true,
596 Label* materialize_false) const {
598 __ bind(materialize_true);
599 __ Move(result_register(), isolate()->factory()->true_value());
600 __ jmp(&done, Label::kNear);
601 __ bind(materialize_false);
602 __ Move(result_register(), isolate()->factory()->false_value());
607 void FullCodeGenerator::StackValueContext::Plug(
608 Label* materialize_true,
609 Label* materialize_false) const {
611 __ bind(materialize_true);
612 __ Push(isolate()->factory()->true_value());
613 __ jmp(&done, Label::kNear);
614 __ bind(materialize_false);
615 __ Push(isolate()->factory()->false_value());
620 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
621 Label* materialize_false) const {
622 DCHECK(materialize_true == true_label_);
623 DCHECK(materialize_false == false_label_);
627 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
631 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
632 Heap::RootListIndex value_root_index =
633 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
634 __ LoadRoot(result_register(), value_root_index);
638 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
639 Heap::RootListIndex value_root_index =
640 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
641 __ PushRoot(value_root_index);
645 void FullCodeGenerator::TestContext::Plug(bool flag) const {
646 codegen()->PrepareForBailoutBeforeSplit(condition(),
651 if (true_label_ != fall_through_) __ jmp(true_label_);
653 if (false_label_ != fall_through_) __ jmp(false_label_);
658 void FullCodeGenerator::DoTest(Expression* condition,
661 Label* fall_through) {
662 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
663 CallIC(ic, condition->test_id());
664 __ testp(result_register(), result_register());
665 // The stub returns nonzero for true.
666 Split(not_zero, if_true, if_false, fall_through);
670 void FullCodeGenerator::Split(Condition cc,
673 Label* fall_through) {
674 if (if_false == fall_through) {
676 } else if (if_true == fall_through) {
677 __ j(NegateCondition(cc), if_false);
685 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686 DCHECK(var->IsStackAllocated());
687 // Offset is negative because higher indexes are at lower addresses.
688 int offset = -var->index() * kPointerSize;
689 // Adjust by a (parameter or local) base offset.
690 if (var->IsParameter()) {
691 offset += kFPOnStackSize + kPCOnStackSize +
692 (info_->scope()->num_parameters() - 1) * kPointerSize;
694 offset += JavaScriptFrameConstants::kLocal0Offset;
696 return Operand(rbp, offset);
700 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702 if (var->IsContextSlot()) {
703 int context_chain_length = scope()->ContextChainLength(var->scope());
704 __ LoadContext(scratch, context_chain_length);
705 return ContextOperand(scratch, var->index());
707 return StackOperand(var);
712 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
714 MemOperand location = VarOperand(var, dest);
715 __ movp(dest, location);
719 void FullCodeGenerator::SetVar(Variable* var,
723 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
724 DCHECK(!scratch0.is(src));
725 DCHECK(!scratch0.is(scratch1));
726 DCHECK(!scratch1.is(src));
727 MemOperand location = VarOperand(var, scratch0);
728 __ movp(location, src);
730 // Emit the write barrier code if the location is in the heap.
731 if (var->IsContextSlot()) {
732 int offset = Context::SlotOffset(var->index());
733 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
738 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
739 bool should_normalize,
742 // Only prepare for bailouts before splits if we're in a test
743 // context. Otherwise, we let the Visit function deal with the
744 // preparation to avoid preparing with the same AST id twice.
745 if (!context()->IsTest() || !info_->IsOptimizable()) return;
748 if (should_normalize) __ jmp(&skip, Label::kNear);
749 PrepareForBailout(expr, TOS_REG);
750 if (should_normalize) {
751 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
752 Split(equal, if_true, if_false, NULL);
758 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759 // The variable in the declaration always resides in the current context.
760 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
761 if (generate_debug_code_) {
762 // Check that we're not inside a with or catch context.
763 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
764 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
765 __ Check(not_equal, kDeclarationInWithContext);
766 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
767 __ Check(not_equal, kDeclarationInCatchContext);
772 void FullCodeGenerator::VisitVariableDeclaration(
773 VariableDeclaration* declaration) {
774 // If it was not possible to allocate the variable at compile time, we
775 // need to "declare" it at runtime to make sure it actually exists in the
777 VariableProxy* proxy = declaration->proxy();
778 VariableMode mode = declaration->mode();
779 Variable* variable = proxy->var();
780 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
781 switch (variable->location()) {
782 case Variable::UNALLOCATED:
783 globals_->Add(variable->name(), zone());
784 globals_->Add(variable->binding_needs_init()
785 ? isolate()->factory()->the_hole_value()
786 : isolate()->factory()->undefined_value(),
790 case Variable::PARAMETER:
791 case Variable::LOCAL:
793 Comment cmnt(masm_, "[ VariableDeclaration");
794 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
795 __ movp(StackOperand(variable), kScratchRegister);
799 case Variable::CONTEXT:
801 Comment cmnt(masm_, "[ VariableDeclaration");
802 EmitDebugCheckDeclarationContext(variable);
803 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
804 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
805 // No write barrier since the hole value is in old space.
806 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
810 case Variable::LOOKUP: {
811 Comment cmnt(masm_, "[ VariableDeclaration");
813 __ Push(variable->name());
814 // Declaration nodes are always introduced in one of four modes.
815 DCHECK(IsDeclaredVariableMode(mode));
816 PropertyAttributes attr =
817 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
818 __ Push(Smi::FromInt(attr));
819 // Push initial value, if any.
820 // Note: For variables we must not push an initial value (such as
821 // 'undefined') because we may have a (legal) redeclaration and we
822 // must not destroy the current value.
824 __ PushRoot(Heap::kTheHoleValueRootIndex);
826 __ Push(Smi::FromInt(0)); // Indicates no initial value.
828 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
835 void FullCodeGenerator::VisitFunctionDeclaration(
836 FunctionDeclaration* declaration) {
837 VariableProxy* proxy = declaration->proxy();
838 Variable* variable = proxy->var();
839 switch (variable->location()) {
840 case Variable::UNALLOCATED: {
841 globals_->Add(variable->name(), zone());
842 Handle<SharedFunctionInfo> function =
843 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
844 // Check for stack-overflow exception.
845 if (function.is_null()) return SetStackOverflow();
846 globals_->Add(function, zone());
850 case Variable::PARAMETER:
851 case Variable::LOCAL: {
852 Comment cmnt(masm_, "[ FunctionDeclaration");
853 VisitForAccumulatorValue(declaration->fun());
854 __ movp(StackOperand(variable), result_register());
858 case Variable::CONTEXT: {
859 Comment cmnt(masm_, "[ FunctionDeclaration");
860 EmitDebugCheckDeclarationContext(variable);
861 VisitForAccumulatorValue(declaration->fun());
862 __ movp(ContextOperand(rsi, variable->index()), result_register());
863 int offset = Context::SlotOffset(variable->index());
864 // We know that we have written a function, which is not a smi.
865 __ RecordWriteContextSlot(rsi,
872 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
876 case Variable::LOOKUP: {
877 Comment cmnt(masm_, "[ FunctionDeclaration");
879 __ Push(variable->name());
880 __ Push(Smi::FromInt(NONE));
881 VisitForStackValue(declaration->fun());
882 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
889 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
890 Variable* variable = declaration->proxy()->var();
891 DCHECK(variable->location() == Variable::CONTEXT);
892 DCHECK(variable->interface()->IsFrozen());
894 Comment cmnt(masm_, "[ ModuleDeclaration");
895 EmitDebugCheckDeclarationContext(variable);
897 // Load instance object.
898 __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
899 __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
900 __ movp(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
903 __ movp(ContextOperand(rsi, variable->index()), rax);
904 // We know that we have written a module, which is not a smi.
905 __ RecordWriteContextSlot(rsi,
906 Context::SlotOffset(variable->index()),
912 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
914 // Traverse into body.
915 Visit(declaration->module());
919 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
920 VariableProxy* proxy = declaration->proxy();
921 Variable* variable = proxy->var();
922 switch (variable->location()) {
923 case Variable::UNALLOCATED:
927 case Variable::CONTEXT: {
928 Comment cmnt(masm_, "[ ImportDeclaration");
929 EmitDebugCheckDeclarationContext(variable);
934 case Variable::PARAMETER:
935 case Variable::LOCAL:
936 case Variable::LOOKUP:
942 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
947 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
948 // Call the runtime to declare the globals.
949 __ Push(rsi); // The context is the first argument.
951 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
952 __ CallRuntime(Runtime::kDeclareGlobals, 3);
953 // Return value is ignored.
957 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
958 // Call the runtime to declare the modules.
959 __ Push(descriptions);
960 __ CallRuntime(Runtime::kDeclareModules, 1);
961 // Return value is ignored.
965 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
966 Comment cmnt(masm_, "[ SwitchStatement");
967 Breakable nested_statement(this, stmt);
968 SetStatementPosition(stmt);
970 // Keep the switch value on the stack until a case matches.
971 VisitForStackValue(stmt->tag());
972 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
974 ZoneList<CaseClause*>* clauses = stmt->cases();
975 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
977 Label next_test; // Recycled for each test.
978 // Compile all the tests with branches to their bodies.
979 for (int i = 0; i < clauses->length(); i++) {
980 CaseClause* clause = clauses->at(i);
981 clause->body_target()->Unuse();
983 // The default is not a test, but remember it as final fall through.
984 if (clause->is_default()) {
985 default_clause = clause;
989 Comment cmnt(masm_, "[ Case comparison");
993 // Compile the label expression.
994 VisitForAccumulatorValue(clause->label());
996 // Perform the comparison as if via '==='.
997 __ movp(rdx, Operand(rsp, 0)); // Switch value.
998 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
999 JumpPatchSite patch_site(masm_);
1000 if (inline_smi_code) {
1004 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1007 __ j(not_equal, &next_test);
1008 __ Drop(1); // Switch value is no longer needed.
1009 __ jmp(clause->body_target());
1010 __ bind(&slow_case);
1013 // Record position before stub call for type feedback.
1014 SetSourcePosition(clause->position());
1016 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1017 CallIC(ic, clause->CompareId());
1018 patch_site.EmitPatchInfo();
1021 __ jmp(&skip, Label::kNear);
1022 PrepareForBailout(clause, TOS_REG);
1023 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1024 __ j(not_equal, &next_test);
1026 __ jmp(clause->body_target());
1030 __ j(not_equal, &next_test);
1031 __ Drop(1); // Switch value is no longer needed.
1032 __ jmp(clause->body_target());
1035 // Discard the test value and jump to the default if present, otherwise to
1036 // the end of the statement.
1037 __ bind(&next_test);
1038 __ Drop(1); // Switch value is no longer needed.
1039 if (default_clause == NULL) {
1040 __ jmp(nested_statement.break_label());
1042 __ jmp(default_clause->body_target());
1045 // Compile all the case bodies.
1046 for (int i = 0; i < clauses->length(); i++) {
1047 Comment cmnt(masm_, "[ Case body");
1048 CaseClause* clause = clauses->at(i);
1049 __ bind(clause->body_target());
1050 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1051 VisitStatements(clause->statements());
1054 __ bind(nested_statement.break_label());
1055 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1059 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1060 Comment cmnt(masm_, "[ ForInStatement");
1061 int slot = stmt->ForInFeedbackSlot();
1062 SetStatementPosition(stmt);
1065 ForIn loop_statement(this, stmt);
1066 increment_loop_depth();
1068 // Get the object to enumerate over. If the object is null or undefined, skip
1069 // over the loop. See ECMA-262 version 5, section 12.6.4.
1070 VisitForAccumulatorValue(stmt->enumerable());
1071 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1073 Register null_value = rdi;
1074 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1075 __ cmpp(rax, null_value);
1078 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1080 // Convert the object to a JS object.
1081 Label convert, done_convert;
1082 __ JumpIfSmi(rax, &convert);
1083 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1084 __ j(above_equal, &done_convert);
1087 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1088 __ bind(&done_convert);
1091 // Check for proxies.
1093 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1094 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1095 __ j(below_equal, &call_runtime);
1097 // Check cache validity in generated code. This is a fast case for
1098 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1099 // guarantee cache validity, call the runtime system to check cache
1100 // validity or get the property names in a fixed array.
1101 __ CheckEnumCache(null_value, &call_runtime);
1103 // The enum cache is valid. Load the map of the object being
1104 // iterated over and use the cache for the iteration.
1106 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1107 __ jmp(&use_cache, Label::kNear);
1109 // Get the set of properties to enumerate.
1110 __ bind(&call_runtime);
1111 __ Push(rax); // Duplicate the enumerable object on the stack.
1112 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1114 // If we got a map from the runtime call, we can do a fast
1115 // modification check. Otherwise, we got a fixed array, and we have
1116 // to do a slow check.
1118 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1119 Heap::kMetaMapRootIndex);
1120 __ j(not_equal, &fixed_array);
1122 // We got a map in register rax. Get the enumeration cache from it.
1123 __ bind(&use_cache);
1125 Label no_descriptors;
1127 __ EnumLength(rdx, rax);
1128 __ Cmp(rdx, Smi::FromInt(0));
1129 __ j(equal, &no_descriptors);
1131 __ LoadInstanceDescriptors(rax, rcx);
1132 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1133 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1135 // Set up the four remaining stack slots.
1136 __ Push(rax); // Map.
1137 __ Push(rcx); // Enumeration cache.
1138 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1139 __ Push(Smi::FromInt(0)); // Initial index.
1142 __ bind(&no_descriptors);
1143 __ addp(rsp, Immediate(kPointerSize));
1146 // We got a fixed array in register rax. Iterate through that.
1148 __ bind(&fixed_array);
1150 // No need for a write barrier, we are storing a Smi in the feedback vector.
1151 __ Move(rbx, FeedbackVector());
1152 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1153 TypeFeedbackVector::MegamorphicSentinel(isolate()));
1154 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1155 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1156 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1157 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1158 __ j(above, &non_proxy);
1159 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1160 __ bind(&non_proxy);
1161 __ Push(rbx); // Smi
1162 __ Push(rax); // Array
1163 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1164 __ Push(rax); // Fixed array length (as smi).
1165 __ Push(Smi::FromInt(0)); // Initial index.
1167 // Generate code for doing the condition check.
1168 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1170 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1171 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1172 __ j(above_equal, loop_statement.break_label());
1174 // Get the current entry of the array into register rbx.
1175 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1176 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1177 __ movp(rbx, FieldOperand(rbx,
1180 FixedArray::kHeaderSize));
1182 // Get the expected map from the stack or a smi in the
1183 // permanent slow case into register rdx.
1184 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1186 // Check if the expected map still matches that of the enumerable.
1187 // If not, we may have to filter the key.
1189 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1190 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1191 __ j(equal, &update_each, Label::kNear);
1193 // For proxies, no filtering is done.
1194 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1195 __ Cmp(rdx, Smi::FromInt(0));
1196 __ j(equal, &update_each, Label::kNear);
1198 // Convert the entry to a string or null if it isn't a property
1199 // anymore. If the property has been removed while iterating, we
1201 __ Push(rcx); // Enumerable.
1202 __ Push(rbx); // Current entry.
1203 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1204 __ Cmp(rax, Smi::FromInt(0));
1205 __ j(equal, loop_statement.continue_label());
1208 // Update the 'each' property or variable from the possibly filtered
1209 // entry in register rbx.
1210 __ bind(&update_each);
1211 __ movp(result_register(), rbx);
1212 // Perform the assignment as if via '='.
1213 { EffectContext context(this);
1214 EmitAssignment(stmt->each());
1217 // Generate code for the body of the loop.
1218 Visit(stmt->body());
1220 // Generate code for going to the next element by incrementing the
1221 // index (smi) stored on top of the stack.
1222 __ bind(loop_statement.continue_label());
1223 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1225 EmitBackEdgeBookkeeping(stmt, &loop);
1228 // Remove the pointers stored on the stack.
1229 __ bind(loop_statement.break_label());
1230 __ addp(rsp, Immediate(5 * kPointerSize));
1232 // Exit and decrement the loop depth.
1233 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1235 decrement_loop_depth();
1239 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1240 Comment cmnt(masm_, "[ ForOfStatement");
1241 SetStatementPosition(stmt);
1243 Iteration loop_statement(this, stmt);
1244 increment_loop_depth();
1246 // var iterator = iterable[Symbol.iterator]();
1247 VisitForEffect(stmt->assign_iterator());
1250 __ bind(loop_statement.continue_label());
1252 // result = iterator.next()
1253 VisitForEffect(stmt->next_result());
1255 // if (result.done) break;
1256 Label result_not_done;
1257 VisitForControl(stmt->result_done(),
1258 loop_statement.break_label(),
1261 __ bind(&result_not_done);
1263 // each = result.value
1264 VisitForEffect(stmt->assign_each());
1266 // Generate code for the body of the loop.
1267 Visit(stmt->body());
1269 // Check stack before looping.
1270 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1271 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1272 __ jmp(loop_statement.continue_label());
1274 // Exit and decrement the loop depth.
1275 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1276 __ bind(loop_statement.break_label());
1277 decrement_loop_depth();
1281 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1283 // Use the fast case closure allocation code that allocates in new
1284 // space for nested functions that don't need literals cloning. If
1285 // we're running with the --always-opt or the --prepare-always-opt
1286 // flag, we need to use the runtime function so that the new function
1287 // we are creating here gets a chance to have its code optimized and
1288 // doesn't just get a copy of the existing unoptimized code.
1289 if (!FLAG_always_opt &&
1290 !FLAG_prepare_always_opt &&
1292 scope()->is_function_scope() &&
1293 info->num_literals() == 0) {
1294 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1301 ? isolate()->factory()->true_value()
1302 : isolate()->factory()->false_value());
1303 __ CallRuntime(Runtime::kNewClosure, 3);
1305 context()->Plug(rax);
1309 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1310 Comment cmnt(masm_, "[ VariableProxy");
1311 EmitVariableLoad(expr);
1315 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1316 Comment cnmt(masm_, "[ SuperReference ");
1318 __ movp(LoadDescriptor::ReceiverRegister(),
1319 Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1321 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1322 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1324 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1326 __ Cmp(rax, isolate()->factory()->undefined_value());
1328 __ j(not_equal, &done);
1329 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1334 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1335 TypeofState typeof_state,
1337 Register context = rsi;
1338 Register temp = rdx;
1342 if (s->num_heap_slots() > 0) {
1343 if (s->calls_sloppy_eval()) {
1344 // Check that extension is NULL.
1345 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1347 __ j(not_equal, slow);
1349 // Load next context in chain.
1350 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1351 // Walk the rest of the chain without clobbering rsi.
1354 // If no outer scope calls eval, we do not need to check more
1355 // context extensions. If we have reached an eval scope, we check
1356 // all extensions from this point.
1357 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1358 s = s->outer_scope();
1361 if (s != NULL && s->is_eval_scope()) {
1362 // Loop up the context chain. There is no frame effect so it is
1363 // safe to use raw labels here.
1365 if (!context.is(temp)) {
1366 __ movp(temp, context);
1368 // Load map for comparison into register, outside loop.
1369 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1371 // Terminate at native context.
1372 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1373 __ j(equal, &fast, Label::kNear);
1374 // Check that extension is NULL.
1375 __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1376 __ j(not_equal, slow);
1377 // Load next context in chain.
1378 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1383 // All extension objects were empty and it is safe to use a global
1385 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1386 __ Move(LoadDescriptor::NameRegister(), proxy->var()->name());
1387 if (FLAG_vector_ics) {
1388 __ Move(VectorLoadICDescriptor::SlotRegister(),
1389 Smi::FromInt(proxy->VariableFeedbackSlot()));
1392 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1399 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1401 DCHECK(var->IsContextSlot());
1402 Register context = rsi;
1403 Register temp = rbx;
1405 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1406 if (s->num_heap_slots() > 0) {
1407 if (s->calls_sloppy_eval()) {
1408 // Check that extension is NULL.
1409 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1411 __ j(not_equal, slow);
1413 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1414 // Walk the rest of the chain without clobbering rsi.
1418 // Check that last extension is NULL.
1419 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1420 __ j(not_equal, slow);
1422 // This function is used only for loads, not stores, so it's safe to
1423 // return an rsi-based operand (the write barrier cannot be allowed to
1424 // destroy the rsi register).
1425 return ContextOperand(context, var->index());
1429 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1430 TypeofState typeof_state,
1433 // Generate fast-case code for variables that might be shadowed by
1434 // eval-introduced variables. Eval is used a lot without
1435 // introducing variables. In those cases, we do not want to
1436 // perform a runtime call for all variables in the scope
1437 // containing the eval.
1438 Variable* var = proxy->var();
1439 if (var->mode() == DYNAMIC_GLOBAL) {
1440 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1442 } else if (var->mode() == DYNAMIC_LOCAL) {
1443 Variable* local = var->local_if_not_shadowed();
1444 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1445 if (local->mode() == LET || local->mode() == CONST ||
1446 local->mode() == CONST_LEGACY) {
1447 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1448 __ j(not_equal, done);
1449 if (local->mode() == CONST_LEGACY) {
1450 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1451 } else { // LET || CONST
1452 __ Push(var->name());
1453 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1461 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1462 // Record position before possible IC call.
1463 SetSourcePosition(proxy->position());
1464 Variable* var = proxy->var();
1466 // Three cases: global variables, lookup variables, and all other types of
1468 switch (var->location()) {
1469 case Variable::UNALLOCATED: {
1470 Comment cmnt(masm_, "[ Global variable");
1471 __ Move(LoadDescriptor::NameRegister(), var->name());
1472 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1473 if (FLAG_vector_ics) {
1474 __ Move(VectorLoadICDescriptor::SlotRegister(),
1475 Smi::FromInt(proxy->VariableFeedbackSlot()));
1477 CallLoadIC(CONTEXTUAL);
1478 context()->Plug(rax);
1482 case Variable::PARAMETER:
1483 case Variable::LOCAL:
1484 case Variable::CONTEXT: {
1485 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1487 if (var->binding_needs_init()) {
1488 // var->scope() may be NULL when the proxy is located in eval code and
1489 // refers to a potential outside binding. Currently those bindings are
1490 // always looked up dynamically, i.e. in that case
1491 // var->location() == LOOKUP.
1493 DCHECK(var->scope() != NULL);
1495 // Check if the binding really needs an initialization check. The check
1496 // can be skipped in the following situation: we have a LET or CONST
1497 // binding in harmony mode, both the Variable and the VariableProxy have
1498 // the same declaration scope (i.e. they are both in global code, in the
1499 // same function or in the same eval code) and the VariableProxy is in
1500 // the source physically located after the initializer of the variable.
1502 // We cannot skip any initialization checks for CONST in non-harmony
1503 // mode because const variables may be declared but never initialized:
1504 // if (false) { const x; }; var y = x;
1506 // The condition on the declaration scopes is a conservative check for
1507 // nested functions that access a binding and are called before the
1508 // binding is initialized:
1509 // function() { f(); let x = 1; function f() { x = 2; } }
1511 bool skip_init_check;
1512 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1513 skip_init_check = false;
1515 // Check that we always have valid source position.
1516 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1517 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1518 skip_init_check = var->mode() != CONST_LEGACY &&
1519 var->initializer_position() < proxy->position();
1522 if (!skip_init_check) {
1523 // Let and const need a read barrier.
1526 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1527 __ j(not_equal, &done, Label::kNear);
1528 if (var->mode() == LET || var->mode() == CONST) {
1529 // Throw a reference error when using an uninitialized let/const
1530 // binding in harmony mode.
1531 __ Push(var->name());
1532 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1534 // Uninitalized const bindings outside of harmony mode are unholed.
1535 DCHECK(var->mode() == CONST_LEGACY);
1536 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1539 context()->Plug(rax);
1543 context()->Plug(var);
1547 case Variable::LOOKUP: {
1548 Comment cmnt(masm_, "[ Lookup slot");
1550 // Generate code for loading from variables potentially shadowed
1551 // by eval-introduced variables.
1552 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1554 __ Push(rsi); // Context.
1555 __ Push(var->name());
1556 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1558 context()->Plug(rax);
1565 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1566 Comment cmnt(masm_, "[ RegExpLiteral");
1568 // Registers will be used as follows:
1569 // rdi = JS function.
1570 // rcx = literals array.
1571 // rbx = regexp literal.
1572 // rax = regexp literal clone.
1573 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1574 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1575 int literal_offset =
1576 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1577 __ movp(rbx, FieldOperand(rcx, literal_offset));
1578 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1579 __ j(not_equal, &materialized, Label::kNear);
1581 // Create regexp literal using runtime function
1582 // Result will be in rax.
1584 __ Push(Smi::FromInt(expr->literal_index()));
1585 __ Push(expr->pattern());
1586 __ Push(expr->flags());
1587 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1590 __ bind(&materialized);
1591 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1592 Label allocated, runtime_allocate;
1593 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1596 __ bind(&runtime_allocate);
1598 __ Push(Smi::FromInt(size));
1599 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1602 __ bind(&allocated);
1603 // Copy the content into the newly allocated memory.
1604 // (Unroll copy loop once for better throughput).
1605 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1606 __ movp(rdx, FieldOperand(rbx, i));
1607 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1608 __ movp(FieldOperand(rax, i), rdx);
1609 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1611 if ((size % (2 * kPointerSize)) != 0) {
1612 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1613 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1615 context()->Plug(rax);
1619 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1620 if (expression == NULL) {
1621 __ PushRoot(Heap::kNullValueRootIndex);
1623 VisitForStackValue(expression);
1628 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1629 Comment cmnt(masm_, "[ ObjectLiteral");
1631 expr->BuildConstantProperties(isolate());
1632 Handle<FixedArray> constant_properties = expr->constant_properties();
1633 int flags = expr->fast_elements()
1634 ? ObjectLiteral::kFastElements
1635 : ObjectLiteral::kNoFlags;
1636 flags |= expr->has_function()
1637 ? ObjectLiteral::kHasFunction
1638 : ObjectLiteral::kNoFlags;
1639 int properties_count = constant_properties->length() / 2;
1640 if (expr->may_store_doubles() || expr->depth() > 1 ||
1641 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1642 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1643 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1644 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1645 __ Push(Smi::FromInt(expr->literal_index()));
1646 __ Push(constant_properties);
1647 __ Push(Smi::FromInt(flags));
1648 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1650 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1651 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1652 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1653 __ Move(rcx, constant_properties);
1654 __ Move(rdx, Smi::FromInt(flags));
1655 FastCloneShallowObjectStub stub(isolate(), properties_count);
1659 // If result_saved is true the result is on top of the stack. If
1660 // result_saved is false the result is in rax.
1661 bool result_saved = false;
1663 // Mark all computed expressions that are bound to a key that
1664 // is shadowed by a later occurrence of the same key. For the
1665 // marked expressions, no store code is emitted.
1666 expr->CalculateEmitStore(zone());
1668 AccessorTable accessor_table(zone());
1669 for (int i = 0; i < expr->properties()->length(); i++) {
1670 ObjectLiteral::Property* property = expr->properties()->at(i);
1671 if (property->IsCompileTimeValue()) continue;
1673 Literal* key = property->key();
1674 Expression* value = property->value();
1675 if (!result_saved) {
1676 __ Push(rax); // Save result on the stack
1677 result_saved = true;
1679 switch (property->kind()) {
1680 case ObjectLiteral::Property::CONSTANT:
1682 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1683 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1685 case ObjectLiteral::Property::COMPUTED:
1686 if (key->value()->IsInternalizedString()) {
1687 if (property->emit_store()) {
1688 VisitForAccumulatorValue(value);
1689 DCHECK(StoreDescriptor::ValueRegister().is(rax));
1690 __ Move(StoreDescriptor::NameRegister(), key->value());
1691 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1692 CallStoreIC(key->LiteralFeedbackId());
1693 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1695 VisitForEffect(value);
1699 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1700 VisitForStackValue(key);
1701 VisitForStackValue(value);
1702 if (property->emit_store()) {
1703 __ Push(Smi::FromInt(SLOPPY)); // Strict mode
1704 __ CallRuntime(Runtime::kSetProperty, 4);
1709 case ObjectLiteral::Property::PROTOTYPE:
1710 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1711 VisitForStackValue(value);
1712 if (property->emit_store()) {
1713 __ CallRuntime(Runtime::kSetPrototype, 2);
1718 case ObjectLiteral::Property::GETTER:
1719 accessor_table.lookup(key)->second->getter = value;
1721 case ObjectLiteral::Property::SETTER:
1722 accessor_table.lookup(key)->second->setter = value;
1727 // Emit code to define accessors, using only a single call to the runtime for
1728 // each pair of corresponding getters and setters.
1729 for (AccessorTable::Iterator it = accessor_table.begin();
1730 it != accessor_table.end();
1732 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1733 VisitForStackValue(it->first);
1734 EmitAccessor(it->second->getter);
1735 EmitAccessor(it->second->setter);
1736 __ Push(Smi::FromInt(NONE));
1737 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1740 if (expr->has_function()) {
1741 DCHECK(result_saved);
1742 __ Push(Operand(rsp, 0));
1743 __ CallRuntime(Runtime::kToFastProperties, 1);
1747 context()->PlugTOS();
1749 context()->Plug(rax);
1754 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1755 Comment cmnt(masm_, "[ ArrayLiteral");
1757 expr->BuildConstantElements(isolate());
1758 int flags = expr->depth() == 1
1759 ? ArrayLiteral::kShallowElements
1760 : ArrayLiteral::kNoFlags;
1762 ZoneList<Expression*>* subexprs = expr->values();
1763 int length = subexprs->length();
1764 Handle<FixedArray> constant_elements = expr->constant_elements();
1765 DCHECK_EQ(2, constant_elements->length());
1766 ElementsKind constant_elements_kind =
1767 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1768 bool has_constant_fast_elements =
1769 IsFastObjectElementsKind(constant_elements_kind);
1770 Handle<FixedArrayBase> constant_elements_values(
1771 FixedArrayBase::cast(constant_elements->get(1)));
1773 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1774 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1775 // If the only customer of allocation sites is transitioning, then
1776 // we can turn it off if we don't have anywhere else to transition to.
1777 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1780 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1781 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1782 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1783 __ Push(Smi::FromInt(expr->literal_index()));
1784 __ Push(constant_elements);
1785 __ Push(Smi::FromInt(flags));
1786 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1788 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1789 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1790 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1791 __ Move(rcx, constant_elements);
1792 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1796 bool result_saved = false; // Is the result saved to the stack?
1798 // Emit code to evaluate all the non-constant subexpressions and to store
1799 // them into the newly cloned array.
1800 for (int i = 0; i < length; i++) {
1801 Expression* subexpr = subexprs->at(i);
1802 // If the subexpression is a literal or a simple materialized literal it
1803 // is already set in the cloned array.
1804 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1806 if (!result_saved) {
1807 __ Push(rax); // array literal
1808 __ Push(Smi::FromInt(expr->literal_index()));
1809 result_saved = true;
1811 VisitForAccumulatorValue(subexpr);
1813 if (IsFastObjectElementsKind(constant_elements_kind)) {
1814 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1815 // cannot transition and don't need to call the runtime stub.
1816 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1817 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1818 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1819 // Store the subexpression value in the array's elements.
1820 __ movp(FieldOperand(rbx, offset), result_register());
1821 // Update the write barrier for the array store.
1822 __ RecordWriteField(rbx, offset, result_register(), rcx,
1824 EMIT_REMEMBERED_SET,
1827 // Store the subexpression value in the array's elements.
1828 __ Move(rcx, Smi::FromInt(i));
1829 StoreArrayLiteralElementStub stub(isolate());
1833 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1837 __ addp(rsp, Immediate(kPointerSize)); // literal index
1838 context()->PlugTOS();
1840 context()->Plug(rax);
1845 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1846 DCHECK(expr->target()->IsValidReferenceExpression());
1848 Comment cmnt(masm_, "[ Assignment");
1850 // Left-hand side can only be a property, a global or a (parameter or local)
1852 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1853 LhsKind assign_type = VARIABLE;
1854 Property* property = expr->target()->AsProperty();
1855 if (property != NULL) {
1856 assign_type = (property->key()->IsPropertyName())
1861 // Evaluate LHS expression.
1862 switch (assign_type) {
1864 // Nothing to do here.
1866 case NAMED_PROPERTY:
1867 if (expr->is_compound()) {
1868 // We need the receiver both on the stack and in the register.
1869 VisitForStackValue(property->obj());
1870 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1872 VisitForStackValue(property->obj());
1875 case KEYED_PROPERTY: {
1876 if (expr->is_compound()) {
1877 VisitForStackValue(property->obj());
1878 VisitForStackValue(property->key());
1879 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1880 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1882 VisitForStackValue(property->obj());
1883 VisitForStackValue(property->key());
1889 // For compound assignments we need another deoptimization point after the
1890 // variable/property load.
1891 if (expr->is_compound()) {
1892 { AccumulatorValueContext context(this);
1893 switch (assign_type) {
1895 EmitVariableLoad(expr->target()->AsVariableProxy());
1896 PrepareForBailout(expr->target(), TOS_REG);
1898 case NAMED_PROPERTY:
1899 EmitNamedPropertyLoad(property);
1900 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1902 case KEYED_PROPERTY:
1903 EmitKeyedPropertyLoad(property);
1904 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1909 Token::Value op = expr->binary_op();
1910 __ Push(rax); // Left operand goes on the stack.
1911 VisitForAccumulatorValue(expr->value());
1913 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1916 SetSourcePosition(expr->position() + 1);
1917 AccumulatorValueContext context(this);
1918 if (ShouldInlineSmiCase(op)) {
1919 EmitInlineSmiBinaryOp(expr->binary_operation(),
1925 EmitBinaryOp(expr->binary_operation(), op, mode);
1927 // Deoptimization point in case the binary operation may have side effects.
1928 PrepareForBailout(expr->binary_operation(), TOS_REG);
1930 VisitForAccumulatorValue(expr->value());
1933 // Record source position before possible IC call.
1934 SetSourcePosition(expr->position());
1937 switch (assign_type) {
1939 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1941 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1942 context()->Plug(rax);
1944 case NAMED_PROPERTY:
1945 EmitNamedPropertyAssignment(expr);
1947 case KEYED_PROPERTY:
1948 EmitKeyedPropertyAssignment(expr);
1954 void FullCodeGenerator::VisitYield(Yield* expr) {
1955 Comment cmnt(masm_, "[ Yield");
1956 // Evaluate yielded value first; the initial iterator definition depends on
1957 // this. It stays on the stack while we update the iterator.
1958 VisitForStackValue(expr->expression());
1960 switch (expr->yield_kind()) {
1961 case Yield::kSuspend:
1962 // Pop value from top-of-stack slot; box result into result register.
1963 EmitCreateIteratorResult(false);
1964 __ Push(result_register());
1966 case Yield::kInitial: {
1967 Label suspend, continuation, post_runtime, resume;
1971 __ bind(&continuation);
1975 VisitForAccumulatorValue(expr->generator_object());
1976 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1977 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1978 Smi::FromInt(continuation.pos()));
1979 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1981 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1983 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1985 __ j(equal, &post_runtime);
1986 __ Push(rax); // generator object
1987 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1988 __ movp(context_register(),
1989 Operand(rbp, StandardFrameConstants::kContextOffset));
1990 __ bind(&post_runtime);
1992 __ Pop(result_register());
1993 EmitReturnSequence();
1996 context()->Plug(result_register());
2000 case Yield::kFinal: {
2001 VisitForAccumulatorValue(expr->generator_object());
2002 __ Move(FieldOperand(result_register(),
2003 JSGeneratorObject::kContinuationOffset),
2004 Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2005 // Pop value from top-of-stack slot, box result into result register.
2006 EmitCreateIteratorResult(true);
2007 EmitUnwindBeforeReturn();
2008 EmitReturnSequence();
2012 case Yield::kDelegating: {
2013 VisitForStackValue(expr->generator_object());
2015 // Initial stack layout is as follows:
2016 // [sp + 1 * kPointerSize] iter
2017 // [sp + 0 * kPointerSize] g
2019 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2020 Label l_next, l_call, l_loop;
2021 Register load_receiver = LoadDescriptor::ReceiverRegister();
2022 Register load_name = LoadDescriptor::NameRegister();
2024 // Initial send value is undefined.
2025 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2028 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2030 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2031 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2033 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2034 __ Push(rax); // exception
2037 // try { received = %yield result }
2038 // Shuffle the received result above a try handler and yield it without
2041 __ Pop(rax); // result
2042 __ PushTryHandler(StackHandler::CATCH, expr->index());
2043 const int handler_size = StackHandlerConstants::kSize;
2044 __ Push(rax); // result
2046 __ bind(&l_continuation);
2048 __ bind(&l_suspend);
2049 const int generator_object_depth = kPointerSize + handler_size;
2050 __ movp(rax, Operand(rsp, generator_object_depth));
2052 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2053 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2054 Smi::FromInt(l_continuation.pos()));
2055 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2057 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2059 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2060 __ movp(context_register(),
2061 Operand(rbp, StandardFrameConstants::kContextOffset));
2062 __ Pop(rax); // result
2063 EmitReturnSequence();
2064 __ bind(&l_resume); // received in rax
2067 // receiver = iter; f = 'next'; arg = received;
2070 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2071 __ Push(load_name); // "next"
2072 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2073 __ Push(rax); // received
2075 // result = receiver[f](arg);
2077 __ movp(load_receiver, Operand(rsp, kPointerSize));
2078 if (FLAG_vector_ics) {
2079 __ Move(VectorLoadICDescriptor::SlotRegister(),
2080 Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
2082 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2083 CallIC(ic, TypeFeedbackId::None());
2085 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2086 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2089 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2090 __ Drop(1); // The function is still on the stack; drop it.
2092 // if (!result.done) goto l_try;
2094 __ Move(load_receiver, rax);
2095 __ Push(load_receiver); // save result
2096 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2097 if (FLAG_vector_ics) {
2098 __ Move(VectorLoadICDescriptor::SlotRegister(),
2099 Smi::FromInt(expr->DoneFeedbackSlot()));
2101 CallLoadIC(NOT_CONTEXTUAL); // rax=result.done
2102 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2104 __ testp(result_register(), result_register());
2108 __ Pop(load_receiver); // result
2109 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2110 if (FLAG_vector_ics) {
2111 __ Move(VectorLoadICDescriptor::SlotRegister(),
2112 Smi::FromInt(expr->ValueFeedbackSlot()));
2114 CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2115 context()->DropAndPlug(2, rax); // drop iter and g
2122 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2124 JSGeneratorObject::ResumeMode resume_mode) {
2125 // The value stays in rax, and is ultimately read by the resumed generator, as
2126 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2127 // is read to throw the value when the resumed generator is already closed.
2128 // rbx will hold the generator object until the activation has been resumed.
2129 VisitForStackValue(generator);
2130 VisitForAccumulatorValue(value);
2133 // Check generator state.
2134 Label wrong_state, closed_state, done;
2135 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2136 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2137 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2139 __ j(equal, &closed_state);
2140 __ j(less, &wrong_state);
2142 // Load suspended function and context.
2143 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2144 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2147 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2149 // Push holes for arguments to generator function.
2150 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2151 __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2152 SharedFunctionInfo::kFormalParameterCountOffset);
2153 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2154 Label push_argument_holes, push_frame;
2155 __ bind(&push_argument_holes);
2156 __ subp(rdx, Immediate(1));
2157 __ j(carry, &push_frame);
2159 __ jmp(&push_argument_holes);
2161 // Enter a new JavaScript frame, and initialize its slots as they were when
2162 // the generator was suspended.
2164 __ bind(&push_frame);
2165 __ call(&resume_frame);
2167 __ bind(&resume_frame);
2168 __ pushq(rbp); // Caller's frame pointer.
2170 __ Push(rsi); // Callee's context.
2171 __ Push(rdi); // Callee's JS Function.
2173 // Load the operand stack size.
2174 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2175 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2176 __ SmiToInteger32(rdx, rdx);
2178 // If we are sending a value and there is no operand stack, we can jump back
2180 if (resume_mode == JSGeneratorObject::NEXT) {
2182 __ cmpp(rdx, Immediate(0));
2183 __ j(not_zero, &slow_resume);
2184 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2185 __ SmiToInteger64(rcx,
2186 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2188 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2189 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2191 __ bind(&slow_resume);
2194 // Otherwise, we push holes for the operand stack and call the runtime to fix
2195 // up the stack and the handlers.
2196 Label push_operand_holes, call_resume;
2197 __ bind(&push_operand_holes);
2198 __ subp(rdx, Immediate(1));
2199 __ j(carry, &call_resume);
2201 __ jmp(&push_operand_holes);
2202 __ bind(&call_resume);
2204 __ Push(result_register());
2205 __ Push(Smi::FromInt(resume_mode));
2206 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2207 // Not reached: the runtime call returns elsewhere.
2208 __ Abort(kGeneratorFailedToResume);
2210 // Reach here when generator is closed.
2211 __ bind(&closed_state);
2212 if (resume_mode == JSGeneratorObject::NEXT) {
2213 // Return completed iterator result when generator is closed.
2214 __ PushRoot(Heap::kUndefinedValueRootIndex);
2215 // Pop value from top-of-stack slot; box result into result register.
2216 EmitCreateIteratorResult(true);
2218 // Throw the provided value.
2220 __ CallRuntime(Runtime::kThrow, 1);
2224 // Throw error if we attempt to operate on a running generator.
2225 __ bind(&wrong_state);
2227 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2230 context()->Plug(result_register());
2234 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2238 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2240 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2243 __ bind(&gc_required);
2244 __ Push(Smi::FromInt(map->instance_size()));
2245 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2246 __ movp(context_register(),
2247 Operand(rbp, StandardFrameConstants::kContextOffset));
2249 __ bind(&allocated);
2252 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2253 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2254 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2255 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2256 isolate()->factory()->empty_fixed_array());
2257 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2258 isolate()->factory()->empty_fixed_array());
2259 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2261 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2264 // Only the value field needs a write barrier, as the other values are in the
2266 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2267 rcx, rdx, kDontSaveFPRegs);
2271 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2272 SetSourcePosition(prop->position());
2273 Literal* key = prop->key()->AsLiteral();
2274 __ Move(LoadDescriptor::NameRegister(), key->value());
2275 if (FLAG_vector_ics) {
2276 __ Move(VectorLoadICDescriptor::SlotRegister(),
2277 Smi::FromInt(prop->PropertyFeedbackSlot()));
2278 CallLoadIC(NOT_CONTEXTUAL);
2280 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2285 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2286 SetSourcePosition(prop->position());
2287 Literal* key = prop->key()->AsLiteral();
2288 DCHECK(!key->value()->IsSmi());
2289 DCHECK(prop->IsSuperAccess());
2291 SuperReference* super_ref = prop->obj()->AsSuperReference();
2292 EmitLoadHomeObject(super_ref);
2294 VisitForStackValue(super_ref->this_var());
2295 __ Push(key->value());
2296 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2300 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2301 SetSourcePosition(prop->position());
2302 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2303 if (FLAG_vector_ics) {
2304 __ Move(VectorLoadICDescriptor::SlotRegister(),
2305 Smi::FromInt(prop->PropertyFeedbackSlot()));
2308 CallIC(ic, prop->PropertyFeedbackId());
2313 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2317 Expression* right) {
2318 // Do combined smi check of the operands. Left operand is on the
2319 // stack (popped into rdx). Right operand is in rax but moved into
2320 // rcx to make the shifts easier.
2321 Label done, stub_call, smi_case;
2325 JumpPatchSite patch_site(masm_);
2326 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2328 __ bind(&stub_call);
2330 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2331 CallIC(code, expr->BinaryOperationFeedbackId());
2332 patch_site.EmitPatchInfo();
2333 __ jmp(&done, Label::kNear);
2338 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2341 __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2344 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2347 __ SmiAdd(rax, rdx, rcx, &stub_call);
2350 __ SmiSub(rax, rdx, rcx, &stub_call);
2353 __ SmiMul(rax, rdx, rcx, &stub_call);
2356 __ SmiOr(rax, rdx, rcx);
2358 case Token::BIT_AND:
2359 __ SmiAnd(rax, rdx, rcx);
2361 case Token::BIT_XOR:
2362 __ SmiXor(rax, rdx, rcx);
2370 context()->Plug(rax);
2374 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2376 OverwriteMode mode) {
2378 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2379 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2380 CallIC(code, expr->BinaryOperationFeedbackId());
2381 patch_site.EmitPatchInfo();
2382 context()->Plug(rax);
2386 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2387 DCHECK(expr->IsValidReferenceExpression());
2389 // Left-hand side can only be a property, a global or a (parameter or local)
2391 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2392 LhsKind assign_type = VARIABLE;
2393 Property* prop = expr->AsProperty();
2395 assign_type = (prop->key()->IsPropertyName())
2400 switch (assign_type) {
2402 Variable* var = expr->AsVariableProxy()->var();
2403 EffectContext context(this);
2404 EmitVariableAssignment(var, Token::ASSIGN);
2407 case NAMED_PROPERTY: {
2408 __ Push(rax); // Preserve value.
2409 VisitForAccumulatorValue(prop->obj());
2410 __ Move(StoreDescriptor::ReceiverRegister(), rax);
2411 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2412 __ Move(StoreDescriptor::NameRegister(),
2413 prop->key()->AsLiteral()->value());
2417 case KEYED_PROPERTY: {
2418 __ Push(rax); // Preserve value.
2419 VisitForStackValue(prop->obj());
2420 VisitForAccumulatorValue(prop->key());
2421 __ Move(StoreDescriptor::NameRegister(), rax);
2422 __ Pop(StoreDescriptor::ReceiverRegister());
2423 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2425 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2430 context()->Plug(rax);
2434 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2435 Variable* var, MemOperand location) {
2436 __ movp(location, rax);
2437 if (var->IsContextSlot()) {
2439 __ RecordWriteContextSlot(
2440 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2445 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2447 if (var->IsUnallocated()) {
2448 // Global var, const, or let.
2449 __ Move(StoreDescriptor::NameRegister(), var->name());
2450 __ movp(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2453 } else if (op == Token::INIT_CONST_LEGACY) {
2454 // Const initializers need a write barrier.
2455 DCHECK(!var->IsParameter()); // No const parameters.
2456 if (var->IsLookupSlot()) {
2459 __ Push(var->name());
2460 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2462 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2464 MemOperand location = VarOperand(var, rcx);
2465 __ movp(rdx, location);
2466 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2467 __ j(not_equal, &skip);
2468 EmitStoreToStackLocalOrContextSlot(var, location);
2472 } else if (var->mode() == LET && op != Token::INIT_LET) {
2473 // Non-initializing assignment to let variable needs a write barrier.
2474 DCHECK(!var->IsLookupSlot());
2475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2477 MemOperand location = VarOperand(var, rcx);
2478 __ movp(rdx, location);
2479 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2480 __ j(not_equal, &assign, Label::kNear);
2481 __ Push(var->name());
2482 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2484 EmitStoreToStackLocalOrContextSlot(var, location);
2486 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2487 if (var->IsLookupSlot()) {
2488 // Assignment to var.
2489 __ Push(rax); // Value.
2490 __ Push(rsi); // Context.
2491 __ Push(var->name());
2492 __ Push(Smi::FromInt(strict_mode()));
2493 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2495 // Assignment to var or initializing assignment to let/const in harmony
2497 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2498 MemOperand location = VarOperand(var, rcx);
2499 if (generate_debug_code_ && op == Token::INIT_LET) {
2500 // Check for an uninitialized let binding.
2501 __ movp(rdx, location);
2502 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2503 __ Check(equal, kLetBindingReInitialization);
2505 EmitStoreToStackLocalOrContextSlot(var, location);
2508 // Non-initializing assignments to consts are ignored.
2512 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2513 // Assignment to a property, using a named store IC.
2514 Property* prop = expr->target()->AsProperty();
2515 DCHECK(prop != NULL);
2516 DCHECK(prop->key()->IsLiteral());
2518 // Record source code position before IC call.
2519 SetSourcePosition(expr->position());
2520 __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2521 __ Pop(StoreDescriptor::ReceiverRegister());
2522 CallStoreIC(expr->AssignmentFeedbackId());
2524 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2525 context()->Plug(rax);
2529 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2530 // Assignment to a property, using a keyed store IC.
2532 __ Pop(StoreDescriptor::NameRegister()); // Key.
2533 __ Pop(StoreDescriptor::ReceiverRegister());
2534 DCHECK(StoreDescriptor::ValueRegister().is(rax));
2535 // Record source code position before IC call.
2536 SetSourcePosition(expr->position());
2537 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2538 CallIC(ic, expr->AssignmentFeedbackId());
2540 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2541 context()->Plug(rax);
2545 void FullCodeGenerator::VisitProperty(Property* expr) {
2546 Comment cmnt(masm_, "[ Property");
2547 Expression* key = expr->key();
2549 if (key->IsPropertyName()) {
2550 if (!expr->IsSuperAccess()) {
2551 VisitForAccumulatorValue(expr->obj());
2552 DCHECK(!rax.is(LoadDescriptor::ReceiverRegister()));
2553 __ movp(LoadDescriptor::ReceiverRegister(), rax);
2554 EmitNamedPropertyLoad(expr);
2556 EmitNamedSuperPropertyLoad(expr);
2558 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2559 context()->Plug(rax);
2561 VisitForStackValue(expr->obj());
2562 VisitForAccumulatorValue(expr->key());
2563 __ Move(LoadDescriptor::NameRegister(), rax);
2564 __ Pop(LoadDescriptor::ReceiverRegister());
2565 EmitKeyedPropertyLoad(expr);
2566 context()->Plug(rax);
2571 void FullCodeGenerator::CallIC(Handle<Code> code,
2572 TypeFeedbackId ast_id) {
2574 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2578 // Code common for calls using the IC.
2579 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2580 Expression* callee = expr->expression();
2582 CallICState::CallType call_type =
2583 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2584 // Get the target function.
2585 if (call_type == CallICState::FUNCTION) {
2586 { StackValueContext context(this);
2587 EmitVariableLoad(callee->AsVariableProxy());
2588 PrepareForBailout(callee, NO_REGISTERS);
2590 // Push undefined as receiver. This is patched in the method prologue if it
2591 // is a sloppy mode method.
2592 __ Push(isolate()->factory()->undefined_value());
2594 // Load the function from the receiver.
2595 DCHECK(callee->IsProperty());
2596 DCHECK(!callee->AsProperty()->IsSuperAccess());
2597 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2598 EmitNamedPropertyLoad(callee->AsProperty());
2599 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2600 // Push the target function under the receiver.
2601 __ Push(Operand(rsp, 0));
2602 __ movp(Operand(rsp, kPointerSize), rax);
2605 EmitCall(expr, call_type);
2609 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2610 Expression* callee = expr->expression();
2611 DCHECK(callee->IsProperty());
2612 Property* prop = callee->AsProperty();
2613 DCHECK(prop->IsSuperAccess());
2615 SetSourcePosition(prop->position());
2616 Literal* key = prop->key()->AsLiteral();
2617 DCHECK(!key->value()->IsSmi());
2618 // Load the function from the receiver.
2619 SuperReference* super_ref = prop->obj()->AsSuperReference();
2620 EmitLoadHomeObject(super_ref);
2622 VisitForAccumulatorValue(super_ref->this_var());
2624 __ Push(Operand(rsp, kPointerSize));
2626 __ Push(key->value());
2630 // - this (receiver)
2631 // - home_object <-- LoadFromSuper will pop here and below.
2632 // - this (receiver)
2634 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2636 // Replace home_object with target function.
2637 __ movp(Operand(rsp, kPointerSize), rax);
2640 // - target function
2641 // - this (receiver)
2642 EmitCall(expr, CallICState::METHOD);
2646 // Common code for calls using the IC.
2647 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2650 VisitForAccumulatorValue(key);
2652 Expression* callee = expr->expression();
2654 // Load the function from the receiver.
2655 DCHECK(callee->IsProperty());
2656 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2657 __ Move(LoadDescriptor::NameRegister(), rax);
2658 EmitKeyedPropertyLoad(callee->AsProperty());
2659 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2661 // Push the target function under the receiver.
2662 __ Push(Operand(rsp, 0));
2663 __ movp(Operand(rsp, kPointerSize), rax);
2665 EmitCall(expr, CallICState::METHOD);
2669 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2670 // Load the arguments.
2671 ZoneList<Expression*>* args = expr->arguments();
2672 int arg_count = args->length();
2673 { PreservePositionScope scope(masm()->positions_recorder());
2674 for (int i = 0; i < arg_count; i++) {
2675 VisitForStackValue(args->at(i));
2679 // Record source position of the IC call.
2680 SetSourcePosition(expr->position());
2681 Handle<Code> ic = CallIC::initialize_stub(
2682 isolate(), arg_count, call_type);
2683 __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2684 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2685 // Don't assign a type feedback id to the IC, since type feedback is provided
2686 // by the vector above.
2689 RecordJSReturnSite(expr);
2691 // Restore context register.
2692 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2693 // Discard the function left on TOS.
2694 context()->DropAndPlug(1, rax);
2698 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2699 // Push copy of the first argument or undefined if it doesn't exist.
2700 if (arg_count > 0) {
2701 __ Push(Operand(rsp, arg_count * kPointerSize));
2703 __ PushRoot(Heap::kUndefinedValueRootIndex);
2706 // Push the receiver of the enclosing function and do runtime call.
2707 StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2708 __ Push(args.GetReceiverOperand());
2710 // Push the language mode.
2711 __ Push(Smi::FromInt(strict_mode()));
2713 // Push the start position of the scope the calls resides in.
2714 __ Push(Smi::FromInt(scope()->start_position()));
2716 // Do the runtime call.
2717 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2721 void FullCodeGenerator::VisitCall(Call* expr) {
2723 // We want to verify that RecordJSReturnSite gets called on all paths
2724 // through this function. Avoid early returns.
2725 expr->return_is_recorded_ = false;
2728 Comment cmnt(masm_, "[ Call");
2729 Expression* callee = expr->expression();
2730 Call::CallType call_type = expr->GetCallType(isolate());
2732 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2733 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2734 // to resolve the function we need to call and the receiver of the call.
2735 // Then we call the resolved function using the given arguments.
2736 ZoneList<Expression*>* args = expr->arguments();
2737 int arg_count = args->length();
2738 { PreservePositionScope pos_scope(masm()->positions_recorder());
2739 VisitForStackValue(callee);
2740 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2742 // Push the arguments.
2743 for (int i = 0; i < arg_count; i++) {
2744 VisitForStackValue(args->at(i));
2747 // Push a copy of the function (found below the arguments) and resolve
2749 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2750 EmitResolvePossiblyDirectEval(arg_count);
2752 // The runtime call returns a pair of values in rax (function) and
2753 // rdx (receiver). Touch up the stack with the right values.
2754 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2755 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2757 // Record source position for debugger.
2758 SetSourcePosition(expr->position());
2759 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2760 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2762 RecordJSReturnSite(expr);
2763 // Restore context register.
2764 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2765 context()->DropAndPlug(1, rax);
2766 } else if (call_type == Call::GLOBAL_CALL) {
2767 EmitCallWithLoadIC(expr);
2769 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2770 // Call to a lookup slot (dynamically introduced variable).
2771 VariableProxy* proxy = callee->AsVariableProxy();
2774 { PreservePositionScope scope(masm()->positions_recorder());
2775 // Generate code for loading from variables potentially shadowed by
2776 // eval-introduced variables.
2777 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2780 // Call the runtime to find the function to call (returned in rax) and
2781 // the object holding it (returned in rdx).
2782 __ Push(context_register());
2783 __ Push(proxy->name());
2784 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2785 __ Push(rax); // Function.
2786 __ Push(rdx); // Receiver.
2788 // If fast case code has been generated, emit code to push the function
2789 // and receiver and have the slow path jump around this code.
2790 if (done.is_linked()) {
2792 __ jmp(&call, Label::kNear);
2796 // The receiver is implicitly the global receiver. Indicate this by
2797 // passing the hole to the call function stub.
2798 __ PushRoot(Heap::kUndefinedValueRootIndex);
2802 // The receiver is either the global receiver or an object found by
2805 } else if (call_type == Call::PROPERTY_CALL) {
2806 Property* property = callee->AsProperty();
2807 bool is_named_call = property->key()->IsPropertyName();
2808 // super.x() is handled in EmitCallWithLoadIC.
2809 if (property->IsSuperAccess() && is_named_call) {
2810 EmitSuperCallWithLoadIC(expr);
2813 PreservePositionScope scope(masm()->positions_recorder());
2814 VisitForStackValue(property->obj());
2816 if (is_named_call) {
2817 EmitCallWithLoadIC(expr);
2819 EmitKeyedCallWithLoadIC(expr, property->key());
2823 DCHECK(call_type == Call::OTHER_CALL);
2824 // Call to an arbitrary expression not handled specially above.
2825 { PreservePositionScope scope(masm()->positions_recorder());
2826 VisitForStackValue(callee);
2828 __ PushRoot(Heap::kUndefinedValueRootIndex);
2829 // Emit function call.
2834 // RecordJSReturnSite should have been called.
2835 DCHECK(expr->return_is_recorded_);
2840 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2841 Comment cmnt(masm_, "[ CallNew");
2842 // According to ECMA-262, section 11.2.2, page 44, the function
2843 // expression in new calls must be evaluated before the
2846 // Push constructor on the stack. If it's not a function it's used as
2847 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2849 VisitForStackValue(expr->expression());
2851 // Push the arguments ("left-to-right") on the stack.
2852 ZoneList<Expression*>* args = expr->arguments();
2853 int arg_count = args->length();
2854 for (int i = 0; i < arg_count; i++) {
2855 VisitForStackValue(args->at(i));
2858 // Call the construct call builtin that handles allocation and
2859 // constructor invocation.
2860 SetSourcePosition(expr->position());
2862 // Load function and argument count into rdi and rax.
2863 __ Set(rax, arg_count);
2864 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2866 // Record call targets in unoptimized code, but not in the snapshot.
2867 if (FLAG_pretenuring_call_new) {
2868 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2869 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2870 expr->CallNewFeedbackSlot() + 1);
2873 __ Move(rbx, FeedbackVector());
2874 __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2876 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2877 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2878 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2879 context()->Plug(rax);
2883 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2884 ZoneList<Expression*>* args = expr->arguments();
2885 DCHECK(args->length() == 1);
2887 VisitForAccumulatorValue(args->at(0));
2889 Label materialize_true, materialize_false;
2890 Label* if_true = NULL;
2891 Label* if_false = NULL;
2892 Label* fall_through = NULL;
2893 context()->PrepareTest(&materialize_true, &materialize_false,
2894 &if_true, &if_false, &fall_through);
2896 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2897 __ JumpIfSmi(rax, if_true);
2900 context()->Plug(if_true, if_false);
2904 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2905 ZoneList<Expression*>* args = expr->arguments();
2906 DCHECK(args->length() == 1);
2908 VisitForAccumulatorValue(args->at(0));
2910 Label materialize_true, materialize_false;
2911 Label* if_true = NULL;
2912 Label* if_false = NULL;
2913 Label* fall_through = NULL;
2914 context()->PrepareTest(&materialize_true, &materialize_false,
2915 &if_true, &if_false, &fall_through);
2917 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2918 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2919 Split(non_negative_smi, if_true, if_false, fall_through);
2921 context()->Plug(if_true, if_false);
2925 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2926 ZoneList<Expression*>* args = expr->arguments();
2927 DCHECK(args->length() == 1);
2929 VisitForAccumulatorValue(args->at(0));
2931 Label materialize_true, materialize_false;
2932 Label* if_true = NULL;
2933 Label* if_false = NULL;
2934 Label* fall_through = NULL;
2935 context()->PrepareTest(&materialize_true, &materialize_false,
2936 &if_true, &if_false, &fall_through);
2938 __ JumpIfSmi(rax, if_false);
2939 __ CompareRoot(rax, Heap::kNullValueRootIndex);
2940 __ j(equal, if_true);
2941 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2942 // Undetectable objects behave like undefined when tested with typeof.
2943 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2944 Immediate(1 << Map::kIsUndetectable));
2945 __ j(not_zero, if_false);
2946 __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2947 __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2948 __ j(below, if_false);
2949 __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2950 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2951 Split(below_equal, if_true, if_false, fall_through);
2953 context()->Plug(if_true, if_false);
2957 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2958 ZoneList<Expression*>* args = expr->arguments();
2959 DCHECK(args->length() == 1);
2961 VisitForAccumulatorValue(args->at(0));
2963 Label materialize_true, materialize_false;
2964 Label* if_true = NULL;
2965 Label* if_false = NULL;
2966 Label* fall_through = NULL;
2967 context()->PrepareTest(&materialize_true, &materialize_false,
2968 &if_true, &if_false, &fall_through);
2970 __ JumpIfSmi(rax, if_false);
2971 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2972 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2973 Split(above_equal, if_true, if_false, fall_through);
2975 context()->Plug(if_true, if_false);
2979 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2980 ZoneList<Expression*>* args = expr->arguments();
2981 DCHECK(args->length() == 1);
2983 VisitForAccumulatorValue(args->at(0));
2985 Label materialize_true, materialize_false;
2986 Label* if_true = NULL;
2987 Label* if_false = NULL;
2988 Label* fall_through = NULL;
2989 context()->PrepareTest(&materialize_true, &materialize_false,
2990 &if_true, &if_false, &fall_through);
2992 __ JumpIfSmi(rax, if_false);
2993 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2994 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2995 Immediate(1 << Map::kIsUndetectable));
2996 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2997 Split(not_zero, if_true, if_false, fall_through);
2999 context()->Plug(if_true, if_false);
3003 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3004 CallRuntime* expr) {
3005 ZoneList<Expression*>* args = expr->arguments();
3006 DCHECK(args->length() == 1);
3008 VisitForAccumulatorValue(args->at(0));
3010 Label materialize_true, materialize_false, skip_lookup;
3011 Label* if_true = NULL;
3012 Label* if_false = NULL;
3013 Label* fall_through = NULL;
3014 context()->PrepareTest(&materialize_true, &materialize_false,
3015 &if_true, &if_false, &fall_through);
3017 __ AssertNotSmi(rax);
3019 // Check whether this map has already been checked to be safe for default
3021 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3022 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
3023 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3024 __ j(not_zero, &skip_lookup);
3026 // Check for fast case object. Generate false result for slow case object.
3027 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
3028 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3029 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3030 __ j(equal, if_false);
3032 // Look for valueOf string in the descriptor array, and indicate false if
3033 // found. Since we omit an enumeration index check, if it is added via a
3034 // transition that shares its descriptor array, this is a false positive.
3035 Label entry, loop, done;
3037 // Skip loop if no descriptors are valid.
3038 __ NumberOfOwnDescriptors(rcx, rbx);
3039 __ cmpp(rcx, Immediate(0));
3042 __ LoadInstanceDescriptors(rbx, r8);
3043 // rbx: descriptor array.
3044 // rcx: valid entries in the descriptor array.
3045 // Calculate the end of the descriptor array.
3046 __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3048 Operand(r8, rcx, times_pointer_size, DescriptorArray::kFirstOffset));
3049 // Calculate location of the first key name.
3050 __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3051 // Loop through all the keys in the descriptor array. If one of these is the
3052 // internalized string "valueOf" the result is false.
3055 __ movp(rdx, FieldOperand(r8, 0));
3056 __ Cmp(rdx, isolate()->factory()->value_of_string());
3057 __ j(equal, if_false);
3058 __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3061 __ j(not_equal, &loop);
3065 // Set the bit in the map to indicate that there is no local valueOf field.
3066 __ orp(FieldOperand(rbx, Map::kBitField2Offset),
3067 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3069 __ bind(&skip_lookup);
3071 // If a valueOf property is not found on the object check that its
3072 // prototype is the un-modified String prototype. If not result is false.
3073 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
3074 __ testp(rcx, Immediate(kSmiTagMask));
3075 __ j(zero, if_false);
3076 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3077 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3078 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3080 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3081 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3082 Split(equal, if_true, if_false, fall_through);
3084 context()->Plug(if_true, if_false);
3088 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3089 ZoneList<Expression*>* args = expr->arguments();
3090 DCHECK(args->length() == 1);
3092 VisitForAccumulatorValue(args->at(0));
3094 Label materialize_true, materialize_false;
3095 Label* if_true = NULL;
3096 Label* if_false = NULL;
3097 Label* fall_through = NULL;
3098 context()->PrepareTest(&materialize_true, &materialize_false,
3099 &if_true, &if_false, &fall_through);
3101 __ JumpIfSmi(rax, if_false);
3102 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3103 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3104 Split(equal, if_true, if_false, fall_through);
3106 context()->Plug(if_true, if_false);
3110 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3111 ZoneList<Expression*>* args = expr->arguments();
3112 DCHECK(args->length() == 1);
3114 VisitForAccumulatorValue(args->at(0));
3116 Label materialize_true, materialize_false;
3117 Label* if_true = NULL;
3118 Label* if_false = NULL;
3119 Label* fall_through = NULL;
3120 context()->PrepareTest(&materialize_true, &materialize_false,
3121 &if_true, &if_false, &fall_through);
3123 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3124 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3125 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3127 __ j(no_overflow, if_false);
3128 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3129 Immediate(0x00000000));
3130 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3131 Split(equal, if_true, if_false, fall_through);
3133 context()->Plug(if_true, if_false);
3137 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3138 ZoneList<Expression*>* args = expr->arguments();
3139 DCHECK(args->length() == 1);
3141 VisitForAccumulatorValue(args->at(0));
3143 Label materialize_true, materialize_false;
3144 Label* if_true = NULL;
3145 Label* if_false = NULL;
3146 Label* fall_through = NULL;
3147 context()->PrepareTest(&materialize_true, &materialize_false,
3148 &if_true, &if_false, &fall_through);
3150 __ JumpIfSmi(rax, if_false);
3151 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3152 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3153 Split(equal, if_true, if_false, fall_through);
3155 context()->Plug(if_true, if_false);
3159 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3160 ZoneList<Expression*>* args = expr->arguments();
3161 DCHECK(args->length() == 1);
3163 VisitForAccumulatorValue(args->at(0));
3165 Label materialize_true, materialize_false;
3166 Label* if_true = NULL;
3167 Label* if_false = NULL;
3168 Label* fall_through = NULL;
3169 context()->PrepareTest(&materialize_true, &materialize_false,
3170 &if_true, &if_false, &fall_through);
3172 __ JumpIfSmi(rax, if_false);
3173 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3174 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3175 Split(equal, if_true, if_false, fall_through);
3177 context()->Plug(if_true, if_false);
3182 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3183 DCHECK(expr->arguments()->length() == 0);
3185 Label materialize_true, materialize_false;
3186 Label* if_true = NULL;
3187 Label* if_false = NULL;
3188 Label* fall_through = NULL;
3189 context()->PrepareTest(&materialize_true, &materialize_false,
3190 &if_true, &if_false, &fall_through);
3192 // Get the frame pointer for the calling frame.
3193 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3195 // Skip the arguments adaptor frame if it exists.
3196 Label check_frame_marker;
3197 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3198 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3199 __ j(not_equal, &check_frame_marker);
3200 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3202 // Check the marker in the calling frame.
3203 __ bind(&check_frame_marker);
3204 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3205 Smi::FromInt(StackFrame::CONSTRUCT));
3206 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3207 Split(equal, if_true, if_false, fall_through);
3209 context()->Plug(if_true, if_false);
3213 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3214 ZoneList<Expression*>* args = expr->arguments();
3215 DCHECK(args->length() == 2);
3217 // Load the two objects into registers and perform the comparison.
3218 VisitForStackValue(args->at(0));
3219 VisitForAccumulatorValue(args->at(1));
3221 Label materialize_true, materialize_false;
3222 Label* if_true = NULL;
3223 Label* if_false = NULL;
3224 Label* fall_through = NULL;
3225 context()->PrepareTest(&materialize_true, &materialize_false,
3226 &if_true, &if_false, &fall_through);
3230 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3231 Split(equal, if_true, if_false, fall_through);
3233 context()->Plug(if_true, if_false);
3237 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3238 ZoneList<Expression*>* args = expr->arguments();
3239 DCHECK(args->length() == 1);
3241 // ArgumentsAccessStub expects the key in rdx and the formal
3242 // parameter count in rax.
3243 VisitForAccumulatorValue(args->at(0));
3245 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3246 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3248 context()->Plug(rax);
3252 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3253 DCHECK(expr->arguments()->length() == 0);
3256 // Get the number of formal parameters.
3257 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3259 // Check if the calling frame is an arguments adaptor frame.
3260 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3261 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3262 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3263 __ j(not_equal, &exit, Label::kNear);
3265 // Arguments adaptor case: Read the arguments length from the
3267 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3271 context()->Plug(rax);
3275 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 DCHECK(args->length() == 1);
3278 Label done, null, function, non_function_constructor;
3280 VisitForAccumulatorValue(args->at(0));
3282 // If the object is a smi, we return null.
3283 __ JumpIfSmi(rax, &null);
3285 // Check that the object is a JS object but take special care of JS
3286 // functions to make sure they have 'Function' as their class.
3287 // Assume that there are only two callable types, and one of them is at
3288 // either end of the type range for JS object types. Saves extra comparisons.
3289 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3290 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3291 // Map is now in rax.
3293 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3294 FIRST_SPEC_OBJECT_TYPE + 1);
3295 __ j(equal, &function);
3297 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3298 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3299 LAST_SPEC_OBJECT_TYPE - 1);
3300 __ j(equal, &function);
3301 // Assume that there is no larger type.
3302 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3304 // Check if the constructor in the map is a JS function.
3305 __ movp(rax, FieldOperand(rax, Map::kConstructorOffset));
3306 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3307 __ j(not_equal, &non_function_constructor);
3309 // rax now contains the constructor function. Grab the
3310 // instance class name from there.
3311 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3312 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3315 // Functions have class 'Function'.
3317 __ Move(rax, isolate()->factory()->Function_string());
3320 // Objects with a non-function constructor have class 'Object'.
3321 __ bind(&non_function_constructor);
3322 __ Move(rax, isolate()->factory()->Object_string());
3325 // Non-JS objects have class null.
3327 __ LoadRoot(rax, Heap::kNullValueRootIndex);
3332 context()->Plug(rax);
3336 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3337 // Load the arguments on the stack and call the stub.
3338 SubStringStub stub(isolate());
3339 ZoneList<Expression*>* args = expr->arguments();
3340 DCHECK(args->length() == 3);
3341 VisitForStackValue(args->at(0));
3342 VisitForStackValue(args->at(1));
3343 VisitForStackValue(args->at(2));
3345 context()->Plug(rax);
3349 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3350 // Load the arguments on the stack and call the stub.
3351 RegExpExecStub stub(isolate());
3352 ZoneList<Expression*>* args = expr->arguments();
3353 DCHECK(args->length() == 4);
3354 VisitForStackValue(args->at(0));
3355 VisitForStackValue(args->at(1));
3356 VisitForStackValue(args->at(2));
3357 VisitForStackValue(args->at(3));
3359 context()->Plug(rax);
3363 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3364 ZoneList<Expression*>* args = expr->arguments();
3365 DCHECK(args->length() == 1);
3367 VisitForAccumulatorValue(args->at(0)); // Load the object.
3370 // If the object is a smi return the object.
3371 __ JumpIfSmi(rax, &done);
3372 // If the object is not a value type, return the object.
3373 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3374 __ j(not_equal, &done);
3375 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3378 context()->Plug(rax);
3382 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3383 ZoneList<Expression*>* args = expr->arguments();
3384 DCHECK(args->length() == 2);
3385 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3386 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3388 VisitForAccumulatorValue(args->at(0)); // Load the object.
3390 Label runtime, done, not_date_object;
3391 Register object = rax;
3392 Register result = rax;
3393 Register scratch = rcx;
3395 __ JumpIfSmi(object, ¬_date_object);
3396 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3397 __ j(not_equal, ¬_date_object);
3399 if (index->value() == 0) {
3400 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3403 if (index->value() < JSDate::kFirstUncachedField) {
3404 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3405 Operand stamp_operand = __ ExternalOperand(stamp);
3406 __ movp(scratch, stamp_operand);
3407 __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3408 __ j(not_equal, &runtime, Label::kNear);
3409 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3410 kPointerSize * index->value()));
3414 __ PrepareCallCFunction(2);
3415 __ movp(arg_reg_1, object);
3416 __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3417 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3418 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3422 __ bind(¬_date_object);
3423 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3425 context()->Plug(rax);
3429 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3430 ZoneList<Expression*>* args = expr->arguments();
3431 DCHECK_EQ(3, args->length());
3433 Register string = rax;
3434 Register index = rbx;
3435 Register value = rcx;
3437 VisitForStackValue(args->at(0)); // index
3438 VisitForStackValue(args->at(1)); // value
3439 VisitForAccumulatorValue(args->at(2)); // string
3443 if (FLAG_debug_code) {
3444 __ Check(__ CheckSmi(value), kNonSmiValue);
3445 __ Check(__ CheckSmi(index), kNonSmiValue);
3448 __ SmiToInteger32(value, value);
3449 __ SmiToInteger32(index, index);
3451 if (FLAG_debug_code) {
3452 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3453 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3456 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3458 context()->Plug(string);
3462 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3463 ZoneList<Expression*>* args = expr->arguments();
3464 DCHECK_EQ(3, args->length());
3466 Register string = rax;
3467 Register index = rbx;
3468 Register value = rcx;
3470 VisitForStackValue(args->at(0)); // index
3471 VisitForStackValue(args->at(1)); // value
3472 VisitForAccumulatorValue(args->at(2)); // string
3476 if (FLAG_debug_code) {
3477 __ Check(__ CheckSmi(value), kNonSmiValue);
3478 __ Check(__ CheckSmi(index), kNonSmiValue);
3481 __ SmiToInteger32(value, value);
3482 __ SmiToInteger32(index, index);
3484 if (FLAG_debug_code) {
3485 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3486 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3489 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3491 context()->Plug(rax);
3495 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3496 // Load the arguments on the stack and call the runtime function.
3497 ZoneList<Expression*>* args = expr->arguments();
3498 DCHECK(args->length() == 2);
3499 VisitForStackValue(args->at(0));
3500 VisitForStackValue(args->at(1));
3501 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3503 context()->Plug(rax);
3507 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3508 ZoneList<Expression*>* args = expr->arguments();
3509 DCHECK(args->length() == 2);
3511 VisitForStackValue(args->at(0)); // Load the object.
3512 VisitForAccumulatorValue(args->at(1)); // Load the value.
3513 __ Pop(rbx); // rax = value. rbx = object.
3516 // If the object is a smi, return the value.
3517 __ JumpIfSmi(rbx, &done);
3519 // If the object is not a value type, return the value.
3520 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3521 __ j(not_equal, &done);
3524 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3525 // Update the write barrier. Save the value as it will be
3526 // overwritten by the write barrier code and is needed afterward.
3528 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3531 context()->Plug(rax);
3535 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3536 ZoneList<Expression*>* args = expr->arguments();
3537 DCHECK_EQ(args->length(), 1);
3539 // Load the argument into rax and call the stub.
3540 VisitForAccumulatorValue(args->at(0));
3542 NumberToStringStub stub(isolate());
3544 context()->Plug(rax);
3548 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3549 ZoneList<Expression*>* args = expr->arguments();
3550 DCHECK(args->length() == 1);
3552 VisitForAccumulatorValue(args->at(0));
3555 StringCharFromCodeGenerator generator(rax, rbx);
3556 generator.GenerateFast(masm_);
3559 NopRuntimeCallHelper call_helper;
3560 generator.GenerateSlow(masm_, call_helper);
3563 context()->Plug(rbx);
3567 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3568 ZoneList<Expression*>* args = expr->arguments();
3569 DCHECK(args->length() == 2);
3571 VisitForStackValue(args->at(0));
3572 VisitForAccumulatorValue(args->at(1));
3574 Register object = rbx;
3575 Register index = rax;
3576 Register result = rdx;
3580 Label need_conversion;
3581 Label index_out_of_range;
3583 StringCharCodeAtGenerator generator(object,
3588 &index_out_of_range,
3589 STRING_INDEX_IS_NUMBER);
3590 generator.GenerateFast(masm_);
3593 __ bind(&index_out_of_range);
3594 // When the index is out of range, the spec requires us to return
3596 __ LoadRoot(result, Heap::kNanValueRootIndex);
3599 __ bind(&need_conversion);
3600 // Move the undefined value into the result register, which will
3601 // trigger conversion.
3602 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3605 NopRuntimeCallHelper call_helper;
3606 generator.GenerateSlow(masm_, call_helper);
3609 context()->Plug(result);
3613 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3614 ZoneList<Expression*>* args = expr->arguments();
3615 DCHECK(args->length() == 2);
3617 VisitForStackValue(args->at(0));
3618 VisitForAccumulatorValue(args->at(1));
3620 Register object = rbx;
3621 Register index = rax;
3622 Register scratch = rdx;
3623 Register result = rax;
3627 Label need_conversion;
3628 Label index_out_of_range;
3630 StringCharAtGenerator generator(object,
3636 &index_out_of_range,
3637 STRING_INDEX_IS_NUMBER);
3638 generator.GenerateFast(masm_);
3641 __ bind(&index_out_of_range);
3642 // When the index is out of range, the spec requires us to return
3643 // the empty string.
3644 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3647 __ bind(&need_conversion);
3648 // Move smi zero into the result register, which will trigger
3650 __ Move(result, Smi::FromInt(0));
3653 NopRuntimeCallHelper call_helper;
3654 generator.GenerateSlow(masm_, call_helper);
3657 context()->Plug(result);
3661 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3662 ZoneList<Expression*>* args = expr->arguments();
3663 DCHECK_EQ(2, args->length());
3664 VisitForStackValue(args->at(0));
3665 VisitForAccumulatorValue(args->at(1));
3668 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3670 context()->Plug(rax);
3674 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3675 ZoneList<Expression*>* args = expr->arguments();
3676 DCHECK_EQ(2, args->length());
3678 VisitForStackValue(args->at(0));
3679 VisitForStackValue(args->at(1));
3681 StringCompareStub stub(isolate());
3683 context()->Plug(rax);
3687 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3688 ZoneList<Expression*>* args = expr->arguments();
3689 DCHECK(args->length() >= 2);
3691 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3692 for (int i = 0; i < arg_count + 1; i++) {
3693 VisitForStackValue(args->at(i));
3695 VisitForAccumulatorValue(args->last()); // Function.
3697 Label runtime, done;
3698 // Check for non-function argument (including proxy).
3699 __ JumpIfSmi(rax, &runtime);
3700 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3701 __ j(not_equal, &runtime);
3703 // InvokeFunction requires the function in rdi. Move it in there.
3704 __ movp(rdi, result_register());
3705 ParameterCount count(arg_count);
3706 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3707 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3712 __ CallRuntime(Runtime::kCall, args->length());
3715 context()->Plug(rax);
3719 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3720 RegExpConstructResultStub stub(isolate());
3721 ZoneList<Expression*>* args = expr->arguments();
3722 DCHECK(args->length() == 3);
3723 VisitForStackValue(args->at(0));
3724 VisitForStackValue(args->at(1));
3725 VisitForAccumulatorValue(args->at(2));
3729 context()->Plug(rax);
3733 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3734 ZoneList<Expression*>* args = expr->arguments();
3735 DCHECK_EQ(2, args->length());
3737 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3738 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3740 Handle<FixedArray> jsfunction_result_caches(
3741 isolate()->native_context()->jsfunction_result_caches());
3742 if (jsfunction_result_caches->length() <= cache_id) {
3743 __ Abort(kAttemptToUseUndefinedCache);
3744 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3745 context()->Plug(rax);
3749 VisitForAccumulatorValue(args->at(1));
3752 Register cache = rbx;
3754 __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3756 FieldOperand(cache, GlobalObject::kNativeContextOffset));
3758 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3760 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3762 Label done, not_found;
3763 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3764 __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3765 // tmp now holds finger offset as a smi.
3767 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3768 __ cmpp(key, FieldOperand(cache,
3771 FixedArray::kHeaderSize));
3772 __ j(not_equal, ¬_found, Label::kNear);
3773 __ movp(rax, FieldOperand(cache,
3776 FixedArray::kHeaderSize + kPointerSize));
3777 __ jmp(&done, Label::kNear);
3779 __ bind(¬_found);
3780 // Call runtime to perform the lookup.
3783 __ CallRuntime(Runtime::kGetFromCache, 2);
3786 context()->Plug(rax);
3790 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3791 ZoneList<Expression*>* args = expr->arguments();
3792 DCHECK(args->length() == 1);
3794 VisitForAccumulatorValue(args->at(0));
3796 Label materialize_true, materialize_false;
3797 Label* if_true = NULL;
3798 Label* if_false = NULL;
3799 Label* fall_through = NULL;
3800 context()->PrepareTest(&materialize_true, &materialize_false,
3801 &if_true, &if_false, &fall_through);
3803 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3804 Immediate(String::kContainsCachedArrayIndexMask));
3805 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3806 __ j(zero, if_true);
3809 context()->Plug(if_true, if_false);
3813 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3814 ZoneList<Expression*>* args = expr->arguments();
3815 DCHECK(args->length() == 1);
3816 VisitForAccumulatorValue(args->at(0));
3818 __ AssertString(rax);
3820 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3821 DCHECK(String::kHashShift >= kSmiTagSize);
3822 __ IndexFromHash(rax, rax);
3824 context()->Plug(rax);
3828 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3829 Label bailout, return_result, done, one_char_separator, long_separator,
3830 non_trivial_array, not_size_one_array, loop,
3831 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3832 ZoneList<Expression*>* args = expr->arguments();
3833 DCHECK(args->length() == 2);
3834 // We will leave the separator on the stack until the end of the function.
3835 VisitForStackValue(args->at(1));
3836 // Load this to rax (= array)
3837 VisitForAccumulatorValue(args->at(0));
3838 // All aliases of the same register have disjoint lifetimes.
3839 Register array = rax;
3840 Register elements = no_reg; // Will be rax.
3842 Register index = rdx;
3844 Register string_length = rcx;
3846 Register string = rsi;
3848 Register scratch = rbx;
3850 Register array_length = rdi;
3851 Register result_pos = no_reg; // Will be rdi.
3853 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3854 Operand result_operand = Operand(rsp, 1 * kPointerSize);
3855 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3856 // Separator operand is already pushed. Make room for the two
3857 // other stack fields, and clear the direction flag in anticipation
3858 // of calling CopyBytes.
3859 __ subp(rsp, Immediate(2 * kPointerSize));
3861 // Check that the array is a JSArray
3862 __ JumpIfSmi(array, &bailout);
3863 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3864 __ j(not_equal, &bailout);
3866 // Check that the array has fast elements.
3867 __ CheckFastElements(scratch, &bailout);
3869 // Array has fast elements, so its length must be a smi.
3870 // If the array has length zero, return the empty string.
3871 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3872 __ SmiCompare(array_length, Smi::FromInt(0));
3873 __ j(not_zero, &non_trivial_array);
3874 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3875 __ jmp(&return_result);
3877 // Save the array length on the stack.
3878 __ bind(&non_trivial_array);
3879 __ SmiToInteger32(array_length, array_length);
3880 __ movl(array_length_operand, array_length);
3882 // Save the FixedArray containing array's elements.
3883 // End of array's live range.
3885 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3889 // Check that all array elements are sequential one-byte strings, and
3890 // accumulate the sum of their lengths, as a smi-encoded value.
3892 __ Set(string_length, 0);
3893 // Loop condition: while (index < array_length).
3894 // Live loop registers: index(int32), array_length(int32), string(String*),
3895 // scratch, string_length(int32), elements(FixedArray*).
3896 if (generate_debug_code_) {
3897 __ cmpp(index, array_length);
3898 __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3901 __ movp(string, FieldOperand(elements,
3904 FixedArray::kHeaderSize));
3905 __ JumpIfSmi(string, &bailout);
3906 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3907 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3908 __ andb(scratch, Immediate(
3909 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3910 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3911 __ j(not_equal, &bailout);
3912 __ AddSmiField(string_length,
3913 FieldOperand(string, SeqOneByteString::kLengthOffset));
3914 __ j(overflow, &bailout);
3916 __ cmpl(index, array_length);
3920 // string_length: Sum of string lengths.
3921 // elements: FixedArray of strings.
3922 // index: Array length.
3923 // array_length: Array length.
3925 // If array_length is 1, return elements[0], a string.
3926 __ cmpl(array_length, Immediate(1));
3927 __ j(not_equal, ¬_size_one_array);
3928 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3929 __ jmp(&return_result);
3931 __ bind(¬_size_one_array);
3933 // End of array_length live range.
3934 result_pos = array_length;
3935 array_length = no_reg;
3938 // string_length: Sum of string lengths.
3939 // elements: FixedArray of strings.
3940 // index: Array length.
3942 // Check that the separator is a sequential one-byte string.
3943 __ movp(string, separator_operand);
3944 __ JumpIfSmi(string, &bailout);
3945 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3946 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3947 __ andb(scratch, Immediate(
3948 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3949 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3950 __ j(not_equal, &bailout);
3953 // string_length: Sum of string lengths.
3954 // elements: FixedArray of strings.
3955 // index: Array length.
3956 // string: Separator string.
3958 // Add (separator length times (array_length - 1)) to string_length.
3959 __ SmiToInteger32(scratch,
3960 FieldOperand(string, SeqOneByteString::kLengthOffset));
3962 __ imull(scratch, index);
3963 __ j(overflow, &bailout);
3964 __ addl(string_length, scratch);
3965 __ j(overflow, &bailout);
3967 // Live registers and stack values:
3968 // string_length: Total length of result string.
3969 // elements: FixedArray of strings.
3970 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
3972 __ movp(result_operand, result_pos);
3973 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3975 __ movp(string, separator_operand);
3976 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
3978 __ j(equal, &one_char_separator);
3979 __ j(greater, &long_separator);
3982 // Empty separator case:
3984 __ movl(scratch, array_length_operand);
3985 __ jmp(&loop_1_condition);
3986 // Loop condition: while (index < array_length).
3988 // Each iteration of the loop concatenates one string to the result.
3989 // Live values in registers:
3990 // index: which element of the elements array we are adding to the result.
3991 // result_pos: the position to which we are currently copying characters.
3992 // elements: the FixedArray of strings we are joining.
3993 // scratch: array length.
3995 // Get string = array[index].
3996 __ movp(string, FieldOperand(elements, index,
3998 FixedArray::kHeaderSize));
3999 __ SmiToInteger32(string_length,
4000 FieldOperand(string, String::kLengthOffset));
4002 FieldOperand(string, SeqOneByteString::kHeaderSize));
4003 __ CopyBytes(result_pos, string, string_length);
4005 __ bind(&loop_1_condition);
4006 __ cmpl(index, scratch);
4007 __ j(less, &loop_1); // Loop while (index < array_length).
4010 // Generic bailout code used from several places.
4012 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4013 __ jmp(&return_result);
4016 // One-character separator case
4017 __ bind(&one_char_separator);
4018 // Get the separator one-byte character value.
4019 // Register "string" holds the separator.
4020 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4022 // Jump into the loop after the code that copies the separator, so the first
4023 // element is not preceded by a separator
4024 __ jmp(&loop_2_entry);
4025 // Loop condition: while (index < length).
4027 // Each iteration of the loop concatenates one string to the result.
4028 // Live values in registers:
4029 // elements: The FixedArray of strings we are joining.
4030 // index: which element of the elements array we are adding to the result.
4031 // result_pos: the position to which we are currently copying characters.
4032 // scratch: Separator character.
4034 // Copy the separator character to the result.
4035 __ movb(Operand(result_pos, 0), scratch);
4036 __ incp(result_pos);
4038 __ bind(&loop_2_entry);
4039 // Get string = array[index].
4040 __ movp(string, FieldOperand(elements, index,
4042 FixedArray::kHeaderSize));
4043 __ SmiToInteger32(string_length,
4044 FieldOperand(string, String::kLengthOffset));
4046 FieldOperand(string, SeqOneByteString::kHeaderSize));
4047 __ CopyBytes(result_pos, string, string_length);
4049 __ cmpl(index, array_length_operand);
4050 __ j(less, &loop_2); // End while (index < length).
4054 // Long separator case (separator is more than one character).
4055 __ bind(&long_separator);
4057 // Make elements point to end of elements array, and index
4058 // count from -array_length to zero, so we don't need to maintain
4060 __ movl(index, array_length_operand);
4061 __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4062 FixedArray::kHeaderSize));
4065 // Replace separator string with pointer to its first character, and
4066 // make scratch be its length.
4067 __ movp(string, separator_operand);
4068 __ SmiToInteger32(scratch,
4069 FieldOperand(string, String::kLengthOffset));
4071 FieldOperand(string, SeqOneByteString::kHeaderSize));
4072 __ movp(separator_operand, string);
4074 // Jump into the loop after the code that copies the separator, so the first
4075 // element is not preceded by a separator
4076 __ jmp(&loop_3_entry);
4077 // Loop condition: while (index < length).
4079 // Each iteration of the loop concatenates one string to the result.
4080 // Live values in registers:
4081 // index: which element of the elements array we are adding to the result.
4082 // result_pos: the position to which we are currently copying characters.
4083 // scratch: Separator length.
4084 // separator_operand (rsp[0x10]): Address of first char of separator.
4086 // Copy the separator to the result.
4087 __ movp(string, separator_operand);
4088 __ movl(string_length, scratch);
4089 __ CopyBytes(result_pos, string, string_length, 2);
4091 __ bind(&loop_3_entry);
4092 // Get string = array[index].
4093 __ movp(string, Operand(elements, index, times_pointer_size, 0));
4094 __ SmiToInteger32(string_length,
4095 FieldOperand(string, String::kLengthOffset));
4097 FieldOperand(string, SeqOneByteString::kHeaderSize));
4098 __ CopyBytes(result_pos, string, string_length);
4100 __ j(not_equal, &loop_3); // Loop while (index < 0).
4103 __ movp(rax, result_operand);
4105 __ bind(&return_result);
4106 // Drop temp values from the stack, and restore context register.
4107 __ addp(rsp, Immediate(3 * kPointerSize));
4108 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4109 context()->Plug(rax);
4113 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4114 DCHECK(expr->arguments()->length() == 0);
4115 ExternalReference debug_is_active =
4116 ExternalReference::debug_is_active_address(isolate());
4117 __ Move(kScratchRegister, debug_is_active);
4118 __ movzxbp(rax, Operand(kScratchRegister, 0));
4119 __ Integer32ToSmi(rax, rax);
4120 context()->Plug(rax);
4124 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4125 if (expr->function() != NULL &&
4126 expr->function()->intrinsic_type == Runtime::INLINE) {
4127 Comment cmnt(masm_, "[ InlineRuntimeCall");
4128 EmitInlineRuntimeCall(expr);
4132 Comment cmnt(masm_, "[ CallRuntime");
4133 ZoneList<Expression*>* args = expr->arguments();
4134 int arg_count = args->length();
4136 if (expr->is_jsruntime()) {
4137 // Push the builtins object as receiver.
4138 __ movp(rax, GlobalObjectOperand());
4139 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4141 // Load the function from the receiver.
4142 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4143 __ Move(LoadDescriptor::NameRegister(), expr->name());
4144 if (FLAG_vector_ics) {
4145 __ Move(VectorLoadICDescriptor::SlotRegister(),
4146 Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
4147 CallLoadIC(NOT_CONTEXTUAL);
4149 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4152 // Push the target function under the receiver.
4153 __ Push(Operand(rsp, 0));
4154 __ movp(Operand(rsp, kPointerSize), rax);
4156 // Push the arguments ("left-to-right").
4157 for (int i = 0; i < arg_count; i++) {
4158 VisitForStackValue(args->at(i));
4161 // Record source position of the IC call.
4162 SetSourcePosition(expr->position());
4163 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4164 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4167 // Restore context register.
4168 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4169 context()->DropAndPlug(1, rax);
4172 // Push the arguments ("left-to-right").
4173 for (int i = 0; i < arg_count; i++) {
4174 VisitForStackValue(args->at(i));
4177 // Call the C runtime.
4178 __ CallRuntime(expr->function(), arg_count);
4179 context()->Plug(rax);
4184 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4185 switch (expr->op()) {
4186 case Token::DELETE: {
4187 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4188 Property* property = expr->expression()->AsProperty();
4189 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4191 if (property != NULL) {
4192 VisitForStackValue(property->obj());
4193 VisitForStackValue(property->key());
4194 __ Push(Smi::FromInt(strict_mode()));
4195 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4196 context()->Plug(rax);
4197 } else if (proxy != NULL) {
4198 Variable* var = proxy->var();
4199 // Delete of an unqualified identifier is disallowed in strict mode
4200 // but "delete this" is allowed.
4201 DCHECK(strict_mode() == SLOPPY || var->is_this());
4202 if (var->IsUnallocated()) {
4203 __ Push(GlobalObjectOperand());
4204 __ Push(var->name());
4205 __ Push(Smi::FromInt(SLOPPY));
4206 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4207 context()->Plug(rax);
4208 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4209 // Result of deleting non-global variables is false. 'this' is
4210 // not really a variable, though we implement it as one. The
4211 // subexpression does not have side effects.
4212 context()->Plug(var->is_this());
4214 // Non-global variable. Call the runtime to try to delete from the
4215 // context where the variable was introduced.
4216 __ Push(context_register());
4217 __ Push(var->name());
4218 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4219 context()->Plug(rax);
4222 // Result of deleting non-property, non-variable reference is true.
4223 // The subexpression may have side effects.
4224 VisitForEffect(expr->expression());
4225 context()->Plug(true);
4231 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4232 VisitForEffect(expr->expression());
4233 context()->Plug(Heap::kUndefinedValueRootIndex);
4238 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4239 if (context()->IsEffect()) {
4240 // Unary NOT has no side effects so it's only necessary to visit the
4241 // subexpression. Match the optimizing compiler by not branching.
4242 VisitForEffect(expr->expression());
4243 } else if (context()->IsTest()) {
4244 const TestContext* test = TestContext::cast(context());
4245 // The labels are swapped for the recursive call.
4246 VisitForControl(expr->expression(),
4247 test->false_label(),
4249 test->fall_through());
4250 context()->Plug(test->true_label(), test->false_label());
4252 // We handle value contexts explicitly rather than simply visiting
4253 // for control and plugging the control flow into the context,
4254 // because we need to prepare a pair of extra administrative AST ids
4255 // for the optimizing compiler.
4256 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4257 Label materialize_true, materialize_false, done;
4258 VisitForControl(expr->expression(),
4262 __ bind(&materialize_true);
4263 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4264 if (context()->IsAccumulatorValue()) {
4265 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4267 __ PushRoot(Heap::kTrueValueRootIndex);
4269 __ jmp(&done, Label::kNear);
4270 __ bind(&materialize_false);
4271 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4272 if (context()->IsAccumulatorValue()) {
4273 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4275 __ PushRoot(Heap::kFalseValueRootIndex);
4282 case Token::TYPEOF: {
4283 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4284 { StackValueContext context(this);
4285 VisitForTypeofValue(expr->expression());
4287 __ CallRuntime(Runtime::kTypeof, 1);
4288 context()->Plug(rax);
4298 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4299 DCHECK(expr->expression()->IsValidReferenceExpression());
4301 Comment cmnt(masm_, "[ CountOperation");
4302 SetSourcePosition(expr->position());
4304 // Expression can only be a property, a global or a (parameter or local)
4306 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4307 LhsKind assign_type = VARIABLE;
4308 Property* prop = expr->expression()->AsProperty();
4309 // In case of a property we use the uninitialized expression context
4310 // of the key to detect a named property.
4313 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4316 // Evaluate expression and get value.
4317 if (assign_type == VARIABLE) {
4318 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4319 AccumulatorValueContext context(this);
4320 EmitVariableLoad(expr->expression()->AsVariableProxy());
4322 // Reserve space for result of postfix operation.
4323 if (expr->is_postfix() && !context()->IsEffect()) {
4324 __ Push(Smi::FromInt(0));
4326 if (assign_type == NAMED_PROPERTY) {
4327 VisitForStackValue(prop->obj());
4328 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4329 EmitNamedPropertyLoad(prop);
4331 VisitForStackValue(prop->obj());
4332 VisitForStackValue(prop->key());
4333 // Leave receiver on stack
4334 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
4335 // Copy of key, needed for later store.
4336 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
4337 EmitKeyedPropertyLoad(prop);
4341 // We need a second deoptimization point after loading the value
4342 // in case evaluating the property load my have a side effect.
4343 if (assign_type == VARIABLE) {
4344 PrepareForBailout(expr->expression(), TOS_REG);
4346 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4349 // Inline smi case if we are in a loop.
4350 Label done, stub_call;
4351 JumpPatchSite patch_site(masm_);
4352 if (ShouldInlineSmiCase(expr->op())) {
4354 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4356 // Save result for postfix expressions.
4357 if (expr->is_postfix()) {
4358 if (!context()->IsEffect()) {
4359 // Save the result on the stack. If we have a named or keyed property
4360 // we store the result under the receiver that is currently on top
4362 switch (assign_type) {
4366 case NAMED_PROPERTY:
4367 __ movp(Operand(rsp, kPointerSize), rax);
4369 case KEYED_PROPERTY:
4370 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4376 SmiOperationExecutionMode mode;
4377 mode.Add(PRESERVE_SOURCE_REGISTER);
4378 mode.Add(BAILOUT_ON_NO_OVERFLOW);
4379 if (expr->op() == Token::INC) {
4380 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4382 __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4384 __ jmp(&stub_call, Label::kNear);
4388 ToNumberStub convert_stub(isolate());
4389 __ CallStub(&convert_stub);
4391 // Save result for postfix expressions.
4392 if (expr->is_postfix()) {
4393 if (!context()->IsEffect()) {
4394 // Save the result on the stack. If we have a named or keyed property
4395 // we store the result under the receiver that is currently on top
4397 switch (assign_type) {
4401 case NAMED_PROPERTY:
4402 __ movp(Operand(rsp, kPointerSize), rax);
4404 case KEYED_PROPERTY:
4405 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4411 // Record position before stub call.
4412 SetSourcePosition(expr->position());
4414 // Call stub for +1/-1.
4415 __ bind(&stub_call);
4417 __ Move(rax, Smi::FromInt(1));
4418 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4419 NO_OVERWRITE).code();
4420 CallIC(code, expr->CountBinOpFeedbackId());
4421 patch_site.EmitPatchInfo();
4424 // Store the value returned in rax.
4425 switch (assign_type) {
4427 if (expr->is_postfix()) {
4428 // Perform the assignment as if via '='.
4429 { EffectContext context(this);
4430 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4432 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4435 // For all contexts except kEffect: We have the result on
4436 // top of the stack.
4437 if (!context()->IsEffect()) {
4438 context()->PlugTOS();
4441 // Perform the assignment as if via '='.
4442 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4444 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4445 context()->Plug(rax);
4448 case NAMED_PROPERTY: {
4449 __ Move(StoreDescriptor::NameRegister(),
4450 prop->key()->AsLiteral()->value());
4451 __ Pop(StoreDescriptor::ReceiverRegister());
4452 CallStoreIC(expr->CountStoreFeedbackId());
4453 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4454 if (expr->is_postfix()) {
4455 if (!context()->IsEffect()) {
4456 context()->PlugTOS();
4459 context()->Plug(rax);
4463 case KEYED_PROPERTY: {
4464 __ Pop(StoreDescriptor::NameRegister());
4465 __ Pop(StoreDescriptor::ReceiverRegister());
4467 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4468 CallIC(ic, expr->CountStoreFeedbackId());
4469 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4470 if (expr->is_postfix()) {
4471 if (!context()->IsEffect()) {
4472 context()->PlugTOS();
4475 context()->Plug(rax);
4483 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4484 VariableProxy* proxy = expr->AsVariableProxy();
4485 DCHECK(!context()->IsEffect());
4486 DCHECK(!context()->IsTest());
4488 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4489 Comment cmnt(masm_, "[ Global variable");
4490 __ Move(LoadDescriptor::NameRegister(), proxy->name());
4491 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4492 if (FLAG_vector_ics) {
4493 __ Move(VectorLoadICDescriptor::SlotRegister(),
4494 Smi::FromInt(proxy->VariableFeedbackSlot()));
4496 // Use a regular load, not a contextual load, to avoid a reference
4498 CallLoadIC(NOT_CONTEXTUAL);
4499 PrepareForBailout(expr, TOS_REG);
4500 context()->Plug(rax);
4501 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4502 Comment cmnt(masm_, "[ Lookup slot");
4505 // Generate code for loading from variables potentially shadowed
4506 // by eval-introduced variables.
4507 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4511 __ Push(proxy->name());
4512 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4513 PrepareForBailout(expr, TOS_REG);
4516 context()->Plug(rax);
4518 // This expression cannot throw a reference error at the top level.
4519 VisitInDuplicateContext(expr);
4524 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4525 Expression* sub_expr,
4526 Handle<String> check) {
4527 Label materialize_true, materialize_false;
4528 Label* if_true = NULL;
4529 Label* if_false = NULL;
4530 Label* fall_through = NULL;
4531 context()->PrepareTest(&materialize_true, &materialize_false,
4532 &if_true, &if_false, &fall_through);
4534 { AccumulatorValueContext context(this);
4535 VisitForTypeofValue(sub_expr);
4537 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4539 Factory* factory = isolate()->factory();
4540 if (String::Equals(check, factory->number_string())) {
4541 __ JumpIfSmi(rax, if_true);
4542 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
4543 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4544 Split(equal, if_true, if_false, fall_through);
4545 } else if (String::Equals(check, factory->string_string())) {
4546 __ JumpIfSmi(rax, if_false);
4547 // Check for undetectable objects => false.
4548 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4549 __ j(above_equal, if_false);
4550 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4551 Immediate(1 << Map::kIsUndetectable));
4552 Split(zero, if_true, if_false, fall_through);
4553 } else if (String::Equals(check, factory->symbol_string())) {
4554 __ JumpIfSmi(rax, if_false);
4555 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4556 Split(equal, if_true, if_false, fall_through);
4557 } else if (String::Equals(check, factory->boolean_string())) {
4558 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4559 __ j(equal, if_true);
4560 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4561 Split(equal, if_true, if_false, fall_through);
4562 } else if (String::Equals(check, factory->undefined_string())) {
4563 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4564 __ j(equal, if_true);
4565 __ JumpIfSmi(rax, if_false);
4566 // Check for undetectable objects => true.
4567 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4568 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4569 Immediate(1 << Map::kIsUndetectable));
4570 Split(not_zero, if_true, if_false, fall_through);
4571 } else if (String::Equals(check, factory->function_string())) {
4572 __ JumpIfSmi(rax, if_false);
4573 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4574 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4575 __ j(equal, if_true);
4576 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4577 Split(equal, if_true, if_false, fall_through);
4578 } else if (String::Equals(check, factory->object_string())) {
4579 __ JumpIfSmi(rax, if_false);
4580 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4581 __ j(equal, if_true);
4582 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4583 __ j(below, if_false);
4584 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4585 __ j(above, if_false);
4586 // Check for undetectable objects => false.
4587 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4588 Immediate(1 << Map::kIsUndetectable));
4589 Split(zero, if_true, if_false, fall_through);
4591 if (if_false != fall_through) __ jmp(if_false);
4593 context()->Plug(if_true, if_false);
4597 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4598 Comment cmnt(masm_, "[ CompareOperation");
4599 SetSourcePosition(expr->position());
4601 // First we try a fast inlined version of the compare when one of
4602 // the operands is a literal.
4603 if (TryLiteralCompare(expr)) return;
4605 // Always perform the comparison for its control flow. Pack the result
4606 // into the expression's context after the comparison is performed.
4607 Label materialize_true, materialize_false;
4608 Label* if_true = NULL;
4609 Label* if_false = NULL;
4610 Label* fall_through = NULL;
4611 context()->PrepareTest(&materialize_true, &materialize_false,
4612 &if_true, &if_false, &fall_through);
4614 Token::Value op = expr->op();
4615 VisitForStackValue(expr->left());
4618 VisitForStackValue(expr->right());
4619 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4620 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4621 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4622 Split(equal, if_true, if_false, fall_through);
4625 case Token::INSTANCEOF: {
4626 VisitForStackValue(expr->right());
4627 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4629 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4631 // The stub returns 0 for true.
4632 Split(zero, if_true, if_false, fall_through);
4637 VisitForAccumulatorValue(expr->right());
4638 Condition cc = CompareIC::ComputeCondition(op);
4641 bool inline_smi_code = ShouldInlineSmiCase(op);
4642 JumpPatchSite patch_site(masm_);
4643 if (inline_smi_code) {
4647 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4649 Split(cc, if_true, if_false, NULL);
4650 __ bind(&slow_case);
4653 // Record position and call the compare IC.
4654 SetSourcePosition(expr->position());
4655 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4656 CallIC(ic, expr->CompareOperationFeedbackId());
4657 patch_site.EmitPatchInfo();
4659 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4661 Split(cc, if_true, if_false, fall_through);
4665 // Convert the result of the comparison into one expected for this
4666 // expression's context.
4667 context()->Plug(if_true, if_false);
4671 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4672 Expression* sub_expr,
4674 Label materialize_true, materialize_false;
4675 Label* if_true = NULL;
4676 Label* if_false = NULL;
4677 Label* fall_through = NULL;
4678 context()->PrepareTest(&materialize_true, &materialize_false,
4679 &if_true, &if_false, &fall_through);
4681 VisitForAccumulatorValue(sub_expr);
4682 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4683 if (expr->op() == Token::EQ_STRICT) {
4684 Heap::RootListIndex nil_value = nil == kNullValue ?
4685 Heap::kNullValueRootIndex :
4686 Heap::kUndefinedValueRootIndex;
4687 __ CompareRoot(rax, nil_value);
4688 Split(equal, if_true, if_false, fall_through);
4690 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4691 CallIC(ic, expr->CompareOperationFeedbackId());
4693 Split(not_zero, if_true, if_false, fall_through);
4695 context()->Plug(if_true, if_false);
4699 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4700 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4701 context()->Plug(rax);
4705 Register FullCodeGenerator::result_register() {
4710 Register FullCodeGenerator::context_register() {
4715 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4716 DCHECK(IsAligned(frame_offset, kPointerSize));
4717 __ movp(Operand(rbp, frame_offset), value);
4721 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4722 __ movp(dst, ContextOperand(rsi, context_index));
4726 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4727 Scope* declaration_scope = scope()->DeclarationScope();
4728 if (declaration_scope->is_global_scope() ||
4729 declaration_scope->is_module_scope()) {
4730 // Contexts nested in the native context have a canonical empty function
4731 // as their closure, not the anonymous closure containing the global
4732 // code. Pass a smi sentinel and let the runtime look up the empty
4734 __ Push(Smi::FromInt(0));
4735 } else if (declaration_scope->is_eval_scope()) {
4736 // Contexts created by a call to eval have the same closure as the
4737 // context calling eval, not the anonymous closure containing the eval
4738 // code. Fetch it from the context.
4739 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4741 DCHECK(declaration_scope->is_function_scope());
4742 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4747 // ----------------------------------------------------------------------------
4748 // Non-local control flow support.
4751 void FullCodeGenerator::EnterFinallyBlock() {
4752 DCHECK(!result_register().is(rdx));
4753 DCHECK(!result_register().is(rcx));
4754 // Cook return address on top of stack (smi encoded Code* delta)
4755 __ PopReturnAddressTo(rdx);
4756 __ Move(rcx, masm_->CodeObject());
4758 __ Integer32ToSmi(rdx, rdx);
4761 // Store result register while executing finally block.
4762 __ Push(result_register());
4764 // Store pending message while executing finally block.
4765 ExternalReference pending_message_obj =
4766 ExternalReference::address_of_pending_message_obj(isolate());
4767 __ Load(rdx, pending_message_obj);
4770 ExternalReference has_pending_message =
4771 ExternalReference::address_of_has_pending_message(isolate());
4772 __ Load(rdx, has_pending_message);
4773 __ Integer32ToSmi(rdx, rdx);
4776 ExternalReference pending_message_script =
4777 ExternalReference::address_of_pending_message_script(isolate());
4778 __ Load(rdx, pending_message_script);
4783 void FullCodeGenerator::ExitFinallyBlock() {
4784 DCHECK(!result_register().is(rdx));
4785 DCHECK(!result_register().is(rcx));
4786 // Restore pending message from stack.
4788 ExternalReference pending_message_script =
4789 ExternalReference::address_of_pending_message_script(isolate());
4790 __ Store(pending_message_script, rdx);
4793 __ SmiToInteger32(rdx, rdx);
4794 ExternalReference has_pending_message =
4795 ExternalReference::address_of_has_pending_message(isolate());
4796 __ Store(has_pending_message, rdx);
4799 ExternalReference pending_message_obj =
4800 ExternalReference::address_of_pending_message_obj(isolate());
4801 __ Store(pending_message_obj, rdx);
4803 // Restore result register from stack.
4804 __ Pop(result_register());
4806 // Uncook return address.
4808 __ SmiToInteger32(rdx, rdx);
4809 __ Move(rcx, masm_->CodeObject());
4817 #define __ ACCESS_MASM(masm())
4819 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4821 int* context_length) {
4822 // The macros used here must preserve the result register.
4824 // Because the handler block contains the context of the finally
4825 // code, we can restore it directly from there for the finally code
4826 // rather than iteratively unwinding contexts via their previous
4828 __ Drop(*stack_depth); // Down to the handler block.
4829 if (*context_length > 0) {
4830 // Restore the context to its dedicated register and the stack.
4831 __ movp(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4832 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4835 __ call(finally_entry_);
4838 *context_length = 0;
4846 static const byte kJnsInstruction = 0x79;
4847 static const byte kNopByteOne = 0x66;
4848 static const byte kNopByteTwo = 0x90;
4850 static const byte kCallInstruction = 0xe8;
4854 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4856 BackEdgeState target_state,
4857 Code* replacement_code) {
4858 Address call_target_address = pc - kIntSize;
4859 Address jns_instr_address = call_target_address - 3;
4860 Address jns_offset_address = call_target_address - 2;
4862 switch (target_state) {
4864 // sub <profiling_counter>, <delta> ;; Not changed
4866 // call <interrupt stub>
4868 *jns_instr_address = kJnsInstruction;
4869 *jns_offset_address = kJnsOffset;
4871 case ON_STACK_REPLACEMENT:
4872 case OSR_AFTER_STACK_CHECK:
4873 // sub <profiling_counter>, <delta> ;; Not changed
4876 // call <on-stack replacment>
4878 *jns_instr_address = kNopByteOne;
4879 *jns_offset_address = kNopByteTwo;
4883 Assembler::set_target_address_at(call_target_address,
4885 replacement_code->entry());
4886 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4887 unoptimized_code, call_target_address, replacement_code);
4891 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4893 Code* unoptimized_code,
4895 Address call_target_address = pc - kIntSize;
4896 Address jns_instr_address = call_target_address - 3;
4897 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4899 if (*jns_instr_address == kJnsInstruction) {
4900 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4901 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4902 Assembler::target_address_at(call_target_address,
4907 DCHECK_EQ(kNopByteOne, *jns_instr_address);
4908 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4910 if (Assembler::target_address_at(call_target_address,
4911 unoptimized_code) ==
4912 isolate->builtins()->OnStackReplacement()->entry()) {
4913 return ON_STACK_REPLACEMENT;
4916 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4917 Assembler::target_address_at(call_target_address,
4919 return OSR_AFTER_STACK_CHECK;
4923 } } // namespace v8::internal
4925 #endif // V8_TARGET_ARCH_X64