1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/compiler.h"
12 #include "src/debug.h"
13 #include "src/full-codegen.h"
14 #include "src/isolate-inl.h"
15 #include "src/parser.h"
16 #include "src/scopes.h"
21 #define __ ACCESS_MASM(masm_)
24 class JumpPatchSite BASE_EMBEDDED {
26 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28 info_emitted_ = false;
33 DCHECK(patch_site_.is_bound() == info_emitted_);
36 void EmitJumpIfNotSmi(Register reg,
38 Label::Distance near_jump = Label::kFar) {
39 __ testb(reg, Immediate(kSmiTagMask));
40 EmitJump(not_carry, target, near_jump); // Always taken before patched.
43 void EmitJumpIfSmi(Register reg,
45 Label::Distance near_jump = Label::kFar) {
46 __ testb(reg, Immediate(kSmiTagMask));
47 EmitJump(carry, target, near_jump); // Never taken before patched.
50 void EmitPatchInfo() {
51 if (patch_site_.is_bound()) {
52 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
53 DCHECK(is_uint8(delta_to_patch_site));
54 __ testl(rax, Immediate(delta_to_patch_site));
59 __ nop(); // Signals no inlined code.
64 // jc will be patched with jz, jnc will become jnz.
65 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
66 DCHECK(!patch_site_.is_bound() && !info_emitted_);
67 DCHECK(cc == carry || cc == not_carry);
68 __ bind(&patch_site_);
69 __ j(cc, target, near_jump);
72 MacroAssembler* masm_;
80 // Generate code for a JS function. On entry to the function the receiver
81 // and arguments have been pushed on the stack left to right, with the
82 // return address on top of them. The actual argument count matches the
83 // formal parameter count expected by the function.
85 // The live registers are:
86 // o rdi: the JS function object being called (i.e. ourselves)
88 // o rbp: our caller's frame pointer
89 // o rsp: stack pointer (pointing to return address)
91 // The function builds a JS frame. Please see JavaScriptFrameConstants in
92 // frames-x64.h for its layout.
93 void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
96 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
98 profiling_counter_ = isolate()->factory()->NewCell(
99 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
100 SetFunctionPosition(function());
101 Comment cmnt(masm_, "[ function compiled by full code generator");
103 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106 if (strlen(FLAG_stop_at) > 0 &&
107 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
112 // Sloppy mode functions and builtins need to replace the receiver with the
113 // global proxy when called as functions (without an explicit receiver
115 if (info->strict_mode() == SLOPPY && !info->is_native()) {
117 // +1 for return address.
118 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
119 __ movp(rcx, args.GetReceiverOperand());
121 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
122 __ j(not_equal, &ok, Label::kNear);
124 __ movp(rcx, GlobalObjectOperand());
125 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalProxyOffset));
127 __ movp(args.GetReceiverOperand(), rcx);
132 // Open a frame scope to indicate that there is a frame on the stack. The
133 // MANUAL indicates that the scope shouldn't actually generate code to set up
134 // the frame (that is done below).
135 FrameScope frame_scope(masm_, StackFrame::MANUAL);
137 info->set_prologue_offset(masm_->pc_offset());
138 __ Prologue(info->IsCodePreAgingActive());
139 info->AddNoFrameRange(0, masm_->pc_offset());
141 { Comment cmnt(masm_, "[ Allocate locals");
142 int locals_count = info->scope()->num_stack_slots();
143 // Generators allocate locals, if any, in context slots.
144 DCHECK(!info->function()->is_generator() || locals_count == 0);
145 if (locals_count == 1) {
146 __ PushRoot(Heap::kUndefinedValueRootIndex);
147 } else if (locals_count > 1) {
148 if (locals_count >= 128) {
151 __ subp(rcx, Immediate(locals_count * kPointerSize));
152 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
153 __ j(above_equal, &ok, Label::kNear);
154 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
157 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
158 const int kMaxPushes = 32;
159 if (locals_count >= kMaxPushes) {
160 int loop_iterations = locals_count / kMaxPushes;
161 __ movp(rcx, Immediate(loop_iterations));
163 __ bind(&loop_header);
165 for (int i = 0; i < kMaxPushes; i++) {
168 // Continue loop if not done.
170 __ j(not_zero, &loop_header, Label::kNear);
172 int remaining = locals_count % kMaxPushes;
173 // Emit the remaining pushes.
174 for (int i = 0; i < remaining; i++) {
180 bool function_in_register = true;
182 // Possibly allocate a local context.
183 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (heap_slots > 0) {
185 Comment cmnt(masm_, "[ Allocate context");
186 bool need_write_barrier = true;
187 // Argument to NewContext is the function, which is still in rdi.
188 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
190 __ Push(info->scope()->GetScopeInfo());
191 __ CallRuntime(Runtime::kNewGlobalContext, 2);
192 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
193 FastNewContextStub stub(isolate(), heap_slots);
195 // Result of FastNewContextStub is always in new space.
196 need_write_barrier = false;
199 __ CallRuntime(Runtime::kNewFunctionContext, 1);
201 function_in_register = false;
202 // Context is returned in rax. It replaces the context passed to us.
203 // It's saved in the stack and kept live in rsi.
205 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
207 // Copy any necessary parameters into the context.
208 int num_parameters = info->scope()->num_parameters();
209 for (int i = 0; i < num_parameters; i++) {
210 Variable* var = scope()->parameter(i);
211 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize;
214 // Load parameter from stack.
215 __ movp(rax, Operand(rbp, parameter_offset));
216 // Store it in the context.
217 int context_offset = Context::SlotOffset(var->index());
218 __ movp(Operand(rsi, context_offset), rax);
219 // Update the write barrier. This clobbers rax and rbx.
220 if (need_write_barrier) {
221 __ RecordWriteContextSlot(
222 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
223 } else if (FLAG_debug_code) {
225 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
226 __ Abort(kExpectedNewSpaceObject);
233 // Possibly allocate an arguments object.
234 Variable* arguments = scope()->arguments();
235 if (arguments != NULL) {
236 // Arguments object must be allocated after the context object, in
237 // case the "arguments" or ".arguments" variables are in the context.
238 Comment cmnt(masm_, "[ Allocate arguments object");
239 if (function_in_register) {
242 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
244 // The receiver is just before the parameters on the caller's stack.
245 int num_parameters = info->scope()->num_parameters();
246 int offset = num_parameters * kPointerSize;
248 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
250 __ Push(Smi::FromInt(num_parameters));
251 // Arguments to ArgumentsAccessStub:
252 // function, receiver address, parameter count.
253 // The stub will rewrite receiver and parameter count if the previous
254 // stack frame was an arguments adapter frame.
255 ArgumentsAccessStub::Type type;
256 if (strict_mode() == STRICT) {
257 type = ArgumentsAccessStub::NEW_STRICT;
258 } else if (function()->has_duplicate_parameters()) {
259 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
261 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
263 ArgumentsAccessStub stub(isolate(), type);
266 SetVar(arguments, rax, rbx, rdx);
270 __ CallRuntime(Runtime::kTraceEnter, 0);
273 // Visit the declarations and body unless there is an illegal
275 if (scope()->HasIllegalRedeclaration()) {
276 Comment cmnt(masm_, "[ Declarations");
277 scope()->VisitIllegalRedeclaration(this);
280 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
281 { Comment cmnt(masm_, "[ Declarations");
282 // For named function expressions, declare the function name as a
284 if (scope()->is_function_scope() && scope()->function() != NULL) {
285 VariableDeclaration* function = scope()->function();
286 DCHECK(function->proxy()->var()->mode() == CONST ||
287 function->proxy()->var()->mode() == CONST_LEGACY);
288 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
289 VisitVariableDeclaration(function);
291 VisitDeclarations(scope()->declarations());
294 { Comment cmnt(masm_, "[ Stack check");
295 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
297 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
298 __ j(above_equal, &ok, Label::kNear);
299 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
303 { Comment cmnt(masm_, "[ Body");
304 DCHECK(loop_depth() == 0);
305 VisitStatements(function()->body());
306 DCHECK(loop_depth() == 0);
310 // Always emit a 'return undefined' in case control fell off the end of
312 { Comment cmnt(masm_, "[ return <undefined>;");
313 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
314 EmitReturnSequence();
319 void FullCodeGenerator::ClearAccumulator() {
324 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
325 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
326 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
327 Smi::FromInt(-delta));
331 void FullCodeGenerator::EmitProfilingCounterReset() {
332 int reset_value = FLAG_interrupt_budget;
333 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
334 __ Move(kScratchRegister, Smi::FromInt(reset_value));
335 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
339 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
342 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
343 Label* back_edge_target) {
344 Comment cmnt(masm_, "[ Back edge bookkeeping");
347 DCHECK(back_edge_target->is_bound());
348 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
349 int weight = Min(kMaxBackEdgeWeight,
350 Max(1, distance / kCodeSizeMultiplier));
351 EmitProfilingCounterDecrement(weight);
353 __ j(positive, &ok, Label::kNear);
355 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
356 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
357 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
359 // Record a mapping of this PC offset to the OSR id. This is used to find
360 // the AST id from the unoptimized code in order to use it as a key into
361 // the deoptimization input data found in the optimized code.
362 RecordBackEdge(stmt->OsrEntryId());
364 EmitProfilingCounterReset();
368 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
369 // Record a mapping of the OSR id to this PC. This is used if the OSR
370 // entry becomes the target of a bailout. We don't expect it to be, but
371 // we want it to work if it is.
372 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
376 void FullCodeGenerator::EmitReturnSequence() {
377 Comment cmnt(masm_, "[ Return sequence");
378 if (return_label_.is_bound()) {
379 __ jmp(&return_label_);
381 __ bind(&return_label_);
384 __ CallRuntime(Runtime::kTraceExit, 1);
386 // Pretend that the exit is a backwards jump to the entry.
388 if (info_->ShouldSelfOptimize()) {
389 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
391 int distance = masm_->pc_offset();
392 weight = Min(kMaxBackEdgeWeight,
393 Max(1, distance / kCodeSizeMultiplier));
395 EmitProfilingCounterDecrement(weight);
397 __ j(positive, &ok, Label::kNear);
399 __ call(isolate()->builtins()->InterruptCheck(),
400 RelocInfo::CODE_TARGET);
402 EmitProfilingCounterReset();
405 // Add a label for checking the size of the code used for returning.
406 Label check_exit_codesize;
407 masm_->bind(&check_exit_codesize);
409 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
411 // Do not use the leave instruction here because it is too short to
412 // patch with the code required by the debugger.
415 int no_frame_start = masm_->pc_offset();
417 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
418 __ Ret(arguments_bytes, rcx);
420 // Add padding that will be overwritten by a debugger breakpoint. We
421 // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k"
422 // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32.
423 const int kPadding = Assembler::kJSReturnSequenceLength -
424 kPointerSize == kInt64Size ? 7 : 6;
425 for (int i = 0; i < kPadding; ++i) {
428 // Check that the size of the code used for returning is large enough
429 // for the debugger's requirements.
430 DCHECK(Assembler::kJSReturnSequenceLength <=
431 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
433 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
438 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
439 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
443 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
444 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
445 codegen()->GetVar(result_register(), var);
449 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
450 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
451 MemOperand operand = codegen()->VarOperand(var, result_register());
456 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
457 codegen()->GetVar(result_register(), var);
458 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
459 codegen()->DoTest(this);
463 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
467 void FullCodeGenerator::AccumulatorValueContext::Plug(
468 Heap::RootListIndex index) const {
469 __ LoadRoot(result_register(), index);
473 void FullCodeGenerator::StackValueContext::Plug(
474 Heap::RootListIndex index) const {
479 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
480 codegen()->PrepareForBailoutBeforeSplit(condition(),
484 if (index == Heap::kUndefinedValueRootIndex ||
485 index == Heap::kNullValueRootIndex ||
486 index == Heap::kFalseValueRootIndex) {
487 if (false_label_ != fall_through_) __ jmp(false_label_);
488 } else if (index == Heap::kTrueValueRootIndex) {
489 if (true_label_ != fall_through_) __ jmp(true_label_);
491 __ LoadRoot(result_register(), index);
492 codegen()->DoTest(this);
497 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502 Handle<Object> lit) const {
504 __ SafeMove(result_register(), Smi::cast(*lit));
506 __ Move(result_register(), lit);
511 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
513 __ SafePush(Smi::cast(*lit));
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
526 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527 if (false_label_ != fall_through_) __ jmp(false_label_);
528 } else if (lit->IsTrue() || lit->IsJSObject()) {
529 if (true_label_ != fall_through_) __ jmp(true_label_);
530 } else if (lit->IsString()) {
531 if (String::cast(*lit)->length() == 0) {
532 if (false_label_ != fall_through_) __ jmp(false_label_);
534 if (true_label_ != fall_through_) __ jmp(true_label_);
536 } else if (lit->IsSmi()) {
537 if (Smi::cast(*lit)->value() == 0) {
538 if (false_label_ != fall_through_) __ jmp(false_label_);
540 if (true_label_ != fall_through_) __ jmp(true_label_);
543 // For simplicity we always test the accumulator register.
544 __ Move(result_register(), lit);
545 codegen()->DoTest(this);
550 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
551 Register reg) const {
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
559 Register reg) const {
562 __ Move(result_register(), reg);
566 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
567 Register reg) const {
569 if (count > 1) __ Drop(count - 1);
570 __ movp(Operand(rsp, 0), reg);
574 void FullCodeGenerator::TestContext::DropAndPlug(int count,
575 Register reg) const {
577 // For simplicity we always test the accumulator register.
579 __ Move(result_register(), reg);
580 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
581 codegen()->DoTest(this);
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586 Label* materialize_false) const {
587 DCHECK(materialize_true == materialize_false);
588 __ bind(materialize_true);
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Label* materialize_true,
594 Label* materialize_false) const {
596 __ bind(materialize_true);
597 __ Move(result_register(), isolate()->factory()->true_value());
598 __ jmp(&done, Label::kNear);
599 __ bind(materialize_false);
600 __ Move(result_register(), isolate()->factory()->false_value());
605 void FullCodeGenerator::StackValueContext::Plug(
606 Label* materialize_true,
607 Label* materialize_false) const {
609 __ bind(materialize_true);
610 __ Push(isolate()->factory()->true_value());
611 __ jmp(&done, Label::kNear);
612 __ bind(materialize_false);
613 __ Push(isolate()->factory()->false_value());
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619 Label* materialize_false) const {
620 DCHECK(materialize_true == true_label_);
621 DCHECK(materialize_false == false_label_);
625 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630 Heap::RootListIndex value_root_index =
631 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
632 __ LoadRoot(result_register(), value_root_index);
636 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
637 Heap::RootListIndex value_root_index =
638 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
639 __ PushRoot(value_root_index);
643 void FullCodeGenerator::TestContext::Plug(bool flag) const {
644 codegen()->PrepareForBailoutBeforeSplit(condition(),
649 if (true_label_ != fall_through_) __ jmp(true_label_);
651 if (false_label_ != fall_through_) __ jmp(false_label_);
656 void FullCodeGenerator::DoTest(Expression* condition,
659 Label* fall_through) {
660 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
661 CallIC(ic, condition->test_id());
662 __ testp(result_register(), result_register());
663 // The stub returns nonzero for true.
664 Split(not_zero, if_true, if_false, fall_through);
668 void FullCodeGenerator::Split(Condition cc,
671 Label* fall_through) {
672 if (if_false == fall_through) {
674 } else if (if_true == fall_through) {
675 __ j(NegateCondition(cc), if_false);
683 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
684 DCHECK(var->IsStackAllocated());
685 // Offset is negative because higher indexes are at lower addresses.
686 int offset = -var->index() * kPointerSize;
687 // Adjust by a (parameter or local) base offset.
688 if (var->IsParameter()) {
689 offset += kFPOnStackSize + kPCOnStackSize +
690 (info_->scope()->num_parameters() - 1) * kPointerSize;
692 offset += JavaScriptFrameConstants::kLocal0Offset;
694 return Operand(rbp, offset);
698 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
699 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
700 if (var->IsContextSlot()) {
701 int context_chain_length = scope()->ContextChainLength(var->scope());
702 __ LoadContext(scratch, context_chain_length);
703 return ContextOperand(scratch, var->index());
705 return StackOperand(var);
710 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
711 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
712 MemOperand location = VarOperand(var, dest);
713 __ movp(dest, location);
717 void FullCodeGenerator::SetVar(Variable* var,
721 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
722 DCHECK(!scratch0.is(src));
723 DCHECK(!scratch0.is(scratch1));
724 DCHECK(!scratch1.is(src));
725 MemOperand location = VarOperand(var, scratch0);
726 __ movp(location, src);
728 // Emit the write barrier code if the location is in the heap.
729 if (var->IsContextSlot()) {
730 int offset = Context::SlotOffset(var->index());
731 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
736 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
737 bool should_normalize,
740 // Only prepare for bailouts before splits if we're in a test
741 // context. Otherwise, we let the Visit function deal with the
742 // preparation to avoid preparing with the same AST id twice.
743 if (!context()->IsTest() || !info_->IsOptimizable()) return;
746 if (should_normalize) __ jmp(&skip, Label::kNear);
747 PrepareForBailout(expr, TOS_REG);
748 if (should_normalize) {
749 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
750 Split(equal, if_true, if_false, NULL);
756 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
757 // The variable in the declaration always resides in the current context.
758 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
759 if (generate_debug_code_) {
760 // Check that we're not inside a with or catch context.
761 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
762 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
763 __ Check(not_equal, kDeclarationInWithContext);
764 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
765 __ Check(not_equal, kDeclarationInCatchContext);
770 void FullCodeGenerator::VisitVariableDeclaration(
771 VariableDeclaration* declaration) {
772 // If it was not possible to allocate the variable at compile time, we
773 // need to "declare" it at runtime to make sure it actually exists in the
775 VariableProxy* proxy = declaration->proxy();
776 VariableMode mode = declaration->mode();
777 Variable* variable = proxy->var();
778 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
779 switch (variable->location()) {
780 case Variable::UNALLOCATED:
781 globals_->Add(variable->name(), zone());
782 globals_->Add(variable->binding_needs_init()
783 ? isolate()->factory()->the_hole_value()
784 : isolate()->factory()->undefined_value(),
788 case Variable::PARAMETER:
789 case Variable::LOCAL:
791 Comment cmnt(masm_, "[ VariableDeclaration");
792 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
793 __ movp(StackOperand(variable), kScratchRegister);
797 case Variable::CONTEXT:
799 Comment cmnt(masm_, "[ VariableDeclaration");
800 EmitDebugCheckDeclarationContext(variable);
801 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
802 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
803 // No write barrier since the hole value is in old space.
804 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
808 case Variable::LOOKUP: {
809 Comment cmnt(masm_, "[ VariableDeclaration");
811 __ Push(variable->name());
812 // Declaration nodes are always introduced in one of four modes.
813 DCHECK(IsDeclaredVariableMode(mode));
814 PropertyAttributes attr =
815 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
816 __ Push(Smi::FromInt(attr));
817 // Push initial value, if any.
818 // Note: For variables we must not push an initial value (such as
819 // 'undefined') because we may have a (legal) redeclaration and we
820 // must not destroy the current value.
822 __ PushRoot(Heap::kTheHoleValueRootIndex);
824 __ Push(Smi::FromInt(0)); // Indicates no initial value.
826 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
833 void FullCodeGenerator::VisitFunctionDeclaration(
834 FunctionDeclaration* declaration) {
835 VariableProxy* proxy = declaration->proxy();
836 Variable* variable = proxy->var();
837 switch (variable->location()) {
838 case Variable::UNALLOCATED: {
839 globals_->Add(variable->name(), zone());
840 Handle<SharedFunctionInfo> function =
841 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
842 // Check for stack-overflow exception.
843 if (function.is_null()) return SetStackOverflow();
844 globals_->Add(function, zone());
848 case Variable::PARAMETER:
849 case Variable::LOCAL: {
850 Comment cmnt(masm_, "[ FunctionDeclaration");
851 VisitForAccumulatorValue(declaration->fun());
852 __ movp(StackOperand(variable), result_register());
856 case Variable::CONTEXT: {
857 Comment cmnt(masm_, "[ FunctionDeclaration");
858 EmitDebugCheckDeclarationContext(variable);
859 VisitForAccumulatorValue(declaration->fun());
860 __ movp(ContextOperand(rsi, variable->index()), result_register());
861 int offset = Context::SlotOffset(variable->index());
862 // We know that we have written a function, which is not a smi.
863 __ RecordWriteContextSlot(rsi,
870 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
874 case Variable::LOOKUP: {
875 Comment cmnt(masm_, "[ FunctionDeclaration");
877 __ Push(variable->name());
878 __ Push(Smi::FromInt(NONE));
879 VisitForStackValue(declaration->fun());
880 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
887 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
888 Variable* variable = declaration->proxy()->var();
889 DCHECK(variable->location() == Variable::CONTEXT);
890 DCHECK(variable->interface()->IsFrozen());
892 Comment cmnt(masm_, "[ ModuleDeclaration");
893 EmitDebugCheckDeclarationContext(variable);
895 // Load instance object.
896 __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
897 __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
898 __ movp(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
901 __ movp(ContextOperand(rsi, variable->index()), rax);
902 // We know that we have written a module, which is not a smi.
903 __ RecordWriteContextSlot(rsi,
904 Context::SlotOffset(variable->index()),
910 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
912 // Traverse into body.
913 Visit(declaration->module());
917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
918 VariableProxy* proxy = declaration->proxy();
919 Variable* variable = proxy->var();
920 switch (variable->location()) {
921 case Variable::UNALLOCATED:
925 case Variable::CONTEXT: {
926 Comment cmnt(masm_, "[ ImportDeclaration");
927 EmitDebugCheckDeclarationContext(variable);
932 case Variable::PARAMETER:
933 case Variable::LOCAL:
934 case Variable::LOOKUP:
940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
946 // Call the runtime to declare the globals.
947 __ Push(rsi); // The context is the first argument.
949 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
950 __ CallRuntime(Runtime::kDeclareGlobals, 3);
951 // Return value is ignored.
955 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
956 // Call the runtime to declare the modules.
957 __ Push(descriptions);
958 __ CallRuntime(Runtime::kDeclareModules, 1);
959 // Return value is ignored.
963 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
964 Comment cmnt(masm_, "[ SwitchStatement");
965 Breakable nested_statement(this, stmt);
966 SetStatementPosition(stmt);
968 // Keep the switch value on the stack until a case matches.
969 VisitForStackValue(stmt->tag());
970 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
972 ZoneList<CaseClause*>* clauses = stmt->cases();
973 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
975 Label next_test; // Recycled for each test.
976 // Compile all the tests with branches to their bodies.
977 for (int i = 0; i < clauses->length(); i++) {
978 CaseClause* clause = clauses->at(i);
979 clause->body_target()->Unuse();
981 // The default is not a test, but remember it as final fall through.
982 if (clause->is_default()) {
983 default_clause = clause;
987 Comment cmnt(masm_, "[ Case comparison");
991 // Compile the label expression.
992 VisitForAccumulatorValue(clause->label());
994 // Perform the comparison as if via '==='.
995 __ movp(rdx, Operand(rsp, 0)); // Switch value.
996 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
997 JumpPatchSite patch_site(masm_);
998 if (inline_smi_code) {
1002 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1005 __ j(not_equal, &next_test);
1006 __ Drop(1); // Switch value is no longer needed.
1007 __ jmp(clause->body_target());
1008 __ bind(&slow_case);
1011 // Record position before stub call for type feedback.
1012 SetSourcePosition(clause->position());
1013 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1014 CallIC(ic, clause->CompareId());
1015 patch_site.EmitPatchInfo();
1018 __ jmp(&skip, Label::kNear);
1019 PrepareForBailout(clause, TOS_REG);
1020 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1021 __ j(not_equal, &next_test);
1023 __ jmp(clause->body_target());
1027 __ j(not_equal, &next_test);
1028 __ Drop(1); // Switch value is no longer needed.
1029 __ jmp(clause->body_target());
1032 // Discard the test value and jump to the default if present, otherwise to
1033 // the end of the statement.
1034 __ bind(&next_test);
1035 __ Drop(1); // Switch value is no longer needed.
1036 if (default_clause == NULL) {
1037 __ jmp(nested_statement.break_label());
1039 __ jmp(default_clause->body_target());
1042 // Compile all the case bodies.
1043 for (int i = 0; i < clauses->length(); i++) {
1044 Comment cmnt(masm_, "[ Case body");
1045 CaseClause* clause = clauses->at(i);
1046 __ bind(clause->body_target());
1047 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1048 VisitStatements(clause->statements());
1051 __ bind(nested_statement.break_label());
1052 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1056 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1057 Comment cmnt(masm_, "[ ForInStatement");
1058 int slot = stmt->ForInFeedbackSlot();
1059 SetStatementPosition(stmt);
1062 ForIn loop_statement(this, stmt);
1063 increment_loop_depth();
1065 // Get the object to enumerate over. If the object is null or undefined, skip
1066 // over the loop. See ECMA-262 version 5, section 12.6.4.
1067 VisitForAccumulatorValue(stmt->enumerable());
1068 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1070 Register null_value = rdi;
1071 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1072 __ cmpp(rax, null_value);
1075 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1077 // Convert the object to a JS object.
1078 Label convert, done_convert;
1079 __ JumpIfSmi(rax, &convert);
1080 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1081 __ j(above_equal, &done_convert);
1084 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1085 __ bind(&done_convert);
1088 // Check for proxies.
1090 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1091 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1092 __ j(below_equal, &call_runtime);
1094 // Check cache validity in generated code. This is a fast case for
1095 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1096 // guarantee cache validity, call the runtime system to check cache
1097 // validity or get the property names in a fixed array.
1098 __ CheckEnumCache(null_value, &call_runtime);
1100 // The enum cache is valid. Load the map of the object being
1101 // iterated over and use the cache for the iteration.
1103 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1104 __ jmp(&use_cache, Label::kNear);
1106 // Get the set of properties to enumerate.
1107 __ bind(&call_runtime);
1108 __ Push(rax); // Duplicate the enumerable object on the stack.
1109 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1111 // If we got a map from the runtime call, we can do a fast
1112 // modification check. Otherwise, we got a fixed array, and we have
1113 // to do a slow check.
1115 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1116 Heap::kMetaMapRootIndex);
1117 __ j(not_equal, &fixed_array);
1119 // We got a map in register rax. Get the enumeration cache from it.
1120 __ bind(&use_cache);
1122 Label no_descriptors;
1124 __ EnumLength(rdx, rax);
1125 __ Cmp(rdx, Smi::FromInt(0));
1126 __ j(equal, &no_descriptors);
1128 __ LoadInstanceDescriptors(rax, rcx);
1129 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1130 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1132 // Set up the four remaining stack slots.
1133 __ Push(rax); // Map.
1134 __ Push(rcx); // Enumeration cache.
1135 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1136 __ Push(Smi::FromInt(0)); // Initial index.
1139 __ bind(&no_descriptors);
1140 __ addp(rsp, Immediate(kPointerSize));
1143 // We got a fixed array in register rax. Iterate through that.
1145 __ bind(&fixed_array);
1147 // No need for a write barrier, we are storing a Smi in the feedback vector.
1148 __ Move(rbx, FeedbackVector());
1149 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1150 TypeFeedbackInfo::MegamorphicSentinel(isolate()));
1151 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1152 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1153 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1154 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1155 __ j(above, &non_proxy);
1156 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1157 __ bind(&non_proxy);
1158 __ Push(rbx); // Smi
1159 __ Push(rax); // Array
1160 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1161 __ Push(rax); // Fixed array length (as smi).
1162 __ Push(Smi::FromInt(0)); // Initial index.
1164 // Generate code for doing the condition check.
1165 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1167 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1168 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1169 __ j(above_equal, loop_statement.break_label());
1171 // Get the current entry of the array into register rbx.
1172 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1173 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1174 __ movp(rbx, FieldOperand(rbx,
1177 FixedArray::kHeaderSize));
1179 // Get the expected map from the stack or a smi in the
1180 // permanent slow case into register rdx.
1181 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1183 // Check if the expected map still matches that of the enumerable.
1184 // If not, we may have to filter the key.
1186 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1187 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1188 __ j(equal, &update_each, Label::kNear);
1190 // For proxies, no filtering is done.
1191 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1192 __ Cmp(rdx, Smi::FromInt(0));
1193 __ j(equal, &update_each, Label::kNear);
1195 // Convert the entry to a string or null if it isn't a property
1196 // anymore. If the property has been removed while iterating, we
1198 __ Push(rcx); // Enumerable.
1199 __ Push(rbx); // Current entry.
1200 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1201 __ Cmp(rax, Smi::FromInt(0));
1202 __ j(equal, loop_statement.continue_label());
1205 // Update the 'each' property or variable from the possibly filtered
1206 // entry in register rbx.
1207 __ bind(&update_each);
1208 __ movp(result_register(), rbx);
1209 // Perform the assignment as if via '='.
1210 { EffectContext context(this);
1211 EmitAssignment(stmt->each());
1214 // Generate code for the body of the loop.
1215 Visit(stmt->body());
1217 // Generate code for going to the next element by incrementing the
1218 // index (smi) stored on top of the stack.
1219 __ bind(loop_statement.continue_label());
1220 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1222 EmitBackEdgeBookkeeping(stmt, &loop);
1225 // Remove the pointers stored on the stack.
1226 __ bind(loop_statement.break_label());
1227 __ addp(rsp, Immediate(5 * kPointerSize));
1229 // Exit and decrement the loop depth.
1230 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1232 decrement_loop_depth();
1236 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1237 Comment cmnt(masm_, "[ ForOfStatement");
1238 SetStatementPosition(stmt);
1240 Iteration loop_statement(this, stmt);
1241 increment_loop_depth();
1243 // var iterator = iterable[Symbol.iterator]();
1244 VisitForEffect(stmt->assign_iterator());
1247 __ bind(loop_statement.continue_label());
1249 // result = iterator.next()
1250 VisitForEffect(stmt->next_result());
1252 // if (result.done) break;
1253 Label result_not_done;
1254 VisitForControl(stmt->result_done(),
1255 loop_statement.break_label(),
1258 __ bind(&result_not_done);
1260 // each = result.value
1261 VisitForEffect(stmt->assign_each());
1263 // Generate code for the body of the loop.
1264 Visit(stmt->body());
1266 // Check stack before looping.
1267 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1268 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1269 __ jmp(loop_statement.continue_label());
1271 // Exit and decrement the loop depth.
1272 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1273 __ bind(loop_statement.break_label());
1274 decrement_loop_depth();
1278 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1280 // Use the fast case closure allocation code that allocates in new
1281 // space for nested functions that don't need literals cloning. If
1282 // we're running with the --always-opt or the --prepare-always-opt
1283 // flag, we need to use the runtime function so that the new function
1284 // we are creating here gets a chance to have its code optimized and
1285 // doesn't just get a copy of the existing unoptimized code.
1286 if (!FLAG_always_opt &&
1287 !FLAG_prepare_always_opt &&
1289 scope()->is_function_scope() &&
1290 info->num_literals() == 0) {
1291 FastNewClosureStub stub(isolate(),
1292 info->strict_mode(),
1293 info->is_generator());
1300 ? isolate()->factory()->true_value()
1301 : isolate()->factory()->false_value());
1302 __ CallRuntime(Runtime::kNewClosure, 3);
1304 context()->Plug(rax);
1308 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1309 Comment cmnt(masm_, "[ VariableProxy");
1310 EmitVariableLoad(expr);
1314 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1315 TypeofState typeof_state,
1317 Register context = rsi;
1318 Register temp = rdx;
1322 if (s->num_heap_slots() > 0) {
1323 if (s->calls_sloppy_eval()) {
1324 // Check that extension is NULL.
1325 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1327 __ j(not_equal, slow);
1329 // Load next context in chain.
1330 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1331 // Walk the rest of the chain without clobbering rsi.
1334 // If no outer scope calls eval, we do not need to check more
1335 // context extensions. If we have reached an eval scope, we check
1336 // all extensions from this point.
1337 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1338 s = s->outer_scope();
1341 if (s != NULL && s->is_eval_scope()) {
1342 // Loop up the context chain. There is no frame effect so it is
1343 // safe to use raw labels here.
1345 if (!context.is(temp)) {
1346 __ movp(temp, context);
1348 // Load map for comparison into register, outside loop.
1349 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1351 // Terminate at native context.
1352 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1353 __ j(equal, &fast, Label::kNear);
1354 // Check that extension is NULL.
1355 __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1356 __ j(not_equal, slow);
1357 // Load next context in chain.
1358 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1363 // All extension objects were empty and it is safe to use a global
1365 __ movp(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1366 __ Move(LoadIC::NameRegister(), proxy->var()->name());
1367 if (FLAG_vector_ics) {
1368 __ Move(LoadIC::SlotRegister(),
1369 Smi::FromInt(proxy->VariableFeedbackSlot()));
1372 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1379 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1381 DCHECK(var->IsContextSlot());
1382 Register context = rsi;
1383 Register temp = rbx;
1385 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1386 if (s->num_heap_slots() > 0) {
1387 if (s->calls_sloppy_eval()) {
1388 // Check that extension is NULL.
1389 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1391 __ j(not_equal, slow);
1393 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1394 // Walk the rest of the chain without clobbering rsi.
1398 // Check that last extension is NULL.
1399 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1400 __ j(not_equal, slow);
1402 // This function is used only for loads, not stores, so it's safe to
1403 // return an rsi-based operand (the write barrier cannot be allowed to
1404 // destroy the rsi register).
1405 return ContextOperand(context, var->index());
1409 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1410 TypeofState typeof_state,
1413 // Generate fast-case code for variables that might be shadowed by
1414 // eval-introduced variables. Eval is used a lot without
1415 // introducing variables. In those cases, we do not want to
1416 // perform a runtime call for all variables in the scope
1417 // containing the eval.
1418 Variable* var = proxy->var();
1419 if (var->mode() == DYNAMIC_GLOBAL) {
1420 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1422 } else if (var->mode() == DYNAMIC_LOCAL) {
1423 Variable* local = var->local_if_not_shadowed();
1424 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1425 if (local->mode() == LET || local->mode() == CONST ||
1426 local->mode() == CONST_LEGACY) {
1427 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1428 __ j(not_equal, done);
1429 if (local->mode() == CONST_LEGACY) {
1430 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1431 } else { // LET || CONST
1432 __ Push(var->name());
1433 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1441 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1442 // Record position before possible IC call.
1443 SetSourcePosition(proxy->position());
1444 Variable* var = proxy->var();
1446 // Three cases: global variables, lookup variables, and all other types of
1448 switch (var->location()) {
1449 case Variable::UNALLOCATED: {
1450 Comment cmnt(masm_, "[ Global variable");
1451 __ Move(LoadIC::NameRegister(), var->name());
1452 __ movp(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1453 if (FLAG_vector_ics) {
1454 __ Move(LoadIC::SlotRegister(),
1455 Smi::FromInt(proxy->VariableFeedbackSlot()));
1457 CallLoadIC(CONTEXTUAL);
1458 context()->Plug(rax);
1462 case Variable::PARAMETER:
1463 case Variable::LOCAL:
1464 case Variable::CONTEXT: {
1465 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1467 if (var->binding_needs_init()) {
1468 // var->scope() may be NULL when the proxy is located in eval code and
1469 // refers to a potential outside binding. Currently those bindings are
1470 // always looked up dynamically, i.e. in that case
1471 // var->location() == LOOKUP.
1473 DCHECK(var->scope() != NULL);
1475 // Check if the binding really needs an initialization check. The check
1476 // can be skipped in the following situation: we have a LET or CONST
1477 // binding in harmony mode, both the Variable and the VariableProxy have
1478 // the same declaration scope (i.e. they are both in global code, in the
1479 // same function or in the same eval code) and the VariableProxy is in
1480 // the source physically located after the initializer of the variable.
1482 // We cannot skip any initialization checks for CONST in non-harmony
1483 // mode because const variables may be declared but never initialized:
1484 // if (false) { const x; }; var y = x;
1486 // The condition on the declaration scopes is a conservative check for
1487 // nested functions that access a binding and are called before the
1488 // binding is initialized:
1489 // function() { f(); let x = 1; function f() { x = 2; } }
1491 bool skip_init_check;
1492 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1493 skip_init_check = false;
1495 // Check that we always have valid source position.
1496 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1497 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1498 skip_init_check = var->mode() != CONST_LEGACY &&
1499 var->initializer_position() < proxy->position();
1502 if (!skip_init_check) {
1503 // Let and const need a read barrier.
1506 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1507 __ j(not_equal, &done, Label::kNear);
1508 if (var->mode() == LET || var->mode() == CONST) {
1509 // Throw a reference error when using an uninitialized let/const
1510 // binding in harmony mode.
1511 __ Push(var->name());
1512 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1514 // Uninitalized const bindings outside of harmony mode are unholed.
1515 DCHECK(var->mode() == CONST_LEGACY);
1516 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1519 context()->Plug(rax);
1523 context()->Plug(var);
1527 case Variable::LOOKUP: {
1528 Comment cmnt(masm_, "[ Lookup slot");
1530 // Generate code for loading from variables potentially shadowed
1531 // by eval-introduced variables.
1532 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1534 __ Push(rsi); // Context.
1535 __ Push(var->name());
1536 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1538 context()->Plug(rax);
1545 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1546 Comment cmnt(masm_, "[ RegExpLiteral");
1548 // Registers will be used as follows:
1549 // rdi = JS function.
1550 // rcx = literals array.
1551 // rbx = regexp literal.
1552 // rax = regexp literal clone.
1553 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1554 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1555 int literal_offset =
1556 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1557 __ movp(rbx, FieldOperand(rcx, literal_offset));
1558 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1559 __ j(not_equal, &materialized, Label::kNear);
1561 // Create regexp literal using runtime function
1562 // Result will be in rax.
1564 __ Push(Smi::FromInt(expr->literal_index()));
1565 __ Push(expr->pattern());
1566 __ Push(expr->flags());
1567 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1570 __ bind(&materialized);
1571 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1572 Label allocated, runtime_allocate;
1573 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1576 __ bind(&runtime_allocate);
1578 __ Push(Smi::FromInt(size));
1579 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1582 __ bind(&allocated);
1583 // Copy the content into the newly allocated memory.
1584 // (Unroll copy loop once for better throughput).
1585 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1586 __ movp(rdx, FieldOperand(rbx, i));
1587 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1588 __ movp(FieldOperand(rax, i), rdx);
1589 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1591 if ((size % (2 * kPointerSize)) != 0) {
1592 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1593 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1595 context()->Plug(rax);
1599 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1600 if (expression == NULL) {
1601 __ PushRoot(Heap::kNullValueRootIndex);
1603 VisitForStackValue(expression);
1608 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1609 Comment cmnt(masm_, "[ ObjectLiteral");
1611 expr->BuildConstantProperties(isolate());
1612 Handle<FixedArray> constant_properties = expr->constant_properties();
1613 int flags = expr->fast_elements()
1614 ? ObjectLiteral::kFastElements
1615 : ObjectLiteral::kNoFlags;
1616 flags |= expr->has_function()
1617 ? ObjectLiteral::kHasFunction
1618 : ObjectLiteral::kNoFlags;
1619 int properties_count = constant_properties->length() / 2;
1620 if (expr->may_store_doubles() || expr->depth() > 1 ||
1621 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1622 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1623 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1624 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1625 __ Push(Smi::FromInt(expr->literal_index()));
1626 __ Push(constant_properties);
1627 __ Push(Smi::FromInt(flags));
1628 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1630 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1631 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1632 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1633 __ Move(rcx, constant_properties);
1634 __ Move(rdx, Smi::FromInt(flags));
1635 FastCloneShallowObjectStub stub(isolate(), properties_count);
1639 // If result_saved is true the result is on top of the stack. If
1640 // result_saved is false the result is in rax.
1641 bool result_saved = false;
1643 // Mark all computed expressions that are bound to a key that
1644 // is shadowed by a later occurrence of the same key. For the
1645 // marked expressions, no store code is emitted.
1646 expr->CalculateEmitStore(zone());
1648 AccessorTable accessor_table(zone());
1649 for (int i = 0; i < expr->properties()->length(); i++) {
1650 ObjectLiteral::Property* property = expr->properties()->at(i);
1651 if (property->IsCompileTimeValue()) continue;
1653 Literal* key = property->key();
1654 Expression* value = property->value();
1655 if (!result_saved) {
1656 __ Push(rax); // Save result on the stack
1657 result_saved = true;
1659 switch (property->kind()) {
1660 case ObjectLiteral::Property::CONSTANT:
1662 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1663 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1665 case ObjectLiteral::Property::COMPUTED:
1666 if (key->value()->IsInternalizedString()) {
1667 if (property->emit_store()) {
1668 VisitForAccumulatorValue(value);
1669 DCHECK(StoreIC::ValueRegister().is(rax));
1670 __ Move(StoreIC::NameRegister(), key->value());
1671 __ movp(StoreIC::ReceiverRegister(), Operand(rsp, 0));
1672 CallStoreIC(key->LiteralFeedbackId());
1673 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1675 VisitForEffect(value);
1679 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1680 VisitForStackValue(key);
1681 VisitForStackValue(value);
1682 if (property->emit_store()) {
1683 __ Push(Smi::FromInt(SLOPPY)); // Strict mode
1684 __ CallRuntime(Runtime::kSetProperty, 4);
1689 case ObjectLiteral::Property::PROTOTYPE:
1690 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1691 VisitForStackValue(value);
1692 if (property->emit_store()) {
1693 __ CallRuntime(Runtime::kSetPrototype, 2);
1698 case ObjectLiteral::Property::GETTER:
1699 accessor_table.lookup(key)->second->getter = value;
1701 case ObjectLiteral::Property::SETTER:
1702 accessor_table.lookup(key)->second->setter = value;
1707 // Emit code to define accessors, using only a single call to the runtime for
1708 // each pair of corresponding getters and setters.
1709 for (AccessorTable::Iterator it = accessor_table.begin();
1710 it != accessor_table.end();
1712 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1713 VisitForStackValue(it->first);
1714 EmitAccessor(it->second->getter);
1715 EmitAccessor(it->second->setter);
1716 __ Push(Smi::FromInt(NONE));
1717 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1720 if (expr->has_function()) {
1721 DCHECK(result_saved);
1722 __ Push(Operand(rsp, 0));
1723 __ CallRuntime(Runtime::kToFastProperties, 1);
1727 context()->PlugTOS();
1729 context()->Plug(rax);
1734 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1735 Comment cmnt(masm_, "[ ArrayLiteral");
1737 expr->BuildConstantElements(isolate());
1738 int flags = expr->depth() == 1
1739 ? ArrayLiteral::kShallowElements
1740 : ArrayLiteral::kNoFlags;
1742 ZoneList<Expression*>* subexprs = expr->values();
1743 int length = subexprs->length();
1744 Handle<FixedArray> constant_elements = expr->constant_elements();
1745 DCHECK_EQ(2, constant_elements->length());
1746 ElementsKind constant_elements_kind =
1747 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1748 bool has_constant_fast_elements =
1749 IsFastObjectElementsKind(constant_elements_kind);
1750 Handle<FixedArrayBase> constant_elements_values(
1751 FixedArrayBase::cast(constant_elements->get(1)));
1753 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1754 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1755 // If the only customer of allocation sites is transitioning, then
1756 // we can turn it off if we don't have anywhere else to transition to.
1757 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1760 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1761 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1762 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1763 __ Push(Smi::FromInt(expr->literal_index()));
1764 __ Push(constant_elements);
1765 __ Push(Smi::FromInt(flags));
1766 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1768 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1769 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1770 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1771 __ Move(rcx, constant_elements);
1772 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1776 bool result_saved = false; // Is the result saved to the stack?
1778 // Emit code to evaluate all the non-constant subexpressions and to store
1779 // them into the newly cloned array.
1780 for (int i = 0; i < length; i++) {
1781 Expression* subexpr = subexprs->at(i);
1782 // If the subexpression is a literal or a simple materialized literal it
1783 // is already set in the cloned array.
1784 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1786 if (!result_saved) {
1787 __ Push(rax); // array literal
1788 __ Push(Smi::FromInt(expr->literal_index()));
1789 result_saved = true;
1791 VisitForAccumulatorValue(subexpr);
1793 if (IsFastObjectElementsKind(constant_elements_kind)) {
1794 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1795 // cannot transition and don't need to call the runtime stub.
1796 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1797 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1798 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1799 // Store the subexpression value in the array's elements.
1800 __ movp(FieldOperand(rbx, offset), result_register());
1801 // Update the write barrier for the array store.
1802 __ RecordWriteField(rbx, offset, result_register(), rcx,
1804 EMIT_REMEMBERED_SET,
1807 // Store the subexpression value in the array's elements.
1808 __ Move(rcx, Smi::FromInt(i));
1809 StoreArrayLiteralElementStub stub(isolate());
1813 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1817 __ addp(rsp, Immediate(kPointerSize)); // literal index
1818 context()->PlugTOS();
1820 context()->Plug(rax);
1825 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1826 DCHECK(expr->target()->IsValidReferenceExpression());
1828 Comment cmnt(masm_, "[ Assignment");
1830 // Left-hand side can only be a property, a global or a (parameter or local)
1832 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1833 LhsKind assign_type = VARIABLE;
1834 Property* property = expr->target()->AsProperty();
1835 if (property != NULL) {
1836 assign_type = (property->key()->IsPropertyName())
1841 // Evaluate LHS expression.
1842 switch (assign_type) {
1844 // Nothing to do here.
1846 case NAMED_PROPERTY:
1847 if (expr->is_compound()) {
1848 // We need the receiver both on the stack and in the register.
1849 VisitForStackValue(property->obj());
1850 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, 0));
1852 VisitForStackValue(property->obj());
1855 case KEYED_PROPERTY: {
1856 if (expr->is_compound()) {
1857 VisitForStackValue(property->obj());
1858 VisitForStackValue(property->key());
1859 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, kPointerSize));
1860 __ movp(LoadIC::NameRegister(), Operand(rsp, 0));
1862 VisitForStackValue(property->obj());
1863 VisitForStackValue(property->key());
1869 // For compound assignments we need another deoptimization point after the
1870 // variable/property load.
1871 if (expr->is_compound()) {
1872 { AccumulatorValueContext context(this);
1873 switch (assign_type) {
1875 EmitVariableLoad(expr->target()->AsVariableProxy());
1876 PrepareForBailout(expr->target(), TOS_REG);
1878 case NAMED_PROPERTY:
1879 EmitNamedPropertyLoad(property);
1880 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1882 case KEYED_PROPERTY:
1883 EmitKeyedPropertyLoad(property);
1884 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1889 Token::Value op = expr->binary_op();
1890 __ Push(rax); // Left operand goes on the stack.
1891 VisitForAccumulatorValue(expr->value());
1893 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1896 SetSourcePosition(expr->position() + 1);
1897 AccumulatorValueContext context(this);
1898 if (ShouldInlineSmiCase(op)) {
1899 EmitInlineSmiBinaryOp(expr->binary_operation(),
1905 EmitBinaryOp(expr->binary_operation(), op, mode);
1907 // Deoptimization point in case the binary operation may have side effects.
1908 PrepareForBailout(expr->binary_operation(), TOS_REG);
1910 VisitForAccumulatorValue(expr->value());
1913 // Record source position before possible IC call.
1914 SetSourcePosition(expr->position());
1917 switch (assign_type) {
1919 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1921 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1922 context()->Plug(rax);
1924 case NAMED_PROPERTY:
1925 EmitNamedPropertyAssignment(expr);
1927 case KEYED_PROPERTY:
1928 EmitKeyedPropertyAssignment(expr);
1934 void FullCodeGenerator::VisitYield(Yield* expr) {
1935 Comment cmnt(masm_, "[ Yield");
1936 // Evaluate yielded value first; the initial iterator definition depends on
1937 // this. It stays on the stack while we update the iterator.
1938 VisitForStackValue(expr->expression());
1940 switch (expr->yield_kind()) {
1941 case Yield::SUSPEND:
1942 // Pop value from top-of-stack slot; box result into result register.
1943 EmitCreateIteratorResult(false);
1944 __ Push(result_register());
1946 case Yield::INITIAL: {
1947 Label suspend, continuation, post_runtime, resume;
1951 __ bind(&continuation);
1955 VisitForAccumulatorValue(expr->generator_object());
1956 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1957 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1958 Smi::FromInt(continuation.pos()));
1959 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1961 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1963 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1965 __ j(equal, &post_runtime);
1966 __ Push(rax); // generator object
1967 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1968 __ movp(context_register(),
1969 Operand(rbp, StandardFrameConstants::kContextOffset));
1970 __ bind(&post_runtime);
1972 __ Pop(result_register());
1973 EmitReturnSequence();
1976 context()->Plug(result_register());
1980 case Yield::FINAL: {
1981 VisitForAccumulatorValue(expr->generator_object());
1982 __ Move(FieldOperand(result_register(),
1983 JSGeneratorObject::kContinuationOffset),
1984 Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
1985 // Pop value from top-of-stack slot, box result into result register.
1986 EmitCreateIteratorResult(true);
1987 EmitUnwindBeforeReturn();
1988 EmitReturnSequence();
1992 case Yield::DELEGATING: {
1993 VisitForStackValue(expr->generator_object());
1995 // Initial stack layout is as follows:
1996 // [sp + 1 * kPointerSize] iter
1997 // [sp + 0 * kPointerSize] g
1999 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2000 Label l_next, l_call, l_loop;
2001 Register load_receiver = LoadIC::ReceiverRegister();
2002 Register load_name = LoadIC::NameRegister();
2004 // Initial send value is undefined.
2005 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2008 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2010 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2011 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2013 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2014 __ Push(rax); // exception
2017 // try { received = %yield result }
2018 // Shuffle the received result above a try handler and yield it without
2021 __ Pop(rax); // result
2022 __ PushTryHandler(StackHandler::CATCH, expr->index());
2023 const int handler_size = StackHandlerConstants::kSize;
2024 __ Push(rax); // result
2026 __ bind(&l_continuation);
2028 __ bind(&l_suspend);
2029 const int generator_object_depth = kPointerSize + handler_size;
2030 __ movp(rax, Operand(rsp, generator_object_depth));
2032 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2033 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2034 Smi::FromInt(l_continuation.pos()));
2035 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2037 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2039 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2040 __ movp(context_register(),
2041 Operand(rbp, StandardFrameConstants::kContextOffset));
2042 __ Pop(rax); // result
2043 EmitReturnSequence();
2044 __ bind(&l_resume); // received in rax
2047 // receiver = iter; f = 'next'; arg = received;
2050 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2051 __ Push(load_name); // "next"
2052 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2053 __ Push(rax); // received
2055 // result = receiver[f](arg);
2057 __ movp(load_receiver, Operand(rsp, kPointerSize));
2058 if (FLAG_vector_ics) {
2059 __ Move(LoadIC::SlotRegister(),
2060 Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
2062 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2063 CallIC(ic, TypeFeedbackId::None());
2065 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2066 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2069 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2070 __ Drop(1); // The function is still on the stack; drop it.
2072 // if (!result.done) goto l_try;
2074 __ Move(load_receiver, rax);
2075 __ Push(load_receiver); // save result
2076 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2077 if (FLAG_vector_ics) {
2078 __ Move(LoadIC::SlotRegister(), Smi::FromInt(expr->DoneFeedbackSlot()));
2080 CallLoadIC(NOT_CONTEXTUAL); // rax=result.done
2081 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2083 __ testp(result_register(), result_register());
2087 __ Pop(load_receiver); // result
2088 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2089 if (FLAG_vector_ics) {
2090 __ Move(LoadIC::SlotRegister(),
2091 Smi::FromInt(expr->ValueFeedbackSlot()));
2093 CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2094 context()->DropAndPlug(2, rax); // drop iter and g
2101 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2103 JSGeneratorObject::ResumeMode resume_mode) {
2104 // The value stays in rax, and is ultimately read by the resumed generator, as
2105 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2106 // is read to throw the value when the resumed generator is already closed.
2107 // rbx will hold the generator object until the activation has been resumed.
2108 VisitForStackValue(generator);
2109 VisitForAccumulatorValue(value);
2112 // Check generator state.
2113 Label wrong_state, closed_state, done;
2114 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2115 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2116 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2118 __ j(equal, &closed_state);
2119 __ j(less, &wrong_state);
2121 // Load suspended function and context.
2122 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2123 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2126 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2128 // Push holes for arguments to generator function.
2129 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2130 __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2131 SharedFunctionInfo::kFormalParameterCountOffset);
2132 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2133 Label push_argument_holes, push_frame;
2134 __ bind(&push_argument_holes);
2135 __ subp(rdx, Immediate(1));
2136 __ j(carry, &push_frame);
2138 __ jmp(&push_argument_holes);
2140 // Enter a new JavaScript frame, and initialize its slots as they were when
2141 // the generator was suspended.
2143 __ bind(&push_frame);
2144 __ call(&resume_frame);
2146 __ bind(&resume_frame);
2147 __ pushq(rbp); // Caller's frame pointer.
2149 __ Push(rsi); // Callee's context.
2150 __ Push(rdi); // Callee's JS Function.
2152 // Load the operand stack size.
2153 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2154 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2155 __ SmiToInteger32(rdx, rdx);
2157 // If we are sending a value and there is no operand stack, we can jump back
2159 if (resume_mode == JSGeneratorObject::NEXT) {
2161 __ cmpp(rdx, Immediate(0));
2162 __ j(not_zero, &slow_resume);
2163 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2164 __ SmiToInteger64(rcx,
2165 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2167 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2168 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2170 __ bind(&slow_resume);
2173 // Otherwise, we push holes for the operand stack and call the runtime to fix
2174 // up the stack and the handlers.
2175 Label push_operand_holes, call_resume;
2176 __ bind(&push_operand_holes);
2177 __ subp(rdx, Immediate(1));
2178 __ j(carry, &call_resume);
2180 __ jmp(&push_operand_holes);
2181 __ bind(&call_resume);
2183 __ Push(result_register());
2184 __ Push(Smi::FromInt(resume_mode));
2185 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2186 // Not reached: the runtime call returns elsewhere.
2187 __ Abort(kGeneratorFailedToResume);
2189 // Reach here when generator is closed.
2190 __ bind(&closed_state);
2191 if (resume_mode == JSGeneratorObject::NEXT) {
2192 // Return completed iterator result when generator is closed.
2193 __ PushRoot(Heap::kUndefinedValueRootIndex);
2194 // Pop value from top-of-stack slot; box result into result register.
2195 EmitCreateIteratorResult(true);
2197 // Throw the provided value.
2199 __ CallRuntime(Runtime::kThrow, 1);
2203 // Throw error if we attempt to operate on a running generator.
2204 __ bind(&wrong_state);
2206 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2209 context()->Plug(result_register());
2213 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2217 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2219 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2222 __ bind(&gc_required);
2223 __ Push(Smi::FromInt(map->instance_size()));
2224 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2225 __ movp(context_register(),
2226 Operand(rbp, StandardFrameConstants::kContextOffset));
2228 __ bind(&allocated);
2231 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2232 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2233 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2234 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2235 isolate()->factory()->empty_fixed_array());
2236 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2237 isolate()->factory()->empty_fixed_array());
2238 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2240 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2243 // Only the value field needs a write barrier, as the other values are in the
2245 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2246 rcx, rdx, kDontSaveFPRegs);
2250 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2251 SetSourcePosition(prop->position());
2252 Literal* key = prop->key()->AsLiteral();
2253 __ Move(LoadIC::NameRegister(), key->value());
2254 if (FLAG_vector_ics) {
2255 __ Move(LoadIC::SlotRegister(), Smi::FromInt(prop->PropertyFeedbackSlot()));
2256 CallLoadIC(NOT_CONTEXTUAL);
2258 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2263 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2264 SetSourcePosition(prop->position());
2265 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2266 if (FLAG_vector_ics) {
2267 __ Move(LoadIC::SlotRegister(), Smi::FromInt(prop->PropertyFeedbackSlot()));
2270 CallIC(ic, prop->PropertyFeedbackId());
2275 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2279 Expression* right) {
2280 // Do combined smi check of the operands. Left operand is on the
2281 // stack (popped into rdx). Right operand is in rax but moved into
2282 // rcx to make the shifts easier.
2283 Label done, stub_call, smi_case;
2287 JumpPatchSite patch_site(masm_);
2288 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2290 __ bind(&stub_call);
2292 BinaryOpICStub stub(isolate(), op, mode);
2293 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2294 patch_site.EmitPatchInfo();
2295 __ jmp(&done, Label::kNear);
2300 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2303 __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2306 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2309 __ SmiAdd(rax, rdx, rcx, &stub_call);
2312 __ SmiSub(rax, rdx, rcx, &stub_call);
2315 __ SmiMul(rax, rdx, rcx, &stub_call);
2318 __ SmiOr(rax, rdx, rcx);
2320 case Token::BIT_AND:
2321 __ SmiAnd(rax, rdx, rcx);
2323 case Token::BIT_XOR:
2324 __ SmiXor(rax, rdx, rcx);
2332 context()->Plug(rax);
2336 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2338 OverwriteMode mode) {
2340 BinaryOpICStub stub(isolate(), op, mode);
2341 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2342 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2343 patch_site.EmitPatchInfo();
2344 context()->Plug(rax);
2348 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2349 DCHECK(expr->IsValidReferenceExpression());
2351 // Left-hand side can only be a property, a global or a (parameter or local)
2353 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2354 LhsKind assign_type = VARIABLE;
2355 Property* prop = expr->AsProperty();
2357 assign_type = (prop->key()->IsPropertyName())
2362 switch (assign_type) {
2364 Variable* var = expr->AsVariableProxy()->var();
2365 EffectContext context(this);
2366 EmitVariableAssignment(var, Token::ASSIGN);
2369 case NAMED_PROPERTY: {
2370 __ Push(rax); // Preserve value.
2371 VisitForAccumulatorValue(prop->obj());
2372 __ Move(StoreIC::ReceiverRegister(), rax);
2373 __ Pop(StoreIC::ValueRegister()); // Restore value.
2374 __ Move(StoreIC::NameRegister(), prop->key()->AsLiteral()->value());
2378 case KEYED_PROPERTY: {
2379 __ Push(rax); // Preserve value.
2380 VisitForStackValue(prop->obj());
2381 VisitForAccumulatorValue(prop->key());
2382 __ Move(KeyedStoreIC::NameRegister(), rax);
2383 __ Pop(KeyedStoreIC::ReceiverRegister());
2384 __ Pop(KeyedStoreIC::ValueRegister()); // Restore value.
2385 Handle<Code> ic = strict_mode() == SLOPPY
2386 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2387 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2392 context()->Plug(rax);
2396 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2397 Variable* var, MemOperand location) {
2398 __ movp(location, rax);
2399 if (var->IsContextSlot()) {
2401 __ RecordWriteContextSlot(
2402 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2407 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2409 if (var->IsUnallocated()) {
2410 // Global var, const, or let.
2411 __ Move(StoreIC::NameRegister(), var->name());
2412 __ movp(StoreIC::ReceiverRegister(), GlobalObjectOperand());
2415 } else if (op == Token::INIT_CONST_LEGACY) {
2416 // Const initializers need a write barrier.
2417 DCHECK(!var->IsParameter()); // No const parameters.
2418 if (var->IsLookupSlot()) {
2421 __ Push(var->name());
2422 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2424 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2426 MemOperand location = VarOperand(var, rcx);
2427 __ movp(rdx, location);
2428 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2429 __ j(not_equal, &skip);
2430 EmitStoreToStackLocalOrContextSlot(var, location);
2434 } else if (var->mode() == LET && op != Token::INIT_LET) {
2435 // Non-initializing assignment to let variable needs a write barrier.
2436 DCHECK(!var->IsLookupSlot());
2437 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2439 MemOperand location = VarOperand(var, rcx);
2440 __ movp(rdx, location);
2441 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2442 __ j(not_equal, &assign, Label::kNear);
2443 __ Push(var->name());
2444 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2446 EmitStoreToStackLocalOrContextSlot(var, location);
2448 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2449 if (var->IsLookupSlot()) {
2450 // Assignment to var.
2451 __ Push(rax); // Value.
2452 __ Push(rsi); // Context.
2453 __ Push(var->name());
2454 __ Push(Smi::FromInt(strict_mode()));
2455 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2457 // Assignment to var or initializing assignment to let/const in harmony
2459 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2460 MemOperand location = VarOperand(var, rcx);
2461 if (generate_debug_code_ && op == Token::INIT_LET) {
2462 // Check for an uninitialized let binding.
2463 __ movp(rdx, location);
2464 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2465 __ Check(equal, kLetBindingReInitialization);
2467 EmitStoreToStackLocalOrContextSlot(var, location);
2470 // Non-initializing assignments to consts are ignored.
2474 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2475 // Assignment to a property, using a named store IC.
2476 Property* prop = expr->target()->AsProperty();
2477 DCHECK(prop != NULL);
2478 DCHECK(prop->key()->IsLiteral());
2480 // Record source code position before IC call.
2481 SetSourcePosition(expr->position());
2482 __ Move(StoreIC::NameRegister(), prop->key()->AsLiteral()->value());
2483 __ Pop(StoreIC::ReceiverRegister());
2484 CallStoreIC(expr->AssignmentFeedbackId());
2486 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2487 context()->Plug(rax);
2491 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2492 // Assignment to a property, using a keyed store IC.
2494 __ Pop(KeyedStoreIC::NameRegister()); // Key.
2495 __ Pop(KeyedStoreIC::ReceiverRegister());
2496 DCHECK(KeyedStoreIC::ValueRegister().is(rax));
2497 // Record source code position before IC call.
2498 SetSourcePosition(expr->position());
2499 Handle<Code> ic = strict_mode() == SLOPPY
2500 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2501 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2502 CallIC(ic, expr->AssignmentFeedbackId());
2504 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2505 context()->Plug(rax);
2509 void FullCodeGenerator::VisitProperty(Property* expr) {
2510 Comment cmnt(masm_, "[ Property");
2511 Expression* key = expr->key();
2513 if (key->IsPropertyName()) {
2514 VisitForAccumulatorValue(expr->obj());
2515 DCHECK(!rax.is(LoadIC::ReceiverRegister()));
2516 __ movp(LoadIC::ReceiverRegister(), rax);
2517 EmitNamedPropertyLoad(expr);
2518 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2519 context()->Plug(rax);
2521 VisitForStackValue(expr->obj());
2522 VisitForAccumulatorValue(expr->key());
2523 __ Move(LoadIC::NameRegister(), rax);
2524 __ Pop(LoadIC::ReceiverRegister());
2525 EmitKeyedPropertyLoad(expr);
2526 context()->Plug(rax);
2531 void FullCodeGenerator::CallIC(Handle<Code> code,
2532 TypeFeedbackId ast_id) {
2534 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2538 // Code common for calls using the IC.
2539 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2540 Expression* callee = expr->expression();
2542 CallIC::CallType call_type = callee->IsVariableProxy()
2545 // Get the target function.
2546 if (call_type == CallIC::FUNCTION) {
2547 { StackValueContext context(this);
2548 EmitVariableLoad(callee->AsVariableProxy());
2549 PrepareForBailout(callee, NO_REGISTERS);
2551 // Push undefined as receiver. This is patched in the method prologue if it
2552 // is a sloppy mode method.
2553 __ Push(isolate()->factory()->undefined_value());
2555 // Load the function from the receiver.
2556 DCHECK(callee->IsProperty());
2557 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, 0));
2558 EmitNamedPropertyLoad(callee->AsProperty());
2559 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2560 // Push the target function under the receiver.
2561 __ Push(Operand(rsp, 0));
2562 __ movp(Operand(rsp, kPointerSize), rax);
2565 EmitCall(expr, call_type);
2569 // Common code for calls using the IC.
2570 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2573 VisitForAccumulatorValue(key);
2575 Expression* callee = expr->expression();
2577 // Load the function from the receiver.
2578 DCHECK(callee->IsProperty());
2579 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, 0));
2580 __ Move(LoadIC::NameRegister(), rax);
2581 EmitKeyedPropertyLoad(callee->AsProperty());
2582 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2584 // Push the target function under the receiver.
2585 __ Push(Operand(rsp, 0));
2586 __ movp(Operand(rsp, kPointerSize), rax);
2588 EmitCall(expr, CallIC::METHOD);
2592 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2593 // Load the arguments.
2594 ZoneList<Expression*>* args = expr->arguments();
2595 int arg_count = args->length();
2596 { PreservePositionScope scope(masm()->positions_recorder());
2597 for (int i = 0; i < arg_count; i++) {
2598 VisitForStackValue(args->at(i));
2602 // Record source position of the IC call.
2603 SetSourcePosition(expr->position());
2604 Handle<Code> ic = CallIC::initialize_stub(
2605 isolate(), arg_count, call_type);
2606 __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2607 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2608 // Don't assign a type feedback id to the IC, since type feedback is provided
2609 // by the vector above.
2612 RecordJSReturnSite(expr);
2614 // Restore context register.
2615 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2616 // Discard the function left on TOS.
2617 context()->DropAndPlug(1, rax);
2621 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2622 // Push copy of the first argument or undefined if it doesn't exist.
2623 if (arg_count > 0) {
2624 __ Push(Operand(rsp, arg_count * kPointerSize));
2626 __ PushRoot(Heap::kUndefinedValueRootIndex);
2629 // Push the receiver of the enclosing function and do runtime call.
2630 StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2631 __ Push(args.GetReceiverOperand());
2633 // Push the language mode.
2634 __ Push(Smi::FromInt(strict_mode()));
2636 // Push the start position of the scope the calls resides in.
2637 __ Push(Smi::FromInt(scope()->start_position()));
2639 // Do the runtime call.
2640 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2644 void FullCodeGenerator::VisitCall(Call* expr) {
2646 // We want to verify that RecordJSReturnSite gets called on all paths
2647 // through this function. Avoid early returns.
2648 expr->return_is_recorded_ = false;
2651 Comment cmnt(masm_, "[ Call");
2652 Expression* callee = expr->expression();
2653 Call::CallType call_type = expr->GetCallType(isolate());
2655 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2656 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2657 // to resolve the function we need to call and the receiver of the call.
2658 // Then we call the resolved function using the given arguments.
2659 ZoneList<Expression*>* args = expr->arguments();
2660 int arg_count = args->length();
2661 { PreservePositionScope pos_scope(masm()->positions_recorder());
2662 VisitForStackValue(callee);
2663 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2665 // Push the arguments.
2666 for (int i = 0; i < arg_count; i++) {
2667 VisitForStackValue(args->at(i));
2670 // Push a copy of the function (found below the arguments) and resolve
2672 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2673 EmitResolvePossiblyDirectEval(arg_count);
2675 // The runtime call returns a pair of values in rax (function) and
2676 // rdx (receiver). Touch up the stack with the right values.
2677 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2678 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2680 // Record source position for debugger.
2681 SetSourcePosition(expr->position());
2682 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2683 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2685 RecordJSReturnSite(expr);
2686 // Restore context register.
2687 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2688 context()->DropAndPlug(1, rax);
2689 } else if (call_type == Call::GLOBAL_CALL) {
2690 EmitCallWithLoadIC(expr);
2692 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2693 // Call to a lookup slot (dynamically introduced variable).
2694 VariableProxy* proxy = callee->AsVariableProxy();
2697 { PreservePositionScope scope(masm()->positions_recorder());
2698 // Generate code for loading from variables potentially shadowed by
2699 // eval-introduced variables.
2700 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2703 // Call the runtime to find the function to call (returned in rax) and
2704 // the object holding it (returned in rdx).
2705 __ Push(context_register());
2706 __ Push(proxy->name());
2707 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2708 __ Push(rax); // Function.
2709 __ Push(rdx); // Receiver.
2711 // If fast case code has been generated, emit code to push the function
2712 // and receiver and have the slow path jump around this code.
2713 if (done.is_linked()) {
2715 __ jmp(&call, Label::kNear);
2719 // The receiver is implicitly the global receiver. Indicate this by
2720 // passing the hole to the call function stub.
2721 __ PushRoot(Heap::kUndefinedValueRootIndex);
2725 // The receiver is either the global receiver or an object found by
2728 } else if (call_type == Call::PROPERTY_CALL) {
2729 Property* property = callee->AsProperty();
2730 { PreservePositionScope scope(masm()->positions_recorder());
2731 VisitForStackValue(property->obj());
2733 if (property->key()->IsPropertyName()) {
2734 EmitCallWithLoadIC(expr);
2736 EmitKeyedCallWithLoadIC(expr, property->key());
2739 DCHECK(call_type == Call::OTHER_CALL);
2740 // Call to an arbitrary expression not handled specially above.
2741 { PreservePositionScope scope(masm()->positions_recorder());
2742 VisitForStackValue(callee);
2744 __ PushRoot(Heap::kUndefinedValueRootIndex);
2745 // Emit function call.
2750 // RecordJSReturnSite should have been called.
2751 DCHECK(expr->return_is_recorded_);
2756 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2757 Comment cmnt(masm_, "[ CallNew");
2758 // According to ECMA-262, section 11.2.2, page 44, the function
2759 // expression in new calls must be evaluated before the
2762 // Push constructor on the stack. If it's not a function it's used as
2763 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2765 VisitForStackValue(expr->expression());
2767 // Push the arguments ("left-to-right") on the stack.
2768 ZoneList<Expression*>* args = expr->arguments();
2769 int arg_count = args->length();
2770 for (int i = 0; i < arg_count; i++) {
2771 VisitForStackValue(args->at(i));
2774 // Call the construct call builtin that handles allocation and
2775 // constructor invocation.
2776 SetSourcePosition(expr->position());
2778 // Load function and argument count into rdi and rax.
2779 __ Set(rax, arg_count);
2780 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2782 // Record call targets in unoptimized code, but not in the snapshot.
2783 if (FLAG_pretenuring_call_new) {
2784 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2785 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2786 expr->CallNewFeedbackSlot() + 1);
2789 __ Move(rbx, FeedbackVector());
2790 __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2792 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2793 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2794 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2795 context()->Plug(rax);
2799 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2800 ZoneList<Expression*>* args = expr->arguments();
2801 DCHECK(args->length() == 1);
2803 VisitForAccumulatorValue(args->at(0));
2805 Label materialize_true, materialize_false;
2806 Label* if_true = NULL;
2807 Label* if_false = NULL;
2808 Label* fall_through = NULL;
2809 context()->PrepareTest(&materialize_true, &materialize_false,
2810 &if_true, &if_false, &fall_through);
2812 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2813 __ JumpIfSmi(rax, if_true);
2816 context()->Plug(if_true, if_false);
2820 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2821 ZoneList<Expression*>* args = expr->arguments();
2822 DCHECK(args->length() == 1);
2824 VisitForAccumulatorValue(args->at(0));
2826 Label materialize_true, materialize_false;
2827 Label* if_true = NULL;
2828 Label* if_false = NULL;
2829 Label* fall_through = NULL;
2830 context()->PrepareTest(&materialize_true, &materialize_false,
2831 &if_true, &if_false, &fall_through);
2833 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2834 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2835 Split(non_negative_smi, if_true, if_false, fall_through);
2837 context()->Plug(if_true, if_false);
2841 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2842 ZoneList<Expression*>* args = expr->arguments();
2843 DCHECK(args->length() == 1);
2845 VisitForAccumulatorValue(args->at(0));
2847 Label materialize_true, materialize_false;
2848 Label* if_true = NULL;
2849 Label* if_false = NULL;
2850 Label* fall_through = NULL;
2851 context()->PrepareTest(&materialize_true, &materialize_false,
2852 &if_true, &if_false, &fall_through);
2854 __ JumpIfSmi(rax, if_false);
2855 __ CompareRoot(rax, Heap::kNullValueRootIndex);
2856 __ j(equal, if_true);
2857 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2858 // Undetectable objects behave like undefined when tested with typeof.
2859 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2860 Immediate(1 << Map::kIsUndetectable));
2861 __ j(not_zero, if_false);
2862 __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2863 __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2864 __ j(below, if_false);
2865 __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2866 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2867 Split(below_equal, if_true, if_false, fall_through);
2869 context()->Plug(if_true, if_false);
2873 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2874 ZoneList<Expression*>* args = expr->arguments();
2875 DCHECK(args->length() == 1);
2877 VisitForAccumulatorValue(args->at(0));
2879 Label materialize_true, materialize_false;
2880 Label* if_true = NULL;
2881 Label* if_false = NULL;
2882 Label* fall_through = NULL;
2883 context()->PrepareTest(&materialize_true, &materialize_false,
2884 &if_true, &if_false, &fall_through);
2886 __ JumpIfSmi(rax, if_false);
2887 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2888 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2889 Split(above_equal, if_true, if_false, fall_through);
2891 context()->Plug(if_true, if_false);
2895 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2896 ZoneList<Expression*>* args = expr->arguments();
2897 DCHECK(args->length() == 1);
2899 VisitForAccumulatorValue(args->at(0));
2901 Label materialize_true, materialize_false;
2902 Label* if_true = NULL;
2903 Label* if_false = NULL;
2904 Label* fall_through = NULL;
2905 context()->PrepareTest(&materialize_true, &materialize_false,
2906 &if_true, &if_false, &fall_through);
2908 __ JumpIfSmi(rax, if_false);
2909 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2910 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2911 Immediate(1 << Map::kIsUndetectable));
2912 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2913 Split(not_zero, if_true, if_false, fall_through);
2915 context()->Plug(if_true, if_false);
2919 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2920 CallRuntime* expr) {
2921 ZoneList<Expression*>* args = expr->arguments();
2922 DCHECK(args->length() == 1);
2924 VisitForAccumulatorValue(args->at(0));
2926 Label materialize_true, materialize_false, skip_lookup;
2927 Label* if_true = NULL;
2928 Label* if_false = NULL;
2929 Label* fall_through = NULL;
2930 context()->PrepareTest(&materialize_true, &materialize_false,
2931 &if_true, &if_false, &fall_through);
2933 __ AssertNotSmi(rax);
2935 // Check whether this map has already been checked to be safe for default
2937 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2938 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2939 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2940 __ j(not_zero, &skip_lookup);
2942 // Check for fast case object. Generate false result for slow case object.
2943 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2944 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2945 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2946 __ j(equal, if_false);
2948 // Look for valueOf string in the descriptor array, and indicate false if
2949 // found. Since we omit an enumeration index check, if it is added via a
2950 // transition that shares its descriptor array, this is a false positive.
2951 Label entry, loop, done;
2953 // Skip loop if no descriptors are valid.
2954 __ NumberOfOwnDescriptors(rcx, rbx);
2955 __ cmpp(rcx, Immediate(0));
2958 __ LoadInstanceDescriptors(rbx, r8);
2959 // rbx: descriptor array.
2960 // rcx: valid entries in the descriptor array.
2961 // Calculate the end of the descriptor array.
2962 __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
2964 Operand(r8, rcx, times_pointer_size, DescriptorArray::kFirstOffset));
2965 // Calculate location of the first key name.
2966 __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
2967 // Loop through all the keys in the descriptor array. If one of these is the
2968 // internalized string "valueOf" the result is false.
2971 __ movp(rdx, FieldOperand(r8, 0));
2972 __ Cmp(rdx, isolate()->factory()->value_of_string());
2973 __ j(equal, if_false);
2974 __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
2977 __ j(not_equal, &loop);
2981 // Set the bit in the map to indicate that there is no local valueOf field.
2982 __ orp(FieldOperand(rbx, Map::kBitField2Offset),
2983 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2985 __ bind(&skip_lookup);
2987 // If a valueOf property is not found on the object check that its
2988 // prototype is the un-modified String prototype. If not result is false.
2989 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2990 __ testp(rcx, Immediate(kSmiTagMask));
2991 __ j(zero, if_false);
2992 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2993 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2994 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
2996 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2997 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2998 Split(equal, if_true, if_false, fall_through);
3000 context()->Plug(if_true, if_false);
3004 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3005 ZoneList<Expression*>* args = expr->arguments();
3006 DCHECK(args->length() == 1);
3008 VisitForAccumulatorValue(args->at(0));
3010 Label materialize_true, materialize_false;
3011 Label* if_true = NULL;
3012 Label* if_false = NULL;
3013 Label* fall_through = NULL;
3014 context()->PrepareTest(&materialize_true, &materialize_false,
3015 &if_true, &if_false, &fall_through);
3017 __ JumpIfSmi(rax, if_false);
3018 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3019 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3020 Split(equal, if_true, if_false, fall_through);
3022 context()->Plug(if_true, if_false);
3026 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3027 ZoneList<Expression*>* args = expr->arguments();
3028 DCHECK(args->length() == 1);
3030 VisitForAccumulatorValue(args->at(0));
3032 Label materialize_true, materialize_false;
3033 Label* if_true = NULL;
3034 Label* if_false = NULL;
3035 Label* fall_through = NULL;
3036 context()->PrepareTest(&materialize_true, &materialize_false,
3037 &if_true, &if_false, &fall_through);
3039 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3040 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3041 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3043 __ j(no_overflow, if_false);
3044 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3045 Immediate(0x00000000));
3046 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3047 Split(equal, if_true, if_false, fall_through);
3049 context()->Plug(if_true, if_false);
3053 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3054 ZoneList<Expression*>* args = expr->arguments();
3055 DCHECK(args->length() == 1);
3057 VisitForAccumulatorValue(args->at(0));
3059 Label materialize_true, materialize_false;
3060 Label* if_true = NULL;
3061 Label* if_false = NULL;
3062 Label* fall_through = NULL;
3063 context()->PrepareTest(&materialize_true, &materialize_false,
3064 &if_true, &if_false, &fall_through);
3066 __ JumpIfSmi(rax, if_false);
3067 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3068 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3069 Split(equal, if_true, if_false, fall_through);
3071 context()->Plug(if_true, if_false);
3075 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3076 ZoneList<Expression*>* args = expr->arguments();
3077 DCHECK(args->length() == 1);
3079 VisitForAccumulatorValue(args->at(0));
3081 Label materialize_true, materialize_false;
3082 Label* if_true = NULL;
3083 Label* if_false = NULL;
3084 Label* fall_through = NULL;
3085 context()->PrepareTest(&materialize_true, &materialize_false,
3086 &if_true, &if_false, &fall_through);
3088 __ JumpIfSmi(rax, if_false);
3089 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3090 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3091 Split(equal, if_true, if_false, fall_through);
3093 context()->Plug(if_true, if_false);
3098 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3099 DCHECK(expr->arguments()->length() == 0);
3101 Label materialize_true, materialize_false;
3102 Label* if_true = NULL;
3103 Label* if_false = NULL;
3104 Label* fall_through = NULL;
3105 context()->PrepareTest(&materialize_true, &materialize_false,
3106 &if_true, &if_false, &fall_through);
3108 // Get the frame pointer for the calling frame.
3109 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3111 // Skip the arguments adaptor frame if it exists.
3112 Label check_frame_marker;
3113 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3114 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3115 __ j(not_equal, &check_frame_marker);
3116 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3118 // Check the marker in the calling frame.
3119 __ bind(&check_frame_marker);
3120 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3121 Smi::FromInt(StackFrame::CONSTRUCT));
3122 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3123 Split(equal, if_true, if_false, fall_through);
3125 context()->Plug(if_true, if_false);
3129 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3130 ZoneList<Expression*>* args = expr->arguments();
3131 DCHECK(args->length() == 2);
3133 // Load the two objects into registers and perform the comparison.
3134 VisitForStackValue(args->at(0));
3135 VisitForAccumulatorValue(args->at(1));
3137 Label materialize_true, materialize_false;
3138 Label* if_true = NULL;
3139 Label* if_false = NULL;
3140 Label* fall_through = NULL;
3141 context()->PrepareTest(&materialize_true, &materialize_false,
3142 &if_true, &if_false, &fall_through);
3146 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3147 Split(equal, if_true, if_false, fall_through);
3149 context()->Plug(if_true, if_false);
3153 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3154 ZoneList<Expression*>* args = expr->arguments();
3155 DCHECK(args->length() == 1);
3157 // ArgumentsAccessStub expects the key in rdx and the formal
3158 // parameter count in rax.
3159 VisitForAccumulatorValue(args->at(0));
3161 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3162 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3164 context()->Plug(rax);
3168 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3169 DCHECK(expr->arguments()->length() == 0);
3172 // Get the number of formal parameters.
3173 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3175 // Check if the calling frame is an arguments adaptor frame.
3176 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3177 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3178 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3179 __ j(not_equal, &exit, Label::kNear);
3181 // Arguments adaptor case: Read the arguments length from the
3183 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3187 context()->Plug(rax);
3191 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3192 ZoneList<Expression*>* args = expr->arguments();
3193 DCHECK(args->length() == 1);
3194 Label done, null, function, non_function_constructor;
3196 VisitForAccumulatorValue(args->at(0));
3198 // If the object is a smi, we return null.
3199 __ JumpIfSmi(rax, &null);
3201 // Check that the object is a JS object but take special care of JS
3202 // functions to make sure they have 'Function' as their class.
3203 // Assume that there are only two callable types, and one of them is at
3204 // either end of the type range for JS object types. Saves extra comparisons.
3205 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3206 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3207 // Map is now in rax.
3209 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3210 FIRST_SPEC_OBJECT_TYPE + 1);
3211 __ j(equal, &function);
3213 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3214 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3215 LAST_SPEC_OBJECT_TYPE - 1);
3216 __ j(equal, &function);
3217 // Assume that there is no larger type.
3218 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3220 // Check if the constructor in the map is a JS function.
3221 __ movp(rax, FieldOperand(rax, Map::kConstructorOffset));
3222 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3223 __ j(not_equal, &non_function_constructor);
3225 // rax now contains the constructor function. Grab the
3226 // instance class name from there.
3227 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3228 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3231 // Functions have class 'Function'.
3233 __ Move(rax, isolate()->factory()->Function_string());
3236 // Objects with a non-function constructor have class 'Object'.
3237 __ bind(&non_function_constructor);
3238 __ Move(rax, isolate()->factory()->Object_string());
3241 // Non-JS objects have class null.
3243 __ LoadRoot(rax, Heap::kNullValueRootIndex);
3248 context()->Plug(rax);
3252 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3253 // Load the arguments on the stack and call the stub.
3254 SubStringStub stub(isolate());
3255 ZoneList<Expression*>* args = expr->arguments();
3256 DCHECK(args->length() == 3);
3257 VisitForStackValue(args->at(0));
3258 VisitForStackValue(args->at(1));
3259 VisitForStackValue(args->at(2));
3261 context()->Plug(rax);
3265 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3266 // Load the arguments on the stack and call the stub.
3267 RegExpExecStub stub(isolate());
3268 ZoneList<Expression*>* args = expr->arguments();
3269 DCHECK(args->length() == 4);
3270 VisitForStackValue(args->at(0));
3271 VisitForStackValue(args->at(1));
3272 VisitForStackValue(args->at(2));
3273 VisitForStackValue(args->at(3));
3275 context()->Plug(rax);
3279 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3280 ZoneList<Expression*>* args = expr->arguments();
3281 DCHECK(args->length() == 1);
3283 VisitForAccumulatorValue(args->at(0)); // Load the object.
3286 // If the object is a smi return the object.
3287 __ JumpIfSmi(rax, &done);
3288 // If the object is not a value type, return the object.
3289 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3290 __ j(not_equal, &done);
3291 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3294 context()->Plug(rax);
3298 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3299 ZoneList<Expression*>* args = expr->arguments();
3300 DCHECK(args->length() == 2);
3301 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3302 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3304 VisitForAccumulatorValue(args->at(0)); // Load the object.
3306 Label runtime, done, not_date_object;
3307 Register object = rax;
3308 Register result = rax;
3309 Register scratch = rcx;
3311 __ JumpIfSmi(object, ¬_date_object);
3312 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3313 __ j(not_equal, ¬_date_object);
3315 if (index->value() == 0) {
3316 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3319 if (index->value() < JSDate::kFirstUncachedField) {
3320 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3321 Operand stamp_operand = __ ExternalOperand(stamp);
3322 __ movp(scratch, stamp_operand);
3323 __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3324 __ j(not_equal, &runtime, Label::kNear);
3325 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3326 kPointerSize * index->value()));
3330 __ PrepareCallCFunction(2);
3331 __ movp(arg_reg_1, object);
3332 __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3333 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3334 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3338 __ bind(¬_date_object);
3339 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3341 context()->Plug(rax);
3345 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3346 ZoneList<Expression*>* args = expr->arguments();
3347 DCHECK_EQ(3, args->length());
3349 Register string = rax;
3350 Register index = rbx;
3351 Register value = rcx;
3353 VisitForStackValue(args->at(1)); // index
3354 VisitForStackValue(args->at(2)); // value
3355 VisitForAccumulatorValue(args->at(0)); // string
3359 if (FLAG_debug_code) {
3360 __ Check(__ CheckSmi(value), kNonSmiValue);
3361 __ Check(__ CheckSmi(index), kNonSmiValue);
3364 __ SmiToInteger32(value, value);
3365 __ SmiToInteger32(index, index);
3367 if (FLAG_debug_code) {
3368 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3369 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3372 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3374 context()->Plug(string);
3378 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3379 ZoneList<Expression*>* args = expr->arguments();
3380 DCHECK_EQ(3, args->length());
3382 Register string = rax;
3383 Register index = rbx;
3384 Register value = rcx;
3386 VisitForStackValue(args->at(1)); // index
3387 VisitForStackValue(args->at(2)); // value
3388 VisitForAccumulatorValue(args->at(0)); // string
3392 if (FLAG_debug_code) {
3393 __ Check(__ CheckSmi(value), kNonSmiValue);
3394 __ Check(__ CheckSmi(index), kNonSmiValue);
3397 __ SmiToInteger32(value, value);
3398 __ SmiToInteger32(index, index);
3400 if (FLAG_debug_code) {
3401 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3402 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3405 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3407 context()->Plug(rax);
3411 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3412 // Load the arguments on the stack and call the runtime function.
3413 ZoneList<Expression*>* args = expr->arguments();
3414 DCHECK(args->length() == 2);
3415 VisitForStackValue(args->at(0));
3416 VisitForStackValue(args->at(1));
3417 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3419 context()->Plug(rax);
3423 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3424 ZoneList<Expression*>* args = expr->arguments();
3425 DCHECK(args->length() == 2);
3427 VisitForStackValue(args->at(0)); // Load the object.
3428 VisitForAccumulatorValue(args->at(1)); // Load the value.
3429 __ Pop(rbx); // rax = value. rbx = object.
3432 // If the object is a smi, return the value.
3433 __ JumpIfSmi(rbx, &done);
3435 // If the object is not a value type, return the value.
3436 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3437 __ j(not_equal, &done);
3440 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3441 // Update the write barrier. Save the value as it will be
3442 // overwritten by the write barrier code and is needed afterward.
3444 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3447 context()->Plug(rax);
3451 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3452 ZoneList<Expression*>* args = expr->arguments();
3453 DCHECK_EQ(args->length(), 1);
3455 // Load the argument into rax and call the stub.
3456 VisitForAccumulatorValue(args->at(0));
3458 NumberToStringStub stub(isolate());
3460 context()->Plug(rax);
3464 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3465 ZoneList<Expression*>* args = expr->arguments();
3466 DCHECK(args->length() == 1);
3468 VisitForAccumulatorValue(args->at(0));
3471 StringCharFromCodeGenerator generator(rax, rbx);
3472 generator.GenerateFast(masm_);
3475 NopRuntimeCallHelper call_helper;
3476 generator.GenerateSlow(masm_, call_helper);
3479 context()->Plug(rbx);
3483 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 DCHECK(args->length() == 2);
3487 VisitForStackValue(args->at(0));
3488 VisitForAccumulatorValue(args->at(1));
3490 Register object = rbx;
3491 Register index = rax;
3492 Register result = rdx;
3496 Label need_conversion;
3497 Label index_out_of_range;
3499 StringCharCodeAtGenerator generator(object,
3504 &index_out_of_range,
3505 STRING_INDEX_IS_NUMBER);
3506 generator.GenerateFast(masm_);
3509 __ bind(&index_out_of_range);
3510 // When the index is out of range, the spec requires us to return
3512 __ LoadRoot(result, Heap::kNanValueRootIndex);
3515 __ bind(&need_conversion);
3516 // Move the undefined value into the result register, which will
3517 // trigger conversion.
3518 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3521 NopRuntimeCallHelper call_helper;
3522 generator.GenerateSlow(masm_, call_helper);
3525 context()->Plug(result);
3529 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3530 ZoneList<Expression*>* args = expr->arguments();
3531 DCHECK(args->length() == 2);
3533 VisitForStackValue(args->at(0));
3534 VisitForAccumulatorValue(args->at(1));
3536 Register object = rbx;
3537 Register index = rax;
3538 Register scratch = rdx;
3539 Register result = rax;
3543 Label need_conversion;
3544 Label index_out_of_range;
3546 StringCharAtGenerator generator(object,
3552 &index_out_of_range,
3553 STRING_INDEX_IS_NUMBER);
3554 generator.GenerateFast(masm_);
3557 __ bind(&index_out_of_range);
3558 // When the index is out of range, the spec requires us to return
3559 // the empty string.
3560 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3563 __ bind(&need_conversion);
3564 // Move smi zero into the result register, which will trigger
3566 __ Move(result, Smi::FromInt(0));
3569 NopRuntimeCallHelper call_helper;
3570 generator.GenerateSlow(masm_, call_helper);
3573 context()->Plug(result);
3577 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3578 ZoneList<Expression*>* args = expr->arguments();
3579 DCHECK_EQ(2, args->length());
3580 VisitForStackValue(args->at(0));
3581 VisitForAccumulatorValue(args->at(1));
3584 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3586 context()->Plug(rax);
3590 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3591 ZoneList<Expression*>* args = expr->arguments();
3592 DCHECK_EQ(2, args->length());
3594 VisitForStackValue(args->at(0));
3595 VisitForStackValue(args->at(1));
3597 StringCompareStub stub(isolate());
3599 context()->Plug(rax);
3603 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3604 ZoneList<Expression*>* args = expr->arguments();
3605 DCHECK(args->length() >= 2);
3607 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3608 for (int i = 0; i < arg_count + 1; i++) {
3609 VisitForStackValue(args->at(i));
3611 VisitForAccumulatorValue(args->last()); // Function.
3613 Label runtime, done;
3614 // Check for non-function argument (including proxy).
3615 __ JumpIfSmi(rax, &runtime);
3616 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3617 __ j(not_equal, &runtime);
3619 // InvokeFunction requires the function in rdi. Move it in there.
3620 __ movp(rdi, result_register());
3621 ParameterCount count(arg_count);
3622 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3623 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3628 __ CallRuntime(Runtime::kCall, args->length());
3631 context()->Plug(rax);
3635 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3636 RegExpConstructResultStub stub(isolate());
3637 ZoneList<Expression*>* args = expr->arguments();
3638 DCHECK(args->length() == 3);
3639 VisitForStackValue(args->at(0));
3640 VisitForStackValue(args->at(1));
3641 VisitForAccumulatorValue(args->at(2));
3645 context()->Plug(rax);
3649 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3650 ZoneList<Expression*>* args = expr->arguments();
3651 DCHECK_EQ(2, args->length());
3653 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3654 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3656 Handle<FixedArray> jsfunction_result_caches(
3657 isolate()->native_context()->jsfunction_result_caches());
3658 if (jsfunction_result_caches->length() <= cache_id) {
3659 __ Abort(kAttemptToUseUndefinedCache);
3660 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3661 context()->Plug(rax);
3665 VisitForAccumulatorValue(args->at(1));
3668 Register cache = rbx;
3670 __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3672 FieldOperand(cache, GlobalObject::kNativeContextOffset));
3674 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3676 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3678 Label done, not_found;
3679 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3680 __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3681 // tmp now holds finger offset as a smi.
3683 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3684 __ cmpp(key, FieldOperand(cache,
3687 FixedArray::kHeaderSize));
3688 __ j(not_equal, ¬_found, Label::kNear);
3689 __ movp(rax, FieldOperand(cache,
3692 FixedArray::kHeaderSize + kPointerSize));
3693 __ jmp(&done, Label::kNear);
3695 __ bind(¬_found);
3696 // Call runtime to perform the lookup.
3699 __ CallRuntime(Runtime::kGetFromCache, 2);
3702 context()->Plug(rax);
3706 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3707 ZoneList<Expression*>* args = expr->arguments();
3708 DCHECK(args->length() == 1);
3710 VisitForAccumulatorValue(args->at(0));
3712 Label materialize_true, materialize_false;
3713 Label* if_true = NULL;
3714 Label* if_false = NULL;
3715 Label* fall_through = NULL;
3716 context()->PrepareTest(&materialize_true, &materialize_false,
3717 &if_true, &if_false, &fall_through);
3719 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3720 Immediate(String::kContainsCachedArrayIndexMask));
3721 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3722 __ j(zero, if_true);
3725 context()->Plug(if_true, if_false);
3729 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3730 ZoneList<Expression*>* args = expr->arguments();
3731 DCHECK(args->length() == 1);
3732 VisitForAccumulatorValue(args->at(0));
3734 __ AssertString(rax);
3736 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3737 DCHECK(String::kHashShift >= kSmiTagSize);
3738 __ IndexFromHash(rax, rax);
3740 context()->Plug(rax);
3744 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3745 Label bailout, return_result, done, one_char_separator, long_separator,
3746 non_trivial_array, not_size_one_array, loop,
3747 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3748 ZoneList<Expression*>* args = expr->arguments();
3749 DCHECK(args->length() == 2);
3750 // We will leave the separator on the stack until the end of the function.
3751 VisitForStackValue(args->at(1));
3752 // Load this to rax (= array)
3753 VisitForAccumulatorValue(args->at(0));
3754 // All aliases of the same register have disjoint lifetimes.
3755 Register array = rax;
3756 Register elements = no_reg; // Will be rax.
3758 Register index = rdx;
3760 Register string_length = rcx;
3762 Register string = rsi;
3764 Register scratch = rbx;
3766 Register array_length = rdi;
3767 Register result_pos = no_reg; // Will be rdi.
3769 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3770 Operand result_operand = Operand(rsp, 1 * kPointerSize);
3771 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3772 // Separator operand is already pushed. Make room for the two
3773 // other stack fields, and clear the direction flag in anticipation
3774 // of calling CopyBytes.
3775 __ subp(rsp, Immediate(2 * kPointerSize));
3777 // Check that the array is a JSArray
3778 __ JumpIfSmi(array, &bailout);
3779 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3780 __ j(not_equal, &bailout);
3782 // Check that the array has fast elements.
3783 __ CheckFastElements(scratch, &bailout);
3785 // Array has fast elements, so its length must be a smi.
3786 // If the array has length zero, return the empty string.
3787 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3788 __ SmiCompare(array_length, Smi::FromInt(0));
3789 __ j(not_zero, &non_trivial_array);
3790 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3791 __ jmp(&return_result);
3793 // Save the array length on the stack.
3794 __ bind(&non_trivial_array);
3795 __ SmiToInteger32(array_length, array_length);
3796 __ movl(array_length_operand, array_length);
3798 // Save the FixedArray containing array's elements.
3799 // End of array's live range.
3801 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3805 // Check that all array elements are sequential ASCII strings, and
3806 // accumulate the sum of their lengths, as a smi-encoded value.
3808 __ Set(string_length, 0);
3809 // Loop condition: while (index < array_length).
3810 // Live loop registers: index(int32), array_length(int32), string(String*),
3811 // scratch, string_length(int32), elements(FixedArray*).
3812 if (generate_debug_code_) {
3813 __ cmpp(index, array_length);
3814 __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3817 __ movp(string, FieldOperand(elements,
3820 FixedArray::kHeaderSize));
3821 __ JumpIfSmi(string, &bailout);
3822 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3823 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3824 __ andb(scratch, Immediate(
3825 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3826 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3827 __ j(not_equal, &bailout);
3828 __ AddSmiField(string_length,
3829 FieldOperand(string, SeqOneByteString::kLengthOffset));
3830 __ j(overflow, &bailout);
3832 __ cmpl(index, array_length);
3836 // string_length: Sum of string lengths.
3837 // elements: FixedArray of strings.
3838 // index: Array length.
3839 // array_length: Array length.
3841 // If array_length is 1, return elements[0], a string.
3842 __ cmpl(array_length, Immediate(1));
3843 __ j(not_equal, ¬_size_one_array);
3844 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3845 __ jmp(&return_result);
3847 __ bind(¬_size_one_array);
3849 // End of array_length live range.
3850 result_pos = array_length;
3851 array_length = no_reg;
3854 // string_length: Sum of string lengths.
3855 // elements: FixedArray of strings.
3856 // index: Array length.
3858 // Check that the separator is a sequential ASCII string.
3859 __ movp(string, separator_operand);
3860 __ JumpIfSmi(string, &bailout);
3861 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3862 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3863 __ andb(scratch, Immediate(
3864 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3865 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3866 __ j(not_equal, &bailout);
3869 // string_length: Sum of string lengths.
3870 // elements: FixedArray of strings.
3871 // index: Array length.
3872 // string: Separator string.
3874 // Add (separator length times (array_length - 1)) to string_length.
3875 __ SmiToInteger32(scratch,
3876 FieldOperand(string, SeqOneByteString::kLengthOffset));
3878 __ imull(scratch, index);
3879 __ j(overflow, &bailout);
3880 __ addl(string_length, scratch);
3881 __ j(overflow, &bailout);
3883 // Live registers and stack values:
3884 // string_length: Total length of result string.
3885 // elements: FixedArray of strings.
3886 __ AllocateAsciiString(result_pos, string_length, scratch,
3887 index, string, &bailout);
3888 __ movp(result_operand, result_pos);
3889 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3891 __ movp(string, separator_operand);
3892 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
3894 __ j(equal, &one_char_separator);
3895 __ j(greater, &long_separator);
3898 // Empty separator case:
3900 __ movl(scratch, array_length_operand);
3901 __ jmp(&loop_1_condition);
3902 // Loop condition: while (index < array_length).
3904 // Each iteration of the loop concatenates one string to the result.
3905 // Live values in registers:
3906 // index: which element of the elements array we are adding to the result.
3907 // result_pos: the position to which we are currently copying characters.
3908 // elements: the FixedArray of strings we are joining.
3909 // scratch: array length.
3911 // Get string = array[index].
3912 __ movp(string, FieldOperand(elements, index,
3914 FixedArray::kHeaderSize));
3915 __ SmiToInteger32(string_length,
3916 FieldOperand(string, String::kLengthOffset));
3918 FieldOperand(string, SeqOneByteString::kHeaderSize));
3919 __ CopyBytes(result_pos, string, string_length);
3921 __ bind(&loop_1_condition);
3922 __ cmpl(index, scratch);
3923 __ j(less, &loop_1); // Loop while (index < array_length).
3926 // Generic bailout code used from several places.
3928 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3929 __ jmp(&return_result);
3932 // One-character separator case
3933 __ bind(&one_char_separator);
3934 // Get the separator ASCII character value.
3935 // Register "string" holds the separator.
3936 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
3938 // Jump into the loop after the code that copies the separator, so the first
3939 // element is not preceded by a separator
3940 __ jmp(&loop_2_entry);
3941 // Loop condition: while (index < length).
3943 // Each iteration of the loop concatenates one string to the result.
3944 // Live values in registers:
3945 // elements: The FixedArray of strings we are joining.
3946 // index: which element of the elements array we are adding to the result.
3947 // result_pos: the position to which we are currently copying characters.
3948 // scratch: Separator character.
3950 // Copy the separator character to the result.
3951 __ movb(Operand(result_pos, 0), scratch);
3952 __ incp(result_pos);
3954 __ bind(&loop_2_entry);
3955 // Get string = array[index].
3956 __ movp(string, FieldOperand(elements, index,
3958 FixedArray::kHeaderSize));
3959 __ SmiToInteger32(string_length,
3960 FieldOperand(string, String::kLengthOffset));
3962 FieldOperand(string, SeqOneByteString::kHeaderSize));
3963 __ CopyBytes(result_pos, string, string_length);
3965 __ cmpl(index, array_length_operand);
3966 __ j(less, &loop_2); // End while (index < length).
3970 // Long separator case (separator is more than one character).
3971 __ bind(&long_separator);
3973 // Make elements point to end of elements array, and index
3974 // count from -array_length to zero, so we don't need to maintain
3976 __ movl(index, array_length_operand);
3977 __ leap(elements, FieldOperand(elements, index, times_pointer_size,
3978 FixedArray::kHeaderSize));
3981 // Replace separator string with pointer to its first character, and
3982 // make scratch be its length.
3983 __ movp(string, separator_operand);
3984 __ SmiToInteger32(scratch,
3985 FieldOperand(string, String::kLengthOffset));
3987 FieldOperand(string, SeqOneByteString::kHeaderSize));
3988 __ movp(separator_operand, string);
3990 // Jump into the loop after the code that copies the separator, so the first
3991 // element is not preceded by a separator
3992 __ jmp(&loop_3_entry);
3993 // Loop condition: while (index < length).
3995 // Each iteration of the loop concatenates one string to the result.
3996 // Live values in registers:
3997 // index: which element of the elements array we are adding to the result.
3998 // result_pos: the position to which we are currently copying characters.
3999 // scratch: Separator length.
4000 // separator_operand (rsp[0x10]): Address of first char of separator.
4002 // Copy the separator to the result.
4003 __ movp(string, separator_operand);
4004 __ movl(string_length, scratch);
4005 __ CopyBytes(result_pos, string, string_length, 2);
4007 __ bind(&loop_3_entry);
4008 // Get string = array[index].
4009 __ movp(string, Operand(elements, index, times_pointer_size, 0));
4010 __ SmiToInteger32(string_length,
4011 FieldOperand(string, String::kLengthOffset));
4013 FieldOperand(string, SeqOneByteString::kHeaderSize));
4014 __ CopyBytes(result_pos, string, string_length);
4016 __ j(not_equal, &loop_3); // Loop while (index < 0).
4019 __ movp(rax, result_operand);
4021 __ bind(&return_result);
4022 // Drop temp values from the stack, and restore context register.
4023 __ addp(rsp, Immediate(3 * kPointerSize));
4024 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4025 context()->Plug(rax);
4029 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4030 DCHECK(expr->arguments()->length() == 0);
4031 ExternalReference debug_is_active =
4032 ExternalReference::debug_is_active_address(isolate());
4033 __ Move(kScratchRegister, debug_is_active);
4034 __ movzxbp(rax, Operand(kScratchRegister, 0));
4035 __ Integer32ToSmi(rax, rax);
4036 context()->Plug(rax);
4040 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4041 if (expr->function() != NULL &&
4042 expr->function()->intrinsic_type == Runtime::INLINE) {
4043 Comment cmnt(masm_, "[ InlineRuntimeCall");
4044 EmitInlineRuntimeCall(expr);
4048 Comment cmnt(masm_, "[ CallRuntime");
4049 ZoneList<Expression*>* args = expr->arguments();
4050 int arg_count = args->length();
4052 if (expr->is_jsruntime()) {
4053 // Push the builtins object as receiver.
4054 __ movp(rax, GlobalObjectOperand());
4055 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4057 // Load the function from the receiver.
4058 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, 0));
4059 __ Move(LoadIC::NameRegister(), expr->name());
4060 if (FLAG_vector_ics) {
4061 __ Move(LoadIC::SlotRegister(),
4062 Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
4063 CallLoadIC(NOT_CONTEXTUAL);
4065 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4068 // Push the target function under the receiver.
4069 __ Push(Operand(rsp, 0));
4070 __ movp(Operand(rsp, kPointerSize), rax);
4072 // Push the arguments ("left-to-right").
4073 for (int i = 0; i < arg_count; i++) {
4074 VisitForStackValue(args->at(i));
4077 // Record source position of the IC call.
4078 SetSourcePosition(expr->position());
4079 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4080 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4083 // Restore context register.
4084 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4085 context()->DropAndPlug(1, rax);
4088 // Push the arguments ("left-to-right").
4089 for (int i = 0; i < arg_count; i++) {
4090 VisitForStackValue(args->at(i));
4093 // Call the C runtime.
4094 __ CallRuntime(expr->function(), arg_count);
4095 context()->Plug(rax);
4100 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4101 switch (expr->op()) {
4102 case Token::DELETE: {
4103 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4104 Property* property = expr->expression()->AsProperty();
4105 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4107 if (property != NULL) {
4108 VisitForStackValue(property->obj());
4109 VisitForStackValue(property->key());
4110 __ Push(Smi::FromInt(strict_mode()));
4111 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4112 context()->Plug(rax);
4113 } else if (proxy != NULL) {
4114 Variable* var = proxy->var();
4115 // Delete of an unqualified identifier is disallowed in strict mode
4116 // but "delete this" is allowed.
4117 DCHECK(strict_mode() == SLOPPY || var->is_this());
4118 if (var->IsUnallocated()) {
4119 __ Push(GlobalObjectOperand());
4120 __ Push(var->name());
4121 __ Push(Smi::FromInt(SLOPPY));
4122 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4123 context()->Plug(rax);
4124 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4125 // Result of deleting non-global variables is false. 'this' is
4126 // not really a variable, though we implement it as one. The
4127 // subexpression does not have side effects.
4128 context()->Plug(var->is_this());
4130 // Non-global variable. Call the runtime to try to delete from the
4131 // context where the variable was introduced.
4132 __ Push(context_register());
4133 __ Push(var->name());
4134 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4135 context()->Plug(rax);
4138 // Result of deleting non-property, non-variable reference is true.
4139 // The subexpression may have side effects.
4140 VisitForEffect(expr->expression());
4141 context()->Plug(true);
4147 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4148 VisitForEffect(expr->expression());
4149 context()->Plug(Heap::kUndefinedValueRootIndex);
4154 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4155 if (context()->IsEffect()) {
4156 // Unary NOT has no side effects so it's only necessary to visit the
4157 // subexpression. Match the optimizing compiler by not branching.
4158 VisitForEffect(expr->expression());
4159 } else if (context()->IsTest()) {
4160 const TestContext* test = TestContext::cast(context());
4161 // The labels are swapped for the recursive call.
4162 VisitForControl(expr->expression(),
4163 test->false_label(),
4165 test->fall_through());
4166 context()->Plug(test->true_label(), test->false_label());
4168 // We handle value contexts explicitly rather than simply visiting
4169 // for control and plugging the control flow into the context,
4170 // because we need to prepare a pair of extra administrative AST ids
4171 // for the optimizing compiler.
4172 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4173 Label materialize_true, materialize_false, done;
4174 VisitForControl(expr->expression(),
4178 __ bind(&materialize_true);
4179 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4180 if (context()->IsAccumulatorValue()) {
4181 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4183 __ PushRoot(Heap::kTrueValueRootIndex);
4185 __ jmp(&done, Label::kNear);
4186 __ bind(&materialize_false);
4187 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4188 if (context()->IsAccumulatorValue()) {
4189 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4191 __ PushRoot(Heap::kFalseValueRootIndex);
4198 case Token::TYPEOF: {
4199 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4200 { StackValueContext context(this);
4201 VisitForTypeofValue(expr->expression());
4203 __ CallRuntime(Runtime::kTypeof, 1);
4204 context()->Plug(rax);
4214 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4215 DCHECK(expr->expression()->IsValidReferenceExpression());
4217 Comment cmnt(masm_, "[ CountOperation");
4218 SetSourcePosition(expr->position());
4220 // Expression can only be a property, a global or a (parameter or local)
4222 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4223 LhsKind assign_type = VARIABLE;
4224 Property* prop = expr->expression()->AsProperty();
4225 // In case of a property we use the uninitialized expression context
4226 // of the key to detect a named property.
4229 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4232 // Evaluate expression and get value.
4233 if (assign_type == VARIABLE) {
4234 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4235 AccumulatorValueContext context(this);
4236 EmitVariableLoad(expr->expression()->AsVariableProxy());
4238 // Reserve space for result of postfix operation.
4239 if (expr->is_postfix() && !context()->IsEffect()) {
4240 __ Push(Smi::FromInt(0));
4242 if (assign_type == NAMED_PROPERTY) {
4243 VisitForStackValue(prop->obj());
4244 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, 0));
4245 EmitNamedPropertyLoad(prop);
4247 VisitForStackValue(prop->obj());
4248 VisitForStackValue(prop->key());
4249 // Leave receiver on stack
4250 __ movp(LoadIC::ReceiverRegister(), Operand(rsp, kPointerSize));
4251 // Copy of key, needed for later store.
4252 __ movp(LoadIC::NameRegister(), Operand(rsp, 0));
4253 EmitKeyedPropertyLoad(prop);
4257 // We need a second deoptimization point after loading the value
4258 // in case evaluating the property load my have a side effect.
4259 if (assign_type == VARIABLE) {
4260 PrepareForBailout(expr->expression(), TOS_REG);
4262 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4265 // Inline smi case if we are in a loop.
4266 Label done, stub_call;
4267 JumpPatchSite patch_site(masm_);
4268 if (ShouldInlineSmiCase(expr->op())) {
4270 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4272 // Save result for postfix expressions.
4273 if (expr->is_postfix()) {
4274 if (!context()->IsEffect()) {
4275 // Save the result on the stack. If we have a named or keyed property
4276 // we store the result under the receiver that is currently on top
4278 switch (assign_type) {
4282 case NAMED_PROPERTY:
4283 __ movp(Operand(rsp, kPointerSize), rax);
4285 case KEYED_PROPERTY:
4286 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4292 SmiOperationExecutionMode mode;
4293 mode.Add(PRESERVE_SOURCE_REGISTER);
4294 mode.Add(BAILOUT_ON_NO_OVERFLOW);
4295 if (expr->op() == Token::INC) {
4296 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4298 __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4300 __ jmp(&stub_call, Label::kNear);
4304 ToNumberStub convert_stub(isolate());
4305 __ CallStub(&convert_stub);
4307 // Save result for postfix expressions.
4308 if (expr->is_postfix()) {
4309 if (!context()->IsEffect()) {
4310 // Save the result on the stack. If we have a named or keyed property
4311 // we store the result under the receiver that is currently on top
4313 switch (assign_type) {
4317 case NAMED_PROPERTY:
4318 __ movp(Operand(rsp, kPointerSize), rax);
4320 case KEYED_PROPERTY:
4321 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4327 // Record position before stub call.
4328 SetSourcePosition(expr->position());
4330 // Call stub for +1/-1.
4331 __ bind(&stub_call);
4333 __ Move(rax, Smi::FromInt(1));
4334 BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
4335 CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4336 patch_site.EmitPatchInfo();
4339 // Store the value returned in rax.
4340 switch (assign_type) {
4342 if (expr->is_postfix()) {
4343 // Perform the assignment as if via '='.
4344 { EffectContext context(this);
4345 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4347 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4350 // For all contexts except kEffect: We have the result on
4351 // top of the stack.
4352 if (!context()->IsEffect()) {
4353 context()->PlugTOS();
4356 // Perform the assignment as if via '='.
4357 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4359 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4360 context()->Plug(rax);
4363 case NAMED_PROPERTY: {
4364 __ Move(StoreIC::NameRegister(), prop->key()->AsLiteral()->value());
4365 __ Pop(StoreIC::ReceiverRegister());
4366 CallStoreIC(expr->CountStoreFeedbackId());
4367 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4368 if (expr->is_postfix()) {
4369 if (!context()->IsEffect()) {
4370 context()->PlugTOS();
4373 context()->Plug(rax);
4377 case KEYED_PROPERTY: {
4378 __ Pop(KeyedStoreIC::NameRegister());
4379 __ Pop(KeyedStoreIC::ReceiverRegister());
4380 Handle<Code> ic = strict_mode() == SLOPPY
4381 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4382 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4383 CallIC(ic, expr->CountStoreFeedbackId());
4384 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4385 if (expr->is_postfix()) {
4386 if (!context()->IsEffect()) {
4387 context()->PlugTOS();
4390 context()->Plug(rax);
4398 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4399 VariableProxy* proxy = expr->AsVariableProxy();
4400 DCHECK(!context()->IsEffect());
4401 DCHECK(!context()->IsTest());
4403 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4404 Comment cmnt(masm_, "[ Global variable");
4405 __ Move(LoadIC::NameRegister(), proxy->name());
4406 __ movp(LoadIC::ReceiverRegister(), GlobalObjectOperand());
4407 if (FLAG_vector_ics) {
4408 __ Move(LoadIC::SlotRegister(),
4409 Smi::FromInt(proxy->VariableFeedbackSlot()));
4411 // Use a regular load, not a contextual load, to avoid a reference
4413 CallLoadIC(NOT_CONTEXTUAL);
4414 PrepareForBailout(expr, TOS_REG);
4415 context()->Plug(rax);
4416 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4417 Comment cmnt(masm_, "[ Lookup slot");
4420 // Generate code for loading from variables potentially shadowed
4421 // by eval-introduced variables.
4422 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4426 __ Push(proxy->name());
4427 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4428 PrepareForBailout(expr, TOS_REG);
4431 context()->Plug(rax);
4433 // This expression cannot throw a reference error at the top level.
4434 VisitInDuplicateContext(expr);
4439 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4440 Expression* sub_expr,
4441 Handle<String> check) {
4442 Label materialize_true, materialize_false;
4443 Label* if_true = NULL;
4444 Label* if_false = NULL;
4445 Label* fall_through = NULL;
4446 context()->PrepareTest(&materialize_true, &materialize_false,
4447 &if_true, &if_false, &fall_through);
4449 { AccumulatorValueContext context(this);
4450 VisitForTypeofValue(sub_expr);
4452 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4454 Factory* factory = isolate()->factory();
4455 if (String::Equals(check, factory->number_string())) {
4456 __ JumpIfSmi(rax, if_true);
4457 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
4458 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4459 Split(equal, if_true, if_false, fall_through);
4460 } else if (String::Equals(check, factory->string_string())) {
4461 __ JumpIfSmi(rax, if_false);
4462 // Check for undetectable objects => false.
4463 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4464 __ j(above_equal, if_false);
4465 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4466 Immediate(1 << Map::kIsUndetectable));
4467 Split(zero, if_true, if_false, fall_through);
4468 } else if (String::Equals(check, factory->symbol_string())) {
4469 __ JumpIfSmi(rax, if_false);
4470 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4471 Split(equal, if_true, if_false, fall_through);
4472 } else if (String::Equals(check, factory->boolean_string())) {
4473 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4474 __ j(equal, if_true);
4475 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4476 Split(equal, if_true, if_false, fall_through);
4477 } else if (String::Equals(check, factory->undefined_string())) {
4478 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4479 __ j(equal, if_true);
4480 __ JumpIfSmi(rax, if_false);
4481 // Check for undetectable objects => true.
4482 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4483 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4484 Immediate(1 << Map::kIsUndetectable));
4485 Split(not_zero, if_true, if_false, fall_through);
4486 } else if (String::Equals(check, factory->function_string())) {
4487 __ JumpIfSmi(rax, if_false);
4488 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4489 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4490 __ j(equal, if_true);
4491 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4492 Split(equal, if_true, if_false, fall_through);
4493 } else if (String::Equals(check, factory->object_string())) {
4494 __ JumpIfSmi(rax, if_false);
4495 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4496 __ j(equal, if_true);
4497 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4498 __ j(below, if_false);
4499 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4500 __ j(above, if_false);
4501 // Check for undetectable objects => false.
4502 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4503 Immediate(1 << Map::kIsUndetectable));
4504 Split(zero, if_true, if_false, fall_through);
4506 if (if_false != fall_through) __ jmp(if_false);
4508 context()->Plug(if_true, if_false);
4512 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4513 Comment cmnt(masm_, "[ CompareOperation");
4514 SetSourcePosition(expr->position());
4516 // First we try a fast inlined version of the compare when one of
4517 // the operands is a literal.
4518 if (TryLiteralCompare(expr)) return;
4520 // Always perform the comparison for its control flow. Pack the result
4521 // into the expression's context after the comparison is performed.
4522 Label materialize_true, materialize_false;
4523 Label* if_true = NULL;
4524 Label* if_false = NULL;
4525 Label* fall_through = NULL;
4526 context()->PrepareTest(&materialize_true, &materialize_false,
4527 &if_true, &if_false, &fall_through);
4529 Token::Value op = expr->op();
4530 VisitForStackValue(expr->left());
4533 VisitForStackValue(expr->right());
4534 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4535 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4536 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4537 Split(equal, if_true, if_false, fall_through);
4540 case Token::INSTANCEOF: {
4541 VisitForStackValue(expr->right());
4542 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4544 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4546 // The stub returns 0 for true.
4547 Split(zero, if_true, if_false, fall_through);
4552 VisitForAccumulatorValue(expr->right());
4553 Condition cc = CompareIC::ComputeCondition(op);
4556 bool inline_smi_code = ShouldInlineSmiCase(op);
4557 JumpPatchSite patch_site(masm_);
4558 if (inline_smi_code) {
4562 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4564 Split(cc, if_true, if_false, NULL);
4565 __ bind(&slow_case);
4568 // Record position and call the compare IC.
4569 SetSourcePosition(expr->position());
4570 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4571 CallIC(ic, expr->CompareOperationFeedbackId());
4572 patch_site.EmitPatchInfo();
4574 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4576 Split(cc, if_true, if_false, fall_through);
4580 // Convert the result of the comparison into one expected for this
4581 // expression's context.
4582 context()->Plug(if_true, if_false);
4586 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4587 Expression* sub_expr,
4589 Label materialize_true, materialize_false;
4590 Label* if_true = NULL;
4591 Label* if_false = NULL;
4592 Label* fall_through = NULL;
4593 context()->PrepareTest(&materialize_true, &materialize_false,
4594 &if_true, &if_false, &fall_through);
4596 VisitForAccumulatorValue(sub_expr);
4597 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4598 if (expr->op() == Token::EQ_STRICT) {
4599 Heap::RootListIndex nil_value = nil == kNullValue ?
4600 Heap::kNullValueRootIndex :
4601 Heap::kUndefinedValueRootIndex;
4602 __ CompareRoot(rax, nil_value);
4603 Split(equal, if_true, if_false, fall_through);
4605 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4606 CallIC(ic, expr->CompareOperationFeedbackId());
4608 Split(not_zero, if_true, if_false, fall_through);
4610 context()->Plug(if_true, if_false);
4614 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4615 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4616 context()->Plug(rax);
4620 Register FullCodeGenerator::result_register() {
4625 Register FullCodeGenerator::context_register() {
4630 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4631 DCHECK(IsAligned(frame_offset, kPointerSize));
4632 __ movp(Operand(rbp, frame_offset), value);
4636 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4637 __ movp(dst, ContextOperand(rsi, context_index));
4641 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4642 Scope* declaration_scope = scope()->DeclarationScope();
4643 if (declaration_scope->is_global_scope() ||
4644 declaration_scope->is_module_scope()) {
4645 // Contexts nested in the native context have a canonical empty function
4646 // as their closure, not the anonymous closure containing the global
4647 // code. Pass a smi sentinel and let the runtime look up the empty
4649 __ Push(Smi::FromInt(0));
4650 } else if (declaration_scope->is_eval_scope()) {
4651 // Contexts created by a call to eval have the same closure as the
4652 // context calling eval, not the anonymous closure containing the eval
4653 // code. Fetch it from the context.
4654 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4656 DCHECK(declaration_scope->is_function_scope());
4657 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4662 // ----------------------------------------------------------------------------
4663 // Non-local control flow support.
4666 void FullCodeGenerator::EnterFinallyBlock() {
4667 DCHECK(!result_register().is(rdx));
4668 DCHECK(!result_register().is(rcx));
4669 // Cook return address on top of stack (smi encoded Code* delta)
4670 __ PopReturnAddressTo(rdx);
4671 __ Move(rcx, masm_->CodeObject());
4673 __ Integer32ToSmi(rdx, rdx);
4676 // Store result register while executing finally block.
4677 __ Push(result_register());
4679 // Store pending message while executing finally block.
4680 ExternalReference pending_message_obj =
4681 ExternalReference::address_of_pending_message_obj(isolate());
4682 __ Load(rdx, pending_message_obj);
4685 ExternalReference has_pending_message =
4686 ExternalReference::address_of_has_pending_message(isolate());
4687 __ Load(rdx, has_pending_message);
4688 __ Integer32ToSmi(rdx, rdx);
4691 ExternalReference pending_message_script =
4692 ExternalReference::address_of_pending_message_script(isolate());
4693 __ Load(rdx, pending_message_script);
4698 void FullCodeGenerator::ExitFinallyBlock() {
4699 DCHECK(!result_register().is(rdx));
4700 DCHECK(!result_register().is(rcx));
4701 // Restore pending message from stack.
4703 ExternalReference pending_message_script =
4704 ExternalReference::address_of_pending_message_script(isolate());
4705 __ Store(pending_message_script, rdx);
4708 __ SmiToInteger32(rdx, rdx);
4709 ExternalReference has_pending_message =
4710 ExternalReference::address_of_has_pending_message(isolate());
4711 __ Store(has_pending_message, rdx);
4714 ExternalReference pending_message_obj =
4715 ExternalReference::address_of_pending_message_obj(isolate());
4716 __ Store(pending_message_obj, rdx);
4718 // Restore result register from stack.
4719 __ Pop(result_register());
4721 // Uncook return address.
4723 __ SmiToInteger32(rdx, rdx);
4724 __ Move(rcx, masm_->CodeObject());
4732 #define __ ACCESS_MASM(masm())
4734 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4736 int* context_length) {
4737 // The macros used here must preserve the result register.
4739 // Because the handler block contains the context of the finally
4740 // code, we can restore it directly from there for the finally code
4741 // rather than iteratively unwinding contexts via their previous
4743 __ Drop(*stack_depth); // Down to the handler block.
4744 if (*context_length > 0) {
4745 // Restore the context to its dedicated register and the stack.
4746 __ movp(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4747 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4750 __ call(finally_entry_);
4753 *context_length = 0;
4761 static const byte kJnsInstruction = 0x79;
4762 static const byte kNopByteOne = 0x66;
4763 static const byte kNopByteTwo = 0x90;
4765 static const byte kCallInstruction = 0xe8;
4769 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4771 BackEdgeState target_state,
4772 Code* replacement_code) {
4773 Address call_target_address = pc - kIntSize;
4774 Address jns_instr_address = call_target_address - 3;
4775 Address jns_offset_address = call_target_address - 2;
4777 switch (target_state) {
4779 // sub <profiling_counter>, <delta> ;; Not changed
4781 // call <interrupt stub>
4783 *jns_instr_address = kJnsInstruction;
4784 *jns_offset_address = kJnsOffset;
4786 case ON_STACK_REPLACEMENT:
4787 case OSR_AFTER_STACK_CHECK:
4788 // sub <profiling_counter>, <delta> ;; Not changed
4791 // call <on-stack replacment>
4793 *jns_instr_address = kNopByteOne;
4794 *jns_offset_address = kNopByteTwo;
4798 Assembler::set_target_address_at(call_target_address,
4800 replacement_code->entry());
4801 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4802 unoptimized_code, call_target_address, replacement_code);
4806 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4808 Code* unoptimized_code,
4810 Address call_target_address = pc - kIntSize;
4811 Address jns_instr_address = call_target_address - 3;
4812 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4814 if (*jns_instr_address == kJnsInstruction) {
4815 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4816 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4817 Assembler::target_address_at(call_target_address,
4822 DCHECK_EQ(kNopByteOne, *jns_instr_address);
4823 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4825 if (Assembler::target_address_at(call_target_address,
4826 unoptimized_code) ==
4827 isolate->builtins()->OnStackReplacement()->entry()) {
4828 return ON_STACK_REPLACEMENT;
4831 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4832 Assembler::target_address_at(call_target_address,
4834 return OSR_AFTER_STACK_CHECK;
4838 } } // namespace v8::internal
4840 #endif // V8_TARGET_ARCH_X64