1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
22 #define __ ACCESS_MASM(masm_)
25 class JumpPatchSite BASE_EMBEDDED {
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 info_emitted_ = false;
34 DCHECK(patch_site_.is_bound() == info_emitted_);
37 void EmitJumpIfNotSmi(Register reg,
39 Label::Distance near_jump = Label::kFar) {
40 __ testb(reg, Immediate(kSmiTagMask));
41 EmitJump(not_carry, target, near_jump); // Always taken before patched.
44 void EmitJumpIfSmi(Register reg,
46 Label::Distance near_jump = Label::kFar) {
47 __ testb(reg, Immediate(kSmiTagMask));
48 EmitJump(carry, target, near_jump); // Never taken before patched.
51 void EmitPatchInfo() {
52 if (patch_site_.is_bound()) {
53 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54 DCHECK(is_uint8(delta_to_patch_site));
55 __ testl(rax, Immediate(delta_to_patch_site));
60 __ nop(); // Signals no inlined code.
65 // jc will be patched with jz, jnc will become jnz.
66 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 DCHECK(cc == carry || cc == not_carry);
69 __ bind(&patch_site_);
70 __ j(cc, target, near_jump);
73 MacroAssembler* masm_;
81 // Generate code for a JS function. On entry to the function the receiver
82 // and arguments have been pushed on the stack left to right, with the
83 // return address on top of them. The actual argument count matches the
84 // formal parameter count expected by the function.
86 // The live registers are:
87 // o rdi: the JS function object being called (i.e. ourselves)
89 // o rbp: our caller's frame pointer
90 // o rsp: stack pointer (pointing to return address)
92 // The function builds a JS frame. Please see JavaScriptFrameConstants in
93 // frames-x64.h for its layout.
94 void FullCodeGenerator::Generate() {
95 CompilationInfo* info = info_;
96 profiling_counter_ = isolate()->factory()->NewCell(
97 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
98 SetFunctionPosition(function());
99 Comment cmnt(masm_, "[ function compiled by full code generator");
101 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
104 if (strlen(FLAG_stop_at) > 0 &&
105 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110 // Sloppy mode functions and builtins need to replace the receiver with the
111 // global proxy when called as functions (without an explicit receiver
113 if (is_sloppy(info->language_mode()) && !info->is_native() &&
114 info->MayUseThis() && info->scope()->has_this_declaration()) {
116 // +1 for return address.
117 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
118 __ movp(rcx, args.GetReceiverOperand());
120 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
121 __ j(not_equal, &ok, Label::kNear);
123 __ movp(rcx, GlobalObjectOperand());
124 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalProxyOffset));
126 __ movp(args.GetReceiverOperand(), rcx);
131 // Open a frame scope to indicate that there is a frame on the stack. The
132 // MANUAL indicates that the scope shouldn't actually generate code to set up
133 // the frame (that is done below).
134 FrameScope frame_scope(masm_, StackFrame::MANUAL);
136 info->set_prologue_offset(masm_->pc_offset());
137 __ Prologue(info->IsCodePreAgingActive());
138 info->AddNoFrameRange(0, masm_->pc_offset());
140 { Comment cmnt(masm_, "[ Allocate locals");
141 int locals_count = info->scope()->num_stack_slots();
142 // Generators allocate locals, if any, in context slots.
143 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
144 if (locals_count == 1) {
145 __ PushRoot(Heap::kUndefinedValueRootIndex);
146 } else if (locals_count > 1) {
147 if (locals_count >= 128) {
150 __ subp(rcx, Immediate(locals_count * kPointerSize));
151 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
152 __ j(above_equal, &ok, Label::kNear);
153 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
156 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
157 const int kMaxPushes = 32;
158 if (locals_count >= kMaxPushes) {
159 int loop_iterations = locals_count / kMaxPushes;
160 __ movp(rcx, Immediate(loop_iterations));
162 __ bind(&loop_header);
164 for (int i = 0; i < kMaxPushes; i++) {
167 // Continue loop if not done.
169 __ j(not_zero, &loop_header, Label::kNear);
171 int remaining = locals_count % kMaxPushes;
172 // Emit the remaining pushes.
173 for (int i = 0; i < remaining; i++) {
179 bool function_in_register = true;
181 // Possibly allocate a local context.
182 if (info->scope()->num_heap_slots() > 0) {
183 Comment cmnt(masm_, "[ Allocate context");
184 bool need_write_barrier = true;
185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 // Argument to NewContext is the function, which is still in rdi.
187 if (info->scope()->is_script_scope()) {
189 __ Push(info->scope()->GetScopeInfo(info->isolate()));
190 __ CallRuntime(Runtime::kNewScriptContext, 2);
191 } else if (slots <= FastNewContextStub::kMaximumSlots) {
192 FastNewContextStub stub(isolate(), slots);
194 // Result of FastNewContextStub is always in new space.
195 need_write_barrier = false;
198 __ CallRuntime(Runtime::kNewFunctionContext, 1);
200 function_in_register = false;
201 // Context is returned in rax. It replaces the context passed to us.
202 // It's saved in the stack and kept live in rsi.
204 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
206 // Copy any necessary parameters into the context.
207 int num_parameters = info->scope()->num_parameters();
208 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
209 for (int i = first_parameter; i < num_parameters; i++) {
210 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
211 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize;
214 // Load parameter from stack.
215 __ movp(rax, Operand(rbp, parameter_offset));
216 // Store it in the context.
217 int context_offset = Context::SlotOffset(var->index());
218 __ movp(Operand(rsi, context_offset), rax);
219 // Update the write barrier. This clobbers rax and rbx.
220 if (need_write_barrier) {
221 __ RecordWriteContextSlot(
222 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
223 } else if (FLAG_debug_code) {
225 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
226 __ Abort(kExpectedNewSpaceObject);
233 // Possibly set up a local binding to the this function which is used in
234 // derived constructors with super calls.
235 Variable* this_function_var = scope()->this_function_var();
236 if (this_function_var != nullptr) {
237 Comment cmnt(masm_, "[ This function");
238 if (!function_in_register) {
239 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
240 // The write barrier clobbers register again, keep is marked as such.
242 SetVar(this_function_var, rdi, rbx, rdx);
245 Variable* new_target_var = scope()->new_target_var();
246 if (new_target_var != nullptr) {
247 Comment cmnt(masm_, "[ new.target");
249 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
250 Label non_adaptor_frame;
251 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
252 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
253 __ j(not_equal, &non_adaptor_frame);
254 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
256 __ bind(&non_adaptor_frame);
257 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
258 Smi::FromInt(StackFrame::CONSTRUCT));
260 Label non_construct_frame, done;
261 __ j(not_equal, &non_construct_frame);
265 Operand(rax, ConstructFrameConstants::kOriginalConstructorOffset));
268 // Non-construct frame
269 __ bind(&non_construct_frame);
270 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
273 SetVar(new_target_var, rax, rbx, rdx);
276 // Possibly allocate RestParameters
278 Variable* rest_param = scope()->rest_parameter(&rest_index);
280 Comment cmnt(masm_, "[ Allocate rest parameter array");
282 int num_parameters = info->scope()->num_parameters();
283 int offset = num_parameters * kPointerSize;
286 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
288 __ Push(Smi::FromInt(num_parameters));
289 __ Push(Smi::FromInt(rest_index));
290 __ Push(Smi::FromInt(language_mode()));
292 RestParamAccessStub stub(isolate());
295 SetVar(rest_param, rax, rbx, rdx);
298 // Possibly allocate an arguments object.
299 Variable* arguments = scope()->arguments();
300 if (arguments != NULL) {
301 // Arguments object must be allocated after the context object, in
302 // case the "arguments" or ".arguments" variables are in the context.
303 Comment cmnt(masm_, "[ Allocate arguments object");
304 if (function_in_register) {
307 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
309 // The receiver is just before the parameters on the caller's stack.
310 int num_parameters = info->scope()->num_parameters();
311 int offset = num_parameters * kPointerSize;
313 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
315 __ Push(Smi::FromInt(num_parameters));
316 // Arguments to ArgumentsAccessStub:
317 // function, receiver address, parameter count.
318 // The stub will rewrite receiver and parameter count if the previous
319 // stack frame was an arguments adapter frame.
321 ArgumentsAccessStub::Type type;
322 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
323 type = ArgumentsAccessStub::NEW_STRICT;
324 } else if (function()->has_duplicate_parameters()) {
325 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
327 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
329 ArgumentsAccessStub stub(isolate(), type);
332 SetVar(arguments, rax, rbx, rdx);
336 __ CallRuntime(Runtime::kTraceEnter, 0);
339 // Visit the declarations and body unless there is an illegal
341 if (scope()->HasIllegalRedeclaration()) {
342 Comment cmnt(masm_, "[ Declarations");
343 scope()->VisitIllegalRedeclaration(this);
346 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
347 { Comment cmnt(masm_, "[ Declarations");
348 VisitDeclarations(scope()->declarations());
351 { Comment cmnt(masm_, "[ Stack check");
352 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
354 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
355 __ j(above_equal, &ok, Label::kNear);
356 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
360 { Comment cmnt(masm_, "[ Body");
361 DCHECK(loop_depth() == 0);
362 VisitStatements(function()->body());
363 DCHECK(loop_depth() == 0);
367 // Always emit a 'return undefined' in case control fell off the end of
369 { Comment cmnt(masm_, "[ return <undefined>;");
370 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
371 EmitReturnSequence();
376 void FullCodeGenerator::ClearAccumulator() {
381 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
382 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
383 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
384 Smi::FromInt(-delta));
388 void FullCodeGenerator::EmitProfilingCounterReset() {
389 int reset_value = FLAG_interrupt_budget;
390 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
391 __ Move(kScratchRegister, Smi::FromInt(reset_value));
392 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
396 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
399 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
400 Label* back_edge_target) {
401 Comment cmnt(masm_, "[ Back edge bookkeeping");
404 DCHECK(back_edge_target->is_bound());
405 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
406 int weight = Min(kMaxBackEdgeWeight,
407 Max(1, distance / kCodeSizeMultiplier));
408 EmitProfilingCounterDecrement(weight);
410 __ j(positive, &ok, Label::kNear);
412 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
413 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
414 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
416 // Record a mapping of this PC offset to the OSR id. This is used to find
417 // the AST id from the unoptimized code in order to use it as a key into
418 // the deoptimization input data found in the optimized code.
419 RecordBackEdge(stmt->OsrEntryId());
421 EmitProfilingCounterReset();
425 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
426 // Record a mapping of the OSR id to this PC. This is used if the OSR
427 // entry becomes the target of a bailout. We don't expect it to be, but
428 // we want it to work if it is.
429 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
433 void FullCodeGenerator::EmitReturnSequence() {
434 Comment cmnt(masm_, "[ Return sequence");
435 if (return_label_.is_bound()) {
436 __ jmp(&return_label_);
438 __ bind(&return_label_);
441 __ CallRuntime(Runtime::kTraceExit, 1);
443 // Pretend that the exit is a backwards jump to the entry.
445 if (info_->ShouldSelfOptimize()) {
446 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
448 int distance = masm_->pc_offset();
449 weight = Min(kMaxBackEdgeWeight,
450 Max(1, distance / kCodeSizeMultiplier));
452 EmitProfilingCounterDecrement(weight);
454 __ j(positive, &ok, Label::kNear);
456 __ call(isolate()->builtins()->InterruptCheck(),
457 RelocInfo::CODE_TARGET);
459 EmitProfilingCounterReset();
462 SetReturnPosition(function());
463 int no_frame_start = masm_->pc_offset();
466 int arg_count = info_->scope()->num_parameters() + 1;
467 int arguments_bytes = arg_count * kPointerSize;
468 __ Ret(arguments_bytes, rcx);
470 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
475 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
476 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
477 MemOperand operand = codegen()->VarOperand(var, result_register());
482 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
486 void FullCodeGenerator::AccumulatorValueContext::Plug(
487 Heap::RootListIndex index) const {
488 __ LoadRoot(result_register(), index);
492 void FullCodeGenerator::StackValueContext::Plug(
493 Heap::RootListIndex index) const {
498 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
499 codegen()->PrepareForBailoutBeforeSplit(condition(),
503 if (index == Heap::kUndefinedValueRootIndex ||
504 index == Heap::kNullValueRootIndex ||
505 index == Heap::kFalseValueRootIndex) {
506 if (false_label_ != fall_through_) __ jmp(false_label_);
507 } else if (index == Heap::kTrueValueRootIndex) {
508 if (true_label_ != fall_through_) __ jmp(true_label_);
510 __ LoadRoot(result_register(), index);
511 codegen()->DoTest(this);
516 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
520 void FullCodeGenerator::AccumulatorValueContext::Plug(
521 Handle<Object> lit) const {
523 __ SafeMove(result_register(), Smi::cast(*lit));
525 __ Move(result_register(), lit);
530 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
532 __ SafePush(Smi::cast(*lit));
539 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
540 codegen()->PrepareForBailoutBeforeSplit(condition(),
544 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
545 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
546 if (false_label_ != fall_through_) __ jmp(false_label_);
547 } else if (lit->IsTrue() || lit->IsJSObject()) {
548 if (true_label_ != fall_through_) __ jmp(true_label_);
549 } else if (lit->IsString()) {
550 if (String::cast(*lit)->length() == 0) {
551 if (false_label_ != fall_through_) __ jmp(false_label_);
553 if (true_label_ != fall_through_) __ jmp(true_label_);
555 } else if (lit->IsSmi()) {
556 if (Smi::cast(*lit)->value() == 0) {
557 if (false_label_ != fall_through_) __ jmp(false_label_);
559 if (true_label_ != fall_through_) __ jmp(true_label_);
562 // For simplicity we always test the accumulator register.
563 __ Move(result_register(), lit);
564 codegen()->DoTest(this);
569 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
570 Register reg) const {
576 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
578 Register reg) const {
581 __ Move(result_register(), reg);
585 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
586 Register reg) const {
588 if (count > 1) __ Drop(count - 1);
589 __ movp(Operand(rsp, 0), reg);
593 void FullCodeGenerator::TestContext::DropAndPlug(int count,
594 Register reg) const {
596 // For simplicity we always test the accumulator register.
598 __ Move(result_register(), reg);
599 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
600 codegen()->DoTest(this);
604 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
605 Label* materialize_false) const {
606 DCHECK(materialize_true == materialize_false);
607 __ bind(materialize_true);
611 void FullCodeGenerator::AccumulatorValueContext::Plug(
612 Label* materialize_true,
613 Label* materialize_false) const {
615 __ bind(materialize_true);
616 __ Move(result_register(), isolate()->factory()->true_value());
617 __ jmp(&done, Label::kNear);
618 __ bind(materialize_false);
619 __ Move(result_register(), isolate()->factory()->false_value());
624 void FullCodeGenerator::StackValueContext::Plug(
625 Label* materialize_true,
626 Label* materialize_false) const {
628 __ bind(materialize_true);
629 __ Push(isolate()->factory()->true_value());
630 __ jmp(&done, Label::kNear);
631 __ bind(materialize_false);
632 __ Push(isolate()->factory()->false_value());
637 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
638 Label* materialize_false) const {
639 DCHECK(materialize_true == true_label_);
640 DCHECK(materialize_false == false_label_);
644 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
645 Heap::RootListIndex value_root_index =
646 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
647 __ LoadRoot(result_register(), value_root_index);
651 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
652 Heap::RootListIndex value_root_index =
653 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654 __ PushRoot(value_root_index);
658 void FullCodeGenerator::TestContext::Plug(bool flag) const {
659 codegen()->PrepareForBailoutBeforeSplit(condition(),
664 if (true_label_ != fall_through_) __ jmp(true_label_);
666 if (false_label_ != fall_through_) __ jmp(false_label_);
671 void FullCodeGenerator::DoTest(Expression* condition,
674 Label* fall_through) {
675 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
676 CallIC(ic, condition->test_id());
677 __ testp(result_register(), result_register());
678 // The stub returns nonzero for true.
679 Split(not_zero, if_true, if_false, fall_through);
683 void FullCodeGenerator::Split(Condition cc,
686 Label* fall_through) {
687 if (if_false == fall_through) {
689 } else if (if_true == fall_through) {
690 __ j(NegateCondition(cc), if_false);
698 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
699 DCHECK(var->IsStackAllocated());
700 // Offset is negative because higher indexes are at lower addresses.
701 int offset = -var->index() * kPointerSize;
702 // Adjust by a (parameter or local) base offset.
703 if (var->IsParameter()) {
704 offset += kFPOnStackSize + kPCOnStackSize +
705 (info_->scope()->num_parameters() - 1) * kPointerSize;
707 offset += JavaScriptFrameConstants::kLocal0Offset;
709 return Operand(rbp, offset);
713 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
714 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
715 if (var->IsContextSlot()) {
716 int context_chain_length = scope()->ContextChainLength(var->scope());
717 __ LoadContext(scratch, context_chain_length);
718 return ContextOperand(scratch, var->index());
720 return StackOperand(var);
725 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
726 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
727 MemOperand location = VarOperand(var, dest);
728 __ movp(dest, location);
732 void FullCodeGenerator::SetVar(Variable* var,
736 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
737 DCHECK(!scratch0.is(src));
738 DCHECK(!scratch0.is(scratch1));
739 DCHECK(!scratch1.is(src));
740 MemOperand location = VarOperand(var, scratch0);
741 __ movp(location, src);
743 // Emit the write barrier code if the location is in the heap.
744 if (var->IsContextSlot()) {
745 int offset = Context::SlotOffset(var->index());
746 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
751 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
752 bool should_normalize,
755 // Only prepare for bailouts before splits if we're in a test
756 // context. Otherwise, we let the Visit function deal with the
757 // preparation to avoid preparing with the same AST id twice.
758 if (!context()->IsTest() || !info_->IsOptimizable()) return;
761 if (should_normalize) __ jmp(&skip, Label::kNear);
762 PrepareForBailout(expr, TOS_REG);
763 if (should_normalize) {
764 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
765 Split(equal, if_true, if_false, NULL);
771 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
772 // The variable in the declaration always resides in the current context.
773 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
774 if (generate_debug_code_) {
775 // Check that we're not inside a with or catch context.
776 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
777 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
778 __ Check(not_equal, kDeclarationInWithContext);
779 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
780 __ Check(not_equal, kDeclarationInCatchContext);
785 void FullCodeGenerator::VisitVariableDeclaration(
786 VariableDeclaration* declaration) {
787 // If it was not possible to allocate the variable at compile time, we
788 // need to "declare" it at runtime to make sure it actually exists in the
790 VariableProxy* proxy = declaration->proxy();
791 VariableMode mode = declaration->mode();
792 Variable* variable = proxy->var();
793 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
794 switch (variable->location()) {
795 case VariableLocation::GLOBAL:
796 case VariableLocation::UNALLOCATED:
797 globals_->Add(variable->name(), zone());
798 globals_->Add(variable->binding_needs_init()
799 ? isolate()->factory()->the_hole_value()
800 : isolate()->factory()->undefined_value(),
804 case VariableLocation::PARAMETER:
805 case VariableLocation::LOCAL:
807 Comment cmnt(masm_, "[ VariableDeclaration");
808 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
809 __ movp(StackOperand(variable), kScratchRegister);
813 case VariableLocation::CONTEXT:
815 Comment cmnt(masm_, "[ VariableDeclaration");
816 EmitDebugCheckDeclarationContext(variable);
817 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
818 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
819 // No write barrier since the hole value is in old space.
820 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
824 case VariableLocation::LOOKUP: {
825 Comment cmnt(masm_, "[ VariableDeclaration");
827 __ Push(variable->name());
828 // Declaration nodes are always introduced in one of four modes.
829 DCHECK(IsDeclaredVariableMode(mode));
830 PropertyAttributes attr =
831 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
832 __ Push(Smi::FromInt(attr));
833 // Push initial value, if any.
834 // Note: For variables we must not push an initial value (such as
835 // 'undefined') because we may have a (legal) redeclaration and we
836 // must not destroy the current value.
838 __ PushRoot(Heap::kTheHoleValueRootIndex);
840 __ Push(Smi::FromInt(0)); // Indicates no initial value.
842 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
849 void FullCodeGenerator::VisitFunctionDeclaration(
850 FunctionDeclaration* declaration) {
851 VariableProxy* proxy = declaration->proxy();
852 Variable* variable = proxy->var();
853 switch (variable->location()) {
854 case VariableLocation::GLOBAL:
855 case VariableLocation::UNALLOCATED: {
856 globals_->Add(variable->name(), zone());
857 Handle<SharedFunctionInfo> function =
858 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
859 // Check for stack-overflow exception.
860 if (function.is_null()) return SetStackOverflow();
861 globals_->Add(function, zone());
865 case VariableLocation::PARAMETER:
866 case VariableLocation::LOCAL: {
867 Comment cmnt(masm_, "[ FunctionDeclaration");
868 VisitForAccumulatorValue(declaration->fun());
869 __ movp(StackOperand(variable), result_register());
873 case VariableLocation::CONTEXT: {
874 Comment cmnt(masm_, "[ FunctionDeclaration");
875 EmitDebugCheckDeclarationContext(variable);
876 VisitForAccumulatorValue(declaration->fun());
877 __ movp(ContextOperand(rsi, variable->index()), result_register());
878 int offset = Context::SlotOffset(variable->index());
879 // We know that we have written a function, which is not a smi.
880 __ RecordWriteContextSlot(rsi,
887 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
891 case VariableLocation::LOOKUP: {
892 Comment cmnt(masm_, "[ FunctionDeclaration");
894 __ Push(variable->name());
895 __ Push(Smi::FromInt(NONE));
896 VisitForStackValue(declaration->fun());
897 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
904 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
905 // Call the runtime to declare the globals.
906 __ Push(rsi); // The context is the first argument.
908 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
909 __ CallRuntime(Runtime::kDeclareGlobals, 3);
910 // Return value is ignored.
914 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
915 // Call the runtime to declare the modules.
916 __ Push(descriptions);
917 __ CallRuntime(Runtime::kDeclareModules, 1);
918 // Return value is ignored.
922 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
923 Comment cmnt(masm_, "[ SwitchStatement");
924 Breakable nested_statement(this, stmt);
925 SetStatementPosition(stmt);
927 // Keep the switch value on the stack until a case matches.
928 VisitForStackValue(stmt->tag());
929 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
931 ZoneList<CaseClause*>* clauses = stmt->cases();
932 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
934 Label next_test; // Recycled for each test.
935 // Compile all the tests with branches to their bodies.
936 for (int i = 0; i < clauses->length(); i++) {
937 CaseClause* clause = clauses->at(i);
938 clause->body_target()->Unuse();
940 // The default is not a test, but remember it as final fall through.
941 if (clause->is_default()) {
942 default_clause = clause;
946 Comment cmnt(masm_, "[ Case comparison");
950 // Compile the label expression.
951 VisitForAccumulatorValue(clause->label());
953 // Perform the comparison as if via '==='.
954 __ movp(rdx, Operand(rsp, 0)); // Switch value.
955 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
956 JumpPatchSite patch_site(masm_);
957 if (inline_smi_code) {
961 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
964 __ j(not_equal, &next_test);
965 __ Drop(1); // Switch value is no longer needed.
966 __ jmp(clause->body_target());
970 // Record position before stub call for type feedback.
971 SetExpressionPosition(clause);
972 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
973 strength(language_mode())).code();
974 CallIC(ic, clause->CompareId());
975 patch_site.EmitPatchInfo();
978 __ jmp(&skip, Label::kNear);
979 PrepareForBailout(clause, TOS_REG);
980 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
981 __ j(not_equal, &next_test);
983 __ jmp(clause->body_target());
987 __ j(not_equal, &next_test);
988 __ Drop(1); // Switch value is no longer needed.
989 __ jmp(clause->body_target());
992 // Discard the test value and jump to the default if present, otherwise to
993 // the end of the statement.
995 __ Drop(1); // Switch value is no longer needed.
996 if (default_clause == NULL) {
997 __ jmp(nested_statement.break_label());
999 __ jmp(default_clause->body_target());
1002 // Compile all the case bodies.
1003 for (int i = 0; i < clauses->length(); i++) {
1004 Comment cmnt(masm_, "[ Case body");
1005 CaseClause* clause = clauses->at(i);
1006 __ bind(clause->body_target());
1007 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1008 VisitStatements(clause->statements());
1011 __ bind(nested_statement.break_label());
1012 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1016 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1017 Comment cmnt(masm_, "[ ForInStatement");
1018 SetStatementPosition(stmt, SKIP_BREAK);
1020 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1023 ForIn loop_statement(this, stmt);
1024 increment_loop_depth();
1026 // Get the object to enumerate over. If the object is null or undefined, skip
1027 // over the loop. See ECMA-262 version 5, section 12.6.4.
1028 SetExpressionAsStatementPosition(stmt->enumerable());
1029 VisitForAccumulatorValue(stmt->enumerable());
1030 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1032 Register null_value = rdi;
1033 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1034 __ cmpp(rax, null_value);
1037 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1039 // Convert the object to a JS object.
1040 Label convert, done_convert;
1041 __ JumpIfSmi(rax, &convert);
1042 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1043 __ j(above_equal, &done_convert);
1046 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1047 __ bind(&done_convert);
1048 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1051 // Check for proxies.
1053 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1054 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1055 __ j(below_equal, &call_runtime);
1057 // Check cache validity in generated code. This is a fast case for
1058 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1059 // guarantee cache validity, call the runtime system to check cache
1060 // validity or get the property names in a fixed array.
1061 __ CheckEnumCache(null_value, &call_runtime);
1063 // The enum cache is valid. Load the map of the object being
1064 // iterated over and use the cache for the iteration.
1066 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1067 __ jmp(&use_cache, Label::kNear);
1069 // Get the set of properties to enumerate.
1070 __ bind(&call_runtime);
1071 __ Push(rax); // Duplicate the enumerable object on the stack.
1072 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1073 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1075 // If we got a map from the runtime call, we can do a fast
1076 // modification check. Otherwise, we got a fixed array, and we have
1077 // to do a slow check.
1079 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1080 Heap::kMetaMapRootIndex);
1081 __ j(not_equal, &fixed_array);
1083 // We got a map in register rax. Get the enumeration cache from it.
1084 __ bind(&use_cache);
1086 Label no_descriptors;
1088 __ EnumLength(rdx, rax);
1089 __ Cmp(rdx, Smi::FromInt(0));
1090 __ j(equal, &no_descriptors);
1092 __ LoadInstanceDescriptors(rax, rcx);
1093 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1094 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1096 // Set up the four remaining stack slots.
1097 __ Push(rax); // Map.
1098 __ Push(rcx); // Enumeration cache.
1099 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1100 __ Push(Smi::FromInt(0)); // Initial index.
1103 __ bind(&no_descriptors);
1104 __ addp(rsp, Immediate(kPointerSize));
1107 // We got a fixed array in register rax. Iterate through that.
1109 __ bind(&fixed_array);
1111 // No need for a write barrier, we are storing a Smi in the feedback vector.
1112 __ Move(rbx, FeedbackVector());
1113 int vector_index = FeedbackVector()->GetIndex(slot);
1114 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(vector_index)),
1115 TypeFeedbackVector::MegamorphicSentinel(isolate()));
1116 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1117 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1118 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1119 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1120 __ j(above, &non_proxy);
1121 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1122 __ bind(&non_proxy);
1123 __ Push(rbx); // Smi
1124 __ Push(rax); // Array
1125 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1126 __ Push(rax); // Fixed array length (as smi).
1127 __ Push(Smi::FromInt(0)); // Initial index.
1129 // Generate code for doing the condition check.
1130 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1132 SetExpressionAsStatementPosition(stmt->each());
1134 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1135 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1136 __ j(above_equal, loop_statement.break_label());
1138 // Get the current entry of the array into register rbx.
1139 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1140 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1141 __ movp(rbx, FieldOperand(rbx,
1144 FixedArray::kHeaderSize));
1146 // Get the expected map from the stack or a smi in the
1147 // permanent slow case into register rdx.
1148 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1150 // Check if the expected map still matches that of the enumerable.
1151 // If not, we may have to filter the key.
1153 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1154 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1155 __ j(equal, &update_each, Label::kNear);
1157 // For proxies, no filtering is done.
1158 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1159 __ Cmp(rdx, Smi::FromInt(0));
1160 __ j(equal, &update_each, Label::kNear);
1162 // Convert the entry to a string or null if it isn't a property
1163 // anymore. If the property has been removed while iterating, we
1165 __ Push(rcx); // Enumerable.
1166 __ Push(rbx); // Current entry.
1167 __ CallRuntime(Runtime::kForInFilter, 2);
1168 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1169 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1170 __ j(equal, loop_statement.continue_label());
1173 // Update the 'each' property or variable from the possibly filtered
1174 // entry in register rbx.
1175 __ bind(&update_each);
1176 __ movp(result_register(), rbx);
1177 // Perform the assignment as if via '='.
1178 { EffectContext context(this);
1179 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1180 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1183 // Generate code for the body of the loop.
1184 Visit(stmt->body());
1186 // Generate code for going to the next element by incrementing the
1187 // index (smi) stored on top of the stack.
1188 __ bind(loop_statement.continue_label());
1189 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1191 EmitBackEdgeBookkeeping(stmt, &loop);
1194 // Remove the pointers stored on the stack.
1195 __ bind(loop_statement.break_label());
1196 __ addp(rsp, Immediate(5 * kPointerSize));
1198 // Exit and decrement the loop depth.
1199 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1201 decrement_loop_depth();
1205 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1207 // Use the fast case closure allocation code that allocates in new
1208 // space for nested functions that don't need literals cloning. If
1209 // we're running with the --always-opt or the --prepare-always-opt
1210 // flag, we need to use the runtime function so that the new function
1211 // we are creating here gets a chance to have its code optimized and
1212 // doesn't just get a copy of the existing unoptimized code.
1213 if (!FLAG_always_opt &&
1214 !FLAG_prepare_always_opt &&
1216 scope()->is_function_scope() &&
1217 info->num_literals() == 0) {
1218 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1225 ? isolate()->factory()->true_value()
1226 : isolate()->factory()->false_value());
1227 __ CallRuntime(Runtime::kNewClosure, 3);
1229 context()->Plug(rax);
1233 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1235 FeedbackVectorICSlot slot) {
1236 if (NeedsHomeObject(initializer)) {
1237 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1238 __ Move(StoreDescriptor::NameRegister(),
1239 isolate()->factory()->home_object_symbol());
1240 __ movp(StoreDescriptor::ValueRegister(),
1241 Operand(rsp, offset * kPointerSize));
1242 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1248 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1249 TypeofMode typeof_mode,
1251 Register context = rsi;
1252 Register temp = rdx;
1256 if (s->num_heap_slots() > 0) {
1257 if (s->calls_sloppy_eval()) {
1258 // Check that extension is NULL.
1259 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1261 __ j(not_equal, slow);
1263 // Load next context in chain.
1264 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1265 // Walk the rest of the chain without clobbering rsi.
1268 // If no outer scope calls eval, we do not need to check more
1269 // context extensions. If we have reached an eval scope, we check
1270 // all extensions from this point.
1271 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1272 s = s->outer_scope();
1275 if (s != NULL && s->is_eval_scope()) {
1276 // Loop up the context chain. There is no frame effect so it is
1277 // safe to use raw labels here.
1279 if (!context.is(temp)) {
1280 __ movp(temp, context);
1282 // Load map for comparison into register, outside loop.
1283 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1285 // Terminate at native context.
1286 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1287 __ j(equal, &fast, Label::kNear);
1288 // Check that extension is NULL.
1289 __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1290 __ j(not_equal, slow);
1291 // Load next context in chain.
1292 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1297 // All extension objects were empty and it is safe to use a normal global
1299 EmitGlobalVariableLoad(proxy, typeof_mode);
1303 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1305 DCHECK(var->IsContextSlot());
1306 Register context = rsi;
1307 Register temp = rbx;
1309 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1310 if (s->num_heap_slots() > 0) {
1311 if (s->calls_sloppy_eval()) {
1312 // Check that extension is NULL.
1313 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1315 __ j(not_equal, slow);
1317 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1318 // Walk the rest of the chain without clobbering rsi.
1322 // Check that last extension is NULL.
1323 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1324 __ j(not_equal, slow);
1326 // This function is used only for loads, not stores, so it's safe to
1327 // return an rsi-based operand (the write barrier cannot be allowed to
1328 // destroy the rsi register).
1329 return ContextOperand(context, var->index());
1333 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1334 TypeofMode typeof_mode,
1335 Label* slow, Label* done) {
1336 // Generate fast-case code for variables that might be shadowed by
1337 // eval-introduced variables. Eval is used a lot without
1338 // introducing variables. In those cases, we do not want to
1339 // perform a runtime call for all variables in the scope
1340 // containing the eval.
1341 Variable* var = proxy->var();
1342 if (var->mode() == DYNAMIC_GLOBAL) {
1343 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1345 } else if (var->mode() == DYNAMIC_LOCAL) {
1346 Variable* local = var->local_if_not_shadowed();
1347 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1348 if (local->mode() == LET || local->mode() == CONST ||
1349 local->mode() == CONST_LEGACY) {
1350 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1351 __ j(not_equal, done);
1352 if (local->mode() == CONST_LEGACY) {
1353 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1354 } else { // LET || CONST
1355 __ Push(var->name());
1356 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1364 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1365 TypeofMode typeof_mode) {
1366 Variable* var = proxy->var();
1367 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1368 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1369 if (var->IsGlobalSlot()) {
1370 DCHECK(var->index() > 0);
1371 DCHECK(var->IsStaticGlobalObjectProperty());
1372 // Each var occupies two slots in the context: for reads and writes.
1373 int const slot = var->index();
1374 int const depth = scope()->ContextChainLength(var->scope());
1375 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1376 __ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
1377 __ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
1378 LoadGlobalViaContextStub stub(isolate(), depth);
1381 __ Push(Smi::FromInt(slot));
1382 __ Push(var->name());
1383 __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
1387 __ Move(LoadDescriptor::NameRegister(), var->name());
1388 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1389 __ Move(LoadDescriptor::SlotRegister(),
1390 SmiFromSlot(proxy->VariableFeedbackSlot()));
1391 CallLoadIC(typeof_mode);
1396 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1397 TypeofMode typeof_mode) {
1398 // Record position before possible IC call.
1399 SetExpressionPosition(proxy);
1400 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1401 Variable* var = proxy->var();
1403 // Three cases: global variables, lookup variables, and all other types of
1405 switch (var->location()) {
1406 case VariableLocation::GLOBAL:
1407 case VariableLocation::UNALLOCATED: {
1408 Comment cmnt(masm_, "[ Global variable");
1409 EmitGlobalVariableLoad(proxy, typeof_mode);
1410 context()->Plug(rax);
1414 case VariableLocation::PARAMETER:
1415 case VariableLocation::LOCAL:
1416 case VariableLocation::CONTEXT: {
1417 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1418 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1420 if (var->binding_needs_init()) {
1421 // var->scope() may be NULL when the proxy is located in eval code and
1422 // refers to a potential outside binding. Currently those bindings are
1423 // always looked up dynamically, i.e. in that case
1424 // var->location() == LOOKUP.
1426 DCHECK(var->scope() != NULL);
1428 // Check if the binding really needs an initialization check. The check
1429 // can be skipped in the following situation: we have a LET or CONST
1430 // binding in harmony mode, both the Variable and the VariableProxy have
1431 // the same declaration scope (i.e. they are both in global code, in the
1432 // same function or in the same eval code) and the VariableProxy is in
1433 // the source physically located after the initializer of the variable.
1435 // We cannot skip any initialization checks for CONST in non-harmony
1436 // mode because const variables may be declared but never initialized:
1437 // if (false) { const x; }; var y = x;
1439 // The condition on the declaration scopes is a conservative check for
1440 // nested functions that access a binding and are called before the
1441 // binding is initialized:
1442 // function() { f(); let x = 1; function f() { x = 2; } }
1444 bool skip_init_check;
1445 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1446 skip_init_check = false;
1447 } else if (var->is_this()) {
1448 CHECK(info_->function() != nullptr &&
1449 (info_->function()->kind() & kSubclassConstructor) != 0);
1450 // TODO(dslomov): implement 'this' hole check elimination.
1451 skip_init_check = false;
1453 // Check that we always have valid source position.
1454 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1455 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1456 skip_init_check = var->mode() != CONST_LEGACY &&
1457 var->initializer_position() < proxy->position();
1460 if (!skip_init_check) {
1461 // Let and const need a read barrier.
1464 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1465 __ j(not_equal, &done, Label::kNear);
1466 if (var->mode() == LET || var->mode() == CONST) {
1467 // Throw a reference error when using an uninitialized let/const
1468 // binding in harmony mode.
1469 __ Push(var->name());
1470 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1472 // Uninitalized const bindings outside of harmony mode are unholed.
1473 DCHECK(var->mode() == CONST_LEGACY);
1474 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1477 context()->Plug(rax);
1481 context()->Plug(var);
1485 case VariableLocation::LOOKUP: {
1486 Comment cmnt(masm_, "[ Lookup slot");
1488 // Generate code for loading from variables potentially shadowed
1489 // by eval-introduced variables.
1490 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1492 __ Push(rsi); // Context.
1493 __ Push(var->name());
1494 Runtime::FunctionId function_id =
1495 typeof_mode == NOT_INSIDE_TYPEOF
1496 ? Runtime::kLoadLookupSlot
1497 : Runtime::kLoadLookupSlotNoReferenceError;
1498 __ CallRuntime(function_id, 2);
1500 context()->Plug(rax);
1507 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1508 Comment cmnt(masm_, "[ RegExpLiteral");
1510 // Registers will be used as follows:
1511 // rdi = JS function.
1512 // rcx = literals array.
1513 // rbx = regexp literal.
1514 // rax = regexp literal clone.
1515 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1516 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1517 int literal_offset =
1518 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1519 __ movp(rbx, FieldOperand(rcx, literal_offset));
1520 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1521 __ j(not_equal, &materialized, Label::kNear);
1523 // Create regexp literal using runtime function
1524 // Result will be in rax.
1526 __ Push(Smi::FromInt(expr->literal_index()));
1527 __ Push(expr->pattern());
1528 __ Push(expr->flags());
1529 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1532 __ bind(&materialized);
1533 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1534 Label allocated, runtime_allocate;
1535 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1538 __ bind(&runtime_allocate);
1540 __ Push(Smi::FromInt(size));
1541 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1544 __ bind(&allocated);
1545 // Copy the content into the newly allocated memory.
1546 // (Unroll copy loop once for better throughput).
1547 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1548 __ movp(rdx, FieldOperand(rbx, i));
1549 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1550 __ movp(FieldOperand(rax, i), rdx);
1551 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1553 if ((size % (2 * kPointerSize)) != 0) {
1554 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1555 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1557 context()->Plug(rax);
1561 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1562 if (expression == NULL) {
1563 __ PushRoot(Heap::kNullValueRootIndex);
1565 VisitForStackValue(expression);
1570 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1571 Comment cmnt(masm_, "[ ObjectLiteral");
1573 Handle<FixedArray> constant_properties = expr->constant_properties();
1574 int flags = expr->ComputeFlags();
1575 if (MustCreateObjectLiteralWithRuntime(expr)) {
1576 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1577 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1578 __ Push(Smi::FromInt(expr->literal_index()));
1579 __ Push(constant_properties);
1580 __ Push(Smi::FromInt(flags));
1581 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1583 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1584 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1585 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1586 __ Move(rcx, constant_properties);
1587 __ Move(rdx, Smi::FromInt(flags));
1588 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1591 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1593 // If result_saved is true the result is on top of the stack. If
1594 // result_saved is false the result is in rax.
1595 bool result_saved = false;
1597 AccessorTable accessor_table(zone());
1598 int property_index = 0;
1599 // store_slot_index points to the vector IC slot for the next store IC used.
1600 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1601 // and must be updated if the number of store ICs emitted here changes.
1602 int store_slot_index = 0;
1603 for (; property_index < expr->properties()->length(); property_index++) {
1604 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1605 if (property->is_computed_name()) break;
1606 if (property->IsCompileTimeValue()) continue;
1608 Literal* key = property->key()->AsLiteral();
1609 Expression* value = property->value();
1610 if (!result_saved) {
1611 __ Push(rax); // Save result on the stack
1612 result_saved = true;
1614 switch (property->kind()) {
1615 case ObjectLiteral::Property::CONSTANT:
1617 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1618 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1620 case ObjectLiteral::Property::COMPUTED:
1621 // It is safe to use [[Put]] here because the boilerplate already
1622 // contains computed properties with an uninitialized value.
1623 if (key->value()->IsInternalizedString()) {
1624 if (property->emit_store()) {
1625 VisitForAccumulatorValue(value);
1626 DCHECK(StoreDescriptor::ValueRegister().is(rax));
1627 __ Move(StoreDescriptor::NameRegister(), key->value());
1628 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1629 if (FLAG_vector_stores) {
1630 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1633 CallStoreIC(key->LiteralFeedbackId());
1635 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1637 if (NeedsHomeObject(value)) {
1638 __ movp(StoreDescriptor::ReceiverRegister(), rax);
1639 __ Move(StoreDescriptor::NameRegister(),
1640 isolate()->factory()->home_object_symbol());
1641 __ movp(StoreDescriptor::ValueRegister(), Operand(rsp, 0));
1642 if (FLAG_vector_stores) {
1643 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1648 VisitForEffect(value);
1652 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1653 VisitForStackValue(key);
1654 VisitForStackValue(value);
1655 if (property->emit_store()) {
1656 EmitSetHomeObjectIfNeeded(
1657 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1658 __ Push(Smi::FromInt(SLOPPY)); // Language mode
1659 __ CallRuntime(Runtime::kSetProperty, 4);
1664 case ObjectLiteral::Property::PROTOTYPE:
1665 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1666 VisitForStackValue(value);
1667 DCHECK(property->emit_store());
1668 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1670 case ObjectLiteral::Property::GETTER:
1671 if (property->emit_store()) {
1672 accessor_table.lookup(key)->second->getter = value;
1675 case ObjectLiteral::Property::SETTER:
1676 if (property->emit_store()) {
1677 accessor_table.lookup(key)->second->setter = value;
1683 // Emit code to define accessors, using only a single call to the runtime for
1684 // each pair of corresponding getters and setters.
1685 for (AccessorTable::Iterator it = accessor_table.begin();
1686 it != accessor_table.end();
1688 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1689 VisitForStackValue(it->first);
1690 EmitAccessor(it->second->getter);
1691 EmitSetHomeObjectIfNeeded(
1692 it->second->getter, 2,
1693 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1694 EmitAccessor(it->second->setter);
1695 EmitSetHomeObjectIfNeeded(
1696 it->second->setter, 3,
1697 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1698 __ Push(Smi::FromInt(NONE));
1699 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1702 // Object literals have two parts. The "static" part on the left contains no
1703 // computed property names, and so we can compute its map ahead of time; see
1704 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1705 // starts with the first computed property name, and continues with all
1706 // properties to its right. All the code from above initializes the static
1707 // component of the object literal, and arranges for the map of the result to
1708 // reflect the static order in which the keys appear. For the dynamic
1709 // properties, we compile them into a series of "SetOwnProperty" runtime
1710 // calls. This will preserve insertion order.
1711 for (; property_index < expr->properties()->length(); property_index++) {
1712 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1714 Expression* value = property->value();
1715 if (!result_saved) {
1716 __ Push(rax); // Save result on the stack
1717 result_saved = true;
1720 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1722 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1723 DCHECK(!property->is_computed_name());
1724 VisitForStackValue(value);
1725 DCHECK(property->emit_store());
1726 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1728 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1729 VisitForStackValue(value);
1730 EmitSetHomeObjectIfNeeded(
1731 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1733 switch (property->kind()) {
1734 case ObjectLiteral::Property::CONSTANT:
1735 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1736 case ObjectLiteral::Property::COMPUTED:
1737 if (property->emit_store()) {
1738 __ Push(Smi::FromInt(NONE));
1739 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1745 case ObjectLiteral::Property::PROTOTYPE:
1749 case ObjectLiteral::Property::GETTER:
1750 __ Push(Smi::FromInt(NONE));
1751 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1754 case ObjectLiteral::Property::SETTER:
1755 __ Push(Smi::FromInt(NONE));
1756 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1762 if (expr->has_function()) {
1763 DCHECK(result_saved);
1764 __ Push(Operand(rsp, 0));
1765 __ CallRuntime(Runtime::kToFastProperties, 1);
1769 context()->PlugTOS();
1771 context()->Plug(rax);
1774 // Verify that compilation exactly consumed the number of store ic slots that
1775 // the ObjectLiteral node had to offer.
1776 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1780 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1781 Comment cmnt(masm_, "[ ArrayLiteral");
1783 expr->BuildConstantElements(isolate());
1784 Handle<FixedArray> constant_elements = expr->constant_elements();
1785 bool has_constant_fast_elements =
1786 IsFastObjectElementsKind(expr->constant_elements_kind());
1788 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1789 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1790 // If the only customer of allocation sites is transitioning, then
1791 // we can turn it off if we don't have anywhere else to transition to.
1792 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1795 if (MustCreateArrayLiteralWithRuntime(expr)) {
1796 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1797 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1798 __ Push(Smi::FromInt(expr->literal_index()));
1799 __ Push(constant_elements);
1800 __ Push(Smi::FromInt(expr->ComputeFlags()));
1801 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1803 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1804 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1805 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1806 __ Move(rcx, constant_elements);
1807 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1810 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1812 bool result_saved = false; // Is the result saved to the stack?
1813 ZoneList<Expression*>* subexprs = expr->values();
1814 int length = subexprs->length();
1816 // Emit code to evaluate all the non-constant subexpressions and to store
1817 // them into the newly cloned array.
1818 int array_index = 0;
1819 for (; array_index < length; array_index++) {
1820 Expression* subexpr = subexprs->at(array_index);
1821 if (subexpr->IsSpread()) break;
1823 // If the subexpression is a literal or a simple materialized literal it
1824 // is already set in the cloned array.
1825 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1827 if (!result_saved) {
1828 __ Push(rax); // array literal
1829 __ Push(Smi::FromInt(expr->literal_index()));
1830 result_saved = true;
1832 VisitForAccumulatorValue(subexpr);
1834 if (has_constant_fast_elements) {
1835 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1836 // cannot transition and don't need to call the runtime stub.
1837 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1838 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1839 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1840 // Store the subexpression value in the array's elements.
1841 __ movp(FieldOperand(rbx, offset), result_register());
1842 // Update the write barrier for the array store.
1843 __ RecordWriteField(rbx, offset, result_register(), rcx,
1845 EMIT_REMEMBERED_SET,
1848 // Store the subexpression value in the array's elements.
1849 __ Move(rcx, Smi::FromInt(array_index));
1850 StoreArrayLiteralElementStub stub(isolate());
1854 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1857 // In case the array literal contains spread expressions it has two parts. The
1858 // first part is the "static" array which has a literal index is handled
1859 // above. The second part is the part after the first spread expression
1860 // (inclusive) and these elements gets appended to the array. Note that the
1861 // number elements an iterable produces is unknown ahead of time.
1862 if (array_index < length && result_saved) {
1863 __ Drop(1); // literal index
1865 result_saved = false;
1867 for (; array_index < length; array_index++) {
1868 Expression* subexpr = subexprs->at(array_index);
1871 if (subexpr->IsSpread()) {
1872 VisitForStackValue(subexpr->AsSpread()->expression());
1873 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1875 VisitForStackValue(subexpr);
1876 __ CallRuntime(Runtime::kAppendElement, 2);
1879 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1883 __ Drop(1); // literal index
1884 context()->PlugTOS();
1886 context()->Plug(rax);
1891 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1892 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1894 Comment cmnt(masm_, "[ Assignment");
1895 SetExpressionPosition(expr, INSERT_BREAK);
1897 Property* property = expr->target()->AsProperty();
1898 LhsKind assign_type = Property::GetAssignType(property);
1900 // Evaluate LHS expression.
1901 switch (assign_type) {
1903 // Nothing to do here.
1905 case NAMED_PROPERTY:
1906 if (expr->is_compound()) {
1907 // We need the receiver both on the stack and in the register.
1908 VisitForStackValue(property->obj());
1909 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1911 VisitForStackValue(property->obj());
1914 case NAMED_SUPER_PROPERTY:
1916 property->obj()->AsSuperPropertyReference()->this_var());
1917 VisitForAccumulatorValue(
1918 property->obj()->AsSuperPropertyReference()->home_object());
1919 __ Push(result_register());
1920 if (expr->is_compound()) {
1921 __ Push(MemOperand(rsp, kPointerSize));
1922 __ Push(result_register());
1925 case KEYED_SUPER_PROPERTY:
1927 property->obj()->AsSuperPropertyReference()->this_var());
1929 property->obj()->AsSuperPropertyReference()->home_object());
1930 VisitForAccumulatorValue(property->key());
1931 __ Push(result_register());
1932 if (expr->is_compound()) {
1933 __ Push(MemOperand(rsp, 2 * kPointerSize));
1934 __ Push(MemOperand(rsp, 2 * kPointerSize));
1935 __ Push(result_register());
1938 case KEYED_PROPERTY: {
1939 if (expr->is_compound()) {
1940 VisitForStackValue(property->obj());
1941 VisitForStackValue(property->key());
1942 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1943 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1945 VisitForStackValue(property->obj());
1946 VisitForStackValue(property->key());
1952 // For compound assignments we need another deoptimization point after the
1953 // variable/property load.
1954 if (expr->is_compound()) {
1955 { AccumulatorValueContext context(this);
1956 switch (assign_type) {
1958 EmitVariableLoad(expr->target()->AsVariableProxy());
1959 PrepareForBailout(expr->target(), TOS_REG);
1961 case NAMED_PROPERTY:
1962 EmitNamedPropertyLoad(property);
1963 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1965 case NAMED_SUPER_PROPERTY:
1966 EmitNamedSuperPropertyLoad(property);
1967 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1969 case KEYED_SUPER_PROPERTY:
1970 EmitKeyedSuperPropertyLoad(property);
1971 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1973 case KEYED_PROPERTY:
1974 EmitKeyedPropertyLoad(property);
1975 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1980 Token::Value op = expr->binary_op();
1981 __ Push(rax); // Left operand goes on the stack.
1982 VisitForAccumulatorValue(expr->value());
1984 AccumulatorValueContext context(this);
1985 if (ShouldInlineSmiCase(op)) {
1986 EmitInlineSmiBinaryOp(expr->binary_operation(),
1991 EmitBinaryOp(expr->binary_operation(), op);
1993 // Deoptimization point in case the binary operation may have side effects.
1994 PrepareForBailout(expr->binary_operation(), TOS_REG);
1996 VisitForAccumulatorValue(expr->value());
1999 SetExpressionPosition(expr);
2002 switch (assign_type) {
2004 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2005 expr->op(), expr->AssignmentSlot());
2006 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2007 context()->Plug(rax);
2009 case NAMED_PROPERTY:
2010 EmitNamedPropertyAssignment(expr);
2012 case NAMED_SUPER_PROPERTY:
2013 EmitNamedSuperPropertyStore(property);
2014 context()->Plug(rax);
2016 case KEYED_SUPER_PROPERTY:
2017 EmitKeyedSuperPropertyStore(property);
2018 context()->Plug(rax);
2020 case KEYED_PROPERTY:
2021 EmitKeyedPropertyAssignment(expr);
2027 void FullCodeGenerator::VisitYield(Yield* expr) {
2028 Comment cmnt(masm_, "[ Yield");
2029 SetExpressionPosition(expr);
2031 // Evaluate yielded value first; the initial iterator definition depends on
2032 // this. It stays on the stack while we update the iterator.
2033 VisitForStackValue(expr->expression());
2035 switch (expr->yield_kind()) {
2036 case Yield::kSuspend:
2037 // Pop value from top-of-stack slot; box result into result register.
2038 EmitCreateIteratorResult(false);
2039 __ Push(result_register());
2041 case Yield::kInitial: {
2042 Label suspend, continuation, post_runtime, resume;
2045 __ bind(&continuation);
2046 __ RecordGeneratorContinuation();
2050 VisitForAccumulatorValue(expr->generator_object());
2051 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2052 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2053 Smi::FromInt(continuation.pos()));
2054 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2056 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2058 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
2060 __ j(equal, &post_runtime);
2061 __ Push(rax); // generator object
2062 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2063 __ movp(context_register(),
2064 Operand(rbp, StandardFrameConstants::kContextOffset));
2065 __ bind(&post_runtime);
2067 __ Pop(result_register());
2068 EmitReturnSequence();
2071 context()->Plug(result_register());
2075 case Yield::kFinal: {
2076 VisitForAccumulatorValue(expr->generator_object());
2077 __ Move(FieldOperand(result_register(),
2078 JSGeneratorObject::kContinuationOffset),
2079 Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2080 // Pop value from top-of-stack slot, box result into result register.
2081 EmitCreateIteratorResult(true);
2082 EmitUnwindBeforeReturn();
2083 EmitReturnSequence();
2087 case Yield::kDelegating: {
2088 VisitForStackValue(expr->generator_object());
2090 // Initial stack layout is as follows:
2091 // [sp + 1 * kPointerSize] iter
2092 // [sp + 0 * kPointerSize] g
2094 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2095 Label l_next, l_call, l_loop;
2096 Register load_receiver = LoadDescriptor::ReceiverRegister();
2097 Register load_name = LoadDescriptor::NameRegister();
2099 // Initial send value is undefined.
2100 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2103 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2105 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2107 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2108 __ Push(rax); // exception
2111 // try { received = %yield result }
2112 // Shuffle the received result above a try handler and yield it without
2115 __ Pop(rax); // result
2116 int handler_index = NewHandlerTableEntry();
2117 EnterTryBlock(handler_index, &l_catch);
2118 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2119 __ Push(rax); // result
2122 __ bind(&l_continuation);
2123 __ RecordGeneratorContinuation();
2126 __ bind(&l_suspend);
2127 const int generator_object_depth = kPointerSize + try_block_size;
2128 __ movp(rax, Operand(rsp, generator_object_depth));
2130 __ Push(Smi::FromInt(handler_index)); // handler-index
2131 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2132 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2133 Smi::FromInt(l_continuation.pos()));
2134 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2136 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2138 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2139 __ movp(context_register(),
2140 Operand(rbp, StandardFrameConstants::kContextOffset));
2141 __ Pop(rax); // result
2142 EmitReturnSequence();
2143 __ bind(&l_resume); // received in rax
2144 ExitTryBlock(handler_index);
2146 // receiver = iter; f = 'next'; arg = received;
2149 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2150 __ Push(load_name); // "next"
2151 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2152 __ Push(rax); // received
2154 // result = receiver[f](arg);
2156 __ movp(load_receiver, Operand(rsp, kPointerSize));
2157 __ Move(LoadDescriptor::SlotRegister(),
2158 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
2159 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2160 CallIC(ic, TypeFeedbackId::None());
2162 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2164 SetCallPosition(expr, 1);
2165 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2168 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2169 __ Drop(1); // The function is still on the stack; drop it.
2171 // if (!result.done) goto l_try;
2173 __ Move(load_receiver, rax);
2174 __ Push(load_receiver); // save result
2175 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2176 __ Move(LoadDescriptor::SlotRegister(),
2177 SmiFromSlot(expr->DoneFeedbackSlot()));
2178 CallLoadIC(NOT_INSIDE_TYPEOF); // rax=result.done
2179 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2181 __ testp(result_register(), result_register());
2185 __ Pop(load_receiver); // result
2186 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2187 __ Move(LoadDescriptor::SlotRegister(),
2188 SmiFromSlot(expr->ValueFeedbackSlot()));
2189 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in rax
2190 context()->DropAndPlug(2, rax); // drop iter and g
2197 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2199 JSGeneratorObject::ResumeMode resume_mode) {
2200 // The value stays in rax, and is ultimately read by the resumed generator, as
2201 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2202 // is read to throw the value when the resumed generator is already closed.
2203 // rbx will hold the generator object until the activation has been resumed.
2204 VisitForStackValue(generator);
2205 VisitForAccumulatorValue(value);
2208 // Load suspended function and context.
2209 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2210 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2213 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2215 // Push holes for arguments to generator function.
2216 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2217 __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2218 SharedFunctionInfo::kFormalParameterCountOffset);
2219 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2220 Label push_argument_holes, push_frame;
2221 __ bind(&push_argument_holes);
2222 __ subp(rdx, Immediate(1));
2223 __ j(carry, &push_frame);
2225 __ jmp(&push_argument_holes);
2227 // Enter a new JavaScript frame, and initialize its slots as they were when
2228 // the generator was suspended.
2229 Label resume_frame, done;
2230 __ bind(&push_frame);
2231 __ call(&resume_frame);
2233 __ bind(&resume_frame);
2234 __ pushq(rbp); // Caller's frame pointer.
2236 __ Push(rsi); // Callee's context.
2237 __ Push(rdi); // Callee's JS Function.
2239 // Load the operand stack size.
2240 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2241 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2242 __ SmiToInteger32(rdx, rdx);
2244 // If we are sending a value and there is no operand stack, we can jump back
2246 if (resume_mode == JSGeneratorObject::NEXT) {
2248 __ cmpp(rdx, Immediate(0));
2249 __ j(not_zero, &slow_resume);
2250 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2251 __ SmiToInteger64(rcx,
2252 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2254 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2255 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2257 __ bind(&slow_resume);
2260 // Otherwise, we push holes for the operand stack and call the runtime to fix
2261 // up the stack and the handlers.
2262 Label push_operand_holes, call_resume;
2263 __ bind(&push_operand_holes);
2264 __ subp(rdx, Immediate(1));
2265 __ j(carry, &call_resume);
2267 __ jmp(&push_operand_holes);
2268 __ bind(&call_resume);
2270 __ Push(result_register());
2271 __ Push(Smi::FromInt(resume_mode));
2272 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2273 // Not reached: the runtime call returns elsewhere.
2274 __ Abort(kGeneratorFailedToResume);
2277 context()->Plug(result_register());
2281 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2285 const int instance_size = 5 * kPointerSize;
2286 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2289 __ Allocate(instance_size, rax, rcx, rdx, &gc_required, TAG_OBJECT);
2292 __ bind(&gc_required);
2293 __ Push(Smi::FromInt(instance_size));
2294 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2295 __ movp(context_register(),
2296 Operand(rbp, StandardFrameConstants::kContextOffset));
2298 __ bind(&allocated);
2299 __ movp(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2300 __ movp(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
2301 __ movp(rbx, ContextOperand(rbx, Context::ITERATOR_RESULT_MAP_INDEX));
2303 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2304 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2305 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2306 isolate()->factory()->empty_fixed_array());
2307 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2308 isolate()->factory()->empty_fixed_array());
2309 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2311 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2314 // Only the value field needs a write barrier, as the other values are in the
2316 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2317 rcx, rdx, kDontSaveFPRegs);
2321 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2322 SetExpressionPosition(prop);
2323 Literal* key = prop->key()->AsLiteral();
2324 DCHECK(!prop->IsSuperAccess());
2326 __ Move(LoadDescriptor::NameRegister(), key->value());
2327 __ Move(LoadDescriptor::SlotRegister(),
2328 SmiFromSlot(prop->PropertyFeedbackSlot()));
2329 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2333 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2334 // Stack: receiver, home_object
2335 SetExpressionPosition(prop);
2336 Literal* key = prop->key()->AsLiteral();
2337 DCHECK(!key->value()->IsSmi());
2338 DCHECK(prop->IsSuperAccess());
2340 __ Push(key->value());
2341 __ Push(Smi::FromInt(language_mode()));
2342 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2346 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2347 SetExpressionPosition(prop);
2348 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2349 __ Move(LoadDescriptor::SlotRegister(),
2350 SmiFromSlot(prop->PropertyFeedbackSlot()));
2355 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2356 // Stack: receiver, home_object, key.
2357 SetExpressionPosition(prop);
2358 __ Push(Smi::FromInt(language_mode()));
2359 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2363 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2366 Expression* right) {
2367 // Do combined smi check of the operands. Left operand is on the
2368 // stack (popped into rdx). Right operand is in rax but moved into
2369 // rcx to make the shifts easier.
2370 Label done, stub_call, smi_case;
2374 JumpPatchSite patch_site(masm_);
2375 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2377 __ bind(&stub_call);
2380 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2381 CallIC(code, expr->BinaryOperationFeedbackId());
2382 patch_site.EmitPatchInfo();
2383 __ jmp(&done, Label::kNear);
2388 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2391 __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2394 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2397 __ SmiAdd(rax, rdx, rcx, &stub_call);
2400 __ SmiSub(rax, rdx, rcx, &stub_call);
2403 __ SmiMul(rax, rdx, rcx, &stub_call);
2406 __ SmiOr(rax, rdx, rcx);
2408 case Token::BIT_AND:
2409 __ SmiAnd(rax, rdx, rcx);
2411 case Token::BIT_XOR:
2412 __ SmiXor(rax, rdx, rcx);
2420 context()->Plug(rax);
2424 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2425 int* used_store_slots) {
2426 // Constructor is in rax.
2427 DCHECK(lit != NULL);
2430 // No access check is needed here since the constructor is created by the
2432 Register scratch = rbx;
2433 __ movp(scratch, FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset));
2436 for (int i = 0; i < lit->properties()->length(); i++) {
2437 ObjectLiteral::Property* property = lit->properties()->at(i);
2438 Expression* value = property->value();
2440 if (property->is_static()) {
2441 __ Push(Operand(rsp, kPointerSize)); // constructor
2443 __ Push(Operand(rsp, 0)); // prototype
2445 EmitPropertyKey(property, lit->GetIdForProperty(i));
2447 // The static prototype property is read only. We handle the non computed
2448 // property name case in the parser. Since this is the only case where we
2449 // need to check for an own read only property we special case this so we do
2450 // not need to do this for every property.
2451 if (property->is_static() && property->is_computed_name()) {
2452 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2456 VisitForStackValue(value);
2457 EmitSetHomeObjectIfNeeded(value, 2,
2458 lit->SlotForHomeObject(value, used_store_slots));
2460 switch (property->kind()) {
2461 case ObjectLiteral::Property::CONSTANT:
2462 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2463 case ObjectLiteral::Property::PROTOTYPE:
2465 case ObjectLiteral::Property::COMPUTED:
2466 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2469 case ObjectLiteral::Property::GETTER:
2470 __ Push(Smi::FromInt(DONT_ENUM));
2471 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2474 case ObjectLiteral::Property::SETTER:
2475 __ Push(Smi::FromInt(DONT_ENUM));
2476 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2485 __ CallRuntime(Runtime::kToFastProperties, 1);
2488 __ CallRuntime(Runtime::kToFastProperties, 1);
2490 if (is_strong(language_mode())) {
2492 FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset));
2495 // TODO(conradw): It would be more efficient to define the properties with
2496 // the right attributes the first time round.
2497 // Freeze the prototype.
2498 __ CallRuntime(Runtime::kObjectFreeze, 1);
2499 // Freeze the constructor.
2500 __ CallRuntime(Runtime::kObjectFreeze, 1);
2505 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2508 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2509 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2510 CallIC(code, expr->BinaryOperationFeedbackId());
2511 patch_site.EmitPatchInfo();
2512 context()->Plug(rax);
2516 void FullCodeGenerator::EmitAssignment(Expression* expr,
2517 FeedbackVectorICSlot slot) {
2518 DCHECK(expr->IsValidReferenceExpressionOrThis());
2520 Property* prop = expr->AsProperty();
2521 LhsKind assign_type = Property::GetAssignType(prop);
2523 switch (assign_type) {
2525 Variable* var = expr->AsVariableProxy()->var();
2526 EffectContext context(this);
2527 EmitVariableAssignment(var, Token::ASSIGN, slot);
2530 case NAMED_PROPERTY: {
2531 __ Push(rax); // Preserve value.
2532 VisitForAccumulatorValue(prop->obj());
2533 __ Move(StoreDescriptor::ReceiverRegister(), rax);
2534 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2535 __ Move(StoreDescriptor::NameRegister(),
2536 prop->key()->AsLiteral()->value());
2537 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2541 case NAMED_SUPER_PROPERTY: {
2543 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2544 VisitForAccumulatorValue(
2545 prop->obj()->AsSuperPropertyReference()->home_object());
2546 // stack: value, this; rax: home_object
2547 Register scratch = rcx;
2548 Register scratch2 = rdx;
2549 __ Move(scratch, result_register()); // home_object
2550 __ movp(rax, MemOperand(rsp, kPointerSize)); // value
2551 __ movp(scratch2, MemOperand(rsp, 0)); // this
2552 __ movp(MemOperand(rsp, kPointerSize), scratch2); // this
2553 __ movp(MemOperand(rsp, 0), scratch); // home_object
2554 // stack: this, home_object; rax: value
2555 EmitNamedSuperPropertyStore(prop);
2558 case KEYED_SUPER_PROPERTY: {
2560 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2562 prop->obj()->AsSuperPropertyReference()->home_object());
2563 VisitForAccumulatorValue(prop->key());
2564 Register scratch = rcx;
2565 Register scratch2 = rdx;
2566 __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize)); // value
2567 // stack: value, this, home_object; rax: key, rdx: value
2568 __ movp(scratch, MemOperand(rsp, kPointerSize)); // this
2569 __ movp(MemOperand(rsp, 2 * kPointerSize), scratch);
2570 __ movp(scratch, MemOperand(rsp, 0)); // home_object
2571 __ movp(MemOperand(rsp, kPointerSize), scratch);
2572 __ movp(MemOperand(rsp, 0), rax);
2573 __ Move(rax, scratch2);
2574 // stack: this, home_object, key; rax: value.
2575 EmitKeyedSuperPropertyStore(prop);
2578 case KEYED_PROPERTY: {
2579 __ Push(rax); // Preserve value.
2580 VisitForStackValue(prop->obj());
2581 VisitForAccumulatorValue(prop->key());
2582 __ Move(StoreDescriptor::NameRegister(), rax);
2583 __ Pop(StoreDescriptor::ReceiverRegister());
2584 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2585 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2587 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2592 context()->Plug(rax);
2596 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2597 Variable* var, MemOperand location) {
2598 __ movp(location, rax);
2599 if (var->IsContextSlot()) {
2601 __ RecordWriteContextSlot(
2602 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2607 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2608 FeedbackVectorICSlot slot) {
2609 if (var->IsUnallocated()) {
2610 // Global var, const, or let.
2611 __ Move(StoreDescriptor::NameRegister(), var->name());
2612 __ movp(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2613 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2616 } else if (var->IsGlobalSlot()) {
2617 // Global var, const, or let.
2618 DCHECK(var->index() > 0);
2619 DCHECK(var->IsStaticGlobalObjectProperty());
2620 // Each var occupies two slots in the context: for reads and writes.
2621 int const slot = var->index() + 1;
2622 int const depth = scope()->ContextChainLength(var->scope());
2623 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2624 __ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
2625 __ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
2626 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax));
2627 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2630 __ Push(Smi::FromInt(slot));
2631 __ Push(var->name());
2633 __ CallRuntime(is_strict(language_mode())
2634 ? Runtime::kStoreGlobalViaContext_Strict
2635 : Runtime::kStoreGlobalViaContext_Sloppy,
2639 } else if (var->mode() == LET && op != Token::INIT_LET) {
2640 // Non-initializing assignment to let variable needs a write barrier.
2641 DCHECK(!var->IsLookupSlot());
2642 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2644 MemOperand location = VarOperand(var, rcx);
2645 __ movp(rdx, location);
2646 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2647 __ j(not_equal, &assign, Label::kNear);
2648 __ Push(var->name());
2649 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2651 EmitStoreToStackLocalOrContextSlot(var, location);
2653 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2654 // Assignment to const variable needs a write barrier.
2655 DCHECK(!var->IsLookupSlot());
2656 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2658 MemOperand location = VarOperand(var, rcx);
2659 __ movp(rdx, location);
2660 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2661 __ j(not_equal, &const_error, Label::kNear);
2662 __ Push(var->name());
2663 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2664 __ bind(&const_error);
2665 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2667 } else if (var->is_this() && op == Token::INIT_CONST) {
2668 // Initializing assignment to const {this} needs a write barrier.
2669 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2670 Label uninitialized_this;
2671 MemOperand location = VarOperand(var, rcx);
2672 __ movp(rdx, location);
2673 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2674 __ j(equal, &uninitialized_this);
2675 __ Push(var->name());
2676 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2677 __ bind(&uninitialized_this);
2678 EmitStoreToStackLocalOrContextSlot(var, location);
2680 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2681 if (var->IsLookupSlot()) {
2682 // Assignment to var.
2683 __ Push(rax); // Value.
2684 __ Push(rsi); // Context.
2685 __ Push(var->name());
2686 __ Push(Smi::FromInt(language_mode()));
2687 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2689 // Assignment to var or initializing assignment to let/const in harmony
2691 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2692 MemOperand location = VarOperand(var, rcx);
2693 if (generate_debug_code_ && op == Token::INIT_LET) {
2694 // Check for an uninitialized let binding.
2695 __ movp(rdx, location);
2696 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2697 __ Check(equal, kLetBindingReInitialization);
2699 EmitStoreToStackLocalOrContextSlot(var, location);
2702 } else if (op == Token::INIT_CONST_LEGACY) {
2703 // Const initializers need a write barrier.
2704 DCHECK(var->mode() == CONST_LEGACY);
2705 DCHECK(!var->IsParameter()); // No const parameters.
2706 if (var->IsLookupSlot()) {
2709 __ Push(var->name());
2710 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2712 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2714 MemOperand location = VarOperand(var, rcx);
2715 __ movp(rdx, location);
2716 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2717 __ j(not_equal, &skip);
2718 EmitStoreToStackLocalOrContextSlot(var, location);
2723 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2724 if (is_strict(language_mode())) {
2725 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2727 // Silently ignore store in sloppy mode.
2732 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2733 // Assignment to a property, using a named store IC.
2734 Property* prop = expr->target()->AsProperty();
2735 DCHECK(prop != NULL);
2736 DCHECK(prop->key()->IsLiteral());
2738 __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2739 __ Pop(StoreDescriptor::ReceiverRegister());
2740 if (FLAG_vector_stores) {
2741 EmitLoadStoreICSlot(expr->AssignmentSlot());
2744 CallStoreIC(expr->AssignmentFeedbackId());
2747 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2748 context()->Plug(rax);
2752 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2753 // Assignment to named property of super.
2755 // stack : receiver ('this'), home_object
2756 DCHECK(prop != NULL);
2757 Literal* key = prop->key()->AsLiteral();
2758 DCHECK(key != NULL);
2760 __ Push(key->value());
2762 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2763 : Runtime::kStoreToSuper_Sloppy),
2768 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2769 // Assignment to named property of super.
2771 // stack : receiver ('this'), home_object, key
2772 DCHECK(prop != NULL);
2776 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2777 : Runtime::kStoreKeyedToSuper_Sloppy),
2782 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2783 // Assignment to a property, using a keyed store IC.
2784 __ Pop(StoreDescriptor::NameRegister()); // Key.
2785 __ Pop(StoreDescriptor::ReceiverRegister());
2786 DCHECK(StoreDescriptor::ValueRegister().is(rax));
2788 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2789 if (FLAG_vector_stores) {
2790 EmitLoadStoreICSlot(expr->AssignmentSlot());
2793 CallIC(ic, expr->AssignmentFeedbackId());
2796 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2797 context()->Plug(rax);
2801 void FullCodeGenerator::VisitProperty(Property* expr) {
2802 Comment cmnt(masm_, "[ Property");
2803 SetExpressionPosition(expr);
2805 Expression* key = expr->key();
2807 if (key->IsPropertyName()) {
2808 if (!expr->IsSuperAccess()) {
2809 VisitForAccumulatorValue(expr->obj());
2810 DCHECK(!rax.is(LoadDescriptor::ReceiverRegister()));
2811 __ movp(LoadDescriptor::ReceiverRegister(), rax);
2812 EmitNamedPropertyLoad(expr);
2814 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2816 expr->obj()->AsSuperPropertyReference()->home_object());
2817 EmitNamedSuperPropertyLoad(expr);
2820 if (!expr->IsSuperAccess()) {
2821 VisitForStackValue(expr->obj());
2822 VisitForAccumulatorValue(expr->key());
2823 __ Move(LoadDescriptor::NameRegister(), rax);
2824 __ Pop(LoadDescriptor::ReceiverRegister());
2825 EmitKeyedPropertyLoad(expr);
2827 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2829 expr->obj()->AsSuperPropertyReference()->home_object());
2830 VisitForStackValue(expr->key());
2831 EmitKeyedSuperPropertyLoad(expr);
2834 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2835 context()->Plug(rax);
2839 void FullCodeGenerator::CallIC(Handle<Code> code,
2840 TypeFeedbackId ast_id) {
2842 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2846 // Code common for calls using the IC.
2847 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2848 Expression* callee = expr->expression();
2850 CallICState::CallType call_type =
2851 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2852 // Get the target function.
2853 if (call_type == CallICState::FUNCTION) {
2854 { StackValueContext context(this);
2855 EmitVariableLoad(callee->AsVariableProxy());
2856 PrepareForBailout(callee, NO_REGISTERS);
2858 // Push undefined as receiver. This is patched in the method prologue if it
2859 // is a sloppy mode method.
2860 __ Push(isolate()->factory()->undefined_value());
2862 // Load the function from the receiver.
2863 DCHECK(callee->IsProperty());
2864 DCHECK(!callee->AsProperty()->IsSuperAccess());
2865 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2866 EmitNamedPropertyLoad(callee->AsProperty());
2867 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2868 // Push the target function under the receiver.
2869 __ Push(Operand(rsp, 0));
2870 __ movp(Operand(rsp, kPointerSize), rax);
2873 EmitCall(expr, call_type);
2877 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2878 Expression* callee = expr->expression();
2879 DCHECK(callee->IsProperty());
2880 Property* prop = callee->AsProperty();
2881 DCHECK(prop->IsSuperAccess());
2882 SetExpressionPosition(prop);
2884 Literal* key = prop->key()->AsLiteral();
2885 DCHECK(!key->value()->IsSmi());
2886 // Load the function from the receiver.
2887 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2888 VisitForStackValue(super_ref->home_object());
2889 VisitForAccumulatorValue(super_ref->this_var());
2892 __ Push(Operand(rsp, kPointerSize * 2));
2893 __ Push(key->value());
2894 __ Push(Smi::FromInt(language_mode()));
2898 // - this (receiver)
2899 // - this (receiver) <-- LoadFromSuper will pop here and below.
2903 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2905 // Replace home_object with target function.
2906 __ movp(Operand(rsp, kPointerSize), rax);
2909 // - target function
2910 // - this (receiver)
2911 EmitCall(expr, CallICState::METHOD);
2915 // Common code for calls using the IC.
2916 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2919 VisitForAccumulatorValue(key);
2921 Expression* callee = expr->expression();
2923 // Load the function from the receiver.
2924 DCHECK(callee->IsProperty());
2925 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2926 __ Move(LoadDescriptor::NameRegister(), rax);
2927 EmitKeyedPropertyLoad(callee->AsProperty());
2928 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2930 // Push the target function under the receiver.
2931 __ Push(Operand(rsp, 0));
2932 __ movp(Operand(rsp, kPointerSize), rax);
2934 EmitCall(expr, CallICState::METHOD);
2938 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2939 Expression* callee = expr->expression();
2940 DCHECK(callee->IsProperty());
2941 Property* prop = callee->AsProperty();
2942 DCHECK(prop->IsSuperAccess());
2944 SetExpressionPosition(prop);
2945 // Load the function from the receiver.
2946 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2947 VisitForStackValue(super_ref->home_object());
2948 VisitForAccumulatorValue(super_ref->this_var());
2951 __ Push(Operand(rsp, kPointerSize * 2));
2952 VisitForStackValue(prop->key());
2953 __ Push(Smi::FromInt(language_mode()));
2957 // - this (receiver)
2958 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2962 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2964 // Replace home_object with target function.
2965 __ movp(Operand(rsp, kPointerSize), rax);
2968 // - target function
2969 // - this (receiver)
2970 EmitCall(expr, CallICState::METHOD);
2974 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2975 // Load the arguments.
2976 ZoneList<Expression*>* args = expr->arguments();
2977 int arg_count = args->length();
2978 for (int i = 0; i < arg_count; i++) {
2979 VisitForStackValue(args->at(i));
2982 SetCallPosition(expr, arg_count);
2983 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2984 __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
2985 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2986 // Don't assign a type feedback id to the IC, since type feedback is provided
2987 // by the vector above.
2990 RecordJSReturnSite(expr);
2992 // Restore context register.
2993 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2994 // Discard the function left on TOS.
2995 context()->DropAndPlug(1, rax);
2999 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3000 // Push copy of the first argument or undefined if it doesn't exist.
3001 if (arg_count > 0) {
3002 __ Push(Operand(rsp, arg_count * kPointerSize));
3004 __ PushRoot(Heap::kUndefinedValueRootIndex);
3007 // Push the enclosing function.
3008 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3010 // Push the language mode.
3011 __ Push(Smi::FromInt(language_mode()));
3013 // Push the start position of the scope the calls resides in.
3014 __ Push(Smi::FromInt(scope()->start_position()));
3016 // Do the runtime call.
3017 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3021 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3022 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3023 VariableProxy* callee = expr->expression()->AsVariableProxy();
3024 if (callee->var()->IsLookupSlot()) {
3026 SetExpressionPosition(callee);
3027 // Generate code for loading from variables potentially shadowed by
3028 // eval-introduced variables.
3029 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3031 // Call the runtime to find the function to call (returned in rax) and
3032 // the object holding it (returned in rdx).
3033 __ Push(context_register());
3034 __ Push(callee->name());
3035 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3036 __ Push(rax); // Function.
3037 __ Push(rdx); // Receiver.
3038 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3040 // If fast case code has been generated, emit code to push the function
3041 // and receiver and have the slow path jump around this code.
3042 if (done.is_linked()) {
3044 __ jmp(&call, Label::kNear);
3048 // Pass undefined as the receiver, which is the WithBaseObject of a
3049 // non-object environment record. If the callee is sloppy, it will patch
3050 // it up to be the global receiver.
3051 __ PushRoot(Heap::kUndefinedValueRootIndex);
3055 VisitForStackValue(callee);
3056 // refEnv.WithBaseObject()
3057 __ PushRoot(Heap::kUndefinedValueRootIndex);
3062 void FullCodeGenerator::VisitCall(Call* expr) {
3064 // We want to verify that RecordJSReturnSite gets called on all paths
3065 // through this function. Avoid early returns.
3066 expr->return_is_recorded_ = false;
3069 Comment cmnt(masm_, "[ Call");
3070 Expression* callee = expr->expression();
3071 Call::CallType call_type = expr->GetCallType(isolate());
3073 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3074 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3075 // to resolve the function we need to call. Then we call the resolved
3076 // function using the given arguments.
3077 ZoneList<Expression*>* args = expr->arguments();
3078 int arg_count = args->length();
3079 PushCalleeAndWithBaseObject(expr);
3081 // Push the arguments.
3082 for (int i = 0; i < arg_count; i++) {
3083 VisitForStackValue(args->at(i));
3086 // Push a copy of the function (found below the arguments) and resolve
3088 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
3089 EmitResolvePossiblyDirectEval(arg_count);
3091 // Touch up the callee.
3092 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
3094 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3096 SetCallPosition(expr, arg_count);
3097 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3098 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
3100 RecordJSReturnSite(expr);
3101 // Restore context register.
3102 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3103 context()->DropAndPlug(1, rax);
3104 } else if (call_type == Call::GLOBAL_CALL) {
3105 EmitCallWithLoadIC(expr);
3107 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3108 // Call to a lookup slot (dynamically introduced variable).
3109 PushCalleeAndWithBaseObject(expr);
3111 } else if (call_type == Call::PROPERTY_CALL) {
3112 Property* property = callee->AsProperty();
3113 bool is_named_call = property->key()->IsPropertyName();
3114 if (property->IsSuperAccess()) {
3115 if (is_named_call) {
3116 EmitSuperCallWithLoadIC(expr);
3118 EmitKeyedSuperCallWithLoadIC(expr);
3121 VisitForStackValue(property->obj());
3122 if (is_named_call) {
3123 EmitCallWithLoadIC(expr);
3125 EmitKeyedCallWithLoadIC(expr, property->key());
3128 } else if (call_type == Call::SUPER_CALL) {
3129 EmitSuperConstructorCall(expr);
3131 DCHECK(call_type == Call::OTHER_CALL);
3132 // Call to an arbitrary expression not handled specially above.
3133 VisitForStackValue(callee);
3134 __ PushRoot(Heap::kUndefinedValueRootIndex);
3135 // Emit function call.
3140 // RecordJSReturnSite should have been called.
3141 DCHECK(expr->return_is_recorded_);
3146 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3147 Comment cmnt(masm_, "[ CallNew");
3148 // According to ECMA-262, section 11.2.2, page 44, the function
3149 // expression in new calls must be evaluated before the
3152 // Push constructor on the stack. If it's not a function it's used as
3153 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3155 DCHECK(!expr->expression()->IsSuperPropertyReference());
3156 VisitForStackValue(expr->expression());
3158 // Push the arguments ("left-to-right") on the stack.
3159 ZoneList<Expression*>* args = expr->arguments();
3160 int arg_count = args->length();
3161 for (int i = 0; i < arg_count; i++) {
3162 VisitForStackValue(args->at(i));
3165 // Call the construct call builtin that handles allocation and
3166 // constructor invocation.
3167 SetConstructCallPosition(expr);
3169 // Load function and argument count into rdi and rax.
3170 __ Set(rax, arg_count);
3171 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
3173 // Record call targets in unoptimized code, but not in the snapshot.
3174 if (FLAG_pretenuring_call_new) {
3175 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3176 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3177 expr->CallNewFeedbackSlot().ToInt() + 1);
3180 __ Move(rbx, FeedbackVector());
3181 __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
3183 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3184 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3185 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3186 context()->Plug(rax);
3190 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3191 SuperCallReference* super_call_ref =
3192 expr->expression()->AsSuperCallReference();
3193 DCHECK_NOT_NULL(super_call_ref);
3195 EmitLoadSuperConstructor(super_call_ref);
3196 __ Push(result_register());
3198 // Push the arguments ("left-to-right") on the stack.
3199 ZoneList<Expression*>* args = expr->arguments();
3200 int arg_count = args->length();
3201 for (int i = 0; i < arg_count; i++) {
3202 VisitForStackValue(args->at(i));
3205 // Call the construct call builtin that handles allocation and
3206 // constructor invocation.
3207 SetConstructCallPosition(expr);
3209 // Load original constructor into rcx.
3210 VisitForAccumulatorValue(super_call_ref->new_target_var());
3211 __ movp(rcx, result_register());
3213 // Load function and argument count into rdi and rax.
3214 __ Set(rax, arg_count);
3215 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
3217 // Record call targets in unoptimized code.
3218 if (FLAG_pretenuring_call_new) {
3220 /* TODO(dslomov): support pretenuring.
3221 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3222 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3223 expr->CallNewFeedbackSlot().ToInt() + 1);
3227 __ Move(rbx, FeedbackVector());
3228 __ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
3230 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3231 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3233 RecordJSReturnSite(expr);
3235 context()->Plug(rax);
3239 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3240 ZoneList<Expression*>* args = expr->arguments();
3241 DCHECK(args->length() == 1);
3243 VisitForAccumulatorValue(args->at(0));
3245 Label materialize_true, materialize_false;
3246 Label* if_true = NULL;
3247 Label* if_false = NULL;
3248 Label* fall_through = NULL;
3249 context()->PrepareTest(&materialize_true, &materialize_false,
3250 &if_true, &if_false, &fall_through);
3252 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3253 __ JumpIfSmi(rax, if_true);
3256 context()->Plug(if_true, if_false);
3260 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3261 ZoneList<Expression*>* args = expr->arguments();
3262 DCHECK(args->length() == 1);
3264 VisitForAccumulatorValue(args->at(0));
3266 Label materialize_true, materialize_false;
3267 Label* if_true = NULL;
3268 Label* if_false = NULL;
3269 Label* fall_through = NULL;
3270 context()->PrepareTest(&materialize_true, &materialize_false,
3271 &if_true, &if_false, &fall_through);
3273 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3274 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
3275 Split(non_negative_smi, if_true, if_false, fall_through);
3277 context()->Plug(if_true, if_false);
3281 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3282 ZoneList<Expression*>* args = expr->arguments();
3283 DCHECK(args->length() == 1);
3285 VisitForAccumulatorValue(args->at(0));
3287 Label materialize_true, materialize_false;
3288 Label* if_true = NULL;
3289 Label* if_false = NULL;
3290 Label* fall_through = NULL;
3291 context()->PrepareTest(&materialize_true, &materialize_false,
3292 &if_true, &if_false, &fall_through);
3294 __ JumpIfSmi(rax, if_false);
3295 __ CompareRoot(rax, Heap::kNullValueRootIndex);
3296 __ j(equal, if_true);
3297 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3298 // Undetectable objects behave like undefined when tested with typeof.
3299 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3300 Immediate(1 << Map::kIsUndetectable));
3301 __ j(not_zero, if_false);
3302 __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3303 __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3304 __ j(below, if_false);
3305 __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3306 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3307 Split(below_equal, if_true, if_false, fall_through);
3309 context()->Plug(if_true, if_false);
3313 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3314 ZoneList<Expression*>* args = expr->arguments();
3315 DCHECK(args->length() == 1);
3317 VisitForAccumulatorValue(args->at(0));
3319 Label materialize_true, materialize_false;
3320 Label* if_true = NULL;
3321 Label* if_false = NULL;
3322 Label* fall_through = NULL;
3323 context()->PrepareTest(&materialize_true, &materialize_false,
3324 &if_true, &if_false, &fall_through);
3326 __ JumpIfSmi(rax, if_false);
3327 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
3328 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3329 Split(above_equal, if_true, if_false, fall_through);
3331 context()->Plug(if_true, if_false);
3335 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3336 ZoneList<Expression*>* args = expr->arguments();
3337 DCHECK(args->length() == 1);
3339 VisitForAccumulatorValue(args->at(0));
3341 Label materialize_true, materialize_false;
3342 Label* if_true = NULL;
3343 Label* if_false = NULL;
3344 Label* fall_through = NULL;
3345 context()->PrepareTest(&materialize_true, &materialize_false,
3346 &if_true, &if_false, &fall_through);
3348 __ JumpIfSmi(rax, if_false);
3349 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3350 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3351 Immediate(1 << Map::kIsUndetectable));
3352 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3353 Split(not_zero, if_true, if_false, fall_through);
3355 context()->Plug(if_true, if_false);
3359 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3360 CallRuntime* expr) {
3361 ZoneList<Expression*>* args = expr->arguments();
3362 DCHECK(args->length() == 1);
3364 VisitForAccumulatorValue(args->at(0));
3366 Label materialize_true, materialize_false, skip_lookup;
3367 Label* if_true = NULL;
3368 Label* if_false = NULL;
3369 Label* fall_through = NULL;
3370 context()->PrepareTest(&materialize_true, &materialize_false,
3371 &if_true, &if_false, &fall_through);
3373 __ AssertNotSmi(rax);
3375 // Check whether this map has already been checked to be safe for default
3377 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3378 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
3379 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3380 __ j(not_zero, &skip_lookup);
3382 // Check for fast case object. Generate false result for slow case object.
3383 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
3384 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3385 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3386 __ j(equal, if_false);
3388 // Look for valueOf string in the descriptor array, and indicate false if
3389 // found. Since we omit an enumeration index check, if it is added via a
3390 // transition that shares its descriptor array, this is a false positive.
3391 Label entry, loop, done;
3393 // Skip loop if no descriptors are valid.
3394 __ NumberOfOwnDescriptors(rcx, rbx);
3395 __ cmpp(rcx, Immediate(0));
3398 __ LoadInstanceDescriptors(rbx, r8);
3399 // rbx: descriptor array.
3400 // rcx: valid entries in the descriptor array.
3401 // Calculate the end of the descriptor array.
3402 __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3404 Operand(r8, rcx, times_pointer_size, DescriptorArray::kFirstOffset));
3405 // Calculate location of the first key name.
3406 __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3407 // Loop through all the keys in the descriptor array. If one of these is the
3408 // internalized string "valueOf" the result is false.
3411 __ movp(rdx, FieldOperand(r8, 0));
3412 __ Cmp(rdx, isolate()->factory()->value_of_string());
3413 __ j(equal, if_false);
3414 __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3417 __ j(not_equal, &loop);
3421 // Set the bit in the map to indicate that there is no local valueOf field.
3422 __ orp(FieldOperand(rbx, Map::kBitField2Offset),
3423 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3425 __ bind(&skip_lookup);
3427 // If a valueOf property is not found on the object check that its
3428 // prototype is the un-modified String prototype. If not result is false.
3429 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
3430 __ testp(rcx, Immediate(kSmiTagMask));
3431 __ j(zero, if_false);
3432 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3433 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3434 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3436 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3437 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3438 Split(equal, if_true, if_false, fall_through);
3440 context()->Plug(if_true, if_false);
3444 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3445 ZoneList<Expression*>* args = expr->arguments();
3446 DCHECK(args->length() == 1);
3448 VisitForAccumulatorValue(args->at(0));
3450 Label materialize_true, materialize_false;
3451 Label* if_true = NULL;
3452 Label* if_false = NULL;
3453 Label* fall_through = NULL;
3454 context()->PrepareTest(&materialize_true, &materialize_false,
3455 &if_true, &if_false, &fall_through);
3457 __ JumpIfSmi(rax, if_false);
3458 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3459 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3460 Split(equal, if_true, if_false, fall_through);
3462 context()->Plug(if_true, if_false);
3466 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3467 ZoneList<Expression*>* args = expr->arguments();
3468 DCHECK(args->length() == 1);
3470 VisitForAccumulatorValue(args->at(0));
3472 Label materialize_true, materialize_false;
3473 Label* if_true = NULL;
3474 Label* if_false = NULL;
3475 Label* fall_through = NULL;
3476 context()->PrepareTest(&materialize_true, &materialize_false,
3477 &if_true, &if_false, &fall_through);
3479 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3480 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3481 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3483 __ j(no_overflow, if_false);
3484 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3485 Immediate(0x00000000));
3486 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3487 Split(equal, if_true, if_false, fall_through);
3489 context()->Plug(if_true, if_false);
3493 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3494 ZoneList<Expression*>* args = expr->arguments();
3495 DCHECK(args->length() == 1);
3497 VisitForAccumulatorValue(args->at(0));
3499 Label materialize_true, materialize_false;
3500 Label* if_true = NULL;
3501 Label* if_false = NULL;
3502 Label* fall_through = NULL;
3503 context()->PrepareTest(&materialize_true, &materialize_false,
3504 &if_true, &if_false, &fall_through);
3506 __ JumpIfSmi(rax, if_false);
3507 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3508 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3509 Split(equal, if_true, if_false, fall_through);
3511 context()->Plug(if_true, if_false);
3515 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3516 ZoneList<Expression*>* args = expr->arguments();
3517 DCHECK(args->length() == 1);
3519 VisitForAccumulatorValue(args->at(0));
3521 Label materialize_true, materialize_false;
3522 Label* if_true = NULL;
3523 Label* if_false = NULL;
3524 Label* fall_through = NULL;
3525 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3526 &if_false, &fall_through);
3528 __ JumpIfSmi(rax, if_false);
3529 __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
3530 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3531 Split(equal, if_true, if_false, fall_through);
3533 context()->Plug(if_true, if_false);
3537 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3538 ZoneList<Expression*>* args = expr->arguments();
3539 DCHECK(args->length() == 1);
3541 VisitForAccumulatorValue(args->at(0));
3543 Label materialize_true, materialize_false;
3544 Label* if_true = NULL;
3545 Label* if_false = NULL;
3546 Label* fall_through = NULL;
3547 context()->PrepareTest(&materialize_true, &materialize_false,
3548 &if_true, &if_false, &fall_through);
3550 __ JumpIfSmi(rax, if_false);
3551 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3552 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3553 Split(equal, if_true, if_false, fall_through);
3555 context()->Plug(if_true, if_false);
3559 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3560 ZoneList<Expression*>* args = expr->arguments();
3561 DCHECK(args->length() == 1);
3563 VisitForAccumulatorValue(args->at(0));
3565 Label materialize_true, materialize_false;
3566 Label* if_true = NULL;
3567 Label* if_false = NULL;
3568 Label* fall_through = NULL;
3569 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3570 &if_false, &fall_through);
3572 __ JumpIfSmi(rax, if_false);
3574 __ movp(map, FieldOperand(rax, HeapObject::kMapOffset));
3575 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3576 __ j(less, if_false);
3577 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3578 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3579 Split(less_equal, if_true, if_false, fall_through);
3581 context()->Plug(if_true, if_false);
3585 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3586 DCHECK(expr->arguments()->length() == 0);
3588 Label materialize_true, materialize_false;
3589 Label* if_true = NULL;
3590 Label* if_false = NULL;
3591 Label* fall_through = NULL;
3592 context()->PrepareTest(&materialize_true, &materialize_false,
3593 &if_true, &if_false, &fall_through);
3595 // Get the frame pointer for the calling frame.
3596 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3598 // Skip the arguments adaptor frame if it exists.
3599 Label check_frame_marker;
3600 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3601 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3602 __ j(not_equal, &check_frame_marker);
3603 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3605 // Check the marker in the calling frame.
3606 __ bind(&check_frame_marker);
3607 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3608 Smi::FromInt(StackFrame::CONSTRUCT));
3609 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3610 Split(equal, if_true, if_false, fall_through);
3612 context()->Plug(if_true, if_false);
3616 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3617 ZoneList<Expression*>* args = expr->arguments();
3618 DCHECK(args->length() == 2);
3620 // Load the two objects into registers and perform the comparison.
3621 VisitForStackValue(args->at(0));
3622 VisitForAccumulatorValue(args->at(1));
3624 Label materialize_true, materialize_false;
3625 Label* if_true = NULL;
3626 Label* if_false = NULL;
3627 Label* fall_through = NULL;
3628 context()->PrepareTest(&materialize_true, &materialize_false,
3629 &if_true, &if_false, &fall_through);
3633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3634 Split(equal, if_true, if_false, fall_through);
3636 context()->Plug(if_true, if_false);
3640 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3641 ZoneList<Expression*>* args = expr->arguments();
3642 DCHECK(args->length() == 1);
3644 // ArgumentsAccessStub expects the key in rdx and the formal
3645 // parameter count in rax.
3646 VisitForAccumulatorValue(args->at(0));
3648 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3649 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3651 context()->Plug(rax);
3655 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3656 DCHECK(expr->arguments()->length() == 0);
3659 // Get the number of formal parameters.
3660 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3662 // Check if the calling frame is an arguments adaptor frame.
3663 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3664 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3665 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3666 __ j(not_equal, &exit, Label::kNear);
3668 // Arguments adaptor case: Read the arguments length from the
3670 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3674 context()->Plug(rax);
3678 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3679 ZoneList<Expression*>* args = expr->arguments();
3680 DCHECK(args->length() == 1);
3681 Label done, null, function, non_function_constructor;
3683 VisitForAccumulatorValue(args->at(0));
3685 // If the object is a smi, we return null.
3686 __ JumpIfSmi(rax, &null);
3688 // Check that the object is a JS object but take special care of JS
3689 // functions to make sure they have 'Function' as their class.
3690 // Assume that there are only two callable types, and one of them is at
3691 // either end of the type range for JS object types. Saves extra comparisons.
3692 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3693 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3694 // Map is now in rax.
3696 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3697 FIRST_SPEC_OBJECT_TYPE + 1);
3698 __ j(equal, &function);
3700 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3701 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3702 LAST_SPEC_OBJECT_TYPE - 1);
3703 __ j(equal, &function);
3704 // Assume that there is no larger type.
3705 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3707 // Check if the constructor in the map is a JS function.
3708 __ GetMapConstructor(rax, rax, rbx);
3709 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3710 __ j(not_equal, &non_function_constructor);
3712 // rax now contains the constructor function. Grab the
3713 // instance class name from there.
3714 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3715 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3718 // Functions have class 'Function'.
3720 __ Move(rax, isolate()->factory()->Function_string());
3723 // Objects with a non-function constructor have class 'Object'.
3724 __ bind(&non_function_constructor);
3725 __ Move(rax, isolate()->factory()->Object_string());
3728 // Non-JS objects have class null.
3730 __ LoadRoot(rax, Heap::kNullValueRootIndex);
3735 context()->Plug(rax);
3739 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3740 ZoneList<Expression*>* args = expr->arguments();
3741 DCHECK(args->length() == 1);
3743 VisitForAccumulatorValue(args->at(0)); // Load the object.
3746 // If the object is a smi return the object.
3747 __ JumpIfSmi(rax, &done);
3748 // If the object is not a value type, return the object.
3749 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3750 __ j(not_equal, &done);
3751 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3754 context()->Plug(rax);
3758 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3759 ZoneList<Expression*>* args = expr->arguments();
3760 DCHECK_EQ(1, args->length());
3762 VisitForAccumulatorValue(args->at(0));
3764 Label materialize_true, materialize_false;
3765 Label* if_true = nullptr;
3766 Label* if_false = nullptr;
3767 Label* fall_through = nullptr;
3768 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3769 &if_false, &fall_through);
3771 __ JumpIfSmi(rax, if_false);
3772 __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
3773 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3774 Split(equal, if_true, if_false, fall_through);
3776 context()->Plug(if_true, if_false);
3780 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3781 ZoneList<Expression*>* args = expr->arguments();
3782 DCHECK(args->length() == 2);
3783 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3784 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3786 VisitForAccumulatorValue(args->at(0)); // Load the object.
3788 Register object = rax;
3789 Register result = rax;
3790 Register scratch = rcx;
3792 if (FLAG_debug_code) {
3793 __ AssertNotSmi(object);
3794 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3795 __ Check(equal, kOperandIsNotADate);
3798 if (index->value() == 0) {
3799 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3801 Label runtime, done;
3802 if (index->value() < JSDate::kFirstUncachedField) {
3803 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3804 Operand stamp_operand = __ ExternalOperand(stamp);
3805 __ movp(scratch, stamp_operand);
3806 __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3807 __ j(not_equal, &runtime, Label::kNear);
3808 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3809 kPointerSize * index->value()));
3810 __ jmp(&done, Label::kNear);
3813 __ PrepareCallCFunction(2);
3814 __ movp(arg_reg_1, object);
3815 __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3816 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3817 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3821 context()->Plug(rax);
3825 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3826 ZoneList<Expression*>* args = expr->arguments();
3827 DCHECK_EQ(3, args->length());
3829 Register string = rax;
3830 Register index = rbx;
3831 Register value = rcx;
3833 VisitForStackValue(args->at(0)); // index
3834 VisitForStackValue(args->at(1)); // value
3835 VisitForAccumulatorValue(args->at(2)); // string
3839 if (FLAG_debug_code) {
3840 __ Check(__ CheckSmi(value), kNonSmiValue);
3841 __ Check(__ CheckSmi(index), kNonSmiValue);
3844 __ SmiToInteger32(value, value);
3845 __ SmiToInteger32(index, index);
3847 if (FLAG_debug_code) {
3848 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3849 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3852 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3854 context()->Plug(string);
3858 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3859 ZoneList<Expression*>* args = expr->arguments();
3860 DCHECK_EQ(3, args->length());
3862 Register string = rax;
3863 Register index = rbx;
3864 Register value = rcx;
3866 VisitForStackValue(args->at(0)); // index
3867 VisitForStackValue(args->at(1)); // value
3868 VisitForAccumulatorValue(args->at(2)); // string
3872 if (FLAG_debug_code) {
3873 __ Check(__ CheckSmi(value), kNonSmiValue);
3874 __ Check(__ CheckSmi(index), kNonSmiValue);
3877 __ SmiToInteger32(value, value);
3878 __ SmiToInteger32(index, index);
3880 if (FLAG_debug_code) {
3881 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3882 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3885 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3887 context()->Plug(rax);
3891 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3892 ZoneList<Expression*>* args = expr->arguments();
3893 DCHECK(args->length() == 2);
3895 VisitForStackValue(args->at(0)); // Load the object.
3896 VisitForAccumulatorValue(args->at(1)); // Load the value.
3897 __ Pop(rbx); // rax = value. rbx = object.
3900 // If the object is a smi, return the value.
3901 __ JumpIfSmi(rbx, &done);
3903 // If the object is not a value type, return the value.
3904 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3905 __ j(not_equal, &done);
3908 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3909 // Update the write barrier. Save the value as it will be
3910 // overwritten by the write barrier code and is needed afterward.
3912 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3915 context()->Plug(rax);
3919 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3920 ZoneList<Expression*>* args = expr->arguments();
3921 DCHECK_EQ(args->length(), 1);
3923 // Load the argument into rax and call the stub.
3924 VisitForAccumulatorValue(args->at(0));
3926 NumberToStringStub stub(isolate());
3928 context()->Plug(rax);
3932 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3933 ZoneList<Expression*>* args = expr->arguments();
3934 DCHECK(args->length() == 1);
3936 VisitForAccumulatorValue(args->at(0));
3939 StringCharFromCodeGenerator generator(rax, rbx);
3940 generator.GenerateFast(masm_);
3943 NopRuntimeCallHelper call_helper;
3944 generator.GenerateSlow(masm_, call_helper);
3947 context()->Plug(rbx);
3951 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3952 ZoneList<Expression*>* args = expr->arguments();
3953 DCHECK(args->length() == 2);
3955 VisitForStackValue(args->at(0));
3956 VisitForAccumulatorValue(args->at(1));
3958 Register object = rbx;
3959 Register index = rax;
3960 Register result = rdx;
3964 Label need_conversion;
3965 Label index_out_of_range;
3967 StringCharCodeAtGenerator generator(object,
3972 &index_out_of_range,
3973 STRING_INDEX_IS_NUMBER);
3974 generator.GenerateFast(masm_);
3977 __ bind(&index_out_of_range);
3978 // When the index is out of range, the spec requires us to return
3980 __ LoadRoot(result, Heap::kNanValueRootIndex);
3983 __ bind(&need_conversion);
3984 // Move the undefined value into the result register, which will
3985 // trigger conversion.
3986 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3989 NopRuntimeCallHelper call_helper;
3990 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3993 context()->Plug(result);
3997 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3998 ZoneList<Expression*>* args = expr->arguments();
3999 DCHECK(args->length() == 2);
4001 VisitForStackValue(args->at(0));
4002 VisitForAccumulatorValue(args->at(1));
4004 Register object = rbx;
4005 Register index = rax;
4006 Register scratch = rdx;
4007 Register result = rax;
4011 Label need_conversion;
4012 Label index_out_of_range;
4014 StringCharAtGenerator generator(object,
4020 &index_out_of_range,
4021 STRING_INDEX_IS_NUMBER);
4022 generator.GenerateFast(masm_);
4025 __ bind(&index_out_of_range);
4026 // When the index is out of range, the spec requires us to return
4027 // the empty string.
4028 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4031 __ bind(&need_conversion);
4032 // Move smi zero into the result register, which will trigger
4034 __ Move(result, Smi::FromInt(0));
4037 NopRuntimeCallHelper call_helper;
4038 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4041 context()->Plug(result);
4045 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4046 ZoneList<Expression*>* args = expr->arguments();
4047 DCHECK_EQ(2, args->length());
4048 VisitForStackValue(args->at(0));
4049 VisitForAccumulatorValue(args->at(1));
4052 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4054 context()->Plug(rax);
4058 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4059 ZoneList<Expression*>* args = expr->arguments();
4060 DCHECK(args->length() >= 2);
4062 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4063 for (int i = 0; i < arg_count + 1; i++) {
4064 VisitForStackValue(args->at(i));
4066 VisitForAccumulatorValue(args->last()); // Function.
4068 Label runtime, done;
4069 // Check for non-function argument (including proxy).
4070 __ JumpIfSmi(rax, &runtime);
4071 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
4072 __ j(not_equal, &runtime);
4074 // InvokeFunction requires the function in rdi. Move it in there.
4075 __ movp(rdi, result_register());
4076 ParameterCount count(arg_count);
4077 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
4078 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4083 __ CallRuntime(Runtime::kCall, args->length());
4086 context()->Plug(rax);
4090 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4091 ZoneList<Expression*>* args = expr->arguments();
4092 DCHECK(args->length() == 2);
4095 VisitForStackValue(args->at(0));
4098 VisitForStackValue(args->at(1));
4099 __ CallRuntime(Runtime::kGetPrototype, 1);
4100 __ Push(result_register());
4102 // Load original constructor into rcx.
4103 __ movp(rcx, Operand(rsp, 1 * kPointerSize));
4105 // Check if the calling frame is an arguments adaptor frame.
4106 Label adaptor_frame, args_set_up, runtime;
4107 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4108 __ movp(rbx, Operand(rdx, StandardFrameConstants::kContextOffset));
4109 __ Cmp(rbx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4110 __ j(equal, &adaptor_frame);
4111 // default constructor has no arguments, so no adaptor frame means no args.
4112 __ movp(rax, Immediate(0));
4113 __ jmp(&args_set_up);
4115 // Copy arguments from adaptor frame.
4117 __ bind(&adaptor_frame);
4118 __ movp(rbx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4119 __ SmiToInteger64(rbx, rbx);
4122 __ leap(rdx, Operand(rdx, rbx, times_pointer_size,
4123 StandardFrameConstants::kCallerSPOffset));
4126 __ Push(Operand(rdx, -1 * kPointerSize));
4127 __ subp(rdx, Immediate(kPointerSize));
4129 __ j(not_zero, &loop);
4132 __ bind(&args_set_up);
4133 __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0));
4134 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
4136 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4137 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4141 context()->Plug(result_register());
4145 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4146 RegExpConstructResultStub stub(isolate());
4147 ZoneList<Expression*>* args = expr->arguments();
4148 DCHECK(args->length() == 3);
4149 VisitForStackValue(args->at(0));
4150 VisitForStackValue(args->at(1));
4151 VisitForAccumulatorValue(args->at(2));
4155 context()->Plug(rax);
4159 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4160 ZoneList<Expression*>* args = expr->arguments();
4161 DCHECK_EQ(2, args->length());
4163 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4164 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4166 Handle<FixedArray> jsfunction_result_caches(
4167 isolate()->native_context()->jsfunction_result_caches());
4168 if (jsfunction_result_caches->length() <= cache_id) {
4169 __ Abort(kAttemptToUseUndefinedCache);
4170 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4171 context()->Plug(rax);
4175 VisitForAccumulatorValue(args->at(1));
4178 Register cache = rbx;
4180 __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
4182 FieldOperand(cache, GlobalObject::kNativeContextOffset));
4184 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4186 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4188 Label done, not_found;
4189 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4190 __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
4191 // tmp now holds finger offset as a smi.
4193 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
4194 __ cmpp(key, FieldOperand(cache,
4197 FixedArray::kHeaderSize));
4198 __ j(not_equal, ¬_found, Label::kNear);
4199 __ movp(rax, FieldOperand(cache,
4202 FixedArray::kHeaderSize + kPointerSize));
4203 __ jmp(&done, Label::kNear);
4205 __ bind(¬_found);
4206 // Call runtime to perform the lookup.
4209 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4212 context()->Plug(rax);
4216 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4217 ZoneList<Expression*>* args = expr->arguments();
4218 DCHECK(args->length() == 1);
4220 VisitForAccumulatorValue(args->at(0));
4222 Label materialize_true, materialize_false;
4223 Label* if_true = NULL;
4224 Label* if_false = NULL;
4225 Label* fall_through = NULL;
4226 context()->PrepareTest(&materialize_true, &materialize_false,
4227 &if_true, &if_false, &fall_through);
4229 __ testl(FieldOperand(rax, String::kHashFieldOffset),
4230 Immediate(String::kContainsCachedArrayIndexMask));
4231 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4232 __ j(zero, if_true);
4235 context()->Plug(if_true, if_false);
4239 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4240 ZoneList<Expression*>* args = expr->arguments();
4241 DCHECK(args->length() == 1);
4242 VisitForAccumulatorValue(args->at(0));
4244 __ AssertString(rax);
4246 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
4247 DCHECK(String::kHashShift >= kSmiTagSize);
4248 __ IndexFromHash(rax, rax);
4250 context()->Plug(rax);
4254 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4255 Label bailout, return_result, done, one_char_separator, long_separator,
4256 non_trivial_array, not_size_one_array, loop,
4257 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4258 ZoneList<Expression*>* args = expr->arguments();
4259 DCHECK(args->length() == 2);
4260 // We will leave the separator on the stack until the end of the function.
4261 VisitForStackValue(args->at(1));
4262 // Load this to rax (= array)
4263 VisitForAccumulatorValue(args->at(0));
4264 // All aliases of the same register have disjoint lifetimes.
4265 Register array = rax;
4266 Register elements = no_reg; // Will be rax.
4268 Register index = rdx;
4270 Register string_length = rcx;
4272 Register string = rsi;
4274 Register scratch = rbx;
4276 Register array_length = rdi;
4277 Register result_pos = no_reg; // Will be rdi.
4279 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
4280 Operand result_operand = Operand(rsp, 1 * kPointerSize);
4281 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
4282 // Separator operand is already pushed. Make room for the two
4283 // other stack fields, and clear the direction flag in anticipation
4284 // of calling CopyBytes.
4285 __ subp(rsp, Immediate(2 * kPointerSize));
4287 // Check that the array is a JSArray
4288 __ JumpIfSmi(array, &bailout);
4289 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4290 __ j(not_equal, &bailout);
4292 // Check that the array has fast elements.
4293 __ CheckFastElements(scratch, &bailout);
4295 // Array has fast elements, so its length must be a smi.
4296 // If the array has length zero, return the empty string.
4297 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
4298 __ SmiCompare(array_length, Smi::FromInt(0));
4299 __ j(not_zero, &non_trivial_array);
4300 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
4301 __ jmp(&return_result);
4303 // Save the array length on the stack.
4304 __ bind(&non_trivial_array);
4305 __ SmiToInteger32(array_length, array_length);
4306 __ movl(array_length_operand, array_length);
4308 // Save the FixedArray containing array's elements.
4309 // End of array's live range.
4311 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
4315 // Check that all array elements are sequential one-byte strings, and
4316 // accumulate the sum of their lengths, as a smi-encoded value.
4318 __ Set(string_length, 0);
4319 // Loop condition: while (index < array_length).
4320 // Live loop registers: index(int32), array_length(int32), string(String*),
4321 // scratch, string_length(int32), elements(FixedArray*).
4322 if (generate_debug_code_) {
4323 __ cmpp(index, array_length);
4324 __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4327 __ movp(string, FieldOperand(elements,
4330 FixedArray::kHeaderSize));
4331 __ JumpIfSmi(string, &bailout);
4332 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
4333 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4334 __ andb(scratch, Immediate(
4335 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4336 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4337 __ j(not_equal, &bailout);
4338 __ AddSmiField(string_length,
4339 FieldOperand(string, SeqOneByteString::kLengthOffset));
4340 __ j(overflow, &bailout);
4342 __ cmpl(index, array_length);
4346 // string_length: Sum of string lengths.
4347 // elements: FixedArray of strings.
4348 // index: Array length.
4349 // array_length: Array length.
4351 // If array_length is 1, return elements[0], a string.
4352 __ cmpl(array_length, Immediate(1));
4353 __ j(not_equal, ¬_size_one_array);
4354 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
4355 __ jmp(&return_result);
4357 __ bind(¬_size_one_array);
4359 // End of array_length live range.
4360 result_pos = array_length;
4361 array_length = no_reg;
4364 // string_length: Sum of string lengths.
4365 // elements: FixedArray of strings.
4366 // index: Array length.
4368 // Check that the separator is a sequential one-byte string.
4369 __ movp(string, separator_operand);
4370 __ JumpIfSmi(string, &bailout);
4371 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
4372 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4373 __ andb(scratch, Immediate(
4374 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4375 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4376 __ j(not_equal, &bailout);
4379 // string_length: Sum of string lengths.
4380 // elements: FixedArray of strings.
4381 // index: Array length.
4382 // string: Separator string.
4384 // Add (separator length times (array_length - 1)) to string_length.
4385 __ SmiToInteger32(scratch,
4386 FieldOperand(string, SeqOneByteString::kLengthOffset));
4388 __ imull(scratch, index);
4389 __ j(overflow, &bailout);
4390 __ addl(string_length, scratch);
4391 __ j(overflow, &bailout);
4393 // Live registers and stack values:
4394 // string_length: Total length of result string.
4395 // elements: FixedArray of strings.
4396 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4398 __ movp(result_operand, result_pos);
4399 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4401 __ movp(string, separator_operand);
4402 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4404 __ j(equal, &one_char_separator);
4405 __ j(greater, &long_separator);
4408 // Empty separator case:
4410 __ movl(scratch, array_length_operand);
4411 __ jmp(&loop_1_condition);
4412 // Loop condition: while (index < array_length).
4414 // Each iteration of the loop concatenates one string to the result.
4415 // Live values in registers:
4416 // index: which element of the elements array we are adding to the result.
4417 // result_pos: the position to which we are currently copying characters.
4418 // elements: the FixedArray of strings we are joining.
4419 // scratch: array length.
4421 // Get string = array[index].
4422 __ movp(string, FieldOperand(elements, index,
4424 FixedArray::kHeaderSize));
4425 __ SmiToInteger32(string_length,
4426 FieldOperand(string, String::kLengthOffset));
4428 FieldOperand(string, SeqOneByteString::kHeaderSize));
4429 __ CopyBytes(result_pos, string, string_length);
4431 __ bind(&loop_1_condition);
4432 __ cmpl(index, scratch);
4433 __ j(less, &loop_1); // Loop while (index < array_length).
4436 // Generic bailout code used from several places.
4438 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4439 __ jmp(&return_result);
4442 // One-character separator case
4443 __ bind(&one_char_separator);
4444 // Get the separator one-byte character value.
4445 // Register "string" holds the separator.
4446 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4448 // Jump into the loop after the code that copies the separator, so the first
4449 // element is not preceded by a separator
4450 __ jmp(&loop_2_entry);
4451 // Loop condition: while (index < length).
4453 // Each iteration of the loop concatenates one string to the result.
4454 // Live values in registers:
4455 // elements: The FixedArray of strings we are joining.
4456 // index: which element of the elements array we are adding to the result.
4457 // result_pos: the position to which we are currently copying characters.
4458 // scratch: Separator character.
4460 // Copy the separator character to the result.
4461 __ movb(Operand(result_pos, 0), scratch);
4462 __ incp(result_pos);
4464 __ bind(&loop_2_entry);
4465 // Get string = array[index].
4466 __ movp(string, FieldOperand(elements, index,
4468 FixedArray::kHeaderSize));
4469 __ SmiToInteger32(string_length,
4470 FieldOperand(string, String::kLengthOffset));
4472 FieldOperand(string, SeqOneByteString::kHeaderSize));
4473 __ CopyBytes(result_pos, string, string_length);
4475 __ cmpl(index, array_length_operand);
4476 __ j(less, &loop_2); // End while (index < length).
4480 // Long separator case (separator is more than one character).
4481 __ bind(&long_separator);
4483 // Make elements point to end of elements array, and index
4484 // count from -array_length to zero, so we don't need to maintain
4486 __ movl(index, array_length_operand);
4487 __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4488 FixedArray::kHeaderSize));
4491 // Replace separator string with pointer to its first character, and
4492 // make scratch be its length.
4493 __ movp(string, separator_operand);
4494 __ SmiToInteger32(scratch,
4495 FieldOperand(string, String::kLengthOffset));
4497 FieldOperand(string, SeqOneByteString::kHeaderSize));
4498 __ movp(separator_operand, string);
4500 // Jump into the loop after the code that copies the separator, so the first
4501 // element is not preceded by a separator
4502 __ jmp(&loop_3_entry);
4503 // Loop condition: while (index < length).
4505 // Each iteration of the loop concatenates one string to the result.
4506 // Live values in registers:
4507 // index: which element of the elements array we are adding to the result.
4508 // result_pos: the position to which we are currently copying characters.
4509 // scratch: Separator length.
4510 // separator_operand (rsp[0x10]): Address of first char of separator.
4512 // Copy the separator to the result.
4513 __ movp(string, separator_operand);
4514 __ movl(string_length, scratch);
4515 __ CopyBytes(result_pos, string, string_length, 2);
4517 __ bind(&loop_3_entry);
4518 // Get string = array[index].
4519 __ movp(string, Operand(elements, index, times_pointer_size, 0));
4520 __ SmiToInteger32(string_length,
4521 FieldOperand(string, String::kLengthOffset));
4523 FieldOperand(string, SeqOneByteString::kHeaderSize));
4524 __ CopyBytes(result_pos, string, string_length);
4526 __ j(not_equal, &loop_3); // Loop while (index < 0).
4529 __ movp(rax, result_operand);
4531 __ bind(&return_result);
4532 // Drop temp values from the stack, and restore context register.
4533 __ addp(rsp, Immediate(3 * kPointerSize));
4534 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4535 context()->Plug(rax);
4539 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4540 DCHECK(expr->arguments()->length() == 0);
4541 ExternalReference debug_is_active =
4542 ExternalReference::debug_is_active_address(isolate());
4543 __ Move(kScratchRegister, debug_is_active);
4544 __ movzxbp(rax, Operand(kScratchRegister, 0));
4545 __ Integer32ToSmi(rax, rax);
4546 context()->Plug(rax);
4550 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4551 // Push the builtins object as receiver.
4552 __ movp(rax, GlobalObjectOperand());
4553 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4555 // Load the function from the receiver.
4556 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4557 __ Move(LoadDescriptor::NameRegister(), expr->name());
4558 __ Move(LoadDescriptor::SlotRegister(),
4559 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4560 CallLoadIC(NOT_INSIDE_TYPEOF);
4564 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4565 ZoneList<Expression*>* args = expr->arguments();
4566 int arg_count = args->length();
4568 SetCallPosition(expr, arg_count);
4569 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4570 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4575 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4576 ZoneList<Expression*>* args = expr->arguments();
4577 int arg_count = args->length();
4579 if (expr->is_jsruntime()) {
4580 Comment cmnt(masm_, "[ CallRuntime");
4582 EmitLoadJSRuntimeFunction(expr);
4584 // Push the target function under the receiver.
4585 __ Push(Operand(rsp, 0));
4586 __ movp(Operand(rsp, kPointerSize), rax);
4588 // Push the arguments ("left-to-right").
4589 for (int i = 0; i < arg_count; i++) {
4590 VisitForStackValue(args->at(i));
4593 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4594 EmitCallJSRuntimeFunction(expr);
4596 // Restore context register.
4597 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4598 context()->DropAndPlug(1, rax);
4601 const Runtime::Function* function = expr->function();
4602 switch (function->function_id) {
4603 #define CALL_INTRINSIC_GENERATOR(Name) \
4604 case Runtime::kInline##Name: { \
4605 Comment cmnt(masm_, "[ Inline" #Name); \
4606 return Emit##Name(expr); \
4608 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4609 #undef CALL_INTRINSIC_GENERATOR
4611 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4612 // Push the arguments ("left-to-right").
4613 for (int i = 0; i < arg_count; i++) {
4614 VisitForStackValue(args->at(i));
4617 // Call the C runtime.
4618 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4619 __ CallRuntime(function, arg_count);
4620 context()->Plug(rax);
4627 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4628 switch (expr->op()) {
4629 case Token::DELETE: {
4630 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4631 Property* property = expr->expression()->AsProperty();
4632 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4634 if (property != NULL) {
4635 VisitForStackValue(property->obj());
4636 VisitForStackValue(property->key());
4637 __ Push(Smi::FromInt(language_mode()));
4638 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4639 context()->Plug(rax);
4640 } else if (proxy != NULL) {
4641 Variable* var = proxy->var();
4642 // Delete of an unqualified identifier is disallowed in strict mode but
4643 // "delete this" is allowed.
4644 bool is_this = var->HasThisName(isolate());
4645 DCHECK(is_sloppy(language_mode()) || is_this);
4646 if (var->IsUnallocatedOrGlobalSlot()) {
4647 __ Push(GlobalObjectOperand());
4648 __ Push(var->name());
4649 __ Push(Smi::FromInt(SLOPPY));
4650 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4651 context()->Plug(rax);
4652 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4653 // Result of deleting non-global variables is false. 'this' is
4654 // not really a variable, though we implement it as one. The
4655 // subexpression does not have side effects.
4656 context()->Plug(is_this);
4658 // Non-global variable. Call the runtime to try to delete from the
4659 // context where the variable was introduced.
4660 __ Push(context_register());
4661 __ Push(var->name());
4662 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4663 context()->Plug(rax);
4666 // Result of deleting non-property, non-variable reference is true.
4667 // The subexpression may have side effects.
4668 VisitForEffect(expr->expression());
4669 context()->Plug(true);
4675 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4676 VisitForEffect(expr->expression());
4677 context()->Plug(Heap::kUndefinedValueRootIndex);
4682 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4683 if (context()->IsEffect()) {
4684 // Unary NOT has no side effects so it's only necessary to visit the
4685 // subexpression. Match the optimizing compiler by not branching.
4686 VisitForEffect(expr->expression());
4687 } else if (context()->IsTest()) {
4688 const TestContext* test = TestContext::cast(context());
4689 // The labels are swapped for the recursive call.
4690 VisitForControl(expr->expression(),
4691 test->false_label(),
4693 test->fall_through());
4694 context()->Plug(test->true_label(), test->false_label());
4696 // We handle value contexts explicitly rather than simply visiting
4697 // for control and plugging the control flow into the context,
4698 // because we need to prepare a pair of extra administrative AST ids
4699 // for the optimizing compiler.
4700 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4701 Label materialize_true, materialize_false, done;
4702 VisitForControl(expr->expression(),
4706 __ bind(&materialize_true);
4707 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4708 if (context()->IsAccumulatorValue()) {
4709 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4711 __ PushRoot(Heap::kTrueValueRootIndex);
4713 __ jmp(&done, Label::kNear);
4714 __ bind(&materialize_false);
4715 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4716 if (context()->IsAccumulatorValue()) {
4717 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4719 __ PushRoot(Heap::kFalseValueRootIndex);
4726 case Token::TYPEOF: {
4727 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4729 AccumulatorValueContext context(this);
4730 VisitForTypeofValue(expr->expression());
4733 TypeofStub typeof_stub(isolate());
4734 __ CallStub(&typeof_stub);
4735 context()->Plug(rax);
4745 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4746 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4748 Comment cmnt(masm_, "[ CountOperation");
4750 Property* prop = expr->expression()->AsProperty();
4751 LhsKind assign_type = Property::GetAssignType(prop);
4753 // Evaluate expression and get value.
4754 if (assign_type == VARIABLE) {
4755 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4756 AccumulatorValueContext context(this);
4757 EmitVariableLoad(expr->expression()->AsVariableProxy());
4759 // Reserve space for result of postfix operation.
4760 if (expr->is_postfix() && !context()->IsEffect()) {
4761 __ Push(Smi::FromInt(0));
4763 switch (assign_type) {
4764 case NAMED_PROPERTY: {
4765 VisitForStackValue(prop->obj());
4766 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4767 EmitNamedPropertyLoad(prop);
4771 case NAMED_SUPER_PROPERTY: {
4772 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4773 VisitForAccumulatorValue(
4774 prop->obj()->AsSuperPropertyReference()->home_object());
4775 __ Push(result_register());
4776 __ Push(MemOperand(rsp, kPointerSize));
4777 __ Push(result_register());
4778 EmitNamedSuperPropertyLoad(prop);
4782 case KEYED_SUPER_PROPERTY: {
4783 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4785 prop->obj()->AsSuperPropertyReference()->home_object());
4786 VisitForAccumulatorValue(prop->key());
4787 __ Push(result_register());
4788 __ Push(MemOperand(rsp, 2 * kPointerSize));
4789 __ Push(MemOperand(rsp, 2 * kPointerSize));
4790 __ Push(result_register());
4791 EmitKeyedSuperPropertyLoad(prop);
4795 case KEYED_PROPERTY: {
4796 VisitForStackValue(prop->obj());
4797 VisitForStackValue(prop->key());
4798 // Leave receiver on stack
4799 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
4800 // Copy of key, needed for later store.
4801 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
4802 EmitKeyedPropertyLoad(prop);
4811 // We need a second deoptimization point after loading the value
4812 // in case evaluating the property load my have a side effect.
4813 if (assign_type == VARIABLE) {
4814 PrepareForBailout(expr->expression(), TOS_REG);
4816 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4819 // Inline smi case if we are in a loop.
4820 Label done, stub_call;
4821 JumpPatchSite patch_site(masm_);
4822 if (ShouldInlineSmiCase(expr->op())) {
4824 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4826 // Save result for postfix expressions.
4827 if (expr->is_postfix()) {
4828 if (!context()->IsEffect()) {
4829 // Save the result on the stack. If we have a named or keyed property
4830 // we store the result under the receiver that is currently on top
4832 switch (assign_type) {
4836 case NAMED_PROPERTY:
4837 __ movp(Operand(rsp, kPointerSize), rax);
4839 case NAMED_SUPER_PROPERTY:
4840 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4842 case KEYED_PROPERTY:
4843 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4845 case KEYED_SUPER_PROPERTY:
4846 __ movp(Operand(rsp, 3 * kPointerSize), rax);
4852 SmiOperationConstraints constraints =
4853 SmiOperationConstraint::kPreserveSourceRegister |
4854 SmiOperationConstraint::kBailoutOnNoOverflow;
4855 if (expr->op() == Token::INC) {
4856 __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
4859 __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
4862 __ jmp(&stub_call, Label::kNear);
4865 if (!is_strong(language_mode())) {
4866 ToNumberStub convert_stub(isolate());
4867 __ CallStub(&convert_stub);
4868 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4871 // Save result for postfix expressions.
4872 if (expr->is_postfix()) {
4873 if (!context()->IsEffect()) {
4874 // Save the result on the stack. If we have a named or keyed property
4875 // we store the result under the receiver that is currently on top
4877 switch (assign_type) {
4881 case NAMED_PROPERTY:
4882 __ movp(Operand(rsp, kPointerSize), rax);
4884 case NAMED_SUPER_PROPERTY:
4885 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4887 case KEYED_PROPERTY:
4888 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4890 case KEYED_SUPER_PROPERTY:
4891 __ movp(Operand(rsp, 3 * kPointerSize), rax);
4897 SetExpressionPosition(expr);
4899 // Call stub for +1/-1.
4900 __ bind(&stub_call);
4902 __ Move(rax, Smi::FromInt(1));
4903 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4904 strength(language_mode())).code();
4905 CallIC(code, expr->CountBinOpFeedbackId());
4906 patch_site.EmitPatchInfo();
4909 if (is_strong(language_mode())) {
4910 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4912 // Store the value returned in rax.
4913 switch (assign_type) {
4915 if (expr->is_postfix()) {
4916 // Perform the assignment as if via '='.
4917 { EffectContext context(this);
4918 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4919 Token::ASSIGN, expr->CountSlot());
4920 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4923 // For all contexts except kEffect: We have the result on
4924 // top of the stack.
4925 if (!context()->IsEffect()) {
4926 context()->PlugTOS();
4929 // Perform the assignment as if via '='.
4930 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4931 Token::ASSIGN, expr->CountSlot());
4932 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4933 context()->Plug(rax);
4936 case NAMED_PROPERTY: {
4937 __ Move(StoreDescriptor::NameRegister(),
4938 prop->key()->AsLiteral()->value());
4939 __ Pop(StoreDescriptor::ReceiverRegister());
4940 if (FLAG_vector_stores) {
4941 EmitLoadStoreICSlot(expr->CountSlot());
4944 CallStoreIC(expr->CountStoreFeedbackId());
4946 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4947 if (expr->is_postfix()) {
4948 if (!context()->IsEffect()) {
4949 context()->PlugTOS();
4952 context()->Plug(rax);
4956 case NAMED_SUPER_PROPERTY: {
4957 EmitNamedSuperPropertyStore(prop);
4958 if (expr->is_postfix()) {
4959 if (!context()->IsEffect()) {
4960 context()->PlugTOS();
4963 context()->Plug(rax);
4967 case KEYED_SUPER_PROPERTY: {
4968 EmitKeyedSuperPropertyStore(prop);
4969 if (expr->is_postfix()) {
4970 if (!context()->IsEffect()) {
4971 context()->PlugTOS();
4974 context()->Plug(rax);
4978 case KEYED_PROPERTY: {
4979 __ Pop(StoreDescriptor::NameRegister());
4980 __ Pop(StoreDescriptor::ReceiverRegister());
4982 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4983 if (FLAG_vector_stores) {
4984 EmitLoadStoreICSlot(expr->CountSlot());
4987 CallIC(ic, expr->CountStoreFeedbackId());
4989 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4990 if (expr->is_postfix()) {
4991 if (!context()->IsEffect()) {
4992 context()->PlugTOS();
4995 context()->Plug(rax);
5003 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5004 Expression* sub_expr,
5005 Handle<String> check) {
5006 Label materialize_true, materialize_false;
5007 Label* if_true = NULL;
5008 Label* if_false = NULL;
5009 Label* fall_through = NULL;
5010 context()->PrepareTest(&materialize_true, &materialize_false,
5011 &if_true, &if_false, &fall_through);
5013 { AccumulatorValueContext context(this);
5014 VisitForTypeofValue(sub_expr);
5016 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5018 Factory* factory = isolate()->factory();
5019 if (String::Equals(check, factory->number_string())) {
5020 __ JumpIfSmi(rax, if_true);
5021 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
5022 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
5023 Split(equal, if_true, if_false, fall_through);
5024 } else if (String::Equals(check, factory->string_string())) {
5025 __ JumpIfSmi(rax, if_false);
5026 // Check for undetectable objects => false.
5027 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
5028 __ j(above_equal, if_false);
5029 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
5030 Immediate(1 << Map::kIsUndetectable));
5031 Split(zero, if_true, if_false, fall_through);
5032 } else if (String::Equals(check, factory->symbol_string())) {
5033 __ JumpIfSmi(rax, if_false);
5034 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
5035 Split(equal, if_true, if_false, fall_through);
5036 } else if (String::Equals(check, factory->float32x4_string())) {
5037 __ JumpIfSmi(rax, if_false);
5038 __ CmpObjectType(rax, FLOAT32X4_TYPE, rdx);
5039 Split(equal, if_true, if_false, fall_through);
5040 } else if (String::Equals(check, factory->boolean_string())) {
5041 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
5042 __ j(equal, if_true);
5043 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
5044 Split(equal, if_true, if_false, fall_through);
5045 } else if (String::Equals(check, factory->undefined_string())) {
5046 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
5047 __ j(equal, if_true);
5048 __ JumpIfSmi(rax, if_false);
5049 // Check for undetectable objects => true.
5050 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
5051 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
5052 Immediate(1 << Map::kIsUndetectable));
5053 Split(not_zero, if_true, if_false, fall_through);
5054 } else if (String::Equals(check, factory->function_string())) {
5055 __ JumpIfSmi(rax, if_false);
5056 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5057 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
5058 __ j(equal, if_true);
5059 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
5060 Split(equal, if_true, if_false, fall_through);
5061 } else if (String::Equals(check, factory->object_string())) {
5062 __ JumpIfSmi(rax, if_false);
5063 __ CompareRoot(rax, Heap::kNullValueRootIndex);
5064 __ j(equal, if_true);
5065 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
5066 __ j(below, if_false);
5067 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5068 __ j(above, if_false);
5069 // Check for undetectable objects => false.
5070 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
5071 Immediate(1 << Map::kIsUndetectable));
5072 Split(zero, if_true, if_false, fall_through);
5074 if (if_false != fall_through) __ jmp(if_false);
5076 context()->Plug(if_true, if_false);
5080 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5081 Comment cmnt(masm_, "[ CompareOperation");
5082 SetExpressionPosition(expr);
5084 // First we try a fast inlined version of the compare when one of
5085 // the operands is a literal.
5086 if (TryLiteralCompare(expr)) return;
5088 // Always perform the comparison for its control flow. Pack the result
5089 // into the expression's context after the comparison is performed.
5090 Label materialize_true, materialize_false;
5091 Label* if_true = NULL;
5092 Label* if_false = NULL;
5093 Label* fall_through = NULL;
5094 context()->PrepareTest(&materialize_true, &materialize_false,
5095 &if_true, &if_false, &fall_through);
5097 Token::Value op = expr->op();
5098 VisitForStackValue(expr->left());
5101 VisitForStackValue(expr->right());
5102 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5103 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5104 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
5105 Split(equal, if_true, if_false, fall_through);
5108 case Token::INSTANCEOF: {
5109 VisitForStackValue(expr->right());
5110 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5112 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5114 // The stub returns 0 for true.
5115 Split(zero, if_true, if_false, fall_through);
5120 VisitForAccumulatorValue(expr->right());
5121 Condition cc = CompareIC::ComputeCondition(op);
5124 bool inline_smi_code = ShouldInlineSmiCase(op);
5125 JumpPatchSite patch_site(masm_);
5126 if (inline_smi_code) {
5130 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
5132 Split(cc, if_true, if_false, NULL);
5133 __ bind(&slow_case);
5136 Handle<Code> ic = CodeFactory::CompareIC(
5137 isolate(), op, strength(language_mode())).code();
5138 CallIC(ic, expr->CompareOperationFeedbackId());
5139 patch_site.EmitPatchInfo();
5141 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5143 Split(cc, if_true, if_false, fall_through);
5147 // Convert the result of the comparison into one expected for this
5148 // expression's context.
5149 context()->Plug(if_true, if_false);
5153 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5154 Expression* sub_expr,
5156 Label materialize_true, materialize_false;
5157 Label* if_true = NULL;
5158 Label* if_false = NULL;
5159 Label* fall_through = NULL;
5160 context()->PrepareTest(&materialize_true, &materialize_false,
5161 &if_true, &if_false, &fall_through);
5163 VisitForAccumulatorValue(sub_expr);
5164 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5165 if (expr->op() == Token::EQ_STRICT) {
5166 Heap::RootListIndex nil_value = nil == kNullValue ?
5167 Heap::kNullValueRootIndex :
5168 Heap::kUndefinedValueRootIndex;
5169 __ CompareRoot(rax, nil_value);
5170 Split(equal, if_true, if_false, fall_through);
5172 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5173 CallIC(ic, expr->CompareOperationFeedbackId());
5175 Split(not_zero, if_true, if_false, fall_through);
5177 context()->Plug(if_true, if_false);
5181 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5182 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5183 context()->Plug(rax);
5187 Register FullCodeGenerator::result_register() {
5192 Register FullCodeGenerator::context_register() {
5197 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5198 DCHECK(IsAligned(frame_offset, kPointerSize));
5199 __ movp(Operand(rbp, frame_offset), value);
5203 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5204 __ movp(dst, ContextOperand(rsi, context_index));
5208 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5209 Scope* declaration_scope = scope()->DeclarationScope();
5210 if (declaration_scope->is_script_scope() ||
5211 declaration_scope->is_module_scope()) {
5212 // Contexts nested in the native context have a canonical empty function
5213 // as their closure, not the anonymous closure containing the global
5214 // code. Pass a smi sentinel and let the runtime look up the empty
5216 __ Push(Smi::FromInt(0));
5217 } else if (declaration_scope->is_eval_scope()) {
5218 // Contexts created by a call to eval have the same closure as the
5219 // context calling eval, not the anonymous closure containing the eval
5220 // code. Fetch it from the context.
5221 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
5223 DCHECK(declaration_scope->is_function_scope());
5224 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5229 // ----------------------------------------------------------------------------
5230 // Non-local control flow support.
5233 void FullCodeGenerator::EnterFinallyBlock() {
5234 DCHECK(!result_register().is(rdx));
5235 DCHECK(!result_register().is(rcx));
5236 // Cook return address on top of stack (smi encoded Code* delta)
5237 __ PopReturnAddressTo(rdx);
5238 __ Move(rcx, masm_->CodeObject());
5240 __ Integer32ToSmi(rdx, rdx);
5243 // Store result register while executing finally block.
5244 __ Push(result_register());
5246 // Store pending message while executing finally block.
5247 ExternalReference pending_message_obj =
5248 ExternalReference::address_of_pending_message_obj(isolate());
5249 __ Load(rdx, pending_message_obj);
5252 ClearPendingMessage();
5256 void FullCodeGenerator::ExitFinallyBlock() {
5257 DCHECK(!result_register().is(rdx));
5258 DCHECK(!result_register().is(rcx));
5259 // Restore pending message from stack.
5261 ExternalReference pending_message_obj =
5262 ExternalReference::address_of_pending_message_obj(isolate());
5263 __ Store(pending_message_obj, rdx);
5265 // Restore result register from stack.
5266 __ Pop(result_register());
5268 // Uncook return address.
5270 __ SmiToInteger32(rdx, rdx);
5271 __ Move(rcx, masm_->CodeObject());
5277 void FullCodeGenerator::ClearPendingMessage() {
5278 DCHECK(!result_register().is(rdx));
5279 ExternalReference pending_message_obj =
5280 ExternalReference::address_of_pending_message_obj(isolate());
5281 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
5282 __ Store(pending_message_obj, rdx);
5286 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5287 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5288 __ Move(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5295 static const byte kJnsInstruction = 0x79;
5296 static const byte kNopByteOne = 0x66;
5297 static const byte kNopByteTwo = 0x90;
5299 static const byte kCallInstruction = 0xe8;
5303 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5305 BackEdgeState target_state,
5306 Code* replacement_code) {
5307 Address call_target_address = pc - kIntSize;
5308 Address jns_instr_address = call_target_address - 3;
5309 Address jns_offset_address = call_target_address - 2;
5311 switch (target_state) {
5313 // sub <profiling_counter>, <delta> ;; Not changed
5315 // call <interrupt stub>
5317 *jns_instr_address = kJnsInstruction;
5318 *jns_offset_address = kJnsOffset;
5320 case ON_STACK_REPLACEMENT:
5321 case OSR_AFTER_STACK_CHECK:
5322 // sub <profiling_counter>, <delta> ;; Not changed
5325 // call <on-stack replacment>
5327 *jns_instr_address = kNopByteOne;
5328 *jns_offset_address = kNopByteTwo;
5332 Assembler::set_target_address_at(call_target_address,
5334 replacement_code->entry());
5335 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5336 unoptimized_code, call_target_address, replacement_code);
5340 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5342 Code* unoptimized_code,
5344 Address call_target_address = pc - kIntSize;
5345 Address jns_instr_address = call_target_address - 3;
5346 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5348 if (*jns_instr_address == kJnsInstruction) {
5349 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5350 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5351 Assembler::target_address_at(call_target_address,
5356 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5357 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5359 if (Assembler::target_address_at(call_target_address,
5360 unoptimized_code) ==
5361 isolate->builtins()->OnStackReplacement()->entry()) {
5362 return ON_STACK_REPLACEMENT;
5365 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5366 Assembler::target_address_at(call_target_address,
5368 return OSR_AFTER_STACK_CHECK;
5372 } // namespace internal
5375 #endif // V8_TARGET_ARCH_X64