1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
22 #define __ ACCESS_MASM(masm_)
25 class JumpPatchSite BASE_EMBEDDED {
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 info_emitted_ = false;
34 DCHECK(patch_site_.is_bound() == info_emitted_);
37 void EmitJumpIfNotSmi(Register reg,
39 Label::Distance near_jump = Label::kFar) {
40 __ testb(reg, Immediate(kSmiTagMask));
41 EmitJump(not_carry, target, near_jump); // Always taken before patched.
44 void EmitJumpIfSmi(Register reg,
46 Label::Distance near_jump = Label::kFar) {
47 __ testb(reg, Immediate(kSmiTagMask));
48 EmitJump(carry, target, near_jump); // Never taken before patched.
51 void EmitPatchInfo() {
52 if (patch_site_.is_bound()) {
53 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54 DCHECK(is_uint8(delta_to_patch_site));
55 __ testl(rax, Immediate(delta_to_patch_site));
60 __ nop(); // Signals no inlined code.
65 // jc will be patched with jz, jnc will become jnz.
66 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 DCHECK(cc == carry || cc == not_carry);
69 __ bind(&patch_site_);
70 __ j(cc, target, near_jump);
73 MacroAssembler* masm_;
81 // Generate code for a JS function. On entry to the function the receiver
82 // and arguments have been pushed on the stack left to right, with the
83 // return address on top of them. The actual argument count matches the
84 // formal parameter count expected by the function.
86 // The live registers are:
87 // o rdi: the JS function object being called (i.e. ourselves)
89 // o rbp: our caller's frame pointer
90 // o rsp: stack pointer (pointing to return address)
92 // The function builds a JS frame. Please see JavaScriptFrameConstants in
93 // frames-x64.h for its layout.
94 void FullCodeGenerator::Generate() {
95 CompilationInfo* info = info_;
97 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
98 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
100 profiling_counter_ = isolate()->factory()->NewCell(
101 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102 SetFunctionPosition(function());
103 Comment cmnt(masm_, "[ function compiled by full code generator");
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
108 if (strlen(FLAG_stop_at) > 0 &&
109 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
114 // Sloppy mode functions and builtins need to replace the receiver with the
115 // global proxy when called as functions (without an explicit receiver
117 if (is_sloppy(info->language_mode()) && !info->is_native() &&
118 info->MayUseThis() && info->scope()->has_this_declaration()) {
120 // +1 for return address.
121 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
122 __ movp(rcx, args.GetReceiverOperand());
124 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
125 __ j(not_equal, &ok, Label::kNear);
127 __ movp(rcx, GlobalObjectOperand());
128 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalProxyOffset));
130 __ movp(args.GetReceiverOperand(), rcx);
135 // Open a frame scope to indicate that there is a frame on the stack. The
136 // MANUAL indicates that the scope shouldn't actually generate code to set up
137 // the frame (that is done below).
138 FrameScope frame_scope(masm_, StackFrame::MANUAL);
140 info->set_prologue_offset(masm_->pc_offset());
141 __ Prologue(info->IsCodePreAgingActive());
142 info->AddNoFrameRange(0, masm_->pc_offset());
144 { Comment cmnt(masm_, "[ Allocate locals");
145 int locals_count = info->scope()->num_stack_slots();
146 // Generators allocate locals, if any, in context slots.
147 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
148 if (locals_count == 1) {
149 __ PushRoot(Heap::kUndefinedValueRootIndex);
150 } else if (locals_count > 1) {
151 if (locals_count >= 128) {
154 __ subp(rcx, Immediate(locals_count * kPointerSize));
155 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
156 __ j(above_equal, &ok, Label::kNear);
157 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
160 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
161 const int kMaxPushes = 32;
162 if (locals_count >= kMaxPushes) {
163 int loop_iterations = locals_count / kMaxPushes;
164 __ movp(rcx, Immediate(loop_iterations));
166 __ bind(&loop_header);
168 for (int i = 0; i < kMaxPushes; i++) {
171 // Continue loop if not done.
173 __ j(not_zero, &loop_header, Label::kNear);
175 int remaining = locals_count % kMaxPushes;
176 // Emit the remaining pushes.
177 for (int i = 0; i < remaining; i++) {
183 bool function_in_register = true;
185 // Possibly allocate a local context.
186 if (info->scope()->num_heap_slots() > 0) {
187 Comment cmnt(masm_, "[ Allocate context");
188 bool need_write_barrier = true;
189 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
190 // Argument to NewContext is the function, which is still in rdi.
191 if (info->scope()->is_script_scope()) {
193 __ Push(info->scope()->GetScopeInfo(info->isolate()));
194 __ CallRuntime(Runtime::kNewScriptContext, 2);
195 } else if (slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(isolate(), slots);
198 // Result of FastNewContextStub is always in new space.
199 need_write_barrier = false;
202 __ CallRuntime(Runtime::kNewFunctionContext, 1);
204 function_in_register = false;
205 // Context is returned in rax. It replaces the context passed to us.
206 // It's saved in the stack and kept live in rsi.
208 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
210 // Copy any necessary parameters into the context.
211 int num_parameters = info->scope()->num_parameters();
212 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
213 for (int i = first_parameter; i < num_parameters; i++) {
214 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
215 if (var->IsContextSlot()) {
216 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
217 (num_parameters - 1 - i) * kPointerSize;
218 // Load parameter from stack.
219 __ movp(rax, Operand(rbp, parameter_offset));
220 // Store it in the context.
221 int context_offset = Context::SlotOffset(var->index());
222 __ movp(Operand(rsi, context_offset), rax);
223 // Update the write barrier. This clobbers rax and rbx.
224 if (need_write_barrier) {
225 __ RecordWriteContextSlot(
226 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
227 } else if (FLAG_debug_code) {
229 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
230 __ Abort(kExpectedNewSpaceObject);
237 // Possibly set up a local binding to the this function which is used in
238 // derived constructors with super calls.
239 Variable* this_function_var = scope()->this_function_var();
240 if (this_function_var != nullptr) {
241 Comment cmnt(masm_, "[ This function");
242 SetVar(this_function_var, rdi, rbx, rdx);
245 Variable* new_target_var = scope()->new_target_var();
246 if (new_target_var != nullptr) {
247 Comment cmnt(masm_, "[ new.target");
248 // new.target is parameter -2.
249 int offset = 2 * kPointerSize + kFPOnStackSize + kPCOnStackSize +
250 (info_->scope()->num_parameters() - 1) * kPointerSize;
251 __ movp(rax, Operand(rbp, offset));
252 SetVar(new_target_var, rax, rbx, rdx);
255 ArgumentsAccessStub::HasNewTarget has_new_target =
256 IsSubclassConstructor(info->function()->kind())
257 ? ArgumentsAccessStub::HAS_NEW_TARGET
258 : ArgumentsAccessStub::NO_NEW_TARGET;
260 // Possibly allocate RestParameters
262 Variable* rest_param = scope()->rest_parameter(&rest_index);
264 Comment cmnt(masm_, "[ Allocate rest parameter array");
266 int num_parameters = info->scope()->num_parameters();
267 int offset = num_parameters * kPointerSize;
268 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
274 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
276 __ Push(Smi::FromInt(num_parameters));
277 __ Push(Smi::FromInt(rest_index));
278 __ Push(Smi::FromInt(language_mode()));
280 RestParamAccessStub stub(isolate());
283 SetVar(rest_param, rax, rbx, rdx);
286 // Possibly allocate an arguments object.
287 Variable* arguments = scope()->arguments();
288 if (arguments != NULL) {
289 // Arguments object must be allocated after the context object, in
290 // case the "arguments" or ".arguments" variables are in the context.
291 Comment cmnt(masm_, "[ Allocate arguments object");
292 if (function_in_register) {
295 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
297 // The receiver is just before the parameters on the caller's stack.
298 int num_parameters = info->scope()->num_parameters();
299 int offset = num_parameters * kPointerSize;
301 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
303 __ Push(Smi::FromInt(num_parameters));
304 // Arguments to ArgumentsAccessStub:
305 // function, receiver address, parameter count.
306 // The stub will rewrite receiver and parameter count if the previous
307 // stack frame was an arguments adapter frame.
309 ArgumentsAccessStub::Type type;
310 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
311 type = ArgumentsAccessStub::NEW_STRICT;
312 } else if (function()->has_duplicate_parameters()) {
313 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
315 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
317 ArgumentsAccessStub stub(isolate(), type, has_new_target);
320 SetVar(arguments, rax, rbx, rdx);
324 __ CallRuntime(Runtime::kTraceEnter, 0);
327 // Visit the declarations and body unless there is an illegal
329 if (scope()->HasIllegalRedeclaration()) {
330 Comment cmnt(masm_, "[ Declarations");
331 scope()->VisitIllegalRedeclaration(this);
334 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
335 { Comment cmnt(masm_, "[ Declarations");
336 // For named function expressions, declare the function name as a
338 if (scope()->is_function_scope() && scope()->function() != NULL) {
339 VariableDeclaration* function = scope()->function();
340 DCHECK(function->proxy()->var()->mode() == CONST ||
341 function->proxy()->var()->mode() == CONST_LEGACY);
342 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
343 VisitVariableDeclaration(function);
345 VisitDeclarations(scope()->declarations());
348 { Comment cmnt(masm_, "[ Stack check");
349 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
351 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
352 __ j(above_equal, &ok, Label::kNear);
353 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
357 { Comment cmnt(masm_, "[ Body");
358 DCHECK(loop_depth() == 0);
359 VisitStatements(function()->body());
360 DCHECK(loop_depth() == 0);
364 // Always emit a 'return undefined' in case control fell off the end of
366 { Comment cmnt(masm_, "[ return <undefined>;");
367 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
368 EmitReturnSequence();
373 void FullCodeGenerator::ClearAccumulator() {
378 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
379 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
380 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
381 Smi::FromInt(-delta));
385 void FullCodeGenerator::EmitProfilingCounterReset() {
386 int reset_value = FLAG_interrupt_budget;
387 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
388 __ Move(kScratchRegister, Smi::FromInt(reset_value));
389 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
393 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
396 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
397 Label* back_edge_target) {
398 Comment cmnt(masm_, "[ Back edge bookkeeping");
401 DCHECK(back_edge_target->is_bound());
402 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
403 int weight = Min(kMaxBackEdgeWeight,
404 Max(1, distance / kCodeSizeMultiplier));
405 EmitProfilingCounterDecrement(weight);
407 __ j(positive, &ok, Label::kNear);
409 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
410 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
411 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
413 // Record a mapping of this PC offset to the OSR id. This is used to find
414 // the AST id from the unoptimized code in order to use it as a key into
415 // the deoptimization input data found in the optimized code.
416 RecordBackEdge(stmt->OsrEntryId());
418 EmitProfilingCounterReset();
422 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
423 // Record a mapping of the OSR id to this PC. This is used if the OSR
424 // entry becomes the target of a bailout. We don't expect it to be, but
425 // we want it to work if it is.
426 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
430 void FullCodeGenerator::EmitReturnSequence() {
431 Comment cmnt(masm_, "[ Return sequence");
432 if (return_label_.is_bound()) {
433 __ jmp(&return_label_);
435 __ bind(&return_label_);
438 __ CallRuntime(Runtime::kTraceExit, 1);
440 // Pretend that the exit is a backwards jump to the entry.
442 if (info_->ShouldSelfOptimize()) {
443 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
445 int distance = masm_->pc_offset();
446 weight = Min(kMaxBackEdgeWeight,
447 Max(1, distance / kCodeSizeMultiplier));
449 EmitProfilingCounterDecrement(weight);
451 __ j(positive, &ok, Label::kNear);
453 __ call(isolate()->builtins()->InterruptCheck(),
454 RelocInfo::CODE_TARGET);
456 EmitProfilingCounterReset();
459 // Add a label for checking the size of the code used for returning.
460 Label check_exit_codesize;
461 masm_->bind(&check_exit_codesize);
463 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
465 // Do not use the leave instruction here because it is too short to
466 // patch with the code required by the debugger.
469 int no_frame_start = masm_->pc_offset();
471 int arg_count = info_->scope()->num_parameters() + 1;
472 if (IsSubclassConstructor(info_->function()->kind())) {
475 int arguments_bytes = arg_count * kPointerSize;
476 __ Ret(arguments_bytes, rcx);
478 // Add padding that will be overwritten by a debugger breakpoint. We
479 // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k"
480 // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32.
481 const int kPadding = Assembler::kJSReturnSequenceLength -
482 kPointerSize == kInt64Size ? 7 : 6;
483 for (int i = 0; i < kPadding; ++i) {
486 // Check that the size of the code used for returning is large enough
487 // for the debugger's requirements.
488 DCHECK(Assembler::kJSReturnSequenceLength <=
489 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
491 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
496 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
497 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
501 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
502 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
503 codegen()->GetVar(result_register(), var);
507 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
508 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
509 MemOperand operand = codegen()->VarOperand(var, result_register());
514 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
515 codegen()->GetVar(result_register(), var);
516 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
517 codegen()->DoTest(this);
521 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
525 void FullCodeGenerator::AccumulatorValueContext::Plug(
526 Heap::RootListIndex index) const {
527 __ LoadRoot(result_register(), index);
531 void FullCodeGenerator::StackValueContext::Plug(
532 Heap::RootListIndex index) const {
537 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
538 codegen()->PrepareForBailoutBeforeSplit(condition(),
542 if (index == Heap::kUndefinedValueRootIndex ||
543 index == Heap::kNullValueRootIndex ||
544 index == Heap::kFalseValueRootIndex) {
545 if (false_label_ != fall_through_) __ jmp(false_label_);
546 } else if (index == Heap::kTrueValueRootIndex) {
547 if (true_label_ != fall_through_) __ jmp(true_label_);
549 __ LoadRoot(result_register(), index);
550 codegen()->DoTest(this);
555 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
559 void FullCodeGenerator::AccumulatorValueContext::Plug(
560 Handle<Object> lit) const {
562 __ SafeMove(result_register(), Smi::cast(*lit));
564 __ Move(result_register(), lit);
569 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
571 __ SafePush(Smi::cast(*lit));
578 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
579 codegen()->PrepareForBailoutBeforeSplit(condition(),
583 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
584 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
585 if (false_label_ != fall_through_) __ jmp(false_label_);
586 } else if (lit->IsTrue() || lit->IsJSObject()) {
587 if (true_label_ != fall_through_) __ jmp(true_label_);
588 } else if (lit->IsString()) {
589 if (String::cast(*lit)->length() == 0) {
590 if (false_label_ != fall_through_) __ jmp(false_label_);
592 if (true_label_ != fall_through_) __ jmp(true_label_);
594 } else if (lit->IsSmi()) {
595 if (Smi::cast(*lit)->value() == 0) {
596 if (false_label_ != fall_through_) __ jmp(false_label_);
598 if (true_label_ != fall_through_) __ jmp(true_label_);
601 // For simplicity we always test the accumulator register.
602 __ Move(result_register(), lit);
603 codegen()->DoTest(this);
608 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
609 Register reg) const {
615 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
617 Register reg) const {
620 __ Move(result_register(), reg);
624 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
625 Register reg) const {
627 if (count > 1) __ Drop(count - 1);
628 __ movp(Operand(rsp, 0), reg);
632 void FullCodeGenerator::TestContext::DropAndPlug(int count,
633 Register reg) const {
635 // For simplicity we always test the accumulator register.
637 __ Move(result_register(), reg);
638 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
639 codegen()->DoTest(this);
643 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
644 Label* materialize_false) const {
645 DCHECK(materialize_true == materialize_false);
646 __ bind(materialize_true);
650 void FullCodeGenerator::AccumulatorValueContext::Plug(
651 Label* materialize_true,
652 Label* materialize_false) const {
654 __ bind(materialize_true);
655 __ Move(result_register(), isolate()->factory()->true_value());
656 __ jmp(&done, Label::kNear);
657 __ bind(materialize_false);
658 __ Move(result_register(), isolate()->factory()->false_value());
663 void FullCodeGenerator::StackValueContext::Plug(
664 Label* materialize_true,
665 Label* materialize_false) const {
667 __ bind(materialize_true);
668 __ Push(isolate()->factory()->true_value());
669 __ jmp(&done, Label::kNear);
670 __ bind(materialize_false);
671 __ Push(isolate()->factory()->false_value());
676 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
677 Label* materialize_false) const {
678 DCHECK(materialize_true == true_label_);
679 DCHECK(materialize_false == false_label_);
683 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
687 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
688 Heap::RootListIndex value_root_index =
689 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
690 __ LoadRoot(result_register(), value_root_index);
694 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
695 Heap::RootListIndex value_root_index =
696 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
697 __ PushRoot(value_root_index);
701 void FullCodeGenerator::TestContext::Plug(bool flag) const {
702 codegen()->PrepareForBailoutBeforeSplit(condition(),
707 if (true_label_ != fall_through_) __ jmp(true_label_);
709 if (false_label_ != fall_through_) __ jmp(false_label_);
714 void FullCodeGenerator::DoTest(Expression* condition,
717 Label* fall_through) {
718 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
719 CallIC(ic, condition->test_id());
720 __ testp(result_register(), result_register());
721 // The stub returns nonzero for true.
722 Split(not_zero, if_true, if_false, fall_through);
726 void FullCodeGenerator::Split(Condition cc,
729 Label* fall_through) {
730 if (if_false == fall_through) {
732 } else if (if_true == fall_through) {
733 __ j(NegateCondition(cc), if_false);
741 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
742 DCHECK(var->IsStackAllocated());
743 // Offset is negative because higher indexes are at lower addresses.
744 int offset = -var->index() * kPointerSize;
745 // Adjust by a (parameter or local) base offset.
746 if (var->IsParameter()) {
747 offset += kFPOnStackSize + kPCOnStackSize +
748 (info_->scope()->num_parameters() - 1) * kPointerSize;
750 offset += JavaScriptFrameConstants::kLocal0Offset;
752 return Operand(rbp, offset);
756 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
757 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
758 if (var->IsContextSlot()) {
759 int context_chain_length = scope()->ContextChainLength(var->scope());
760 __ LoadContext(scratch, context_chain_length);
761 return ContextOperand(scratch, var->index());
763 return StackOperand(var);
768 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
769 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
770 MemOperand location = VarOperand(var, dest);
771 __ movp(dest, location);
775 void FullCodeGenerator::SetVar(Variable* var,
779 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
780 DCHECK(!scratch0.is(src));
781 DCHECK(!scratch0.is(scratch1));
782 DCHECK(!scratch1.is(src));
783 MemOperand location = VarOperand(var, scratch0);
784 __ movp(location, src);
786 // Emit the write barrier code if the location is in the heap.
787 if (var->IsContextSlot()) {
788 int offset = Context::SlotOffset(var->index());
789 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
794 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
795 bool should_normalize,
798 // Only prepare for bailouts before splits if we're in a test
799 // context. Otherwise, we let the Visit function deal with the
800 // preparation to avoid preparing with the same AST id twice.
801 if (!context()->IsTest() || !info_->IsOptimizable()) return;
804 if (should_normalize) __ jmp(&skip, Label::kNear);
805 PrepareForBailout(expr, TOS_REG);
806 if (should_normalize) {
807 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
808 Split(equal, if_true, if_false, NULL);
814 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
815 // The variable in the declaration always resides in the current context.
816 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
817 if (generate_debug_code_) {
818 // Check that we're not inside a with or catch context.
819 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
820 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
821 __ Check(not_equal, kDeclarationInWithContext);
822 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
823 __ Check(not_equal, kDeclarationInCatchContext);
828 void FullCodeGenerator::VisitVariableDeclaration(
829 VariableDeclaration* declaration) {
830 // If it was not possible to allocate the variable at compile time, we
831 // need to "declare" it at runtime to make sure it actually exists in the
833 VariableProxy* proxy = declaration->proxy();
834 VariableMode mode = declaration->mode();
835 Variable* variable = proxy->var();
836 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
837 switch (variable->location()) {
838 case Variable::UNALLOCATED:
839 globals_->Add(variable->name(), zone());
840 globals_->Add(variable->binding_needs_init()
841 ? isolate()->factory()->the_hole_value()
842 : isolate()->factory()->undefined_value(),
846 case Variable::PARAMETER:
847 case Variable::LOCAL:
849 Comment cmnt(masm_, "[ VariableDeclaration");
850 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
851 __ movp(StackOperand(variable), kScratchRegister);
855 case Variable::CONTEXT:
857 Comment cmnt(masm_, "[ VariableDeclaration");
858 EmitDebugCheckDeclarationContext(variable);
859 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
860 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
861 // No write barrier since the hole value is in old space.
862 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
866 case Variable::LOOKUP: {
867 Comment cmnt(masm_, "[ VariableDeclaration");
869 __ Push(variable->name());
870 // Declaration nodes are always introduced in one of four modes.
871 DCHECK(IsDeclaredVariableMode(mode));
872 PropertyAttributes attr =
873 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
874 __ Push(Smi::FromInt(attr));
875 // Push initial value, if any.
876 // Note: For variables we must not push an initial value (such as
877 // 'undefined') because we may have a (legal) redeclaration and we
878 // must not destroy the current value.
880 __ PushRoot(Heap::kTheHoleValueRootIndex);
882 __ Push(Smi::FromInt(0)); // Indicates no initial value.
884 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
891 void FullCodeGenerator::VisitFunctionDeclaration(
892 FunctionDeclaration* declaration) {
893 VariableProxy* proxy = declaration->proxy();
894 Variable* variable = proxy->var();
895 switch (variable->location()) {
896 case Variable::UNALLOCATED: {
897 globals_->Add(variable->name(), zone());
898 Handle<SharedFunctionInfo> function =
899 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
900 // Check for stack-overflow exception.
901 if (function.is_null()) return SetStackOverflow();
902 globals_->Add(function, zone());
906 case Variable::PARAMETER:
907 case Variable::LOCAL: {
908 Comment cmnt(masm_, "[ FunctionDeclaration");
909 VisitForAccumulatorValue(declaration->fun());
910 __ movp(StackOperand(variable), result_register());
914 case Variable::CONTEXT: {
915 Comment cmnt(masm_, "[ FunctionDeclaration");
916 EmitDebugCheckDeclarationContext(variable);
917 VisitForAccumulatorValue(declaration->fun());
918 __ movp(ContextOperand(rsi, variable->index()), result_register());
919 int offset = Context::SlotOffset(variable->index());
920 // We know that we have written a function, which is not a smi.
921 __ RecordWriteContextSlot(rsi,
928 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
932 case Variable::LOOKUP: {
933 Comment cmnt(masm_, "[ FunctionDeclaration");
935 __ Push(variable->name());
936 __ Push(Smi::FromInt(NONE));
937 VisitForStackValue(declaration->fun());
938 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
945 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
946 VariableProxy* proxy = declaration->proxy();
947 Variable* variable = proxy->var();
948 switch (variable->location()) {
949 case Variable::UNALLOCATED:
953 case Variable::CONTEXT: {
954 Comment cmnt(masm_, "[ ImportDeclaration");
955 EmitDebugCheckDeclarationContext(variable);
960 case Variable::PARAMETER:
961 case Variable::LOCAL:
962 case Variable::LOOKUP:
968 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
973 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
974 // Call the runtime to declare the globals.
975 __ Push(rsi); // The context is the first argument.
977 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
978 __ CallRuntime(Runtime::kDeclareGlobals, 3);
979 // Return value is ignored.
983 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
984 // Call the runtime to declare the modules.
985 __ Push(descriptions);
986 __ CallRuntime(Runtime::kDeclareModules, 1);
987 // Return value is ignored.
991 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
992 Comment cmnt(masm_, "[ SwitchStatement");
993 Breakable nested_statement(this, stmt);
994 SetStatementPosition(stmt);
996 // Keep the switch value on the stack until a case matches.
997 VisitForStackValue(stmt->tag());
998 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1000 ZoneList<CaseClause*>* clauses = stmt->cases();
1001 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1003 Label next_test; // Recycled for each test.
1004 // Compile all the tests with branches to their bodies.
1005 for (int i = 0; i < clauses->length(); i++) {
1006 CaseClause* clause = clauses->at(i);
1007 clause->body_target()->Unuse();
1009 // The default is not a test, but remember it as final fall through.
1010 if (clause->is_default()) {
1011 default_clause = clause;
1015 Comment cmnt(masm_, "[ Case comparison");
1016 __ bind(&next_test);
1019 // Compile the label expression.
1020 VisitForAccumulatorValue(clause->label());
1022 // Perform the comparison as if via '==='.
1023 __ movp(rdx, Operand(rsp, 0)); // Switch value.
1024 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1025 JumpPatchSite patch_site(masm_);
1026 if (inline_smi_code) {
1030 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1033 __ j(not_equal, &next_test);
1034 __ Drop(1); // Switch value is no longer needed.
1035 __ jmp(clause->body_target());
1036 __ bind(&slow_case);
1039 // Record position before stub call for type feedback.
1040 SetSourcePosition(clause->position());
1041 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1042 language_mode()).code();
1043 CallIC(ic, clause->CompareId());
1044 patch_site.EmitPatchInfo();
1047 __ jmp(&skip, Label::kNear);
1048 PrepareForBailout(clause, TOS_REG);
1049 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1050 __ j(not_equal, &next_test);
1052 __ jmp(clause->body_target());
1056 __ j(not_equal, &next_test);
1057 __ Drop(1); // Switch value is no longer needed.
1058 __ jmp(clause->body_target());
1061 // Discard the test value and jump to the default if present, otherwise to
1062 // the end of the statement.
1063 __ bind(&next_test);
1064 __ Drop(1); // Switch value is no longer needed.
1065 if (default_clause == NULL) {
1066 __ jmp(nested_statement.break_label());
1068 __ jmp(default_clause->body_target());
1071 // Compile all the case bodies.
1072 for (int i = 0; i < clauses->length(); i++) {
1073 Comment cmnt(masm_, "[ Case body");
1074 CaseClause* clause = clauses->at(i);
1075 __ bind(clause->body_target());
1076 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1077 VisitStatements(clause->statements());
1080 __ bind(nested_statement.break_label());
1081 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1085 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1086 Comment cmnt(masm_, "[ ForInStatement");
1087 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1088 SetStatementPosition(stmt);
1091 ForIn loop_statement(this, stmt);
1092 increment_loop_depth();
1094 // Get the object to enumerate over. If the object is null or undefined, skip
1095 // over the loop. See ECMA-262 version 5, section 12.6.4.
1096 SetExpressionPosition(stmt->enumerable());
1097 VisitForAccumulatorValue(stmt->enumerable());
1098 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1100 Register null_value = rdi;
1101 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1102 __ cmpp(rax, null_value);
1105 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1107 // Convert the object to a JS object.
1108 Label convert, done_convert;
1109 __ JumpIfSmi(rax, &convert);
1110 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1111 __ j(above_equal, &done_convert);
1114 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1115 __ bind(&done_convert);
1116 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1119 // Check for proxies.
1121 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1122 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1123 __ j(below_equal, &call_runtime);
1125 // Check cache validity in generated code. This is a fast case for
1126 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1127 // guarantee cache validity, call the runtime system to check cache
1128 // validity or get the property names in a fixed array.
1129 __ CheckEnumCache(null_value, &call_runtime);
1131 // The enum cache is valid. Load the map of the object being
1132 // iterated over and use the cache for the iteration.
1134 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1135 __ jmp(&use_cache, Label::kNear);
1137 // Get the set of properties to enumerate.
1138 __ bind(&call_runtime);
1139 __ Push(rax); // Duplicate the enumerable object on the stack.
1140 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1141 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1143 // If we got a map from the runtime call, we can do a fast
1144 // modification check. Otherwise, we got a fixed array, and we have
1145 // to do a slow check.
1147 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1148 Heap::kMetaMapRootIndex);
1149 __ j(not_equal, &fixed_array);
1151 // We got a map in register rax. Get the enumeration cache from it.
1152 __ bind(&use_cache);
1154 Label no_descriptors;
1156 __ EnumLength(rdx, rax);
1157 __ Cmp(rdx, Smi::FromInt(0));
1158 __ j(equal, &no_descriptors);
1160 __ LoadInstanceDescriptors(rax, rcx);
1161 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1162 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1164 // Set up the four remaining stack slots.
1165 __ Push(rax); // Map.
1166 __ Push(rcx); // Enumeration cache.
1167 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1168 __ Push(Smi::FromInt(0)); // Initial index.
1171 __ bind(&no_descriptors);
1172 __ addp(rsp, Immediate(kPointerSize));
1175 // We got a fixed array in register rax. Iterate through that.
1177 __ bind(&fixed_array);
1179 // No need for a write barrier, we are storing a Smi in the feedback vector.
1180 __ Move(rbx, FeedbackVector());
1181 int vector_index = FeedbackVector()->GetIndex(slot);
1182 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(vector_index)),
1183 TypeFeedbackVector::MegamorphicSentinel(isolate()));
1184 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1185 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1186 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1187 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1188 __ j(above, &non_proxy);
1189 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1190 __ bind(&non_proxy);
1191 __ Push(rbx); // Smi
1192 __ Push(rax); // Array
1193 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1194 __ Push(rax); // Fixed array length (as smi).
1195 __ Push(Smi::FromInt(0)); // Initial index.
1197 // Generate code for doing the condition check.
1198 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1200 SetExpressionPosition(stmt->each());
1202 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1203 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1204 __ j(above_equal, loop_statement.break_label());
1206 // Get the current entry of the array into register rbx.
1207 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1208 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1209 __ movp(rbx, FieldOperand(rbx,
1212 FixedArray::kHeaderSize));
1214 // Get the expected map from the stack or a smi in the
1215 // permanent slow case into register rdx.
1216 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1218 // Check if the expected map still matches that of the enumerable.
1219 // If not, we may have to filter the key.
1221 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1222 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1223 __ j(equal, &update_each, Label::kNear);
1225 // For proxies, no filtering is done.
1226 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1227 __ Cmp(rdx, Smi::FromInt(0));
1228 __ j(equal, &update_each, Label::kNear);
1230 // Convert the entry to a string or null if it isn't a property
1231 // anymore. If the property has been removed while iterating, we
1233 __ Push(rcx); // Enumerable.
1234 __ Push(rbx); // Current entry.
1235 __ CallRuntime(Runtime::kForInFilter, 2);
1236 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1237 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1238 __ j(equal, loop_statement.continue_label());
1241 // Update the 'each' property or variable from the possibly filtered
1242 // entry in register rbx.
1243 __ bind(&update_each);
1244 __ movp(result_register(), rbx);
1245 // Perform the assignment as if via '='.
1246 { EffectContext context(this);
1247 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1248 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1251 // Generate code for the body of the loop.
1252 Visit(stmt->body());
1254 // Generate code for going to the next element by incrementing the
1255 // index (smi) stored on top of the stack.
1256 __ bind(loop_statement.continue_label());
1257 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1259 EmitBackEdgeBookkeeping(stmt, &loop);
1262 // Remove the pointers stored on the stack.
1263 __ bind(loop_statement.break_label());
1264 __ addp(rsp, Immediate(5 * kPointerSize));
1266 // Exit and decrement the loop depth.
1267 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1269 decrement_loop_depth();
1273 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1275 // Use the fast case closure allocation code that allocates in new
1276 // space for nested functions that don't need literals cloning. If
1277 // we're running with the --always-opt or the --prepare-always-opt
1278 // flag, we need to use the runtime function so that the new function
1279 // we are creating here gets a chance to have its code optimized and
1280 // doesn't just get a copy of the existing unoptimized code.
1281 if (!FLAG_always_opt &&
1282 !FLAG_prepare_always_opt &&
1284 scope()->is_function_scope() &&
1285 info->num_literals() == 0) {
1286 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1293 ? isolate()->factory()->true_value()
1294 : isolate()->factory()->false_value());
1295 __ CallRuntime(Runtime::kNewClosure, 3);
1297 context()->Plug(rax);
1301 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1302 Comment cmnt(masm_, "[ VariableProxy");
1303 EmitVariableLoad(expr);
1307 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1309 FeedbackVectorICSlot slot) {
1310 if (NeedsHomeObject(initializer)) {
1311 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1312 __ Move(StoreDescriptor::NameRegister(),
1313 isolate()->factory()->home_object_symbol());
1314 __ movp(StoreDescriptor::ValueRegister(),
1315 Operand(rsp, offset * kPointerSize));
1316 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1322 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1323 TypeofState typeof_state,
1325 Register context = rsi;
1326 Register temp = rdx;
1330 if (s->num_heap_slots() > 0) {
1331 if (s->calls_sloppy_eval()) {
1332 // Check that extension is NULL.
1333 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1335 __ j(not_equal, slow);
1337 // Load next context in chain.
1338 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1339 // Walk the rest of the chain without clobbering rsi.
1342 // If no outer scope calls eval, we do not need to check more
1343 // context extensions. If we have reached an eval scope, we check
1344 // all extensions from this point.
1345 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1346 s = s->outer_scope();
1349 if (s != NULL && s->is_eval_scope()) {
1350 // Loop up the context chain. There is no frame effect so it is
1351 // safe to use raw labels here.
1353 if (!context.is(temp)) {
1354 __ movp(temp, context);
1356 // Load map for comparison into register, outside loop.
1357 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1359 // Terminate at native context.
1360 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1361 __ j(equal, &fast, Label::kNear);
1362 // Check that extension is NULL.
1363 __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1364 __ j(not_equal, slow);
1365 // Load next context in chain.
1366 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1371 // All extension objects were empty and it is safe to use a global
1373 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1374 __ Move(LoadDescriptor::NameRegister(), proxy->var()->name());
1375 __ Move(LoadDescriptor::SlotRegister(),
1376 SmiFromSlot(proxy->VariableFeedbackSlot()));
1378 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1385 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1387 DCHECK(var->IsContextSlot());
1388 Register context = rsi;
1389 Register temp = rbx;
1391 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1392 if (s->num_heap_slots() > 0) {
1393 if (s->calls_sloppy_eval()) {
1394 // Check that extension is NULL.
1395 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1397 __ j(not_equal, slow);
1399 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1400 // Walk the rest of the chain without clobbering rsi.
1404 // Check that last extension is NULL.
1405 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1406 __ j(not_equal, slow);
1408 // This function is used only for loads, not stores, so it's safe to
1409 // return an rsi-based operand (the write barrier cannot be allowed to
1410 // destroy the rsi register).
1411 return ContextOperand(context, var->index());
1415 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1416 TypeofState typeof_state,
1419 // Generate fast-case code for variables that might be shadowed by
1420 // eval-introduced variables. Eval is used a lot without
1421 // introducing variables. In those cases, we do not want to
1422 // perform a runtime call for all variables in the scope
1423 // containing the eval.
1424 Variable* var = proxy->var();
1425 if (var->mode() == DYNAMIC_GLOBAL) {
1426 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1428 } else if (var->mode() == DYNAMIC_LOCAL) {
1429 Variable* local = var->local_if_not_shadowed();
1430 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1431 if (local->mode() == LET || local->mode() == CONST ||
1432 local->mode() == CONST_LEGACY) {
1433 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1434 __ j(not_equal, done);
1435 if (local->mode() == CONST_LEGACY) {
1436 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1437 } else { // LET || CONST
1438 __ Push(var->name());
1439 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1447 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1448 // Record position before possible IC call.
1449 SetSourcePosition(proxy->position());
1450 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1451 Variable* var = proxy->var();
1453 // Three cases: global variables, lookup variables, and all other types of
1455 switch (var->location()) {
1456 case Variable::UNALLOCATED: {
1457 Comment cmnt(masm_, "[ Global variable");
1458 __ Move(LoadDescriptor::NameRegister(), var->name());
1459 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1460 __ Move(LoadDescriptor::SlotRegister(),
1461 SmiFromSlot(proxy->VariableFeedbackSlot()));
1462 CallGlobalLoadIC(var->name());
1463 context()->Plug(rax);
1467 case Variable::PARAMETER:
1468 case Variable::LOCAL:
1469 case Variable::CONTEXT: {
1470 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1472 if (var->binding_needs_init()) {
1473 // var->scope() may be NULL when the proxy is located in eval code and
1474 // refers to a potential outside binding. Currently those bindings are
1475 // always looked up dynamically, i.e. in that case
1476 // var->location() == LOOKUP.
1478 DCHECK(var->scope() != NULL);
1480 // Check if the binding really needs an initialization check. The check
1481 // can be skipped in the following situation: we have a LET or CONST
1482 // binding in harmony mode, both the Variable and the VariableProxy have
1483 // the same declaration scope (i.e. they are both in global code, in the
1484 // same function or in the same eval code) and the VariableProxy is in
1485 // the source physically located after the initializer of the variable.
1487 // We cannot skip any initialization checks for CONST in non-harmony
1488 // mode because const variables may be declared but never initialized:
1489 // if (false) { const x; }; var y = x;
1491 // The condition on the declaration scopes is a conservative check for
1492 // nested functions that access a binding and are called before the
1493 // binding is initialized:
1494 // function() { f(); let x = 1; function f() { x = 2; } }
1496 bool skip_init_check;
1497 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1498 skip_init_check = false;
1499 } else if (var->is_this()) {
1500 CHECK(info_->function() != nullptr &&
1501 (info_->function()->kind() & kSubclassConstructor) != 0);
1502 // TODO(dslomov): implement 'this' hole check elimination.
1503 skip_init_check = false;
1505 // Check that we always have valid source position.
1506 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1507 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1508 skip_init_check = var->mode() != CONST_LEGACY &&
1509 var->initializer_position() < proxy->position();
1512 if (!skip_init_check) {
1513 // Let and const need a read barrier.
1516 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1517 __ j(not_equal, &done, Label::kNear);
1518 if (var->mode() == LET || var->mode() == CONST) {
1519 // Throw a reference error when using an uninitialized let/const
1520 // binding in harmony mode.
1521 __ Push(var->name());
1522 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1524 // Uninitalized const bindings outside of harmony mode are unholed.
1525 DCHECK(var->mode() == CONST_LEGACY);
1526 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1529 context()->Plug(rax);
1533 context()->Plug(var);
1537 case Variable::LOOKUP: {
1538 Comment cmnt(masm_, "[ Lookup slot");
1540 // Generate code for loading from variables potentially shadowed
1541 // by eval-introduced variables.
1542 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1544 __ Push(rsi); // Context.
1545 __ Push(var->name());
1546 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1548 context()->Plug(rax);
1555 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1556 Comment cmnt(masm_, "[ RegExpLiteral");
1558 // Registers will be used as follows:
1559 // rdi = JS function.
1560 // rcx = literals array.
1561 // rbx = regexp literal.
1562 // rax = regexp literal clone.
1563 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1564 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1565 int literal_offset =
1566 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1567 __ movp(rbx, FieldOperand(rcx, literal_offset));
1568 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1569 __ j(not_equal, &materialized, Label::kNear);
1571 // Create regexp literal using runtime function
1572 // Result will be in rax.
1574 __ Push(Smi::FromInt(expr->literal_index()));
1575 __ Push(expr->pattern());
1576 __ Push(expr->flags());
1577 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1580 __ bind(&materialized);
1581 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1582 Label allocated, runtime_allocate;
1583 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1586 __ bind(&runtime_allocate);
1588 __ Push(Smi::FromInt(size));
1589 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1592 __ bind(&allocated);
1593 // Copy the content into the newly allocated memory.
1594 // (Unroll copy loop once for better throughput).
1595 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1596 __ movp(rdx, FieldOperand(rbx, i));
1597 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1598 __ movp(FieldOperand(rax, i), rdx);
1599 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1601 if ((size % (2 * kPointerSize)) != 0) {
1602 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1603 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1605 context()->Plug(rax);
1609 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1610 if (expression == NULL) {
1611 __ PushRoot(Heap::kNullValueRootIndex);
1613 VisitForStackValue(expression);
1618 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1619 Comment cmnt(masm_, "[ ObjectLiteral");
1621 Handle<FixedArray> constant_properties = expr->constant_properties();
1622 int flags = expr->ComputeFlags();
1623 if (MustCreateObjectLiteralWithRuntime(expr)) {
1624 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1625 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1626 __ Push(Smi::FromInt(expr->literal_index()));
1627 __ Push(constant_properties);
1628 __ Push(Smi::FromInt(flags));
1629 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1631 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1632 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1633 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1634 __ Move(rcx, constant_properties);
1635 __ Move(rdx, Smi::FromInt(flags));
1636 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1639 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1641 // If result_saved is true the result is on top of the stack. If
1642 // result_saved is false the result is in rax.
1643 bool result_saved = false;
1645 AccessorTable accessor_table(zone());
1646 int property_index = 0;
1647 // store_slot_index points to the vector IC slot for the next store IC used.
1648 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1649 // and must be updated if the number of store ICs emitted here changes.
1650 int store_slot_index = 0;
1651 for (; property_index < expr->properties()->length(); property_index++) {
1652 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1653 if (property->is_computed_name()) break;
1654 if (property->IsCompileTimeValue()) continue;
1656 Literal* key = property->key()->AsLiteral();
1657 Expression* value = property->value();
1658 if (!result_saved) {
1659 __ Push(rax); // Save result on the stack
1660 result_saved = true;
1662 switch (property->kind()) {
1663 case ObjectLiteral::Property::CONSTANT:
1665 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1666 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1668 case ObjectLiteral::Property::COMPUTED:
1669 // It is safe to use [[Put]] here because the boilerplate already
1670 // contains computed properties with an uninitialized value.
1671 if (key->value()->IsInternalizedString()) {
1672 if (property->emit_store()) {
1673 VisitForAccumulatorValue(value);
1674 DCHECK(StoreDescriptor::ValueRegister().is(rax));
1675 __ Move(StoreDescriptor::NameRegister(), key->value());
1676 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1677 if (FLAG_vector_stores) {
1678 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1681 CallStoreIC(key->LiteralFeedbackId());
1683 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1685 if (NeedsHomeObject(value)) {
1686 __ movp(StoreDescriptor::ReceiverRegister(), rax);
1687 __ Move(StoreDescriptor::NameRegister(),
1688 isolate()->factory()->home_object_symbol());
1689 __ movp(StoreDescriptor::ValueRegister(), Operand(rsp, 0));
1690 if (FLAG_vector_stores) {
1691 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1696 VisitForEffect(value);
1700 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1701 VisitForStackValue(key);
1702 VisitForStackValue(value);
1703 if (property->emit_store()) {
1704 EmitSetHomeObjectIfNeeded(
1705 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1706 __ Push(Smi::FromInt(SLOPPY)); // Language mode
1707 __ CallRuntime(Runtime::kSetProperty, 4);
1712 case ObjectLiteral::Property::PROTOTYPE:
1713 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1714 VisitForStackValue(value);
1715 DCHECK(property->emit_store());
1716 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1718 case ObjectLiteral::Property::GETTER:
1719 if (property->emit_store()) {
1720 accessor_table.lookup(key)->second->getter = value;
1723 case ObjectLiteral::Property::SETTER:
1724 if (property->emit_store()) {
1725 accessor_table.lookup(key)->second->setter = value;
1731 // Emit code to define accessors, using only a single call to the runtime for
1732 // each pair of corresponding getters and setters.
1733 for (AccessorTable::Iterator it = accessor_table.begin();
1734 it != accessor_table.end();
1736 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1737 VisitForStackValue(it->first);
1738 EmitAccessor(it->second->getter);
1739 EmitSetHomeObjectIfNeeded(
1740 it->second->getter, 2,
1741 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1742 EmitAccessor(it->second->setter);
1743 EmitSetHomeObjectIfNeeded(
1744 it->second->setter, 3,
1745 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1746 __ Push(Smi::FromInt(NONE));
1747 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1750 // Object literals have two parts. The "static" part on the left contains no
1751 // computed property names, and so we can compute its map ahead of time; see
1752 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1753 // starts with the first computed property name, and continues with all
1754 // properties to its right. All the code from above initializes the static
1755 // component of the object literal, and arranges for the map of the result to
1756 // reflect the static order in which the keys appear. For the dynamic
1757 // properties, we compile them into a series of "SetOwnProperty" runtime
1758 // calls. This will preserve insertion order.
1759 for (; property_index < expr->properties()->length(); property_index++) {
1760 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1762 Expression* value = property->value();
1763 if (!result_saved) {
1764 __ Push(rax); // Save result on the stack
1765 result_saved = true;
1768 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1770 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1771 DCHECK(!property->is_computed_name());
1772 VisitForStackValue(value);
1773 DCHECK(property->emit_store());
1774 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1776 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1777 VisitForStackValue(value);
1778 EmitSetHomeObjectIfNeeded(
1779 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1781 switch (property->kind()) {
1782 case ObjectLiteral::Property::CONSTANT:
1783 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1784 case ObjectLiteral::Property::COMPUTED:
1785 if (property->emit_store()) {
1786 __ Push(Smi::FromInt(NONE));
1787 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1793 case ObjectLiteral::Property::PROTOTYPE:
1797 case ObjectLiteral::Property::GETTER:
1798 __ Push(Smi::FromInt(NONE));
1799 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1802 case ObjectLiteral::Property::SETTER:
1803 __ Push(Smi::FromInt(NONE));
1804 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1810 if (expr->has_function()) {
1811 DCHECK(result_saved);
1812 __ Push(Operand(rsp, 0));
1813 __ CallRuntime(Runtime::kToFastProperties, 1);
1817 context()->PlugTOS();
1819 context()->Plug(rax);
1822 // Verify that compilation exactly consumed the number of store ic slots that
1823 // the ObjectLiteral node had to offer.
1824 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1828 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1829 Comment cmnt(masm_, "[ ArrayLiteral");
1831 expr->BuildConstantElements(isolate());
1832 Handle<FixedArray> constant_elements = expr->constant_elements();
1833 bool has_constant_fast_elements =
1834 IsFastObjectElementsKind(expr->constant_elements_kind());
1836 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1837 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1838 // If the only customer of allocation sites is transitioning, then
1839 // we can turn it off if we don't have anywhere else to transition to.
1840 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1843 if (MustCreateArrayLiteralWithRuntime(expr)) {
1844 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1845 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1846 __ Push(Smi::FromInt(expr->literal_index()));
1847 __ Push(constant_elements);
1848 __ Push(Smi::FromInt(expr->ComputeFlags()));
1849 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1851 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1852 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1853 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1854 __ Move(rcx, constant_elements);
1855 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1858 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1860 bool result_saved = false; // Is the result saved to the stack?
1861 ZoneList<Expression*>* subexprs = expr->values();
1862 int length = subexprs->length();
1864 // Emit code to evaluate all the non-constant subexpressions and to store
1865 // them into the newly cloned array.
1866 int array_index = 0;
1867 for (; array_index < length; array_index++) {
1868 Expression* subexpr = subexprs->at(array_index);
1869 if (subexpr->IsSpread()) break;
1871 // If the subexpression is a literal or a simple materialized literal it
1872 // is already set in the cloned array.
1873 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1875 if (!result_saved) {
1876 __ Push(rax); // array literal
1877 __ Push(Smi::FromInt(expr->literal_index()));
1878 result_saved = true;
1880 VisitForAccumulatorValue(subexpr);
1882 if (has_constant_fast_elements) {
1883 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1884 // cannot transition and don't need to call the runtime stub.
1885 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1886 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1887 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1888 // Store the subexpression value in the array's elements.
1889 __ movp(FieldOperand(rbx, offset), result_register());
1890 // Update the write barrier for the array store.
1891 __ RecordWriteField(rbx, offset, result_register(), rcx,
1893 EMIT_REMEMBERED_SET,
1896 // Store the subexpression value in the array's elements.
1897 __ Move(rcx, Smi::FromInt(array_index));
1898 StoreArrayLiteralElementStub stub(isolate());
1902 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1905 // In case the array literal contains spread expressions it has two parts. The
1906 // first part is the "static" array which has a literal index is handled
1907 // above. The second part is the part after the first spread expression
1908 // (inclusive) and these elements gets appended to the array. Note that the
1909 // number elements an iterable produces is unknown ahead of time.
1910 if (array_index < length && result_saved) {
1911 __ Drop(1); // literal index
1913 result_saved = false;
1915 for (; array_index < length; array_index++) {
1916 Expression* subexpr = subexprs->at(array_index);
1919 if (subexpr->IsSpread()) {
1920 VisitForStackValue(subexpr->AsSpread()->expression());
1921 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1923 VisitForStackValue(subexpr);
1924 __ CallRuntime(Runtime::kAppendElement, 2);
1927 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1931 __ Drop(1); // literal index
1932 context()->PlugTOS();
1934 context()->Plug(rax);
1939 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1940 DCHECK(expr->target()->IsValidReferenceExpression());
1942 Comment cmnt(masm_, "[ Assignment");
1944 Property* property = expr->target()->AsProperty();
1945 LhsKind assign_type = Property::GetAssignType(property);
1947 // Evaluate LHS expression.
1948 switch (assign_type) {
1950 // Nothing to do here.
1952 case NAMED_PROPERTY:
1953 if (expr->is_compound()) {
1954 // We need the receiver both on the stack and in the register.
1955 VisitForStackValue(property->obj());
1956 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1958 VisitForStackValue(property->obj());
1961 case NAMED_SUPER_PROPERTY:
1963 property->obj()->AsSuperPropertyReference()->this_var());
1964 VisitForAccumulatorValue(
1965 property->obj()->AsSuperPropertyReference()->home_object());
1966 __ Push(result_register());
1967 if (expr->is_compound()) {
1968 __ Push(MemOperand(rsp, kPointerSize));
1969 __ Push(result_register());
1972 case KEYED_SUPER_PROPERTY:
1974 property->obj()->AsSuperPropertyReference()->this_var());
1976 property->obj()->AsSuperPropertyReference()->home_object());
1977 VisitForAccumulatorValue(property->key());
1978 __ Push(result_register());
1979 if (expr->is_compound()) {
1980 __ Push(MemOperand(rsp, 2 * kPointerSize));
1981 __ Push(MemOperand(rsp, 2 * kPointerSize));
1982 __ Push(result_register());
1985 case KEYED_PROPERTY: {
1986 if (expr->is_compound()) {
1987 VisitForStackValue(property->obj());
1988 VisitForStackValue(property->key());
1989 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1990 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1992 VisitForStackValue(property->obj());
1993 VisitForStackValue(property->key());
1999 // For compound assignments we need another deoptimization point after the
2000 // variable/property load.
2001 if (expr->is_compound()) {
2002 { AccumulatorValueContext context(this);
2003 switch (assign_type) {
2005 EmitVariableLoad(expr->target()->AsVariableProxy());
2006 PrepareForBailout(expr->target(), TOS_REG);
2008 case NAMED_PROPERTY:
2009 EmitNamedPropertyLoad(property);
2010 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2012 case NAMED_SUPER_PROPERTY:
2013 EmitNamedSuperPropertyLoad(property);
2014 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2016 case KEYED_SUPER_PROPERTY:
2017 EmitKeyedSuperPropertyLoad(property);
2018 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2020 case KEYED_PROPERTY:
2021 EmitKeyedPropertyLoad(property);
2022 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2027 Token::Value op = expr->binary_op();
2028 __ Push(rax); // Left operand goes on the stack.
2029 VisitForAccumulatorValue(expr->value());
2031 SetSourcePosition(expr->position() + 1);
2032 AccumulatorValueContext context(this);
2033 if (ShouldInlineSmiCase(op)) {
2034 EmitInlineSmiBinaryOp(expr->binary_operation(),
2039 EmitBinaryOp(expr->binary_operation(), op);
2041 // Deoptimization point in case the binary operation may have side effects.
2042 PrepareForBailout(expr->binary_operation(), TOS_REG);
2044 VisitForAccumulatorValue(expr->value());
2047 // Record source position before possible IC call.
2048 SetSourcePosition(expr->position());
2051 switch (assign_type) {
2053 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2054 expr->op(), expr->AssignmentSlot());
2055 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2056 context()->Plug(rax);
2058 case NAMED_PROPERTY:
2059 EmitNamedPropertyAssignment(expr);
2061 case NAMED_SUPER_PROPERTY:
2062 EmitNamedSuperPropertyStore(property);
2063 context()->Plug(rax);
2065 case KEYED_SUPER_PROPERTY:
2066 EmitKeyedSuperPropertyStore(property);
2067 context()->Plug(rax);
2069 case KEYED_PROPERTY:
2070 EmitKeyedPropertyAssignment(expr);
2076 void FullCodeGenerator::VisitYield(Yield* expr) {
2077 Comment cmnt(masm_, "[ Yield");
2078 // Evaluate yielded value first; the initial iterator definition depends on
2079 // this. It stays on the stack while we update the iterator.
2080 VisitForStackValue(expr->expression());
2082 switch (expr->yield_kind()) {
2083 case Yield::kSuspend:
2084 // Pop value from top-of-stack slot; box result into result register.
2085 EmitCreateIteratorResult(false);
2086 __ Push(result_register());
2088 case Yield::kInitial: {
2089 Label suspend, continuation, post_runtime, resume;
2093 __ bind(&continuation);
2097 VisitForAccumulatorValue(expr->generator_object());
2098 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2099 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2100 Smi::FromInt(continuation.pos()));
2101 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2103 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2105 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
2107 __ j(equal, &post_runtime);
2108 __ Push(rax); // generator object
2109 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2110 __ movp(context_register(),
2111 Operand(rbp, StandardFrameConstants::kContextOffset));
2112 __ bind(&post_runtime);
2114 __ Pop(result_register());
2115 EmitReturnSequence();
2118 context()->Plug(result_register());
2122 case Yield::kFinal: {
2123 VisitForAccumulatorValue(expr->generator_object());
2124 __ Move(FieldOperand(result_register(),
2125 JSGeneratorObject::kContinuationOffset),
2126 Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2127 // Pop value from top-of-stack slot, box result into result register.
2128 EmitCreateIteratorResult(true);
2129 EmitUnwindBeforeReturn();
2130 EmitReturnSequence();
2134 case Yield::kDelegating: {
2135 VisitForStackValue(expr->generator_object());
2137 // Initial stack layout is as follows:
2138 // [sp + 1 * kPointerSize] iter
2139 // [sp + 0 * kPointerSize] g
2141 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2142 Label l_next, l_call, l_loop;
2143 Register load_receiver = LoadDescriptor::ReceiverRegister();
2144 Register load_name = LoadDescriptor::NameRegister();
2146 // Initial send value is undefined.
2147 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2150 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2152 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2154 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2155 __ Push(rax); // exception
2158 // try { received = %yield result }
2159 // Shuffle the received result above a try handler and yield it without
2162 __ Pop(rax); // result
2163 EnterTryBlock(expr->index(), &l_catch);
2164 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2165 __ Push(rax); // result
2167 __ bind(&l_continuation);
2169 __ bind(&l_suspend);
2170 const int generator_object_depth = kPointerSize + try_block_size;
2171 __ movp(rax, Operand(rsp, generator_object_depth));
2173 __ Push(Smi::FromInt(expr->index())); // handler-index
2174 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2175 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2176 Smi::FromInt(l_continuation.pos()));
2177 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2179 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2181 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2182 __ movp(context_register(),
2183 Operand(rbp, StandardFrameConstants::kContextOffset));
2184 __ Pop(rax); // result
2185 EmitReturnSequence();
2186 __ bind(&l_resume); // received in rax
2187 ExitTryBlock(expr->index());
2189 // receiver = iter; f = 'next'; arg = received;
2192 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2193 __ Push(load_name); // "next"
2194 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2195 __ Push(rax); // received
2197 // result = receiver[f](arg);
2199 __ movp(load_receiver, Operand(rsp, kPointerSize));
2200 __ Move(LoadDescriptor::SlotRegister(),
2201 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
2202 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2203 CallIC(ic, TypeFeedbackId::None());
2205 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2206 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2209 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2210 __ Drop(1); // The function is still on the stack; drop it.
2212 // if (!result.done) goto l_try;
2214 __ Move(load_receiver, rax);
2215 __ Push(load_receiver); // save result
2216 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2217 __ Move(LoadDescriptor::SlotRegister(),
2218 SmiFromSlot(expr->DoneFeedbackSlot()));
2219 CallLoadIC(NOT_CONTEXTUAL); // rax=result.done
2220 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2222 __ testp(result_register(), result_register());
2226 __ Pop(load_receiver); // result
2227 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2228 __ Move(LoadDescriptor::SlotRegister(),
2229 SmiFromSlot(expr->ValueFeedbackSlot()));
2230 CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2231 context()->DropAndPlug(2, rax); // drop iter and g
2238 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2240 JSGeneratorObject::ResumeMode resume_mode) {
2241 // The value stays in rax, and is ultimately read by the resumed generator, as
2242 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2243 // is read to throw the value when the resumed generator is already closed.
2244 // rbx will hold the generator object until the activation has been resumed.
2245 VisitForStackValue(generator);
2246 VisitForAccumulatorValue(value);
2249 // Load suspended function and context.
2250 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2251 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2254 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2256 // Push holes for arguments to generator function.
2257 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2258 __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2259 SharedFunctionInfo::kFormalParameterCountOffset);
2260 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2261 Label push_argument_holes, push_frame;
2262 __ bind(&push_argument_holes);
2263 __ subp(rdx, Immediate(1));
2264 __ j(carry, &push_frame);
2266 __ jmp(&push_argument_holes);
2268 // Enter a new JavaScript frame, and initialize its slots as they were when
2269 // the generator was suspended.
2270 Label resume_frame, done;
2271 __ bind(&push_frame);
2272 __ call(&resume_frame);
2274 __ bind(&resume_frame);
2275 __ pushq(rbp); // Caller's frame pointer.
2277 __ Push(rsi); // Callee's context.
2278 __ Push(rdi); // Callee's JS Function.
2280 // Load the operand stack size.
2281 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2282 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2283 __ SmiToInteger32(rdx, rdx);
2285 // If we are sending a value and there is no operand stack, we can jump back
2287 if (resume_mode == JSGeneratorObject::NEXT) {
2289 __ cmpp(rdx, Immediate(0));
2290 __ j(not_zero, &slow_resume);
2291 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2292 __ SmiToInteger64(rcx,
2293 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2295 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2296 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2298 __ bind(&slow_resume);
2301 // Otherwise, we push holes for the operand stack and call the runtime to fix
2302 // up the stack and the handlers.
2303 Label push_operand_holes, call_resume;
2304 __ bind(&push_operand_holes);
2305 __ subp(rdx, Immediate(1));
2306 __ j(carry, &call_resume);
2308 __ jmp(&push_operand_holes);
2309 __ bind(&call_resume);
2311 __ Push(result_register());
2312 __ Push(Smi::FromInt(resume_mode));
2313 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2314 // Not reached: the runtime call returns elsewhere.
2315 __ Abort(kGeneratorFailedToResume);
2318 context()->Plug(result_register());
2322 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2326 const int instance_size = 5 * kPointerSize;
2327 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2330 __ Allocate(instance_size, rax, rcx, rdx, &gc_required, TAG_OBJECT);
2333 __ bind(&gc_required);
2334 __ Push(Smi::FromInt(instance_size));
2335 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2336 __ movp(context_register(),
2337 Operand(rbp, StandardFrameConstants::kContextOffset));
2339 __ bind(&allocated);
2340 __ movp(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2341 __ movp(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
2342 __ movp(rbx, ContextOperand(rbx, Context::ITERATOR_RESULT_MAP_INDEX));
2344 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2345 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2346 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2347 isolate()->factory()->empty_fixed_array());
2348 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2349 isolate()->factory()->empty_fixed_array());
2350 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2352 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2355 // Only the value field needs a write barrier, as the other values are in the
2357 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2358 rcx, rdx, kDontSaveFPRegs);
2362 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2363 SetSourcePosition(prop->position());
2364 Literal* key = prop->key()->AsLiteral();
2365 DCHECK(!prop->IsSuperAccess());
2367 __ Move(LoadDescriptor::NameRegister(), key->value());
2368 __ Move(LoadDescriptor::SlotRegister(),
2369 SmiFromSlot(prop->PropertyFeedbackSlot()));
2370 CallLoadIC(NOT_CONTEXTUAL);
2374 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2375 // Stack: receiver, home_object
2376 SetSourcePosition(prop->position());
2377 Literal* key = prop->key()->AsLiteral();
2378 DCHECK(!key->value()->IsSmi());
2379 DCHECK(prop->IsSuperAccess());
2381 __ Push(key->value());
2382 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2386 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2387 SetSourcePosition(prop->position());
2388 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2389 __ Move(LoadDescriptor::SlotRegister(),
2390 SmiFromSlot(prop->PropertyFeedbackSlot()));
2395 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2396 // Stack: receiver, home_object, key.
2397 SetSourcePosition(prop->position());
2399 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2403 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2406 Expression* right) {
2407 // Do combined smi check of the operands. Left operand is on the
2408 // stack (popped into rdx). Right operand is in rax but moved into
2409 // rcx to make the shifts easier.
2410 Label done, stub_call, smi_case;
2414 JumpPatchSite patch_site(masm_);
2415 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2417 __ bind(&stub_call);
2419 Handle<Code> code = CodeFactory::BinaryOpIC(
2420 isolate(), op, language_mode()).code();
2421 CallIC(code, expr->BinaryOperationFeedbackId());
2422 patch_site.EmitPatchInfo();
2423 __ jmp(&done, Label::kNear);
2428 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2431 __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2434 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2437 __ SmiAdd(rax, rdx, rcx, &stub_call);
2440 __ SmiSub(rax, rdx, rcx, &stub_call);
2443 __ SmiMul(rax, rdx, rcx, &stub_call);
2446 __ SmiOr(rax, rdx, rcx);
2448 case Token::BIT_AND:
2449 __ SmiAnd(rax, rdx, rcx);
2451 case Token::BIT_XOR:
2452 __ SmiXor(rax, rdx, rcx);
2460 context()->Plug(rax);
2464 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2465 // Constructor is in rax.
2466 DCHECK(lit != NULL);
2469 // No access check is needed here since the constructor is created by the
2471 Register scratch = rbx;
2472 __ movp(scratch, FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset));
2475 // store_slot_index points to the vector IC slot for the next store IC used.
2476 // ClassLiteral::ComputeFeedbackRequirements controls the allocation of slots
2477 // and must be updated if the number of store ICs emitted here changes.
2478 int store_slot_index = 0;
2479 for (int i = 0; i < lit->properties()->length(); i++) {
2480 ObjectLiteral::Property* property = lit->properties()->at(i);
2481 Expression* value = property->value();
2483 if (property->is_static()) {
2484 __ Push(Operand(rsp, kPointerSize)); // constructor
2486 __ Push(Operand(rsp, 0)); // prototype
2488 EmitPropertyKey(property, lit->GetIdForProperty(i));
2490 // The static prototype property is read only. We handle the non computed
2491 // property name case in the parser. Since this is the only case where we
2492 // need to check for an own read only property we special case this so we do
2493 // not need to do this for every property.
2494 if (property->is_static() && property->is_computed_name()) {
2495 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2499 VisitForStackValue(value);
2500 EmitSetHomeObjectIfNeeded(value, 2,
2501 lit->SlotForHomeObject(value, &store_slot_index));
2503 switch (property->kind()) {
2504 case ObjectLiteral::Property::CONSTANT:
2505 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2506 case ObjectLiteral::Property::PROTOTYPE:
2508 case ObjectLiteral::Property::COMPUTED:
2509 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2512 case ObjectLiteral::Property::GETTER:
2513 __ Push(Smi::FromInt(DONT_ENUM));
2514 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2517 case ObjectLiteral::Property::SETTER:
2518 __ Push(Smi::FromInt(DONT_ENUM));
2519 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2528 __ CallRuntime(Runtime::kToFastProperties, 1);
2531 __ CallRuntime(Runtime::kToFastProperties, 1);
2533 // Verify that compilation exactly consumed the number of store ic slots that
2534 // the ClassLiteral node had to offer.
2535 DCHECK(!FLAG_vector_stores || store_slot_index == lit->slot_count());
2539 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2541 Handle<Code> code = CodeFactory::BinaryOpIC(
2542 isolate(), op, language_mode()).code();
2543 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2544 CallIC(code, expr->BinaryOperationFeedbackId());
2545 patch_site.EmitPatchInfo();
2546 context()->Plug(rax);
2550 void FullCodeGenerator::EmitAssignment(Expression* expr,
2551 FeedbackVectorICSlot slot) {
2552 DCHECK(expr->IsValidReferenceExpression());
2554 Property* prop = expr->AsProperty();
2555 LhsKind assign_type = Property::GetAssignType(prop);
2557 switch (assign_type) {
2559 Variable* var = expr->AsVariableProxy()->var();
2560 EffectContext context(this);
2561 EmitVariableAssignment(var, Token::ASSIGN, slot);
2564 case NAMED_PROPERTY: {
2565 __ Push(rax); // Preserve value.
2566 VisitForAccumulatorValue(prop->obj());
2567 __ Move(StoreDescriptor::ReceiverRegister(), rax);
2568 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2569 __ Move(StoreDescriptor::NameRegister(),
2570 prop->key()->AsLiteral()->value());
2571 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2575 case NAMED_SUPER_PROPERTY: {
2577 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2578 VisitForAccumulatorValue(
2579 prop->obj()->AsSuperPropertyReference()->home_object());
2580 // stack: value, this; rax: home_object
2581 Register scratch = rcx;
2582 Register scratch2 = rdx;
2583 __ Move(scratch, result_register()); // home_object
2584 __ movp(rax, MemOperand(rsp, kPointerSize)); // value
2585 __ movp(scratch2, MemOperand(rsp, 0)); // this
2586 __ movp(MemOperand(rsp, kPointerSize), scratch2); // this
2587 __ movp(MemOperand(rsp, 0), scratch); // home_object
2588 // stack: this, home_object; rax: value
2589 EmitNamedSuperPropertyStore(prop);
2592 case KEYED_SUPER_PROPERTY: {
2594 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2596 prop->obj()->AsSuperPropertyReference()->home_object());
2597 VisitForAccumulatorValue(prop->key());
2598 Register scratch = rcx;
2599 Register scratch2 = rdx;
2600 __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize)); // value
2601 // stack: value, this, home_object; rax: key, rdx: value
2602 __ movp(scratch, MemOperand(rsp, kPointerSize)); // this
2603 __ movp(MemOperand(rsp, 2 * kPointerSize), scratch);
2604 __ movp(scratch, MemOperand(rsp, 0)); // home_object
2605 __ movp(MemOperand(rsp, kPointerSize), scratch);
2606 __ movp(MemOperand(rsp, 0), rax);
2607 __ Move(rax, scratch2);
2608 // stack: this, home_object, key; rax: value.
2609 EmitKeyedSuperPropertyStore(prop);
2612 case KEYED_PROPERTY: {
2613 __ Push(rax); // Preserve value.
2614 VisitForStackValue(prop->obj());
2615 VisitForAccumulatorValue(prop->key());
2616 __ Move(StoreDescriptor::NameRegister(), rax);
2617 __ Pop(StoreDescriptor::ReceiverRegister());
2618 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2619 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2621 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2626 context()->Plug(rax);
2630 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2631 Variable* var, MemOperand location) {
2632 __ movp(location, rax);
2633 if (var->IsContextSlot()) {
2635 __ RecordWriteContextSlot(
2636 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2641 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2642 FeedbackVectorICSlot slot) {
2643 if (var->IsUnallocated()) {
2644 // Global var, const, or let.
2645 __ Move(StoreDescriptor::NameRegister(), var->name());
2646 __ movp(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2647 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2650 } else if (var->mode() == LET && op != Token::INIT_LET) {
2651 // Non-initializing assignment to let variable needs a write barrier.
2652 DCHECK(!var->IsLookupSlot());
2653 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2655 MemOperand location = VarOperand(var, rcx);
2656 __ movp(rdx, location);
2657 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2658 __ j(not_equal, &assign, Label::kNear);
2659 __ Push(var->name());
2660 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2662 EmitStoreToStackLocalOrContextSlot(var, location);
2664 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2665 // Assignment to const variable needs a write barrier.
2666 DCHECK(!var->IsLookupSlot());
2667 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2669 MemOperand location = VarOperand(var, rcx);
2670 __ movp(rdx, location);
2671 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2672 __ j(not_equal, &const_error, Label::kNear);
2673 __ Push(var->name());
2674 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2675 __ bind(&const_error);
2676 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2678 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2679 if (var->IsLookupSlot()) {
2680 // Assignment to var.
2681 __ Push(rax); // Value.
2682 __ Push(rsi); // Context.
2683 __ Push(var->name());
2684 __ Push(Smi::FromInt(language_mode()));
2685 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2687 // Assignment to var or initializing assignment to let/const in harmony
2689 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2690 MemOperand location = VarOperand(var, rcx);
2691 if (generate_debug_code_ && op == Token::INIT_LET) {
2692 // Check for an uninitialized let binding.
2693 __ movp(rdx, location);
2694 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2695 __ Check(equal, kLetBindingReInitialization);
2697 EmitStoreToStackLocalOrContextSlot(var, location);
2700 } else if (op == Token::INIT_CONST_LEGACY) {
2701 // Const initializers need a write barrier.
2702 DCHECK(var->mode() == CONST_LEGACY);
2703 DCHECK(!var->IsParameter()); // No const parameters.
2704 if (var->IsLookupSlot()) {
2707 __ Push(var->name());
2708 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2710 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2712 MemOperand location = VarOperand(var, rcx);
2713 __ movp(rdx, location);
2714 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2715 __ j(not_equal, &skip);
2716 EmitStoreToStackLocalOrContextSlot(var, location);
2721 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2722 if (is_strict(language_mode())) {
2723 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2725 // Silently ignore store in sloppy mode.
2730 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2731 // Assignment to a property, using a named store IC.
2732 Property* prop = expr->target()->AsProperty();
2733 DCHECK(prop != NULL);
2734 DCHECK(prop->key()->IsLiteral());
2736 // Record source code position before IC call.
2737 SetSourcePosition(expr->position());
2738 __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2739 __ Pop(StoreDescriptor::ReceiverRegister());
2740 if (FLAG_vector_stores) {
2741 EmitLoadStoreICSlot(expr->AssignmentSlot());
2744 CallStoreIC(expr->AssignmentFeedbackId());
2747 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2748 context()->Plug(rax);
2752 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2753 // Assignment to named property of super.
2755 // stack : receiver ('this'), home_object
2756 DCHECK(prop != NULL);
2757 Literal* key = prop->key()->AsLiteral();
2758 DCHECK(key != NULL);
2760 __ Push(key->value());
2762 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2763 : Runtime::kStoreToSuper_Sloppy),
2768 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2769 // Assignment to named property of super.
2771 // stack : receiver ('this'), home_object, key
2772 DCHECK(prop != NULL);
2776 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2777 : Runtime::kStoreKeyedToSuper_Sloppy),
2782 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2783 // Assignment to a property, using a keyed store IC.
2785 __ Pop(StoreDescriptor::NameRegister()); // Key.
2786 __ Pop(StoreDescriptor::ReceiverRegister());
2787 DCHECK(StoreDescriptor::ValueRegister().is(rax));
2788 // Record source code position before IC call.
2789 SetSourcePosition(expr->position());
2791 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2792 if (FLAG_vector_stores) {
2793 EmitLoadStoreICSlot(expr->AssignmentSlot());
2796 CallIC(ic, expr->AssignmentFeedbackId());
2799 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2800 context()->Plug(rax);
2804 void FullCodeGenerator::VisitProperty(Property* expr) {
2805 Comment cmnt(masm_, "[ Property");
2806 Expression* key = expr->key();
2808 if (key->IsPropertyName()) {
2809 if (!expr->IsSuperAccess()) {
2810 VisitForAccumulatorValue(expr->obj());
2811 DCHECK(!rax.is(LoadDescriptor::ReceiverRegister()));
2812 __ movp(LoadDescriptor::ReceiverRegister(), rax);
2813 EmitNamedPropertyLoad(expr);
2815 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2817 expr->obj()->AsSuperPropertyReference()->home_object());
2818 EmitNamedSuperPropertyLoad(expr);
2821 if (!expr->IsSuperAccess()) {
2822 VisitForStackValue(expr->obj());
2823 VisitForAccumulatorValue(expr->key());
2824 __ Move(LoadDescriptor::NameRegister(), rax);
2825 __ Pop(LoadDescriptor::ReceiverRegister());
2826 EmitKeyedPropertyLoad(expr);
2828 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2830 expr->obj()->AsSuperPropertyReference()->home_object());
2831 VisitForStackValue(expr->key());
2832 EmitKeyedSuperPropertyLoad(expr);
2835 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2836 context()->Plug(rax);
2840 void FullCodeGenerator::CallIC(Handle<Code> code,
2841 TypeFeedbackId ast_id) {
2843 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2847 // Code common for calls using the IC.
2848 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2849 Expression* callee = expr->expression();
2851 CallICState::CallType call_type =
2852 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2853 // Get the target function.
2854 if (call_type == CallICState::FUNCTION) {
2855 { StackValueContext context(this);
2856 EmitVariableLoad(callee->AsVariableProxy());
2857 PrepareForBailout(callee, NO_REGISTERS);
2859 // Push undefined as receiver. This is patched in the method prologue if it
2860 // is a sloppy mode method.
2861 __ Push(isolate()->factory()->undefined_value());
2863 // Load the function from the receiver.
2864 DCHECK(callee->IsProperty());
2865 DCHECK(!callee->AsProperty()->IsSuperAccess());
2866 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2867 EmitNamedPropertyLoad(callee->AsProperty());
2868 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2869 // Push the target function under the receiver.
2870 __ Push(Operand(rsp, 0));
2871 __ movp(Operand(rsp, kPointerSize), rax);
2874 EmitCall(expr, call_type);
2878 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2879 Expression* callee = expr->expression();
2880 DCHECK(callee->IsProperty());
2881 Property* prop = callee->AsProperty();
2882 DCHECK(prop->IsSuperAccess());
2884 SetSourcePosition(prop->position());
2885 Literal* key = prop->key()->AsLiteral();
2886 DCHECK(!key->value()->IsSmi());
2887 // Load the function from the receiver.
2888 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2889 VisitForStackValue(super_ref->home_object());
2890 VisitForAccumulatorValue(super_ref->this_var());
2893 __ Push(Operand(rsp, kPointerSize * 2));
2894 __ Push(key->value());
2898 // - this (receiver)
2899 // - this (receiver) <-- LoadFromSuper will pop here and below.
2902 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2904 // Replace home_object with target function.
2905 __ movp(Operand(rsp, kPointerSize), rax);
2908 // - target function
2909 // - this (receiver)
2910 EmitCall(expr, CallICState::METHOD);
2914 // Common code for calls using the IC.
2915 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2918 VisitForAccumulatorValue(key);
2920 Expression* callee = expr->expression();
2922 // Load the function from the receiver.
2923 DCHECK(callee->IsProperty());
2924 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2925 __ Move(LoadDescriptor::NameRegister(), rax);
2926 EmitKeyedPropertyLoad(callee->AsProperty());
2927 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2929 // Push the target function under the receiver.
2930 __ Push(Operand(rsp, 0));
2931 __ movp(Operand(rsp, kPointerSize), rax);
2933 EmitCall(expr, CallICState::METHOD);
2937 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2938 Expression* callee = expr->expression();
2939 DCHECK(callee->IsProperty());
2940 Property* prop = callee->AsProperty();
2941 DCHECK(prop->IsSuperAccess());
2943 SetSourcePosition(prop->position());
2944 // Load the function from the receiver.
2945 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2946 VisitForStackValue(super_ref->home_object());
2947 VisitForAccumulatorValue(super_ref->this_var());
2950 __ Push(Operand(rsp, kPointerSize * 2));
2951 VisitForStackValue(prop->key());
2955 // - this (receiver)
2956 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2959 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2961 // Replace home_object with target function.
2962 __ movp(Operand(rsp, kPointerSize), rax);
2965 // - target function
2966 // - this (receiver)
2967 EmitCall(expr, CallICState::METHOD);
2971 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2972 // Load the arguments.
2973 ZoneList<Expression*>* args = expr->arguments();
2974 int arg_count = args->length();
2975 { PreservePositionScope scope(masm()->positions_recorder());
2976 for (int i = 0; i < arg_count; i++) {
2977 VisitForStackValue(args->at(i));
2981 // Record source position of the IC call.
2982 SetSourcePosition(expr->position());
2983 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2984 __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
2985 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2986 // Don't assign a type feedback id to the IC, since type feedback is provided
2987 // by the vector above.
2990 RecordJSReturnSite(expr);
2992 // Restore context register.
2993 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2994 // Discard the function left on TOS.
2995 context()->DropAndPlug(1, rax);
2999 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3000 // Push copy of the first argument or undefined if it doesn't exist.
3001 if (arg_count > 0) {
3002 __ Push(Operand(rsp, arg_count * kPointerSize));
3004 __ PushRoot(Heap::kUndefinedValueRootIndex);
3007 // Push the enclosing function.
3008 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3010 // Push the receiver of the enclosing function and do runtime call.
3011 Variable* this_var = scope()->LookupThis();
3012 DCHECK_NOT_NULL(this_var);
3013 __ Push(VarOperand(this_var, rcx));
3015 // Push the language mode.
3016 __ Push(Smi::FromInt(language_mode()));
3018 // Push the start position of the scope the calls resides in.
3019 __ Push(Smi::FromInt(scope()->start_position()));
3021 // Do the runtime call.
3022 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3026 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3027 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
3028 Variable* this_var = super_ref->this_var()->var();
3029 GetVar(rcx, this_var);
3030 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
3031 Label uninitialized_this;
3032 __ j(equal, &uninitialized_this);
3033 __ Push(this_var->name());
3034 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3035 __ bind(&uninitialized_this);
3037 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3041 void FullCodeGenerator::VisitCall(Call* expr) {
3043 // We want to verify that RecordJSReturnSite gets called on all paths
3044 // through this function. Avoid early returns.
3045 expr->return_is_recorded_ = false;
3048 Comment cmnt(masm_, "[ Call");
3049 Expression* callee = expr->expression();
3050 Call::CallType call_type = expr->GetCallType(isolate());
3052 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3053 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3054 // to resolve the function we need to call and the receiver of the call.
3055 // Then we call the resolved function using the given arguments.
3056 ZoneList<Expression*>* args = expr->arguments();
3057 int arg_count = args->length();
3058 { PreservePositionScope pos_scope(masm()->positions_recorder());
3059 VisitForStackValue(callee);
3060 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
3062 // Push the arguments.
3063 for (int i = 0; i < arg_count; i++) {
3064 VisitForStackValue(args->at(i));
3067 // Push a copy of the function (found below the arguments) and resolve
3069 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
3070 EmitResolvePossiblyDirectEval(arg_count);
3072 // The runtime call returns a pair of values in rax (function) and
3073 // rdx (receiver). Touch up the stack with the right values.
3074 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
3075 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
3077 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3079 // Record source position for debugger.
3080 SetSourcePosition(expr->position());
3081 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3082 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
3084 RecordJSReturnSite(expr);
3085 // Restore context register.
3086 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3087 context()->DropAndPlug(1, rax);
3088 } else if (call_type == Call::GLOBAL_CALL) {
3089 EmitCallWithLoadIC(expr);
3091 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3092 // Call to a lookup slot (dynamically introduced variable).
3093 VariableProxy* proxy = callee->AsVariableProxy();
3096 { PreservePositionScope scope(masm()->positions_recorder());
3097 // Generate code for loading from variables potentially shadowed by
3098 // eval-introduced variables.
3099 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3102 // Call the runtime to find the function to call (returned in rax) and
3103 // the object holding it (returned in rdx).
3104 __ Push(context_register());
3105 __ Push(proxy->name());
3106 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3107 __ Push(rax); // Function.
3108 __ Push(rdx); // Receiver.
3109 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3111 // If fast case code has been generated, emit code to push the function
3112 // and receiver and have the slow path jump around this code.
3113 if (done.is_linked()) {
3115 __ jmp(&call, Label::kNear);
3119 // The receiver is implicitly the global receiver. Indicate this by
3120 // passing the hole to the call function stub.
3121 __ PushRoot(Heap::kUndefinedValueRootIndex);
3125 // The receiver is either the global receiver or an object found by
3128 } else if (call_type == Call::PROPERTY_CALL) {
3129 Property* property = callee->AsProperty();
3130 bool is_named_call = property->key()->IsPropertyName();
3131 if (property->IsSuperAccess()) {
3132 if (is_named_call) {
3133 EmitSuperCallWithLoadIC(expr);
3135 EmitKeyedSuperCallWithLoadIC(expr);
3139 PreservePositionScope scope(masm()->positions_recorder());
3140 VisitForStackValue(property->obj());
3142 if (is_named_call) {
3143 EmitCallWithLoadIC(expr);
3145 EmitKeyedCallWithLoadIC(expr, property->key());
3148 } else if (call_type == Call::SUPER_CALL) {
3149 EmitSuperConstructorCall(expr);
3151 DCHECK(call_type == Call::OTHER_CALL);
3152 // Call to an arbitrary expression not handled specially above.
3153 { PreservePositionScope scope(masm()->positions_recorder());
3154 VisitForStackValue(callee);
3156 __ PushRoot(Heap::kUndefinedValueRootIndex);
3157 // Emit function call.
3162 // RecordJSReturnSite should have been called.
3163 DCHECK(expr->return_is_recorded_);
3168 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3169 Comment cmnt(masm_, "[ CallNew");
3170 // According to ECMA-262, section 11.2.2, page 44, the function
3171 // expression in new calls must be evaluated before the
3174 // Push constructor on the stack. If it's not a function it's used as
3175 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3177 DCHECK(!expr->expression()->IsSuperPropertyReference());
3178 VisitForStackValue(expr->expression());
3180 // Push the arguments ("left-to-right") on the stack.
3181 ZoneList<Expression*>* args = expr->arguments();
3182 int arg_count = args->length();
3183 for (int i = 0; i < arg_count; i++) {
3184 VisitForStackValue(args->at(i));
3187 // Call the construct call builtin that handles allocation and
3188 // constructor invocation.
3189 SetSourcePosition(expr->position());
3191 // Load function and argument count into rdi and rax.
3192 __ Set(rax, arg_count);
3193 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
3195 // Record call targets in unoptimized code, but not in the snapshot.
3196 if (FLAG_pretenuring_call_new) {
3197 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3198 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3199 expr->CallNewFeedbackSlot().ToInt() + 1);
3202 __ Move(rbx, FeedbackVector());
3203 __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
3205 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3206 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3207 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3208 context()->Plug(rax);
3212 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3213 SuperCallReference* super_call_ref =
3214 expr->expression()->AsSuperCallReference();
3215 DCHECK_NOT_NULL(super_call_ref);
3217 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3218 VisitForStackValue(new_target_proxy);
3220 EmitLoadSuperConstructor(super_call_ref);
3221 __ Push(result_register());
3223 // Push the arguments ("left-to-right") on the stack.
3224 ZoneList<Expression*>* args = expr->arguments();
3225 int arg_count = args->length();
3226 for (int i = 0; i < arg_count; i++) {
3227 VisitForStackValue(args->at(i));
3230 // Call the construct call builtin that handles allocation and
3231 // constructor invocation.
3232 SetSourcePosition(expr->position());
3234 // Load function and argument count into edi and eax.
3235 __ Set(rax, arg_count);
3236 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
3238 // Record call targets in unoptimized code.
3239 if (FLAG_pretenuring_call_new) {
3241 /* TODO(dslomov): support pretenuring.
3242 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3243 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3244 expr->CallNewFeedbackSlot().ToInt() + 1);
3248 __ Move(rbx, FeedbackVector());
3249 __ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
3251 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3252 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3256 RecordJSReturnSite(expr);
3258 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3259 context()->Plug(rax);
3263 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3264 ZoneList<Expression*>* args = expr->arguments();
3265 DCHECK(args->length() == 1);
3267 VisitForAccumulatorValue(args->at(0));
3269 Label materialize_true, materialize_false;
3270 Label* if_true = NULL;
3271 Label* if_false = NULL;
3272 Label* fall_through = NULL;
3273 context()->PrepareTest(&materialize_true, &materialize_false,
3274 &if_true, &if_false, &fall_through);
3276 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3277 __ JumpIfSmi(rax, if_true);
3280 context()->Plug(if_true, if_false);
3284 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3285 ZoneList<Expression*>* args = expr->arguments();
3286 DCHECK(args->length() == 1);
3288 VisitForAccumulatorValue(args->at(0));
3290 Label materialize_true, materialize_false;
3291 Label* if_true = NULL;
3292 Label* if_false = NULL;
3293 Label* fall_through = NULL;
3294 context()->PrepareTest(&materialize_true, &materialize_false,
3295 &if_true, &if_false, &fall_through);
3297 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3298 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
3299 Split(non_negative_smi, if_true, if_false, fall_through);
3301 context()->Plug(if_true, if_false);
3305 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3306 ZoneList<Expression*>* args = expr->arguments();
3307 DCHECK(args->length() == 1);
3309 VisitForAccumulatorValue(args->at(0));
3311 Label materialize_true, materialize_false;
3312 Label* if_true = NULL;
3313 Label* if_false = NULL;
3314 Label* fall_through = NULL;
3315 context()->PrepareTest(&materialize_true, &materialize_false,
3316 &if_true, &if_false, &fall_through);
3318 __ JumpIfSmi(rax, if_false);
3319 __ CompareRoot(rax, Heap::kNullValueRootIndex);
3320 __ j(equal, if_true);
3321 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3322 // Undetectable objects behave like undefined when tested with typeof.
3323 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3324 Immediate(1 << Map::kIsUndetectable));
3325 __ j(not_zero, if_false);
3326 __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3327 __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3328 __ j(below, if_false);
3329 __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3330 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3331 Split(below_equal, if_true, if_false, fall_through);
3333 context()->Plug(if_true, if_false);
3337 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3338 ZoneList<Expression*>* args = expr->arguments();
3339 DCHECK(args->length() == 1);
3341 VisitForAccumulatorValue(args->at(0));
3343 Label materialize_true, materialize_false;
3344 Label* if_true = NULL;
3345 Label* if_false = NULL;
3346 Label* fall_through = NULL;
3347 context()->PrepareTest(&materialize_true, &materialize_false,
3348 &if_true, &if_false, &fall_through);
3350 __ JumpIfSmi(rax, if_false);
3351 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
3352 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3353 Split(above_equal, if_true, if_false, fall_through);
3355 context()->Plug(if_true, if_false);
3359 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3360 ZoneList<Expression*>* args = expr->arguments();
3361 DCHECK(args->length() == 1);
3363 VisitForAccumulatorValue(args->at(0));
3365 Label materialize_true, materialize_false;
3366 Label* if_true = NULL;
3367 Label* if_false = NULL;
3368 Label* fall_through = NULL;
3369 context()->PrepareTest(&materialize_true, &materialize_false,
3370 &if_true, &if_false, &fall_through);
3372 __ JumpIfSmi(rax, if_false);
3373 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3374 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3375 Immediate(1 << Map::kIsUndetectable));
3376 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3377 Split(not_zero, if_true, if_false, fall_through);
3379 context()->Plug(if_true, if_false);
3383 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3384 CallRuntime* expr) {
3385 ZoneList<Expression*>* args = expr->arguments();
3386 DCHECK(args->length() == 1);
3388 VisitForAccumulatorValue(args->at(0));
3390 Label materialize_true, materialize_false, skip_lookup;
3391 Label* if_true = NULL;
3392 Label* if_false = NULL;
3393 Label* fall_through = NULL;
3394 context()->PrepareTest(&materialize_true, &materialize_false,
3395 &if_true, &if_false, &fall_through);
3397 __ AssertNotSmi(rax);
3399 // Check whether this map has already been checked to be safe for default
3401 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3402 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
3403 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3404 __ j(not_zero, &skip_lookup);
3406 // Check for fast case object. Generate false result for slow case object.
3407 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
3408 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3409 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3410 __ j(equal, if_false);
3412 // Look for valueOf string in the descriptor array, and indicate false if
3413 // found. Since we omit an enumeration index check, if it is added via a
3414 // transition that shares its descriptor array, this is a false positive.
3415 Label entry, loop, done;
3417 // Skip loop if no descriptors are valid.
3418 __ NumberOfOwnDescriptors(rcx, rbx);
3419 __ cmpp(rcx, Immediate(0));
3422 __ LoadInstanceDescriptors(rbx, r8);
3423 // rbx: descriptor array.
3424 // rcx: valid entries in the descriptor array.
3425 // Calculate the end of the descriptor array.
3426 __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3428 Operand(r8, rcx, times_pointer_size, DescriptorArray::kFirstOffset));
3429 // Calculate location of the first key name.
3430 __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3431 // Loop through all the keys in the descriptor array. If one of these is the
3432 // internalized string "valueOf" the result is false.
3435 __ movp(rdx, FieldOperand(r8, 0));
3436 __ Cmp(rdx, isolate()->factory()->value_of_string());
3437 __ j(equal, if_false);
3438 __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3441 __ j(not_equal, &loop);
3445 // Set the bit in the map to indicate that there is no local valueOf field.
3446 __ orp(FieldOperand(rbx, Map::kBitField2Offset),
3447 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3449 __ bind(&skip_lookup);
3451 // If a valueOf property is not found on the object check that its
3452 // prototype is the un-modified String prototype. If not result is false.
3453 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
3454 __ testp(rcx, Immediate(kSmiTagMask));
3455 __ j(zero, if_false);
3456 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3457 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3458 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3460 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3461 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3462 Split(equal, if_true, if_false, fall_through);
3464 context()->Plug(if_true, if_false);
3468 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3469 ZoneList<Expression*>* args = expr->arguments();
3470 DCHECK(args->length() == 1);
3472 VisitForAccumulatorValue(args->at(0));
3474 Label materialize_true, materialize_false;
3475 Label* if_true = NULL;
3476 Label* if_false = NULL;
3477 Label* fall_through = NULL;
3478 context()->PrepareTest(&materialize_true, &materialize_false,
3479 &if_true, &if_false, &fall_through);
3481 __ JumpIfSmi(rax, if_false);
3482 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3483 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3484 Split(equal, if_true, if_false, fall_through);
3486 context()->Plug(if_true, if_false);
3490 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3491 ZoneList<Expression*>* args = expr->arguments();
3492 DCHECK(args->length() == 1);
3494 VisitForAccumulatorValue(args->at(0));
3496 Label materialize_true, materialize_false;
3497 Label* if_true = NULL;
3498 Label* if_false = NULL;
3499 Label* fall_through = NULL;
3500 context()->PrepareTest(&materialize_true, &materialize_false,
3501 &if_true, &if_false, &fall_through);
3503 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3504 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3505 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3507 __ j(no_overflow, if_false);
3508 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3509 Immediate(0x00000000));
3510 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3511 Split(equal, if_true, if_false, fall_through);
3513 context()->Plug(if_true, if_false);
3517 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3518 ZoneList<Expression*>* args = expr->arguments();
3519 DCHECK(args->length() == 1);
3521 VisitForAccumulatorValue(args->at(0));
3523 Label materialize_true, materialize_false;
3524 Label* if_true = NULL;
3525 Label* if_false = NULL;
3526 Label* fall_through = NULL;
3527 context()->PrepareTest(&materialize_true, &materialize_false,
3528 &if_true, &if_false, &fall_through);
3530 __ JumpIfSmi(rax, if_false);
3531 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3532 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3533 Split(equal, if_true, if_false, fall_through);
3535 context()->Plug(if_true, if_false);
3539 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3540 ZoneList<Expression*>* args = expr->arguments();
3541 DCHECK(args->length() == 1);
3543 VisitForAccumulatorValue(args->at(0));
3545 Label materialize_true, materialize_false;
3546 Label* if_true = NULL;
3547 Label* if_false = NULL;
3548 Label* fall_through = NULL;
3549 context()->PrepareTest(&materialize_true, &materialize_false,
3550 &if_true, &if_false, &fall_through);
3552 __ JumpIfSmi(rax, if_false);
3553 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3554 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3555 Split(equal, if_true, if_false, fall_through);
3557 context()->Plug(if_true, if_false);
3561 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3562 ZoneList<Expression*>* args = expr->arguments();
3563 DCHECK(args->length() == 1);
3565 VisitForAccumulatorValue(args->at(0));
3567 Label materialize_true, materialize_false;
3568 Label* if_true = NULL;
3569 Label* if_false = NULL;
3570 Label* fall_through = NULL;
3571 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3572 &if_false, &fall_through);
3574 __ JumpIfSmi(rax, if_false);
3576 __ movp(map, FieldOperand(rax, HeapObject::kMapOffset));
3577 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3578 __ j(less, if_false);
3579 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3580 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3581 Split(less_equal, if_true, if_false, fall_through);
3583 context()->Plug(if_true, if_false);
3587 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3588 DCHECK(expr->arguments()->length() == 0);
3590 Label materialize_true, materialize_false;
3591 Label* if_true = NULL;
3592 Label* if_false = NULL;
3593 Label* fall_through = NULL;
3594 context()->PrepareTest(&materialize_true, &materialize_false,
3595 &if_true, &if_false, &fall_through);
3597 // Get the frame pointer for the calling frame.
3598 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3600 // Skip the arguments adaptor frame if it exists.
3601 Label check_frame_marker;
3602 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3603 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3604 __ j(not_equal, &check_frame_marker);
3605 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3607 // Check the marker in the calling frame.
3608 __ bind(&check_frame_marker);
3609 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3610 Smi::FromInt(StackFrame::CONSTRUCT));
3611 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3612 Split(equal, if_true, if_false, fall_through);
3614 context()->Plug(if_true, if_false);
3618 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3619 ZoneList<Expression*>* args = expr->arguments();
3620 DCHECK(args->length() == 2);
3622 // Load the two objects into registers and perform the comparison.
3623 VisitForStackValue(args->at(0));
3624 VisitForAccumulatorValue(args->at(1));
3626 Label materialize_true, materialize_false;
3627 Label* if_true = NULL;
3628 Label* if_false = NULL;
3629 Label* fall_through = NULL;
3630 context()->PrepareTest(&materialize_true, &materialize_false,
3631 &if_true, &if_false, &fall_through);
3635 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3636 Split(equal, if_true, if_false, fall_through);
3638 context()->Plug(if_true, if_false);
3642 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3643 ZoneList<Expression*>* args = expr->arguments();
3644 DCHECK(args->length() == 1);
3646 // ArgumentsAccessStub expects the key in rdx and the formal
3647 // parameter count in rax.
3648 VisitForAccumulatorValue(args->at(0));
3650 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3651 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3653 context()->Plug(rax);
3657 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3658 DCHECK(expr->arguments()->length() == 0);
3661 // Get the number of formal parameters.
3662 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3664 // Check if the calling frame is an arguments adaptor frame.
3665 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3666 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3667 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3668 __ j(not_equal, &exit, Label::kNear);
3670 // Arguments adaptor case: Read the arguments length from the
3672 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3676 context()->Plug(rax);
3680 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3681 ZoneList<Expression*>* args = expr->arguments();
3682 DCHECK(args->length() == 1);
3683 Label done, null, function, non_function_constructor;
3685 VisitForAccumulatorValue(args->at(0));
3687 // If the object is a smi, we return null.
3688 __ JumpIfSmi(rax, &null);
3690 // Check that the object is a JS object but take special care of JS
3691 // functions to make sure they have 'Function' as their class.
3692 // Assume that there are only two callable types, and one of them is at
3693 // either end of the type range for JS object types. Saves extra comparisons.
3694 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3695 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3696 // Map is now in rax.
3698 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3699 FIRST_SPEC_OBJECT_TYPE + 1);
3700 __ j(equal, &function);
3702 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3703 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3704 LAST_SPEC_OBJECT_TYPE - 1);
3705 __ j(equal, &function);
3706 // Assume that there is no larger type.
3707 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3709 // Check if the constructor in the map is a JS function.
3710 __ GetMapConstructor(rax, rax, rbx);
3711 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3712 __ j(not_equal, &non_function_constructor);
3714 // rax now contains the constructor function. Grab the
3715 // instance class name from there.
3716 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3717 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3720 // Functions have class 'Function'.
3722 __ Move(rax, isolate()->factory()->Function_string());
3725 // Objects with a non-function constructor have class 'Object'.
3726 __ bind(&non_function_constructor);
3727 __ Move(rax, isolate()->factory()->Object_string());
3730 // Non-JS objects have class null.
3732 __ LoadRoot(rax, Heap::kNullValueRootIndex);
3737 context()->Plug(rax);
3741 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3742 // Load the arguments on the stack and call the stub.
3743 SubStringStub stub(isolate());
3744 ZoneList<Expression*>* args = expr->arguments();
3745 DCHECK(args->length() == 3);
3746 VisitForStackValue(args->at(0));
3747 VisitForStackValue(args->at(1));
3748 VisitForStackValue(args->at(2));
3750 context()->Plug(rax);
3754 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3755 // Load the arguments on the stack and call the stub.
3756 RegExpExecStub stub(isolate());
3757 ZoneList<Expression*>* args = expr->arguments();
3758 DCHECK(args->length() == 4);
3759 VisitForStackValue(args->at(0));
3760 VisitForStackValue(args->at(1));
3761 VisitForStackValue(args->at(2));
3762 VisitForStackValue(args->at(3));
3764 context()->Plug(rax);
3768 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3769 ZoneList<Expression*>* args = expr->arguments();
3770 DCHECK(args->length() == 1);
3772 VisitForAccumulatorValue(args->at(0)); // Load the object.
3775 // If the object is a smi return the object.
3776 __ JumpIfSmi(rax, &done);
3777 // If the object is not a value type, return the object.
3778 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3779 __ j(not_equal, &done);
3780 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3783 context()->Plug(rax);
3787 void FullCodeGenerator::EmitThrowIfNotADate(CallRuntime* expr) {
3788 ZoneList<Expression*>* args = expr->arguments();
3789 DCHECK_EQ(1, args->length());
3791 VisitForAccumulatorValue(args->at(0)); // Load the object.
3793 Register object = rax;
3794 Register result = rax;
3795 Register scratch = rcx;
3797 Label done, not_date_object;
3798 __ JumpIfSmi(object, ¬_date_object, Label::kNear);
3799 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3800 __ j(equal, &done, Label::kNear);
3801 __ bind(¬_date_object);
3802 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3805 context()->Plug(result);
3809 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3810 ZoneList<Expression*>* args = expr->arguments();
3811 DCHECK(args->length() == 2);
3812 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3813 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3815 VisitForAccumulatorValue(args->at(0)); // Load the object.
3817 Register object = rax;
3818 Register result = rax;
3819 Register scratch = rcx;
3821 if (FLAG_debug_code) {
3822 __ AssertNotSmi(object);
3823 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3824 __ Check(equal, kOperandIsNotADate);
3827 if (index->value() == 0) {
3828 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3830 Label runtime, done;
3831 if (index->value() < JSDate::kFirstUncachedField) {
3832 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3833 Operand stamp_operand = __ ExternalOperand(stamp);
3834 __ movp(scratch, stamp_operand);
3835 __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3836 __ j(not_equal, &runtime, Label::kNear);
3837 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3838 kPointerSize * index->value()));
3839 __ jmp(&done, Label::kNear);
3842 __ PrepareCallCFunction(2);
3843 __ movp(arg_reg_1, object);
3844 __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3845 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3846 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3850 context()->Plug(rax);
3854 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3855 ZoneList<Expression*>* args = expr->arguments();
3856 DCHECK_EQ(3, args->length());
3858 Register string = rax;
3859 Register index = rbx;
3860 Register value = rcx;
3862 VisitForStackValue(args->at(0)); // index
3863 VisitForStackValue(args->at(1)); // value
3864 VisitForAccumulatorValue(args->at(2)); // string
3868 if (FLAG_debug_code) {
3869 __ Check(__ CheckSmi(value), kNonSmiValue);
3870 __ Check(__ CheckSmi(index), kNonSmiValue);
3873 __ SmiToInteger32(value, value);
3874 __ SmiToInteger32(index, index);
3876 if (FLAG_debug_code) {
3877 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3878 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3881 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3883 context()->Plug(string);
3887 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3888 ZoneList<Expression*>* args = expr->arguments();
3889 DCHECK_EQ(3, args->length());
3891 Register string = rax;
3892 Register index = rbx;
3893 Register value = rcx;
3895 VisitForStackValue(args->at(0)); // index
3896 VisitForStackValue(args->at(1)); // value
3897 VisitForAccumulatorValue(args->at(2)); // string
3901 if (FLAG_debug_code) {
3902 __ Check(__ CheckSmi(value), kNonSmiValue);
3903 __ Check(__ CheckSmi(index), kNonSmiValue);
3906 __ SmiToInteger32(value, value);
3907 __ SmiToInteger32(index, index);
3909 if (FLAG_debug_code) {
3910 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3911 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3914 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3916 context()->Plug(rax);
3920 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3921 // Load the arguments on the stack and call the runtime function.
3922 ZoneList<Expression*>* args = expr->arguments();
3923 DCHECK(args->length() == 2);
3924 VisitForStackValue(args->at(0));
3925 VisitForStackValue(args->at(1));
3926 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3928 context()->Plug(rax);
3932 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3933 ZoneList<Expression*>* args = expr->arguments();
3934 DCHECK(args->length() == 2);
3936 VisitForStackValue(args->at(0)); // Load the object.
3937 VisitForAccumulatorValue(args->at(1)); // Load the value.
3938 __ Pop(rbx); // rax = value. rbx = object.
3941 // If the object is a smi, return the value.
3942 __ JumpIfSmi(rbx, &done);
3944 // If the object is not a value type, return the value.
3945 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3946 __ j(not_equal, &done);
3949 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3950 // Update the write barrier. Save the value as it will be
3951 // overwritten by the write barrier code and is needed afterward.
3953 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3956 context()->Plug(rax);
3960 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3961 ZoneList<Expression*>* args = expr->arguments();
3962 DCHECK_EQ(args->length(), 1);
3964 // Load the argument into rax and call the stub.
3965 VisitForAccumulatorValue(args->at(0));
3967 NumberToStringStub stub(isolate());
3969 context()->Plug(rax);
3973 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3974 ZoneList<Expression*>* args = expr->arguments();
3975 DCHECK(args->length() == 1);
3977 VisitForAccumulatorValue(args->at(0));
3980 StringCharFromCodeGenerator generator(rax, rbx);
3981 generator.GenerateFast(masm_);
3984 NopRuntimeCallHelper call_helper;
3985 generator.GenerateSlow(masm_, call_helper);
3988 context()->Plug(rbx);
3992 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3993 ZoneList<Expression*>* args = expr->arguments();
3994 DCHECK(args->length() == 2);
3996 VisitForStackValue(args->at(0));
3997 VisitForAccumulatorValue(args->at(1));
3999 Register object = rbx;
4000 Register index = rax;
4001 Register result = rdx;
4005 Label need_conversion;
4006 Label index_out_of_range;
4008 StringCharCodeAtGenerator generator(object,
4013 &index_out_of_range,
4014 STRING_INDEX_IS_NUMBER);
4015 generator.GenerateFast(masm_);
4018 __ bind(&index_out_of_range);
4019 // When the index is out of range, the spec requires us to return
4021 __ LoadRoot(result, Heap::kNanValueRootIndex);
4024 __ bind(&need_conversion);
4025 // Move the undefined value into the result register, which will
4026 // trigger conversion.
4027 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4030 NopRuntimeCallHelper call_helper;
4031 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4034 context()->Plug(result);
4038 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4039 ZoneList<Expression*>* args = expr->arguments();
4040 DCHECK(args->length() == 2);
4042 VisitForStackValue(args->at(0));
4043 VisitForAccumulatorValue(args->at(1));
4045 Register object = rbx;
4046 Register index = rax;
4047 Register scratch = rdx;
4048 Register result = rax;
4052 Label need_conversion;
4053 Label index_out_of_range;
4055 StringCharAtGenerator generator(object,
4061 &index_out_of_range,
4062 STRING_INDEX_IS_NUMBER);
4063 generator.GenerateFast(masm_);
4066 __ bind(&index_out_of_range);
4067 // When the index is out of range, the spec requires us to return
4068 // the empty string.
4069 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4072 __ bind(&need_conversion);
4073 // Move smi zero into the result register, which will trigger
4075 __ Move(result, Smi::FromInt(0));
4078 NopRuntimeCallHelper call_helper;
4079 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4082 context()->Plug(result);
4086 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4087 ZoneList<Expression*>* args = expr->arguments();
4088 DCHECK_EQ(2, args->length());
4089 VisitForStackValue(args->at(0));
4090 VisitForAccumulatorValue(args->at(1));
4093 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4095 context()->Plug(rax);
4099 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4100 ZoneList<Expression*>* args = expr->arguments();
4101 DCHECK_EQ(2, args->length());
4103 VisitForStackValue(args->at(0));
4104 VisitForStackValue(args->at(1));
4106 StringCompareStub stub(isolate());
4108 context()->Plug(rax);
4112 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4113 ZoneList<Expression*>* args = expr->arguments();
4114 DCHECK(args->length() >= 2);
4116 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4117 for (int i = 0; i < arg_count + 1; i++) {
4118 VisitForStackValue(args->at(i));
4120 VisitForAccumulatorValue(args->last()); // Function.
4122 Label runtime, done;
4123 // Check for non-function argument (including proxy).
4124 __ JumpIfSmi(rax, &runtime);
4125 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
4126 __ j(not_equal, &runtime);
4128 // InvokeFunction requires the function in rdi. Move it in there.
4129 __ movp(rdi, result_register());
4130 ParameterCount count(arg_count);
4131 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
4132 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4137 __ CallRuntime(Runtime::kCall, args->length());
4140 context()->Plug(rax);
4144 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4145 ZoneList<Expression*>* args = expr->arguments();
4146 DCHECK(args->length() == 2);
4149 VisitForStackValue(args->at(0));
4152 VisitForStackValue(args->at(1));
4153 __ CallRuntime(Runtime::kGetPrototype, 1);
4154 __ Push(result_register());
4156 // Check if the calling frame is an arguments adaptor frame.
4157 Label adaptor_frame, args_set_up, runtime;
4158 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4159 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
4160 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4161 __ j(equal, &adaptor_frame);
4162 // default constructor has no arguments, so no adaptor frame means no args.
4163 __ movp(rax, Immediate(0));
4164 __ jmp(&args_set_up);
4166 // Copy arguments from adaptor frame.
4168 __ bind(&adaptor_frame);
4169 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4170 __ SmiToInteger64(rcx, rcx);
4172 // Subtract 1 from arguments count, for new.target.
4173 __ subp(rcx, Immediate(1));
4175 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
4176 StandardFrameConstants::kCallerSPOffset));
4179 __ Push(Operand(rdx, -1 * kPointerSize));
4180 __ subp(rdx, Immediate(kPointerSize));
4182 __ j(not_zero, &loop);
4185 __ bind(&args_set_up);
4186 __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0));
4187 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
4189 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4190 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4194 context()->Plug(result_register());
4198 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4199 RegExpConstructResultStub stub(isolate());
4200 ZoneList<Expression*>* args = expr->arguments();
4201 DCHECK(args->length() == 3);
4202 VisitForStackValue(args->at(0));
4203 VisitForStackValue(args->at(1));
4204 VisitForAccumulatorValue(args->at(2));
4208 context()->Plug(rax);
4212 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4213 ZoneList<Expression*>* args = expr->arguments();
4214 DCHECK_EQ(2, args->length());
4216 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4217 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4219 Handle<FixedArray> jsfunction_result_caches(
4220 isolate()->native_context()->jsfunction_result_caches());
4221 if (jsfunction_result_caches->length() <= cache_id) {
4222 __ Abort(kAttemptToUseUndefinedCache);
4223 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4224 context()->Plug(rax);
4228 VisitForAccumulatorValue(args->at(1));
4231 Register cache = rbx;
4233 __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
4235 FieldOperand(cache, GlobalObject::kNativeContextOffset));
4237 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4239 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4241 Label done, not_found;
4242 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4243 __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
4244 // tmp now holds finger offset as a smi.
4246 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
4247 __ cmpp(key, FieldOperand(cache,
4250 FixedArray::kHeaderSize));
4251 __ j(not_equal, ¬_found, Label::kNear);
4252 __ movp(rax, FieldOperand(cache,
4255 FixedArray::kHeaderSize + kPointerSize));
4256 __ jmp(&done, Label::kNear);
4258 __ bind(¬_found);
4259 // Call runtime to perform the lookup.
4262 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4265 context()->Plug(rax);
4269 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4270 ZoneList<Expression*>* args = expr->arguments();
4271 DCHECK(args->length() == 1);
4273 VisitForAccumulatorValue(args->at(0));
4275 Label materialize_true, materialize_false;
4276 Label* if_true = NULL;
4277 Label* if_false = NULL;
4278 Label* fall_through = NULL;
4279 context()->PrepareTest(&materialize_true, &materialize_false,
4280 &if_true, &if_false, &fall_through);
4282 __ testl(FieldOperand(rax, String::kHashFieldOffset),
4283 Immediate(String::kContainsCachedArrayIndexMask));
4284 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4285 __ j(zero, if_true);
4288 context()->Plug(if_true, if_false);
4292 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4293 ZoneList<Expression*>* args = expr->arguments();
4294 DCHECK(args->length() == 1);
4295 VisitForAccumulatorValue(args->at(0));
4297 __ AssertString(rax);
4299 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
4300 DCHECK(String::kHashShift >= kSmiTagSize);
4301 __ IndexFromHash(rax, rax);
4303 context()->Plug(rax);
4307 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4308 Label bailout, return_result, done, one_char_separator, long_separator,
4309 non_trivial_array, not_size_one_array, loop,
4310 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4311 ZoneList<Expression*>* args = expr->arguments();
4312 DCHECK(args->length() == 2);
4313 // We will leave the separator on the stack until the end of the function.
4314 VisitForStackValue(args->at(1));
4315 // Load this to rax (= array)
4316 VisitForAccumulatorValue(args->at(0));
4317 // All aliases of the same register have disjoint lifetimes.
4318 Register array = rax;
4319 Register elements = no_reg; // Will be rax.
4321 Register index = rdx;
4323 Register string_length = rcx;
4325 Register string = rsi;
4327 Register scratch = rbx;
4329 Register array_length = rdi;
4330 Register result_pos = no_reg; // Will be rdi.
4332 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
4333 Operand result_operand = Operand(rsp, 1 * kPointerSize);
4334 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
4335 // Separator operand is already pushed. Make room for the two
4336 // other stack fields, and clear the direction flag in anticipation
4337 // of calling CopyBytes.
4338 __ subp(rsp, Immediate(2 * kPointerSize));
4340 // Check that the array is a JSArray
4341 __ JumpIfSmi(array, &bailout);
4342 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4343 __ j(not_equal, &bailout);
4345 // Check that the array has fast elements.
4346 __ CheckFastElements(scratch, &bailout);
4348 // Array has fast elements, so its length must be a smi.
4349 // If the array has length zero, return the empty string.
4350 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
4351 __ SmiCompare(array_length, Smi::FromInt(0));
4352 __ j(not_zero, &non_trivial_array);
4353 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
4354 __ jmp(&return_result);
4356 // Save the array length on the stack.
4357 __ bind(&non_trivial_array);
4358 __ SmiToInteger32(array_length, array_length);
4359 __ movl(array_length_operand, array_length);
4361 // Save the FixedArray containing array's elements.
4362 // End of array's live range.
4364 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
4368 // Check that all array elements are sequential one-byte strings, and
4369 // accumulate the sum of their lengths, as a smi-encoded value.
4371 __ Set(string_length, 0);
4372 // Loop condition: while (index < array_length).
4373 // Live loop registers: index(int32), array_length(int32), string(String*),
4374 // scratch, string_length(int32), elements(FixedArray*).
4375 if (generate_debug_code_) {
4376 __ cmpp(index, array_length);
4377 __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4380 __ movp(string, FieldOperand(elements,
4383 FixedArray::kHeaderSize));
4384 __ JumpIfSmi(string, &bailout);
4385 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
4386 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4387 __ andb(scratch, Immediate(
4388 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4389 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4390 __ j(not_equal, &bailout);
4391 __ AddSmiField(string_length,
4392 FieldOperand(string, SeqOneByteString::kLengthOffset));
4393 __ j(overflow, &bailout);
4395 __ cmpl(index, array_length);
4399 // string_length: Sum of string lengths.
4400 // elements: FixedArray of strings.
4401 // index: Array length.
4402 // array_length: Array length.
4404 // If array_length is 1, return elements[0], a string.
4405 __ cmpl(array_length, Immediate(1));
4406 __ j(not_equal, ¬_size_one_array);
4407 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
4408 __ jmp(&return_result);
4410 __ bind(¬_size_one_array);
4412 // End of array_length live range.
4413 result_pos = array_length;
4414 array_length = no_reg;
4417 // string_length: Sum of string lengths.
4418 // elements: FixedArray of strings.
4419 // index: Array length.
4421 // Check that the separator is a sequential one-byte string.
4422 __ movp(string, separator_operand);
4423 __ JumpIfSmi(string, &bailout);
4424 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
4425 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4426 __ andb(scratch, Immediate(
4427 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4428 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4429 __ j(not_equal, &bailout);
4432 // string_length: Sum of string lengths.
4433 // elements: FixedArray of strings.
4434 // index: Array length.
4435 // string: Separator string.
4437 // Add (separator length times (array_length - 1)) to string_length.
4438 __ SmiToInteger32(scratch,
4439 FieldOperand(string, SeqOneByteString::kLengthOffset));
4441 __ imull(scratch, index);
4442 __ j(overflow, &bailout);
4443 __ addl(string_length, scratch);
4444 __ j(overflow, &bailout);
4446 // Live registers and stack values:
4447 // string_length: Total length of result string.
4448 // elements: FixedArray of strings.
4449 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4451 __ movp(result_operand, result_pos);
4452 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4454 __ movp(string, separator_operand);
4455 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4457 __ j(equal, &one_char_separator);
4458 __ j(greater, &long_separator);
4461 // Empty separator case:
4463 __ movl(scratch, array_length_operand);
4464 __ jmp(&loop_1_condition);
4465 // Loop condition: while (index < array_length).
4467 // Each iteration of the loop concatenates one string to the result.
4468 // Live values in registers:
4469 // index: which element of the elements array we are adding to the result.
4470 // result_pos: the position to which we are currently copying characters.
4471 // elements: the FixedArray of strings we are joining.
4472 // scratch: array length.
4474 // Get string = array[index].
4475 __ movp(string, FieldOperand(elements, index,
4477 FixedArray::kHeaderSize));
4478 __ SmiToInteger32(string_length,
4479 FieldOperand(string, String::kLengthOffset));
4481 FieldOperand(string, SeqOneByteString::kHeaderSize));
4482 __ CopyBytes(result_pos, string, string_length);
4484 __ bind(&loop_1_condition);
4485 __ cmpl(index, scratch);
4486 __ j(less, &loop_1); // Loop while (index < array_length).
4489 // Generic bailout code used from several places.
4491 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4492 __ jmp(&return_result);
4495 // One-character separator case
4496 __ bind(&one_char_separator);
4497 // Get the separator one-byte character value.
4498 // Register "string" holds the separator.
4499 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4501 // Jump into the loop after the code that copies the separator, so the first
4502 // element is not preceded by a separator
4503 __ jmp(&loop_2_entry);
4504 // Loop condition: while (index < length).
4506 // Each iteration of the loop concatenates one string to the result.
4507 // Live values in registers:
4508 // elements: The FixedArray of strings we are joining.
4509 // index: which element of the elements array we are adding to the result.
4510 // result_pos: the position to which we are currently copying characters.
4511 // scratch: Separator character.
4513 // Copy the separator character to the result.
4514 __ movb(Operand(result_pos, 0), scratch);
4515 __ incp(result_pos);
4517 __ bind(&loop_2_entry);
4518 // Get string = array[index].
4519 __ movp(string, FieldOperand(elements, index,
4521 FixedArray::kHeaderSize));
4522 __ SmiToInteger32(string_length,
4523 FieldOperand(string, String::kLengthOffset));
4525 FieldOperand(string, SeqOneByteString::kHeaderSize));
4526 __ CopyBytes(result_pos, string, string_length);
4528 __ cmpl(index, array_length_operand);
4529 __ j(less, &loop_2); // End while (index < length).
4533 // Long separator case (separator is more than one character).
4534 __ bind(&long_separator);
4536 // Make elements point to end of elements array, and index
4537 // count from -array_length to zero, so we don't need to maintain
4539 __ movl(index, array_length_operand);
4540 __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4541 FixedArray::kHeaderSize));
4544 // Replace separator string with pointer to its first character, and
4545 // make scratch be its length.
4546 __ movp(string, separator_operand);
4547 __ SmiToInteger32(scratch,
4548 FieldOperand(string, String::kLengthOffset));
4550 FieldOperand(string, SeqOneByteString::kHeaderSize));
4551 __ movp(separator_operand, string);
4553 // Jump into the loop after the code that copies the separator, so the first
4554 // element is not preceded by a separator
4555 __ jmp(&loop_3_entry);
4556 // Loop condition: while (index < length).
4558 // Each iteration of the loop concatenates one string to the result.
4559 // Live values in registers:
4560 // index: which element of the elements array we are adding to the result.
4561 // result_pos: the position to which we are currently copying characters.
4562 // scratch: Separator length.
4563 // separator_operand (rsp[0x10]): Address of first char of separator.
4565 // Copy the separator to the result.
4566 __ movp(string, separator_operand);
4567 __ movl(string_length, scratch);
4568 __ CopyBytes(result_pos, string, string_length, 2);
4570 __ bind(&loop_3_entry);
4571 // Get string = array[index].
4572 __ movp(string, Operand(elements, index, times_pointer_size, 0));
4573 __ SmiToInteger32(string_length,
4574 FieldOperand(string, String::kLengthOffset));
4576 FieldOperand(string, SeqOneByteString::kHeaderSize));
4577 __ CopyBytes(result_pos, string, string_length);
4579 __ j(not_equal, &loop_3); // Loop while (index < 0).
4582 __ movp(rax, result_operand);
4584 __ bind(&return_result);
4585 // Drop temp values from the stack, and restore context register.
4586 __ addp(rsp, Immediate(3 * kPointerSize));
4587 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4588 context()->Plug(rax);
4592 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4593 DCHECK(expr->arguments()->length() == 0);
4594 ExternalReference debug_is_active =
4595 ExternalReference::debug_is_active_address(isolate());
4596 __ Move(kScratchRegister, debug_is_active);
4597 __ movzxbp(rax, Operand(kScratchRegister, 0));
4598 __ Integer32ToSmi(rax, rax);
4599 context()->Plug(rax);
4603 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4604 // Assert: expr === CallRuntime("ReflectConstruct")
4605 DCHECK_EQ(1, expr->arguments()->length());
4606 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4608 ZoneList<Expression*>* args = call->arguments();
4609 DCHECK_EQ(3, args->length());
4611 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4612 DCHECK_NOT_NULL(super_call_ref);
4614 // Load ReflectConstruct function
4615 EmitLoadJSRuntimeFunction(call);
4617 // Push the target function under the receiver.
4618 __ Push(Operand(rsp, 0));
4619 __ movp(Operand(rsp, kPointerSize), rax);
4621 // Push super constructor
4622 EmitLoadSuperConstructor(super_call_ref);
4623 __ Push(result_register());
4625 // Push arguments array
4626 VisitForStackValue(args->at(1));
4629 DCHECK(args->at(2)->IsVariableProxy());
4630 VisitForStackValue(args->at(2));
4632 EmitCallJSRuntimeFunction(call);
4634 // Restore context register.
4635 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4636 context()->DropAndPlug(1, rax);
4638 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4639 EmitInitializeThisAfterSuper(super_call_ref);
4643 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4644 // Push the builtins object as receiver.
4645 __ movp(rax, GlobalObjectOperand());
4646 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4648 // Load the function from the receiver.
4649 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4650 __ Move(LoadDescriptor::NameRegister(), expr->name());
4651 __ Move(LoadDescriptor::SlotRegister(),
4652 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4653 CallLoadIC(NOT_CONTEXTUAL);
4657 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4658 ZoneList<Expression*>* args = expr->arguments();
4659 int arg_count = args->length();
4661 // Record source position of the IC call.
4662 SetSourcePosition(expr->position());
4663 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4664 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4669 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4670 ZoneList<Expression*>* args = expr->arguments();
4671 int arg_count = args->length();
4673 if (expr->is_jsruntime()) {
4674 Comment cmnt(masm_, "[ CallRuntime");
4676 EmitLoadJSRuntimeFunction(expr);
4678 // Push the target function under the receiver.
4679 __ Push(Operand(rsp, 0));
4680 __ movp(Operand(rsp, kPointerSize), rax);
4682 // Push the arguments ("left-to-right").
4683 for (int i = 0; i < arg_count; i++) {
4684 VisitForStackValue(args->at(i));
4687 EmitCallJSRuntimeFunction(expr);
4689 // Restore context register.
4690 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4691 context()->DropAndPlug(1, rax);
4694 const Runtime::Function* function = expr->function();
4695 switch (function->function_id) {
4696 #define CALL_INTRINSIC_GENERATOR(Name) \
4697 case Runtime::kInline##Name: { \
4698 Comment cmnt(masm_, "[ Inline" #Name); \
4699 return Emit##Name(expr); \
4701 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4702 #undef CALL_INTRINSIC_GENERATOR
4704 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4705 // Push the arguments ("left-to-right").
4706 for (int i = 0; i < arg_count; i++) {
4707 VisitForStackValue(args->at(i));
4710 // Call the C runtime.
4711 __ CallRuntime(function, arg_count);
4712 context()->Plug(rax);
4719 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4720 switch (expr->op()) {
4721 case Token::DELETE: {
4722 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4723 Property* property = expr->expression()->AsProperty();
4724 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4726 if (property != NULL) {
4727 VisitForStackValue(property->obj());
4728 VisitForStackValue(property->key());
4729 __ Push(Smi::FromInt(language_mode()));
4730 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4731 context()->Plug(rax);
4732 } else if (proxy != NULL) {
4733 Variable* var = proxy->var();
4734 // Delete of an unqualified identifier is disallowed in strict mode
4735 // but "delete this" is allowed.
4736 DCHECK(is_sloppy(language_mode()) || var->is_this());
4737 if (var->IsUnallocated()) {
4738 __ Push(GlobalObjectOperand());
4739 __ Push(var->name());
4740 __ Push(Smi::FromInt(SLOPPY));
4741 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4742 context()->Plug(rax);
4743 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4744 // Result of deleting non-global variables is false. 'this' is
4745 // not really a variable, though we implement it as one. The
4746 // subexpression does not have side effects.
4747 context()->Plug(var->is_this());
4749 // Non-global variable. Call the runtime to try to delete from the
4750 // context where the variable was introduced.
4751 __ Push(context_register());
4752 __ Push(var->name());
4753 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4754 context()->Plug(rax);
4757 // Result of deleting non-property, non-variable reference is true.
4758 // The subexpression may have side effects.
4759 VisitForEffect(expr->expression());
4760 context()->Plug(true);
4766 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4767 VisitForEffect(expr->expression());
4768 context()->Plug(Heap::kUndefinedValueRootIndex);
4773 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4774 if (context()->IsEffect()) {
4775 // Unary NOT has no side effects so it's only necessary to visit the
4776 // subexpression. Match the optimizing compiler by not branching.
4777 VisitForEffect(expr->expression());
4778 } else if (context()->IsTest()) {
4779 const TestContext* test = TestContext::cast(context());
4780 // The labels are swapped for the recursive call.
4781 VisitForControl(expr->expression(),
4782 test->false_label(),
4784 test->fall_through());
4785 context()->Plug(test->true_label(), test->false_label());
4787 // We handle value contexts explicitly rather than simply visiting
4788 // for control and plugging the control flow into the context,
4789 // because we need to prepare a pair of extra administrative AST ids
4790 // for the optimizing compiler.
4791 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4792 Label materialize_true, materialize_false, done;
4793 VisitForControl(expr->expression(),
4797 __ bind(&materialize_true);
4798 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4799 if (context()->IsAccumulatorValue()) {
4800 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4802 __ PushRoot(Heap::kTrueValueRootIndex);
4804 __ jmp(&done, Label::kNear);
4805 __ bind(&materialize_false);
4806 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4807 if (context()->IsAccumulatorValue()) {
4808 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4810 __ PushRoot(Heap::kFalseValueRootIndex);
4817 case Token::TYPEOF: {
4818 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4820 AccumulatorValueContext context(this);
4821 VisitForTypeofValue(expr->expression());
4824 TypeofStub typeof_stub(isolate());
4825 __ CallStub(&typeof_stub);
4826 context()->Plug(rax);
4836 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4837 DCHECK(expr->expression()->IsValidReferenceExpression());
4839 Comment cmnt(masm_, "[ CountOperation");
4840 SetSourcePosition(expr->position());
4842 Property* prop = expr->expression()->AsProperty();
4843 LhsKind assign_type = Property::GetAssignType(prop);
4845 // Evaluate expression and get value.
4846 if (assign_type == VARIABLE) {
4847 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4848 AccumulatorValueContext context(this);
4849 EmitVariableLoad(expr->expression()->AsVariableProxy());
4851 // Reserve space for result of postfix operation.
4852 if (expr->is_postfix() && !context()->IsEffect()) {
4853 __ Push(Smi::FromInt(0));
4855 switch (assign_type) {
4856 case NAMED_PROPERTY: {
4857 VisitForStackValue(prop->obj());
4858 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4859 EmitNamedPropertyLoad(prop);
4863 case NAMED_SUPER_PROPERTY: {
4864 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4865 VisitForAccumulatorValue(
4866 prop->obj()->AsSuperPropertyReference()->home_object());
4867 __ Push(result_register());
4868 __ Push(MemOperand(rsp, kPointerSize));
4869 __ Push(result_register());
4870 EmitNamedSuperPropertyLoad(prop);
4874 case KEYED_SUPER_PROPERTY: {
4875 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4877 prop->obj()->AsSuperPropertyReference()->home_object());
4878 VisitForAccumulatorValue(prop->key());
4879 __ Push(result_register());
4880 __ Push(MemOperand(rsp, 2 * kPointerSize));
4881 __ Push(MemOperand(rsp, 2 * kPointerSize));
4882 __ Push(result_register());
4883 EmitKeyedSuperPropertyLoad(prop);
4887 case KEYED_PROPERTY: {
4888 VisitForStackValue(prop->obj());
4889 VisitForStackValue(prop->key());
4890 // Leave receiver on stack
4891 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
4892 // Copy of key, needed for later store.
4893 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
4894 EmitKeyedPropertyLoad(prop);
4903 // We need a second deoptimization point after loading the value
4904 // in case evaluating the property load my have a side effect.
4905 if (assign_type == VARIABLE) {
4906 PrepareForBailout(expr->expression(), TOS_REG);
4908 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4911 // Inline smi case if we are in a loop.
4912 Label done, stub_call;
4913 JumpPatchSite patch_site(masm_);
4914 if (ShouldInlineSmiCase(expr->op())) {
4916 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4918 // Save result for postfix expressions.
4919 if (expr->is_postfix()) {
4920 if (!context()->IsEffect()) {
4921 // Save the result on the stack. If we have a named or keyed property
4922 // we store the result under the receiver that is currently on top
4924 switch (assign_type) {
4928 case NAMED_PROPERTY:
4929 __ movp(Operand(rsp, kPointerSize), rax);
4931 case NAMED_SUPER_PROPERTY:
4932 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4934 case KEYED_PROPERTY:
4935 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4937 case KEYED_SUPER_PROPERTY:
4938 __ movp(Operand(rsp, 3 * kPointerSize), rax);
4944 SmiOperationExecutionMode mode;
4945 mode.Add(PRESERVE_SOURCE_REGISTER);
4946 mode.Add(BAILOUT_ON_NO_OVERFLOW);
4947 if (expr->op() == Token::INC) {
4948 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4950 __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4952 __ jmp(&stub_call, Label::kNear);
4956 ToNumberStub convert_stub(isolate());
4957 __ CallStub(&convert_stub);
4958 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4960 // Save result for postfix expressions.
4961 if (expr->is_postfix()) {
4962 if (!context()->IsEffect()) {
4963 // Save the result on the stack. If we have a named or keyed property
4964 // we store the result under the receiver that is currently on top
4966 switch (assign_type) {
4970 case NAMED_PROPERTY:
4971 __ movp(Operand(rsp, kPointerSize), rax);
4973 case NAMED_SUPER_PROPERTY:
4974 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4976 case KEYED_PROPERTY:
4977 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4979 case KEYED_SUPER_PROPERTY:
4980 __ movp(Operand(rsp, 3 * kPointerSize), rax);
4986 // Record position before stub call.
4987 SetSourcePosition(expr->position());
4989 // Call stub for +1/-1.
4990 __ bind(&stub_call);
4992 __ Move(rax, Smi::FromInt(1));
4993 Handle<Code> code = CodeFactory::BinaryOpIC(
4994 isolate(), expr->binary_op(), language_mode()).code();
4995 CallIC(code, expr->CountBinOpFeedbackId());
4996 patch_site.EmitPatchInfo();
4999 // Store the value returned in rax.
5000 switch (assign_type) {
5002 if (expr->is_postfix()) {
5003 // Perform the assignment as if via '='.
5004 { EffectContext context(this);
5005 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5006 Token::ASSIGN, expr->CountSlot());
5007 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5010 // For all contexts except kEffect: We have the result on
5011 // top of the stack.
5012 if (!context()->IsEffect()) {
5013 context()->PlugTOS();
5016 // Perform the assignment as if via '='.
5017 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5018 Token::ASSIGN, expr->CountSlot());
5019 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5020 context()->Plug(rax);
5023 case NAMED_PROPERTY: {
5024 __ Move(StoreDescriptor::NameRegister(),
5025 prop->key()->AsLiteral()->value());
5026 __ Pop(StoreDescriptor::ReceiverRegister());
5027 if (FLAG_vector_stores) {
5028 EmitLoadStoreICSlot(expr->CountSlot());
5031 CallStoreIC(expr->CountStoreFeedbackId());
5033 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5034 if (expr->is_postfix()) {
5035 if (!context()->IsEffect()) {
5036 context()->PlugTOS();
5039 context()->Plug(rax);
5043 case NAMED_SUPER_PROPERTY: {
5044 EmitNamedSuperPropertyStore(prop);
5045 if (expr->is_postfix()) {
5046 if (!context()->IsEffect()) {
5047 context()->PlugTOS();
5050 context()->Plug(rax);
5054 case KEYED_SUPER_PROPERTY: {
5055 EmitKeyedSuperPropertyStore(prop);
5056 if (expr->is_postfix()) {
5057 if (!context()->IsEffect()) {
5058 context()->PlugTOS();
5061 context()->Plug(rax);
5065 case KEYED_PROPERTY: {
5066 __ Pop(StoreDescriptor::NameRegister());
5067 __ Pop(StoreDescriptor::ReceiverRegister());
5069 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5070 if (FLAG_vector_stores) {
5071 EmitLoadStoreICSlot(expr->CountSlot());
5074 CallIC(ic, expr->CountStoreFeedbackId());
5076 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5077 if (expr->is_postfix()) {
5078 if (!context()->IsEffect()) {
5079 context()->PlugTOS();
5082 context()->Plug(rax);
5090 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5091 VariableProxy* proxy = expr->AsVariableProxy();
5092 DCHECK(!context()->IsEffect());
5093 DCHECK(!context()->IsTest());
5095 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5096 Comment cmnt(masm_, "[ Global variable");
5097 __ Move(LoadDescriptor::NameRegister(), proxy->name());
5098 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5099 __ Move(LoadDescriptor::SlotRegister(),
5100 SmiFromSlot(proxy->VariableFeedbackSlot()));
5101 // Use a regular load, not a contextual load, to avoid a reference
5103 CallLoadIC(NOT_CONTEXTUAL);
5104 PrepareForBailout(expr, TOS_REG);
5105 context()->Plug(rax);
5106 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5107 Comment cmnt(masm_, "[ Lookup slot");
5110 // Generate code for loading from variables potentially shadowed
5111 // by eval-introduced variables.
5112 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5116 __ Push(proxy->name());
5117 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5118 PrepareForBailout(expr, TOS_REG);
5121 context()->Plug(rax);
5123 // This expression cannot throw a reference error at the top level.
5124 VisitInDuplicateContext(expr);
5129 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5130 Expression* sub_expr,
5131 Handle<String> check) {
5132 Label materialize_true, materialize_false;
5133 Label* if_true = NULL;
5134 Label* if_false = NULL;
5135 Label* fall_through = NULL;
5136 context()->PrepareTest(&materialize_true, &materialize_false,
5137 &if_true, &if_false, &fall_through);
5139 { AccumulatorValueContext context(this);
5140 VisitForTypeofValue(sub_expr);
5142 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5144 Factory* factory = isolate()->factory();
5145 if (String::Equals(check, factory->number_string())) {
5146 __ JumpIfSmi(rax, if_true);
5147 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
5148 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
5149 Split(equal, if_true, if_false, fall_through);
5150 } else if (String::Equals(check, factory->string_string())) {
5151 __ JumpIfSmi(rax, if_false);
5152 // Check for undetectable objects => false.
5153 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
5154 __ j(above_equal, if_false);
5155 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
5156 Immediate(1 << Map::kIsUndetectable));
5157 Split(zero, if_true, if_false, fall_through);
5158 } else if (String::Equals(check, factory->symbol_string())) {
5159 __ JumpIfSmi(rax, if_false);
5160 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
5161 Split(equal, if_true, if_false, fall_through);
5162 } else if (String::Equals(check, factory->boolean_string())) {
5163 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
5164 __ j(equal, if_true);
5165 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
5166 Split(equal, if_true, if_false, fall_through);
5167 } else if (String::Equals(check, factory->undefined_string())) {
5168 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
5169 __ j(equal, if_true);
5170 __ JumpIfSmi(rax, if_false);
5171 // Check for undetectable objects => true.
5172 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
5173 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
5174 Immediate(1 << Map::kIsUndetectable));
5175 Split(not_zero, if_true, if_false, fall_through);
5176 } else if (String::Equals(check, factory->function_string())) {
5177 __ JumpIfSmi(rax, if_false);
5178 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5179 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
5180 __ j(equal, if_true);
5181 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
5182 Split(equal, if_true, if_false, fall_through);
5183 } else if (String::Equals(check, factory->object_string())) {
5184 __ JumpIfSmi(rax, if_false);
5185 __ CompareRoot(rax, Heap::kNullValueRootIndex);
5186 __ j(equal, if_true);
5187 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
5188 __ j(below, if_false);
5189 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5190 __ j(above, if_false);
5191 // Check for undetectable objects => false.
5192 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
5193 Immediate(1 << Map::kIsUndetectable));
5194 Split(zero, if_true, if_false, fall_through);
5196 if (if_false != fall_through) __ jmp(if_false);
5198 context()->Plug(if_true, if_false);
5202 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5203 Comment cmnt(masm_, "[ CompareOperation");
5204 SetSourcePosition(expr->position());
5206 // First we try a fast inlined version of the compare when one of
5207 // the operands is a literal.
5208 if (TryLiteralCompare(expr)) return;
5210 // Always perform the comparison for its control flow. Pack the result
5211 // into the expression's context after the comparison is performed.
5212 Label materialize_true, materialize_false;
5213 Label* if_true = NULL;
5214 Label* if_false = NULL;
5215 Label* fall_through = NULL;
5216 context()->PrepareTest(&materialize_true, &materialize_false,
5217 &if_true, &if_false, &fall_through);
5219 Token::Value op = expr->op();
5220 VisitForStackValue(expr->left());
5223 VisitForStackValue(expr->right());
5224 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5225 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5226 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
5227 Split(equal, if_true, if_false, fall_through);
5230 case Token::INSTANCEOF: {
5231 VisitForStackValue(expr->right());
5232 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5234 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5236 // The stub returns 0 for true.
5237 Split(zero, if_true, if_false, fall_through);
5242 VisitForAccumulatorValue(expr->right());
5243 Condition cc = CompareIC::ComputeCondition(op);
5246 bool inline_smi_code = ShouldInlineSmiCase(op);
5247 JumpPatchSite patch_site(masm_);
5248 if (inline_smi_code) {
5252 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
5254 Split(cc, if_true, if_false, NULL);
5255 __ bind(&slow_case);
5258 // Record position and call the compare IC.
5259 SetSourcePosition(expr->position());
5261 CodeFactory::CompareIC(isolate(), op, language_mode()).code();
5262 CallIC(ic, expr->CompareOperationFeedbackId());
5263 patch_site.EmitPatchInfo();
5265 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5267 Split(cc, if_true, if_false, fall_through);
5271 // Convert the result of the comparison into one expected for this
5272 // expression's context.
5273 context()->Plug(if_true, if_false);
5277 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5278 Expression* sub_expr,
5280 Label materialize_true, materialize_false;
5281 Label* if_true = NULL;
5282 Label* if_false = NULL;
5283 Label* fall_through = NULL;
5284 context()->PrepareTest(&materialize_true, &materialize_false,
5285 &if_true, &if_false, &fall_through);
5287 VisitForAccumulatorValue(sub_expr);
5288 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5289 if (expr->op() == Token::EQ_STRICT) {
5290 Heap::RootListIndex nil_value = nil == kNullValue ?
5291 Heap::kNullValueRootIndex :
5292 Heap::kUndefinedValueRootIndex;
5293 __ CompareRoot(rax, nil_value);
5294 Split(equal, if_true, if_false, fall_through);
5296 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5297 CallIC(ic, expr->CompareOperationFeedbackId());
5299 Split(not_zero, if_true, if_false, fall_through);
5301 context()->Plug(if_true, if_false);
5305 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5306 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5307 context()->Plug(rax);
5311 Register FullCodeGenerator::result_register() {
5316 Register FullCodeGenerator::context_register() {
5321 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5322 DCHECK(IsAligned(frame_offset, kPointerSize));
5323 __ movp(Operand(rbp, frame_offset), value);
5327 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5328 __ movp(dst, ContextOperand(rsi, context_index));
5332 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5333 Scope* declaration_scope = scope()->DeclarationScope();
5334 if (declaration_scope->is_script_scope() ||
5335 declaration_scope->is_module_scope()) {
5336 // Contexts nested in the native context have a canonical empty function
5337 // as their closure, not the anonymous closure containing the global
5338 // code. Pass a smi sentinel and let the runtime look up the empty
5340 __ Push(Smi::FromInt(0));
5341 } else if (declaration_scope->is_eval_scope()) {
5342 // Contexts created by a call to eval have the same closure as the
5343 // context calling eval, not the anonymous closure containing the eval
5344 // code. Fetch it from the context.
5345 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
5347 DCHECK(declaration_scope->is_function_scope());
5348 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5353 // ----------------------------------------------------------------------------
5354 // Non-local control flow support.
5357 void FullCodeGenerator::EnterFinallyBlock() {
5358 DCHECK(!result_register().is(rdx));
5359 DCHECK(!result_register().is(rcx));
5360 // Cook return address on top of stack (smi encoded Code* delta)
5361 __ PopReturnAddressTo(rdx);
5362 __ Move(rcx, masm_->CodeObject());
5364 __ Integer32ToSmi(rdx, rdx);
5367 // Store result register while executing finally block.
5368 __ Push(result_register());
5370 // Store pending message while executing finally block.
5371 ExternalReference pending_message_obj =
5372 ExternalReference::address_of_pending_message_obj(isolate());
5373 __ Load(rdx, pending_message_obj);
5376 ClearPendingMessage();
5380 void FullCodeGenerator::ExitFinallyBlock() {
5381 DCHECK(!result_register().is(rdx));
5382 DCHECK(!result_register().is(rcx));
5383 // Restore pending message from stack.
5385 ExternalReference pending_message_obj =
5386 ExternalReference::address_of_pending_message_obj(isolate());
5387 __ Store(pending_message_obj, rdx);
5389 // Restore result register from stack.
5390 __ Pop(result_register());
5392 // Uncook return address.
5394 __ SmiToInteger32(rdx, rdx);
5395 __ Move(rcx, masm_->CodeObject());
5401 void FullCodeGenerator::ClearPendingMessage() {
5402 DCHECK(!result_register().is(rdx));
5403 ExternalReference pending_message_obj =
5404 ExternalReference::address_of_pending_message_obj(isolate());
5405 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
5406 __ Store(pending_message_obj, rdx);
5410 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5411 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5412 __ Move(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5419 static const byte kJnsInstruction = 0x79;
5420 static const byte kNopByteOne = 0x66;
5421 static const byte kNopByteTwo = 0x90;
5423 static const byte kCallInstruction = 0xe8;
5427 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5429 BackEdgeState target_state,
5430 Code* replacement_code) {
5431 Address call_target_address = pc - kIntSize;
5432 Address jns_instr_address = call_target_address - 3;
5433 Address jns_offset_address = call_target_address - 2;
5435 switch (target_state) {
5437 // sub <profiling_counter>, <delta> ;; Not changed
5439 // call <interrupt stub>
5441 *jns_instr_address = kJnsInstruction;
5442 *jns_offset_address = kJnsOffset;
5444 case ON_STACK_REPLACEMENT:
5445 case OSR_AFTER_STACK_CHECK:
5446 // sub <profiling_counter>, <delta> ;; Not changed
5449 // call <on-stack replacment>
5451 *jns_instr_address = kNopByteOne;
5452 *jns_offset_address = kNopByteTwo;
5456 Assembler::set_target_address_at(call_target_address,
5458 replacement_code->entry());
5459 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5460 unoptimized_code, call_target_address, replacement_code);
5464 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5466 Code* unoptimized_code,
5468 Address call_target_address = pc - kIntSize;
5469 Address jns_instr_address = call_target_address - 3;
5470 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5472 if (*jns_instr_address == kJnsInstruction) {
5473 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5474 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5475 Assembler::target_address_at(call_target_address,
5480 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5481 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5483 if (Assembler::target_address_at(call_target_address,
5484 unoptimized_code) ==
5485 isolate->builtins()->OnStackReplacement()->entry()) {
5486 return ON_STACK_REPLACEMENT;
5489 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5490 Assembler::target_address_at(call_target_address,
5492 return OSR_AFTER_STACK_CHECK;
5496 } // namespace internal
5499 #endif // V8_TARGET_ARCH_X64