1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parser.h"
15 #include "src/scopes.h"
20 #define __ ACCESS_MASM(masm_)
23 class JumpPatchSite BASE_EMBEDDED {
25 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
27 info_emitted_ = false;
32 DCHECK(patch_site_.is_bound() == info_emitted_);
35 void EmitJumpIfNotSmi(Register reg,
37 Label::Distance near_jump = Label::kFar) {
38 __ testb(reg, Immediate(kSmiTagMask));
39 EmitJump(not_carry, target, near_jump); // Always taken before patched.
42 void EmitJumpIfSmi(Register reg,
44 Label::Distance near_jump = Label::kFar) {
45 __ testb(reg, Immediate(kSmiTagMask));
46 EmitJump(carry, target, near_jump); // Never taken before patched.
49 void EmitPatchInfo() {
50 if (patch_site_.is_bound()) {
51 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
52 DCHECK(is_uint8(delta_to_patch_site));
53 __ testl(rax, Immediate(delta_to_patch_site));
58 __ nop(); // Signals no inlined code.
63 // jc will be patched with jz, jnc will become jnz.
64 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
65 DCHECK(!patch_site_.is_bound() && !info_emitted_);
66 DCHECK(cc == carry || cc == not_carry);
67 __ bind(&patch_site_);
68 __ j(cc, target, near_jump);
71 MacroAssembler* masm_;
79 // Generate code for a JS function. On entry to the function the receiver
80 // and arguments have been pushed on the stack left to right, with the
81 // return address on top of them. The actual argument count matches the
82 // formal parameter count expected by the function.
84 // The live registers are:
85 // o rdi: the JS function object being called (i.e. ourselves)
87 // o rbp: our caller's frame pointer
88 // o rsp: stack pointer (pointing to return address)
90 // The function builds a JS frame. Please see JavaScriptFrameConstants in
91 // frames-x64.h for its layout.
92 void FullCodeGenerator::Generate() {
93 CompilationInfo* info = info_;
94 profiling_counter_ = isolate()->factory()->NewCell(
95 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
96 SetFunctionPosition(function());
97 Comment cmnt(masm_, "[ function compiled by full code generator");
99 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
102 if (strlen(FLAG_stop_at) > 0 &&
103 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
108 // Sloppy mode functions and builtins need to replace the receiver with the
109 // global proxy when called as functions (without an explicit receiver
111 if (is_sloppy(info->language_mode()) && !info->is_native() &&
112 info->MayUseThis() && info->scope()->has_this_declaration()) {
114 // +1 for return address.
115 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
116 __ movp(rcx, args.GetReceiverOperand());
118 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
119 __ j(not_equal, &ok, Label::kNear);
121 __ movp(rcx, GlobalObjectOperand());
122 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalProxyOffset));
124 __ movp(args.GetReceiverOperand(), rcx);
129 // Open a frame scope to indicate that there is a frame on the stack. The
130 // MANUAL indicates that the scope shouldn't actually generate code to set up
131 // the frame (that is done below).
132 FrameScope frame_scope(masm_, StackFrame::MANUAL);
134 info->set_prologue_offset(masm_->pc_offset());
135 __ Prologue(info->IsCodePreAgingActive());
136 info->AddNoFrameRange(0, masm_->pc_offset());
138 { Comment cmnt(masm_, "[ Allocate locals");
139 int locals_count = info->scope()->num_stack_slots();
140 // Generators allocate locals, if any, in context slots.
141 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
142 if (locals_count == 1) {
143 __ PushRoot(Heap::kUndefinedValueRootIndex);
144 } else if (locals_count > 1) {
145 if (locals_count >= 128) {
148 __ subp(rcx, Immediate(locals_count * kPointerSize));
149 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
150 __ j(above_equal, &ok, Label::kNear);
151 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
154 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
155 const int kMaxPushes = 32;
156 if (locals_count >= kMaxPushes) {
157 int loop_iterations = locals_count / kMaxPushes;
158 __ movp(rcx, Immediate(loop_iterations));
160 __ bind(&loop_header);
162 for (int i = 0; i < kMaxPushes; i++) {
165 // Continue loop if not done.
167 __ j(not_zero, &loop_header, Label::kNear);
169 int remaining = locals_count % kMaxPushes;
170 // Emit the remaining pushes.
171 for (int i = 0; i < remaining; i++) {
177 bool function_in_register = true;
179 // Possibly allocate a local context.
180 if (info->scope()->num_heap_slots() > 0) {
181 Comment cmnt(masm_, "[ Allocate context");
182 bool need_write_barrier = true;
183 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 // Argument to NewContext is the function, which is still in rdi.
185 if (info->scope()->is_script_scope()) {
187 __ Push(info->scope()->GetScopeInfo(info->isolate()));
188 __ CallRuntime(Runtime::kNewScriptContext, 2);
189 } else if (slots <= FastNewContextStub::kMaximumSlots) {
190 FastNewContextStub stub(isolate(), slots);
192 // Result of FastNewContextStub is always in new space.
193 need_write_barrier = false;
196 __ CallRuntime(Runtime::kNewFunctionContext, 1);
198 function_in_register = false;
199 // Context is returned in rax. It replaces the context passed to us.
200 // It's saved in the stack and kept live in rsi.
202 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
204 // Copy any necessary parameters into the context.
205 int num_parameters = info->scope()->num_parameters();
206 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
207 for (int i = first_parameter; i < num_parameters; i++) {
208 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
209 if (var->IsContextSlot()) {
210 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
211 (num_parameters - 1 - i) * kPointerSize;
212 // Load parameter from stack.
213 __ movp(rax, Operand(rbp, parameter_offset));
214 // Store it in the context.
215 int context_offset = Context::SlotOffset(var->index());
216 __ movp(Operand(rsi, context_offset), rax);
217 // Update the write barrier. This clobbers rax and rbx.
218 if (need_write_barrier) {
219 __ RecordWriteContextSlot(
220 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
221 } else if (FLAG_debug_code) {
223 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
224 __ Abort(kExpectedNewSpaceObject);
231 // Possibly set up a local binding to the this function which is used in
232 // derived constructors with super calls.
233 Variable* this_function_var = scope()->this_function_var();
234 if (this_function_var != nullptr) {
235 Comment cmnt(masm_, "[ This function");
236 if (!function_in_register) {
237 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
238 // The write barrier clobbers register again, keep is marked as such.
240 SetVar(this_function_var, rdi, rbx, rdx);
243 Variable* new_target_var = scope()->new_target_var();
244 if (new_target_var != nullptr) {
245 Comment cmnt(masm_, "[ new.target");
247 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
248 Label non_adaptor_frame;
249 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
250 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
251 __ j(not_equal, &non_adaptor_frame);
252 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
254 __ bind(&non_adaptor_frame);
255 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
256 Smi::FromInt(StackFrame::CONSTRUCT));
258 Label non_construct_frame, done;
259 __ j(not_equal, &non_construct_frame);
263 Operand(rax, ConstructFrameConstants::kOriginalConstructorOffset));
266 // Non-construct frame
267 __ bind(&non_construct_frame);
268 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
271 SetVar(new_target_var, rax, rbx, rdx);
274 // Possibly allocate RestParameters
276 Variable* rest_param = scope()->rest_parameter(&rest_index);
278 Comment cmnt(masm_, "[ Allocate rest parameter array");
280 int num_parameters = info->scope()->num_parameters();
281 int offset = num_parameters * kPointerSize;
284 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
286 __ Push(Smi::FromInt(num_parameters));
287 __ Push(Smi::FromInt(rest_index));
288 __ Push(Smi::FromInt(language_mode()));
290 RestParamAccessStub stub(isolate());
293 SetVar(rest_param, rax, rbx, rdx);
296 // Possibly allocate an arguments object.
297 Variable* arguments = scope()->arguments();
298 if (arguments != NULL) {
299 // Arguments object must be allocated after the context object, in
300 // case the "arguments" or ".arguments" variables are in the context.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (function_in_register) {
305 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
307 // The receiver is just before the parameters on the caller's stack.
308 int num_parameters = info->scope()->num_parameters();
309 int offset = num_parameters * kPointerSize;
311 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
313 __ Push(Smi::FromInt(num_parameters));
314 // Arguments to ArgumentsAccessStub:
315 // function, receiver address, parameter count.
316 // The stub will rewrite receiver and parameter count if the previous
317 // stack frame was an arguments adapter frame.
319 ArgumentsAccessStub::Type type;
320 if (is_strict(language_mode()) || !has_simple_parameters()) {
321 type = ArgumentsAccessStub::NEW_STRICT;
322 } else if (function()->has_duplicate_parameters()) {
323 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
325 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
327 ArgumentsAccessStub stub(isolate(), type);
330 SetVar(arguments, rax, rbx, rdx);
334 __ CallRuntime(Runtime::kTraceEnter, 0);
337 // Visit the declarations and body unless there is an illegal
339 if (scope()->HasIllegalRedeclaration()) {
340 Comment cmnt(masm_, "[ Declarations");
341 scope()->VisitIllegalRedeclaration(this);
344 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
345 { Comment cmnt(masm_, "[ Declarations");
346 VisitDeclarations(scope()->declarations());
349 // Assert that the declarations do not use ICs. Otherwise the debugger
350 // won't be able to redirect a PC at an IC to the correct IC in newly
352 DCHECK_EQ(0, ic_total_count_);
354 { Comment cmnt(masm_, "[ Stack check");
355 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
357 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
358 __ j(above_equal, &ok, Label::kNear);
359 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
363 { Comment cmnt(masm_, "[ Body");
364 DCHECK(loop_depth() == 0);
365 VisitStatements(function()->body());
366 DCHECK(loop_depth() == 0);
370 // Always emit a 'return undefined' in case control fell off the end of
372 { Comment cmnt(masm_, "[ return <undefined>;");
373 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
374 EmitReturnSequence();
379 void FullCodeGenerator::ClearAccumulator() {
384 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
385 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
386 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
387 Smi::FromInt(-delta));
391 void FullCodeGenerator::EmitProfilingCounterReset() {
392 int reset_value = FLAG_interrupt_budget;
393 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
394 __ Move(kScratchRegister, Smi::FromInt(reset_value));
395 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
399 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
402 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
403 Label* back_edge_target) {
404 Comment cmnt(masm_, "[ Back edge bookkeeping");
407 DCHECK(back_edge_target->is_bound());
408 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
409 int weight = Min(kMaxBackEdgeWeight,
410 Max(1, distance / kCodeSizeMultiplier));
411 EmitProfilingCounterDecrement(weight);
413 __ j(positive, &ok, Label::kNear);
415 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
416 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
417 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
419 // Record a mapping of this PC offset to the OSR id. This is used to find
420 // the AST id from the unoptimized code in order to use it as a key into
421 // the deoptimization input data found in the optimized code.
422 RecordBackEdge(stmt->OsrEntryId());
424 EmitProfilingCounterReset();
428 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
429 // Record a mapping of the OSR id to this PC. This is used if the OSR
430 // entry becomes the target of a bailout. We don't expect it to be, but
431 // we want it to work if it is.
432 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
436 void FullCodeGenerator::EmitReturnSequence() {
437 Comment cmnt(masm_, "[ Return sequence");
438 if (return_label_.is_bound()) {
439 __ jmp(&return_label_);
441 __ bind(&return_label_);
444 __ CallRuntime(Runtime::kTraceExit, 1);
446 // Pretend that the exit is a backwards jump to the entry.
448 if (info_->ShouldSelfOptimize()) {
449 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
451 int distance = masm_->pc_offset();
452 weight = Min(kMaxBackEdgeWeight,
453 Max(1, distance / kCodeSizeMultiplier));
455 EmitProfilingCounterDecrement(weight);
457 __ j(positive, &ok, Label::kNear);
459 __ call(isolate()->builtins()->InterruptCheck(),
460 RelocInfo::CODE_TARGET);
462 EmitProfilingCounterReset();
465 SetReturnPosition(function());
466 int no_frame_start = masm_->pc_offset();
469 int arg_count = info_->scope()->num_parameters() + 1;
470 int arguments_bytes = arg_count * kPointerSize;
471 __ Ret(arguments_bytes, rcx);
473 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
478 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
479 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
480 MemOperand operand = codegen()->VarOperand(var, result_register());
485 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
489 void FullCodeGenerator::AccumulatorValueContext::Plug(
490 Heap::RootListIndex index) const {
491 __ LoadRoot(result_register(), index);
495 void FullCodeGenerator::StackValueContext::Plug(
496 Heap::RootListIndex index) const {
501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
502 codegen()->PrepareForBailoutBeforeSplit(condition(),
506 if (index == Heap::kUndefinedValueRootIndex ||
507 index == Heap::kNullValueRootIndex ||
508 index == Heap::kFalseValueRootIndex) {
509 if (false_label_ != fall_through_) __ jmp(false_label_);
510 } else if (index == Heap::kTrueValueRootIndex) {
511 if (true_label_ != fall_through_) __ jmp(true_label_);
513 __ LoadRoot(result_register(), index);
514 codegen()->DoTest(this);
519 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
523 void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Handle<Object> lit) const {
526 __ SafeMove(result_register(), Smi::cast(*lit));
528 __ Move(result_register(), lit);
533 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535 __ SafePush(Smi::cast(*lit));
542 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
543 codegen()->PrepareForBailoutBeforeSplit(condition(),
547 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
548 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
549 if (false_label_ != fall_through_) __ jmp(false_label_);
550 } else if (lit->IsTrue() || lit->IsJSObject()) {
551 if (true_label_ != fall_through_) __ jmp(true_label_);
552 } else if (lit->IsString()) {
553 if (String::cast(*lit)->length() == 0) {
554 if (false_label_ != fall_through_) __ jmp(false_label_);
556 if (true_label_ != fall_through_) __ jmp(true_label_);
558 } else if (lit->IsSmi()) {
559 if (Smi::cast(*lit)->value() == 0) {
560 if (false_label_ != fall_through_) __ jmp(false_label_);
562 if (true_label_ != fall_through_) __ jmp(true_label_);
565 // For simplicity we always test the accumulator register.
566 __ Move(result_register(), lit);
567 codegen()->DoTest(this);
572 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
573 Register reg) const {
579 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
581 Register reg) const {
584 __ Move(result_register(), reg);
588 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
589 Register reg) const {
591 if (count > 1) __ Drop(count - 1);
592 __ movp(Operand(rsp, 0), reg);
596 void FullCodeGenerator::TestContext::DropAndPlug(int count,
597 Register reg) const {
599 // For simplicity we always test the accumulator register.
601 __ Move(result_register(), reg);
602 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
603 codegen()->DoTest(this);
607 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
608 Label* materialize_false) const {
609 DCHECK(materialize_true == materialize_false);
610 __ bind(materialize_true);
614 void FullCodeGenerator::AccumulatorValueContext::Plug(
615 Label* materialize_true,
616 Label* materialize_false) const {
618 __ bind(materialize_true);
619 __ Move(result_register(), isolate()->factory()->true_value());
620 __ jmp(&done, Label::kNear);
621 __ bind(materialize_false);
622 __ Move(result_register(), isolate()->factory()->false_value());
627 void FullCodeGenerator::StackValueContext::Plug(
628 Label* materialize_true,
629 Label* materialize_false) const {
631 __ bind(materialize_true);
632 __ Push(isolate()->factory()->true_value());
633 __ jmp(&done, Label::kNear);
634 __ bind(materialize_false);
635 __ Push(isolate()->factory()->false_value());
640 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
641 Label* materialize_false) const {
642 DCHECK(materialize_true == true_label_);
643 DCHECK(materialize_false == false_label_);
647 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
648 Heap::RootListIndex value_root_index =
649 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
650 __ LoadRoot(result_register(), value_root_index);
654 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
655 Heap::RootListIndex value_root_index =
656 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
657 __ PushRoot(value_root_index);
661 void FullCodeGenerator::TestContext::Plug(bool flag) const {
662 codegen()->PrepareForBailoutBeforeSplit(condition(),
667 if (true_label_ != fall_through_) __ jmp(true_label_);
669 if (false_label_ != fall_through_) __ jmp(false_label_);
674 void FullCodeGenerator::DoTest(Expression* condition,
677 Label* fall_through) {
678 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
679 CallIC(ic, condition->test_id());
680 __ testp(result_register(), result_register());
681 // The stub returns nonzero for true.
682 Split(not_zero, if_true, if_false, fall_through);
686 void FullCodeGenerator::Split(Condition cc,
689 Label* fall_through) {
690 if (if_false == fall_through) {
692 } else if (if_true == fall_through) {
693 __ j(NegateCondition(cc), if_false);
701 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
702 DCHECK(var->IsStackAllocated());
703 // Offset is negative because higher indexes are at lower addresses.
704 int offset = -var->index() * kPointerSize;
705 // Adjust by a (parameter or local) base offset.
706 if (var->IsParameter()) {
707 offset += kFPOnStackSize + kPCOnStackSize +
708 (info_->scope()->num_parameters() - 1) * kPointerSize;
710 offset += JavaScriptFrameConstants::kLocal0Offset;
712 return Operand(rbp, offset);
716 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
717 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
718 if (var->IsContextSlot()) {
719 int context_chain_length = scope()->ContextChainLength(var->scope());
720 __ LoadContext(scratch, context_chain_length);
721 return ContextOperand(scratch, var->index());
723 return StackOperand(var);
728 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
729 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
730 MemOperand location = VarOperand(var, dest);
731 __ movp(dest, location);
735 void FullCodeGenerator::SetVar(Variable* var,
739 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
740 DCHECK(!scratch0.is(src));
741 DCHECK(!scratch0.is(scratch1));
742 DCHECK(!scratch1.is(src));
743 MemOperand location = VarOperand(var, scratch0);
744 __ movp(location, src);
746 // Emit the write barrier code if the location is in the heap.
747 if (var->IsContextSlot()) {
748 int offset = Context::SlotOffset(var->index());
749 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
754 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
755 bool should_normalize,
758 // Only prepare for bailouts before splits if we're in a test
759 // context. Otherwise, we let the Visit function deal with the
760 // preparation to avoid preparing with the same AST id twice.
761 if (!context()->IsTest() || !info_->IsOptimizable()) return;
764 if (should_normalize) __ jmp(&skip, Label::kNear);
765 PrepareForBailout(expr, TOS_REG);
766 if (should_normalize) {
767 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
768 Split(equal, if_true, if_false, NULL);
774 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
775 // The variable in the declaration always resides in the current context.
776 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
777 if (generate_debug_code_) {
778 // Check that we're not inside a with or catch context.
779 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
780 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
781 __ Check(not_equal, kDeclarationInWithContext);
782 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
783 __ Check(not_equal, kDeclarationInCatchContext);
788 void FullCodeGenerator::VisitVariableDeclaration(
789 VariableDeclaration* declaration) {
790 // If it was not possible to allocate the variable at compile time, we
791 // need to "declare" it at runtime to make sure it actually exists in the
793 VariableProxy* proxy = declaration->proxy();
794 VariableMode mode = declaration->mode();
795 Variable* variable = proxy->var();
796 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
797 switch (variable->location()) {
798 case VariableLocation::GLOBAL:
799 case VariableLocation::UNALLOCATED:
800 globals_->Add(variable->name(), zone());
801 globals_->Add(variable->binding_needs_init()
802 ? isolate()->factory()->the_hole_value()
803 : isolate()->factory()->undefined_value(),
807 case VariableLocation::PARAMETER:
808 case VariableLocation::LOCAL:
810 Comment cmnt(masm_, "[ VariableDeclaration");
811 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
812 __ movp(StackOperand(variable), kScratchRegister);
816 case VariableLocation::CONTEXT:
818 Comment cmnt(masm_, "[ VariableDeclaration");
819 EmitDebugCheckDeclarationContext(variable);
820 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
821 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
822 // No write barrier since the hole value is in old space.
823 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
827 case VariableLocation::LOOKUP: {
828 Comment cmnt(masm_, "[ VariableDeclaration");
829 __ Push(variable->name());
830 // Declaration nodes are always introduced in one of four modes.
831 DCHECK(IsDeclaredVariableMode(mode));
832 // Push initial value, if any.
833 // Note: For variables we must not push an initial value (such as
834 // 'undefined') because we may have a (legal) redeclaration and we
835 // must not destroy the current value.
837 __ PushRoot(Heap::kTheHoleValueRootIndex);
839 __ Push(Smi::FromInt(0)); // Indicates no initial value.
841 __ CallRuntime(IsImmutableVariableMode(mode)
842 ? Runtime::kDeclareReadOnlyLookupSlot
843 : Runtime::kDeclareLookupSlot,
851 void FullCodeGenerator::VisitFunctionDeclaration(
852 FunctionDeclaration* declaration) {
853 VariableProxy* proxy = declaration->proxy();
854 Variable* variable = proxy->var();
855 switch (variable->location()) {
856 case VariableLocation::GLOBAL:
857 case VariableLocation::UNALLOCATED: {
858 globals_->Add(variable->name(), zone());
859 Handle<SharedFunctionInfo> function =
860 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
861 // Check for stack-overflow exception.
862 if (function.is_null()) return SetStackOverflow();
863 globals_->Add(function, zone());
867 case VariableLocation::PARAMETER:
868 case VariableLocation::LOCAL: {
869 Comment cmnt(masm_, "[ FunctionDeclaration");
870 VisitForAccumulatorValue(declaration->fun());
871 __ movp(StackOperand(variable), result_register());
875 case VariableLocation::CONTEXT: {
876 Comment cmnt(masm_, "[ FunctionDeclaration");
877 EmitDebugCheckDeclarationContext(variable);
878 VisitForAccumulatorValue(declaration->fun());
879 __ movp(ContextOperand(rsi, variable->index()), result_register());
880 int offset = Context::SlotOffset(variable->index());
881 // We know that we have written a function, which is not a smi.
882 __ RecordWriteContextSlot(rsi,
889 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
893 case VariableLocation::LOOKUP: {
894 Comment cmnt(masm_, "[ FunctionDeclaration");
895 __ Push(variable->name());
896 VisitForStackValue(declaration->fun());
897 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
904 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
905 // Call the runtime to declare the globals.
907 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
908 __ CallRuntime(Runtime::kDeclareGlobals, 2);
909 // Return value is ignored.
913 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
914 // Call the runtime to declare the modules.
915 __ Push(descriptions);
916 __ CallRuntime(Runtime::kDeclareModules, 1);
917 // Return value is ignored.
921 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
922 Comment cmnt(masm_, "[ SwitchStatement");
923 Breakable nested_statement(this, stmt);
924 SetStatementPosition(stmt);
926 // Keep the switch value on the stack until a case matches.
927 VisitForStackValue(stmt->tag());
928 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
930 ZoneList<CaseClause*>* clauses = stmt->cases();
931 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
933 Label next_test; // Recycled for each test.
934 // Compile all the tests with branches to their bodies.
935 for (int i = 0; i < clauses->length(); i++) {
936 CaseClause* clause = clauses->at(i);
937 clause->body_target()->Unuse();
939 // The default is not a test, but remember it as final fall through.
940 if (clause->is_default()) {
941 default_clause = clause;
945 Comment cmnt(masm_, "[ Case comparison");
949 // Compile the label expression.
950 VisitForAccumulatorValue(clause->label());
952 // Perform the comparison as if via '==='.
953 __ movp(rdx, Operand(rsp, 0)); // Switch value.
954 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
955 JumpPatchSite patch_site(masm_);
956 if (inline_smi_code) {
960 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
963 __ j(not_equal, &next_test);
964 __ Drop(1); // Switch value is no longer needed.
965 __ jmp(clause->body_target());
969 // Record position before stub call for type feedback.
970 SetExpressionPosition(clause);
971 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
972 strength(language_mode())).code();
973 CallIC(ic, clause->CompareId());
974 patch_site.EmitPatchInfo();
977 __ jmp(&skip, Label::kNear);
978 PrepareForBailout(clause, TOS_REG);
979 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
980 __ j(not_equal, &next_test);
982 __ jmp(clause->body_target());
986 __ j(not_equal, &next_test);
987 __ Drop(1); // Switch value is no longer needed.
988 __ jmp(clause->body_target());
991 // Discard the test value and jump to the default if present, otherwise to
992 // the end of the statement.
994 __ Drop(1); // Switch value is no longer needed.
995 if (default_clause == NULL) {
996 __ jmp(nested_statement.break_label());
998 __ jmp(default_clause->body_target());
1001 // Compile all the case bodies.
1002 for (int i = 0; i < clauses->length(); i++) {
1003 Comment cmnt(masm_, "[ Case body");
1004 CaseClause* clause = clauses->at(i);
1005 __ bind(clause->body_target());
1006 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1007 VisitStatements(clause->statements());
1010 __ bind(nested_statement.break_label());
1011 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1015 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1016 Comment cmnt(masm_, "[ ForInStatement");
1017 SetStatementPosition(stmt, SKIP_BREAK);
1019 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1022 ForIn loop_statement(this, stmt);
1023 increment_loop_depth();
1025 // Get the object to enumerate over. If the object is null or undefined, skip
1026 // over the loop. See ECMA-262 version 5, section 12.6.4.
1027 SetExpressionAsStatementPosition(stmt->enumerable());
1028 VisitForAccumulatorValue(stmt->enumerable());
1029 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1031 Register null_value = rdi;
1032 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1033 __ cmpp(rax, null_value);
1036 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1038 // Convert the object to a JS object.
1039 Label convert, done_convert;
1040 __ JumpIfSmi(rax, &convert, Label::kNear);
1041 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1042 __ j(above_equal, &done_convert, Label::kNear);
1044 ToObjectStub stub(isolate());
1046 __ bind(&done_convert);
1047 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1050 // Check for proxies.
1052 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1053 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1054 __ j(below_equal, &call_runtime);
1056 // Check cache validity in generated code. This is a fast case for
1057 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1058 // guarantee cache validity, call the runtime system to check cache
1059 // validity or get the property names in a fixed array.
1060 __ CheckEnumCache(null_value, &call_runtime);
1062 // The enum cache is valid. Load the map of the object being
1063 // iterated over and use the cache for the iteration.
1065 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1066 __ jmp(&use_cache, Label::kNear);
1068 // Get the set of properties to enumerate.
1069 __ bind(&call_runtime);
1070 __ Push(rax); // Duplicate the enumerable object on the stack.
1071 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1072 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1074 // If we got a map from the runtime call, we can do a fast
1075 // modification check. Otherwise, we got a fixed array, and we have
1076 // to do a slow check.
1078 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1079 Heap::kMetaMapRootIndex);
1080 __ j(not_equal, &fixed_array);
1082 // We got a map in register rax. Get the enumeration cache from it.
1083 __ bind(&use_cache);
1085 Label no_descriptors;
1087 __ EnumLength(rdx, rax);
1088 __ Cmp(rdx, Smi::FromInt(0));
1089 __ j(equal, &no_descriptors);
1091 __ LoadInstanceDescriptors(rax, rcx);
1092 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1093 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1095 // Set up the four remaining stack slots.
1096 __ Push(rax); // Map.
1097 __ Push(rcx); // Enumeration cache.
1098 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1099 __ Push(Smi::FromInt(0)); // Initial index.
1102 __ bind(&no_descriptors);
1103 __ addp(rsp, Immediate(kPointerSize));
1106 // We got a fixed array in register rax. Iterate through that.
1108 __ bind(&fixed_array);
1110 // No need for a write barrier, we are storing a Smi in the feedback vector.
1111 __ Move(rbx, FeedbackVector());
1112 int vector_index = FeedbackVector()->GetIndex(slot);
1113 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(vector_index)),
1114 TypeFeedbackVector::MegamorphicSentinel(isolate()));
1115 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1116 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1117 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1118 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1119 __ j(above, &non_proxy);
1120 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1121 __ bind(&non_proxy);
1122 __ Push(rbx); // Smi
1123 __ Push(rax); // Array
1124 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1125 __ Push(rax); // Fixed array length (as smi).
1126 __ Push(Smi::FromInt(0)); // Initial index.
1128 // Generate code for doing the condition check.
1129 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1131 SetExpressionAsStatementPosition(stmt->each());
1133 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1134 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1135 __ j(above_equal, loop_statement.break_label());
1137 // Get the current entry of the array into register rbx.
1138 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1139 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1140 __ movp(rbx, FieldOperand(rbx,
1143 FixedArray::kHeaderSize));
1145 // Get the expected map from the stack or a smi in the
1146 // permanent slow case into register rdx.
1147 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1149 // Check if the expected map still matches that of the enumerable.
1150 // If not, we may have to filter the key.
1152 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1153 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1154 __ j(equal, &update_each, Label::kNear);
1156 // For proxies, no filtering is done.
1157 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1158 __ Cmp(rdx, Smi::FromInt(0));
1159 __ j(equal, &update_each, Label::kNear);
1161 // Convert the entry to a string or null if it isn't a property
1162 // anymore. If the property has been removed while iterating, we
1164 __ Push(rcx); // Enumerable.
1165 __ Push(rbx); // Current entry.
1166 __ CallRuntime(Runtime::kForInFilter, 2);
1167 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1168 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1169 __ j(equal, loop_statement.continue_label());
1172 // Update the 'each' property or variable from the possibly filtered
1173 // entry in register rbx.
1174 __ bind(&update_each);
1175 __ movp(result_register(), rbx);
1176 // Perform the assignment as if via '='.
1177 { EffectContext context(this);
1178 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1179 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1182 // Generate code for the body of the loop.
1183 Visit(stmt->body());
1185 // Generate code for going to the next element by incrementing the
1186 // index (smi) stored on top of the stack.
1187 __ bind(loop_statement.continue_label());
1188 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1190 EmitBackEdgeBookkeeping(stmt, &loop);
1193 // Remove the pointers stored on the stack.
1194 __ bind(loop_statement.break_label());
1195 __ addp(rsp, Immediate(5 * kPointerSize));
1197 // Exit and decrement the loop depth.
1198 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1200 decrement_loop_depth();
1204 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1206 // Use the fast case closure allocation code that allocates in new
1207 // space for nested functions that don't need literals cloning. If
1208 // we're running with the --always-opt or the --prepare-always-opt
1209 // flag, we need to use the runtime function so that the new function
1210 // we are creating here gets a chance to have its code optimized and
1211 // doesn't just get a copy of the existing unoptimized code.
1212 if (!FLAG_always_opt &&
1213 !FLAG_prepare_always_opt &&
1215 scope()->is_function_scope() &&
1216 info->num_literals() == 0) {
1217 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1224 ? isolate()->factory()->true_value()
1225 : isolate()->factory()->false_value());
1226 __ CallRuntime(Runtime::kNewClosure, 3);
1228 context()->Plug(rax);
1232 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1234 FeedbackVectorICSlot slot) {
1235 if (NeedsHomeObject(initializer)) {
1236 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1237 __ Move(StoreDescriptor::NameRegister(),
1238 isolate()->factory()->home_object_symbol());
1239 __ movp(StoreDescriptor::ValueRegister(),
1240 Operand(rsp, offset * kPointerSize));
1241 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1247 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1248 TypeofMode typeof_mode,
1250 Register context = rsi;
1251 Register temp = rdx;
1255 if (s->num_heap_slots() > 0) {
1256 if (s->calls_sloppy_eval()) {
1257 // Check that extension is NULL.
1258 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1260 __ j(not_equal, slow);
1262 // Load next context in chain.
1263 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1264 // Walk the rest of the chain without clobbering rsi.
1267 // If no outer scope calls eval, we do not need to check more
1268 // context extensions. If we have reached an eval scope, we check
1269 // all extensions from this point.
1270 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1271 s = s->outer_scope();
1274 if (s != NULL && s->is_eval_scope()) {
1275 // Loop up the context chain. There is no frame effect so it is
1276 // safe to use raw labels here.
1278 if (!context.is(temp)) {
1279 __ movp(temp, context);
1281 // Load map for comparison into register, outside loop.
1282 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1284 // Terminate at native context.
1285 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1286 __ j(equal, &fast, Label::kNear);
1287 // Check that extension is NULL.
1288 __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1289 __ j(not_equal, slow);
1290 // Load next context in chain.
1291 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1296 // All extension objects were empty and it is safe to use a normal global
1298 EmitGlobalVariableLoad(proxy, typeof_mode);
1302 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1304 DCHECK(var->IsContextSlot());
1305 Register context = rsi;
1306 Register temp = rbx;
1308 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1309 if (s->num_heap_slots() > 0) {
1310 if (s->calls_sloppy_eval()) {
1311 // Check that extension is NULL.
1312 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1314 __ j(not_equal, slow);
1316 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1317 // Walk the rest of the chain without clobbering rsi.
1321 // Check that last extension is NULL.
1322 __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1323 __ j(not_equal, slow);
1325 // This function is used only for loads, not stores, so it's safe to
1326 // return an rsi-based operand (the write barrier cannot be allowed to
1327 // destroy the rsi register).
1328 return ContextOperand(context, var->index());
1332 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1333 TypeofMode typeof_mode,
1334 Label* slow, Label* done) {
1335 // Generate fast-case code for variables that might be shadowed by
1336 // eval-introduced variables. Eval is used a lot without
1337 // introducing variables. In those cases, we do not want to
1338 // perform a runtime call for all variables in the scope
1339 // containing the eval.
1340 Variable* var = proxy->var();
1341 if (var->mode() == DYNAMIC_GLOBAL) {
1342 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1344 } else if (var->mode() == DYNAMIC_LOCAL) {
1345 Variable* local = var->local_if_not_shadowed();
1346 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1347 if (local->mode() == LET || local->mode() == CONST ||
1348 local->mode() == CONST_LEGACY) {
1349 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1350 __ j(not_equal, done);
1351 if (local->mode() == CONST_LEGACY) {
1352 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1353 } else { // LET || CONST
1354 __ Push(var->name());
1355 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1363 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1364 TypeofMode typeof_mode) {
1365 Variable* var = proxy->var();
1366 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1367 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1368 if (var->IsGlobalSlot()) {
1369 DCHECK(var->index() > 0);
1370 DCHECK(var->IsStaticGlobalObjectProperty());
1371 int const slot = var->index();
1372 int const depth = scope()->ContextChainLength(var->scope());
1373 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1374 __ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
1375 LoadGlobalViaContextStub stub(isolate(), depth);
1378 __ Push(Smi::FromInt(slot));
1379 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1383 __ Move(LoadDescriptor::NameRegister(), var->name());
1384 __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1385 __ Move(LoadDescriptor::SlotRegister(),
1386 SmiFromSlot(proxy->VariableFeedbackSlot()));
1387 CallLoadIC(typeof_mode);
1392 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1393 TypeofMode typeof_mode) {
1394 // Record position before possible IC call.
1395 SetExpressionPosition(proxy);
1396 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1397 Variable* var = proxy->var();
1399 // Three cases: global variables, lookup variables, and all other types of
1401 switch (var->location()) {
1402 case VariableLocation::GLOBAL:
1403 case VariableLocation::UNALLOCATED: {
1404 Comment cmnt(masm_, "[ Global variable");
1405 EmitGlobalVariableLoad(proxy, typeof_mode);
1406 context()->Plug(rax);
1410 case VariableLocation::PARAMETER:
1411 case VariableLocation::LOCAL:
1412 case VariableLocation::CONTEXT: {
1413 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1414 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1416 if (var->binding_needs_init()) {
1417 // var->scope() may be NULL when the proxy is located in eval code and
1418 // refers to a potential outside binding. Currently those bindings are
1419 // always looked up dynamically, i.e. in that case
1420 // var->location() == LOOKUP.
1422 DCHECK(var->scope() != NULL);
1424 // Check if the binding really needs an initialization check. The check
1425 // can be skipped in the following situation: we have a LET or CONST
1426 // binding in harmony mode, both the Variable and the VariableProxy have
1427 // the same declaration scope (i.e. they are both in global code, in the
1428 // same function or in the same eval code) and the VariableProxy is in
1429 // the source physically located after the initializer of the variable.
1431 // We cannot skip any initialization checks for CONST in non-harmony
1432 // mode because const variables may be declared but never initialized:
1433 // if (false) { const x; }; var y = x;
1435 // The condition on the declaration scopes is a conservative check for
1436 // nested functions that access a binding and are called before the
1437 // binding is initialized:
1438 // function() { f(); let x = 1; function f() { x = 2; } }
1440 bool skip_init_check;
1441 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1442 skip_init_check = false;
1443 } else if (var->is_this()) {
1444 CHECK(info_->function() != nullptr &&
1445 (info_->function()->kind() & kSubclassConstructor) != 0);
1446 // TODO(dslomov): implement 'this' hole check elimination.
1447 skip_init_check = false;
1449 // Check that we always have valid source position.
1450 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1451 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1452 skip_init_check = var->mode() != CONST_LEGACY &&
1453 var->initializer_position() < proxy->position();
1456 if (!skip_init_check) {
1457 // Let and const need a read barrier.
1460 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1461 __ j(not_equal, &done, Label::kNear);
1462 if (var->mode() == LET || var->mode() == CONST) {
1463 // Throw a reference error when using an uninitialized let/const
1464 // binding in harmony mode.
1465 __ Push(var->name());
1466 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1468 // Uninitalized const bindings outside of harmony mode are unholed.
1469 DCHECK(var->mode() == CONST_LEGACY);
1470 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1473 context()->Plug(rax);
1477 context()->Plug(var);
1481 case VariableLocation::LOOKUP: {
1482 Comment cmnt(masm_, "[ Lookup slot");
1484 // Generate code for loading from variables potentially shadowed
1485 // by eval-introduced variables.
1486 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1488 __ Push(rsi); // Context.
1489 __ Push(var->name());
1490 Runtime::FunctionId function_id =
1491 typeof_mode == NOT_INSIDE_TYPEOF
1492 ? Runtime::kLoadLookupSlot
1493 : Runtime::kLoadLookupSlotNoReferenceError;
1494 __ CallRuntime(function_id, 2);
1496 context()->Plug(rax);
1503 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1504 Comment cmnt(masm_, "[ RegExpLiteral");
1506 // Registers will be used as follows:
1507 // rdi = JS function.
1508 // rcx = literals array.
1509 // rbx = regexp literal.
1510 // rax = regexp literal clone.
1511 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1512 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1513 int literal_offset =
1514 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1515 __ movp(rbx, FieldOperand(rcx, literal_offset));
1516 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1517 __ j(not_equal, &materialized, Label::kNear);
1519 // Create regexp literal using runtime function
1520 // Result will be in rax.
1522 __ Push(Smi::FromInt(expr->literal_index()));
1523 __ Push(expr->pattern());
1524 __ Push(expr->flags());
1525 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1528 __ bind(&materialized);
1529 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1530 Label allocated, runtime_allocate;
1531 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1534 __ bind(&runtime_allocate);
1536 __ Push(Smi::FromInt(size));
1537 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1540 __ bind(&allocated);
1541 // Copy the content into the newly allocated memory.
1542 // (Unroll copy loop once for better throughput).
1543 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1544 __ movp(rdx, FieldOperand(rbx, i));
1545 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1546 __ movp(FieldOperand(rax, i), rdx);
1547 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1549 if ((size % (2 * kPointerSize)) != 0) {
1550 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1551 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1553 context()->Plug(rax);
1557 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1558 if (expression == NULL) {
1559 __ PushRoot(Heap::kNullValueRootIndex);
1561 VisitForStackValue(expression);
1566 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1567 Comment cmnt(masm_, "[ ObjectLiteral");
1569 Handle<FixedArray> constant_properties = expr->constant_properties();
1570 int flags = expr->ComputeFlags();
1571 if (MustCreateObjectLiteralWithRuntime(expr)) {
1572 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1573 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1574 __ Push(Smi::FromInt(expr->literal_index()));
1575 __ Push(constant_properties);
1576 __ Push(Smi::FromInt(flags));
1577 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1579 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1580 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1581 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1582 __ Move(rcx, constant_properties);
1583 __ Move(rdx, Smi::FromInt(flags));
1584 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1587 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1589 // If result_saved is true the result is on top of the stack. If
1590 // result_saved is false the result is in rax.
1591 bool result_saved = false;
1593 AccessorTable accessor_table(zone());
1594 int property_index = 0;
1595 // store_slot_index points to the vector IC slot for the next store IC used.
1596 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1597 // and must be updated if the number of store ICs emitted here changes.
1598 int store_slot_index = 0;
1599 for (; property_index < expr->properties()->length(); property_index++) {
1600 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1601 if (property->is_computed_name()) break;
1602 if (property->IsCompileTimeValue()) continue;
1604 Literal* key = property->key()->AsLiteral();
1605 Expression* value = property->value();
1606 if (!result_saved) {
1607 __ Push(rax); // Save result on the stack
1608 result_saved = true;
1610 switch (property->kind()) {
1611 case ObjectLiteral::Property::CONSTANT:
1613 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1614 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1616 case ObjectLiteral::Property::COMPUTED:
1617 // It is safe to use [[Put]] here because the boilerplate already
1618 // contains computed properties with an uninitialized value.
1619 if (key->value()->IsInternalizedString()) {
1620 if (property->emit_store()) {
1621 VisitForAccumulatorValue(value);
1622 DCHECK(StoreDescriptor::ValueRegister().is(rax));
1623 __ Move(StoreDescriptor::NameRegister(), key->value());
1624 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1625 if (FLAG_vector_stores) {
1626 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1629 CallStoreIC(key->LiteralFeedbackId());
1631 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1633 if (NeedsHomeObject(value)) {
1634 __ movp(StoreDescriptor::ReceiverRegister(), rax);
1635 __ Move(StoreDescriptor::NameRegister(),
1636 isolate()->factory()->home_object_symbol());
1637 __ movp(StoreDescriptor::ValueRegister(), Operand(rsp, 0));
1638 if (FLAG_vector_stores) {
1639 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1644 VisitForEffect(value);
1648 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1649 VisitForStackValue(key);
1650 VisitForStackValue(value);
1651 if (property->emit_store()) {
1652 EmitSetHomeObjectIfNeeded(
1653 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1654 __ Push(Smi::FromInt(SLOPPY)); // Language mode
1655 __ CallRuntime(Runtime::kSetProperty, 4);
1660 case ObjectLiteral::Property::PROTOTYPE:
1661 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1662 VisitForStackValue(value);
1663 DCHECK(property->emit_store());
1664 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1666 case ObjectLiteral::Property::GETTER:
1667 if (property->emit_store()) {
1668 accessor_table.lookup(key)->second->getter = value;
1671 case ObjectLiteral::Property::SETTER:
1672 if (property->emit_store()) {
1673 accessor_table.lookup(key)->second->setter = value;
1679 // Emit code to define accessors, using only a single call to the runtime for
1680 // each pair of corresponding getters and setters.
1681 for (AccessorTable::Iterator it = accessor_table.begin();
1682 it != accessor_table.end();
1684 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1685 VisitForStackValue(it->first);
1686 EmitAccessor(it->second->getter);
1687 EmitSetHomeObjectIfNeeded(
1688 it->second->getter, 2,
1689 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1690 EmitAccessor(it->second->setter);
1691 EmitSetHomeObjectIfNeeded(
1692 it->second->setter, 3,
1693 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1694 __ Push(Smi::FromInt(NONE));
1695 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1698 // Object literals have two parts. The "static" part on the left contains no
1699 // computed property names, and so we can compute its map ahead of time; see
1700 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1701 // starts with the first computed property name, and continues with all
1702 // properties to its right. All the code from above initializes the static
1703 // component of the object literal, and arranges for the map of the result to
1704 // reflect the static order in which the keys appear. For the dynamic
1705 // properties, we compile them into a series of "SetOwnProperty" runtime
1706 // calls. This will preserve insertion order.
1707 for (; property_index < expr->properties()->length(); property_index++) {
1708 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1710 Expression* value = property->value();
1711 if (!result_saved) {
1712 __ Push(rax); // Save result on the stack
1713 result_saved = true;
1716 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1718 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1719 DCHECK(!property->is_computed_name());
1720 VisitForStackValue(value);
1721 DCHECK(property->emit_store());
1722 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1724 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1725 VisitForStackValue(value);
1726 EmitSetHomeObjectIfNeeded(
1727 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1729 switch (property->kind()) {
1730 case ObjectLiteral::Property::CONSTANT:
1731 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1732 case ObjectLiteral::Property::COMPUTED:
1733 if (property->emit_store()) {
1734 __ Push(Smi::FromInt(NONE));
1735 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1741 case ObjectLiteral::Property::PROTOTYPE:
1745 case ObjectLiteral::Property::GETTER:
1746 __ Push(Smi::FromInt(NONE));
1747 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1750 case ObjectLiteral::Property::SETTER:
1751 __ Push(Smi::FromInt(NONE));
1752 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1758 if (expr->has_function()) {
1759 DCHECK(result_saved);
1760 __ Push(Operand(rsp, 0));
1761 __ CallRuntime(Runtime::kToFastProperties, 1);
1765 context()->PlugTOS();
1767 context()->Plug(rax);
1770 // Verify that compilation exactly consumed the number of store ic slots that
1771 // the ObjectLiteral node had to offer.
1772 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1776 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1777 Comment cmnt(masm_, "[ ArrayLiteral");
1779 expr->BuildConstantElements(isolate());
1780 Handle<FixedArray> constant_elements = expr->constant_elements();
1781 bool has_constant_fast_elements =
1782 IsFastObjectElementsKind(expr->constant_elements_kind());
1784 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1785 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1786 // If the only customer of allocation sites is transitioning, then
1787 // we can turn it off if we don't have anywhere else to transition to.
1788 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1791 if (MustCreateArrayLiteralWithRuntime(expr)) {
1792 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1793 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1794 __ Push(Smi::FromInt(expr->literal_index()));
1795 __ Push(constant_elements);
1796 __ Push(Smi::FromInt(expr->ComputeFlags()));
1797 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1799 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1800 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1801 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1802 __ Move(rcx, constant_elements);
1803 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1806 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1808 bool result_saved = false; // Is the result saved to the stack?
1809 ZoneList<Expression*>* subexprs = expr->values();
1810 int length = subexprs->length();
1812 // Emit code to evaluate all the non-constant subexpressions and to store
1813 // them into the newly cloned array.
1814 int array_index = 0;
1815 for (; array_index < length; array_index++) {
1816 Expression* subexpr = subexprs->at(array_index);
1817 if (subexpr->IsSpread()) break;
1819 // If the subexpression is a literal or a simple materialized literal it
1820 // is already set in the cloned array.
1821 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1823 if (!result_saved) {
1824 __ Push(rax); // array literal
1825 __ Push(Smi::FromInt(expr->literal_index()));
1826 result_saved = true;
1828 VisitForAccumulatorValue(subexpr);
1830 if (has_constant_fast_elements) {
1831 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1832 // cannot transition and don't need to call the runtime stub.
1833 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1834 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1835 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1836 // Store the subexpression value in the array's elements.
1837 __ movp(FieldOperand(rbx, offset), result_register());
1838 // Update the write barrier for the array store.
1839 __ RecordWriteField(rbx, offset, result_register(), rcx,
1841 EMIT_REMEMBERED_SET,
1844 // Store the subexpression value in the array's elements.
1845 __ Move(rcx, Smi::FromInt(array_index));
1846 StoreArrayLiteralElementStub stub(isolate());
1850 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1853 // In case the array literal contains spread expressions it has two parts. The
1854 // first part is the "static" array which has a literal index is handled
1855 // above. The second part is the part after the first spread expression
1856 // (inclusive) and these elements gets appended to the array. Note that the
1857 // number elements an iterable produces is unknown ahead of time.
1858 if (array_index < length && result_saved) {
1859 __ Drop(1); // literal index
1861 result_saved = false;
1863 for (; array_index < length; array_index++) {
1864 Expression* subexpr = subexprs->at(array_index);
1867 if (subexpr->IsSpread()) {
1868 VisitForStackValue(subexpr->AsSpread()->expression());
1869 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1871 VisitForStackValue(subexpr);
1872 __ CallRuntime(Runtime::kAppendElement, 2);
1875 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1879 __ Drop(1); // literal index
1880 context()->PlugTOS();
1882 context()->Plug(rax);
1887 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1888 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1890 Comment cmnt(masm_, "[ Assignment");
1891 SetExpressionPosition(expr, INSERT_BREAK);
1893 Property* property = expr->target()->AsProperty();
1894 LhsKind assign_type = Property::GetAssignType(property);
1896 // Evaluate LHS expression.
1897 switch (assign_type) {
1899 // Nothing to do here.
1901 case NAMED_PROPERTY:
1902 if (expr->is_compound()) {
1903 // We need the receiver both on the stack and in the register.
1904 VisitForStackValue(property->obj());
1905 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1907 VisitForStackValue(property->obj());
1910 case NAMED_SUPER_PROPERTY:
1912 property->obj()->AsSuperPropertyReference()->this_var());
1913 VisitForAccumulatorValue(
1914 property->obj()->AsSuperPropertyReference()->home_object());
1915 __ Push(result_register());
1916 if (expr->is_compound()) {
1917 __ Push(MemOperand(rsp, kPointerSize));
1918 __ Push(result_register());
1921 case KEYED_SUPER_PROPERTY:
1923 property->obj()->AsSuperPropertyReference()->this_var());
1925 property->obj()->AsSuperPropertyReference()->home_object());
1926 VisitForAccumulatorValue(property->key());
1927 __ Push(result_register());
1928 if (expr->is_compound()) {
1929 __ Push(MemOperand(rsp, 2 * kPointerSize));
1930 __ Push(MemOperand(rsp, 2 * kPointerSize));
1931 __ Push(result_register());
1934 case KEYED_PROPERTY: {
1935 if (expr->is_compound()) {
1936 VisitForStackValue(property->obj());
1937 VisitForStackValue(property->key());
1938 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1939 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1941 VisitForStackValue(property->obj());
1942 VisitForStackValue(property->key());
1948 // For compound assignments we need another deoptimization point after the
1949 // variable/property load.
1950 if (expr->is_compound()) {
1951 { AccumulatorValueContext context(this);
1952 switch (assign_type) {
1954 EmitVariableLoad(expr->target()->AsVariableProxy());
1955 PrepareForBailout(expr->target(), TOS_REG);
1957 case NAMED_PROPERTY:
1958 EmitNamedPropertyLoad(property);
1959 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1961 case NAMED_SUPER_PROPERTY:
1962 EmitNamedSuperPropertyLoad(property);
1963 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1965 case KEYED_SUPER_PROPERTY:
1966 EmitKeyedSuperPropertyLoad(property);
1967 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1969 case KEYED_PROPERTY:
1970 EmitKeyedPropertyLoad(property);
1971 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1976 Token::Value op = expr->binary_op();
1977 __ Push(rax); // Left operand goes on the stack.
1978 VisitForAccumulatorValue(expr->value());
1980 AccumulatorValueContext context(this);
1981 if (ShouldInlineSmiCase(op)) {
1982 EmitInlineSmiBinaryOp(expr->binary_operation(),
1987 EmitBinaryOp(expr->binary_operation(), op);
1989 // Deoptimization point in case the binary operation may have side effects.
1990 PrepareForBailout(expr->binary_operation(), TOS_REG);
1992 VisitForAccumulatorValue(expr->value());
1995 SetExpressionPosition(expr);
1998 switch (assign_type) {
2000 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2001 expr->op(), expr->AssignmentSlot());
2002 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2003 context()->Plug(rax);
2005 case NAMED_PROPERTY:
2006 EmitNamedPropertyAssignment(expr);
2008 case NAMED_SUPER_PROPERTY:
2009 EmitNamedSuperPropertyStore(property);
2010 context()->Plug(rax);
2012 case KEYED_SUPER_PROPERTY:
2013 EmitKeyedSuperPropertyStore(property);
2014 context()->Plug(rax);
2016 case KEYED_PROPERTY:
2017 EmitKeyedPropertyAssignment(expr);
2023 void FullCodeGenerator::VisitYield(Yield* expr) {
2024 Comment cmnt(masm_, "[ Yield");
2025 SetExpressionPosition(expr);
2027 // Evaluate yielded value first; the initial iterator definition depends on
2028 // this. It stays on the stack while we update the iterator.
2029 VisitForStackValue(expr->expression());
2031 switch (expr->yield_kind()) {
2032 case Yield::kSuspend:
2033 // Pop value from top-of-stack slot; box result into result register.
2034 EmitCreateIteratorResult(false);
2035 __ Push(result_register());
2037 case Yield::kInitial: {
2038 Label suspend, continuation, post_runtime, resume;
2041 __ bind(&continuation);
2042 __ RecordGeneratorContinuation();
2046 VisitForAccumulatorValue(expr->generator_object());
2047 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2048 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2049 Smi::FromInt(continuation.pos()));
2050 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2052 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2054 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
2056 __ j(equal, &post_runtime);
2057 __ Push(rax); // generator object
2058 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2059 __ movp(context_register(),
2060 Operand(rbp, StandardFrameConstants::kContextOffset));
2061 __ bind(&post_runtime);
2063 __ Pop(result_register());
2064 EmitReturnSequence();
2067 context()->Plug(result_register());
2071 case Yield::kFinal: {
2072 VisitForAccumulatorValue(expr->generator_object());
2073 __ Move(FieldOperand(result_register(),
2074 JSGeneratorObject::kContinuationOffset),
2075 Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2076 // Pop value from top-of-stack slot, box result into result register.
2077 EmitCreateIteratorResult(true);
2078 EmitUnwindBeforeReturn();
2079 EmitReturnSequence();
2083 case Yield::kDelegating: {
2084 VisitForStackValue(expr->generator_object());
2086 // Initial stack layout is as follows:
2087 // [sp + 1 * kPointerSize] iter
2088 // [sp + 0 * kPointerSize] g
2090 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2091 Label l_next, l_call, l_loop;
2092 Register load_receiver = LoadDescriptor::ReceiverRegister();
2093 Register load_name = LoadDescriptor::NameRegister();
2095 // Initial send value is undefined.
2096 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2099 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2101 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2103 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2104 __ Push(rax); // exception
2107 // try { received = %yield result }
2108 // Shuffle the received result above a try handler and yield it without
2111 __ Pop(rax); // result
2112 int handler_index = NewHandlerTableEntry();
2113 EnterTryBlock(handler_index, &l_catch);
2114 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2115 __ Push(rax); // result
2118 __ bind(&l_continuation);
2119 __ RecordGeneratorContinuation();
2122 __ bind(&l_suspend);
2123 const int generator_object_depth = kPointerSize + try_block_size;
2124 __ movp(rax, Operand(rsp, generator_object_depth));
2126 __ Push(Smi::FromInt(handler_index)); // handler-index
2127 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2128 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2129 Smi::FromInt(l_continuation.pos()));
2130 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2132 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2134 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2135 __ movp(context_register(),
2136 Operand(rbp, StandardFrameConstants::kContextOffset));
2137 __ Pop(rax); // result
2138 EmitReturnSequence();
2139 __ bind(&l_resume); // received in rax
2140 ExitTryBlock(handler_index);
2142 // receiver = iter; f = 'next'; arg = received;
2145 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2146 __ Push(load_name); // "next"
2147 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2148 __ Push(rax); // received
2150 // result = receiver[f](arg);
2152 __ movp(load_receiver, Operand(rsp, kPointerSize));
2153 __ Move(LoadDescriptor::SlotRegister(),
2154 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
2155 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2156 CallIC(ic, TypeFeedbackId::None());
2158 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2160 SetCallPosition(expr, 1);
2161 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2164 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2165 __ Drop(1); // The function is still on the stack; drop it.
2167 // if (!result.done) goto l_try;
2169 __ Move(load_receiver, rax);
2170 __ Push(load_receiver); // save result
2171 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2172 __ Move(LoadDescriptor::SlotRegister(),
2173 SmiFromSlot(expr->DoneFeedbackSlot()));
2174 CallLoadIC(NOT_INSIDE_TYPEOF); // rax=result.done
2175 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2177 __ testp(result_register(), result_register());
2181 __ Pop(load_receiver); // result
2182 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2183 __ Move(LoadDescriptor::SlotRegister(),
2184 SmiFromSlot(expr->ValueFeedbackSlot()));
2185 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in rax
2186 context()->DropAndPlug(2, rax); // drop iter and g
2193 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2195 JSGeneratorObject::ResumeMode resume_mode) {
2196 // The value stays in rax, and is ultimately read by the resumed generator, as
2197 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2198 // is read to throw the value when the resumed generator is already closed.
2199 // rbx will hold the generator object until the activation has been resumed.
2200 VisitForStackValue(generator);
2201 VisitForAccumulatorValue(value);
2204 // Load suspended function and context.
2205 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2206 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2209 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2211 // Push holes for arguments to generator function.
2212 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2213 __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2214 SharedFunctionInfo::kFormalParameterCountOffset);
2215 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2216 Label push_argument_holes, push_frame;
2217 __ bind(&push_argument_holes);
2218 __ subp(rdx, Immediate(1));
2219 __ j(carry, &push_frame);
2221 __ jmp(&push_argument_holes);
2223 // Enter a new JavaScript frame, and initialize its slots as they were when
2224 // the generator was suspended.
2225 Label resume_frame, done;
2226 __ bind(&push_frame);
2227 __ call(&resume_frame);
2229 __ bind(&resume_frame);
2230 __ pushq(rbp); // Caller's frame pointer.
2232 __ Push(rsi); // Callee's context.
2233 __ Push(rdi); // Callee's JS Function.
2235 // Load the operand stack size.
2236 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2237 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2238 __ SmiToInteger32(rdx, rdx);
2240 // If we are sending a value and there is no operand stack, we can jump back
2242 if (resume_mode == JSGeneratorObject::NEXT) {
2244 __ cmpp(rdx, Immediate(0));
2245 __ j(not_zero, &slow_resume);
2246 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2247 __ SmiToInteger64(rcx,
2248 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2250 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2251 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2253 __ bind(&slow_resume);
2256 // Otherwise, we push holes for the operand stack and call the runtime to fix
2257 // up the stack and the handlers.
2258 Label push_operand_holes, call_resume;
2259 __ bind(&push_operand_holes);
2260 __ subp(rdx, Immediate(1));
2261 __ j(carry, &call_resume);
2263 __ jmp(&push_operand_holes);
2264 __ bind(&call_resume);
2266 __ Push(result_register());
2267 __ Push(Smi::FromInt(resume_mode));
2268 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2269 // Not reached: the runtime call returns elsewhere.
2270 __ Abort(kGeneratorFailedToResume);
2273 context()->Plug(result_register());
2277 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2281 const int instance_size = 5 * kPointerSize;
2282 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2285 __ Allocate(instance_size, rax, rcx, rdx, &gc_required, TAG_OBJECT);
2288 __ bind(&gc_required);
2289 __ Push(Smi::FromInt(instance_size));
2290 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2291 __ movp(context_register(),
2292 Operand(rbp, StandardFrameConstants::kContextOffset));
2294 __ bind(&allocated);
2295 __ movp(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2296 __ movp(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
2297 __ movp(rbx, ContextOperand(rbx, Context::ITERATOR_RESULT_MAP_INDEX));
2299 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2300 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2301 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2302 isolate()->factory()->empty_fixed_array());
2303 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2304 isolate()->factory()->empty_fixed_array());
2305 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2307 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2310 // Only the value field needs a write barrier, as the other values are in the
2312 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2313 rcx, rdx, kDontSaveFPRegs);
2317 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2318 SetExpressionPosition(prop);
2319 Literal* key = prop->key()->AsLiteral();
2320 DCHECK(!prop->IsSuperAccess());
2322 __ Move(LoadDescriptor::NameRegister(), key->value());
2323 __ Move(LoadDescriptor::SlotRegister(),
2324 SmiFromSlot(prop->PropertyFeedbackSlot()));
2325 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2329 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2330 // Stack: receiver, home_object
2331 SetExpressionPosition(prop);
2332 Literal* key = prop->key()->AsLiteral();
2333 DCHECK(!key->value()->IsSmi());
2334 DCHECK(prop->IsSuperAccess());
2336 __ Push(key->value());
2337 __ Push(Smi::FromInt(language_mode()));
2338 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2342 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2343 SetExpressionPosition(prop);
2344 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2345 __ Move(LoadDescriptor::SlotRegister(),
2346 SmiFromSlot(prop->PropertyFeedbackSlot()));
2351 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2352 // Stack: receiver, home_object, key.
2353 SetExpressionPosition(prop);
2354 __ Push(Smi::FromInt(language_mode()));
2355 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2359 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2362 Expression* right) {
2363 // Do combined smi check of the operands. Left operand is on the
2364 // stack (popped into rdx). Right operand is in rax but moved into
2365 // rcx to make the shifts easier.
2366 Label done, stub_call, smi_case;
2370 JumpPatchSite patch_site(masm_);
2371 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2373 __ bind(&stub_call);
2376 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2377 CallIC(code, expr->BinaryOperationFeedbackId());
2378 patch_site.EmitPatchInfo();
2379 __ jmp(&done, Label::kNear);
2384 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2387 __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2390 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2393 __ SmiAdd(rax, rdx, rcx, &stub_call);
2396 __ SmiSub(rax, rdx, rcx, &stub_call);
2399 __ SmiMul(rax, rdx, rcx, &stub_call);
2402 __ SmiOr(rax, rdx, rcx);
2404 case Token::BIT_AND:
2405 __ SmiAnd(rax, rdx, rcx);
2407 case Token::BIT_XOR:
2408 __ SmiXor(rax, rdx, rcx);
2416 context()->Plug(rax);
2420 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2421 int* used_store_slots) {
2422 // Constructor is in rax.
2423 DCHECK(lit != NULL);
2426 // No access check is needed here since the constructor is created by the
2428 Register scratch = rbx;
2429 __ movp(scratch, FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset));
2432 for (int i = 0; i < lit->properties()->length(); i++) {
2433 ObjectLiteral::Property* property = lit->properties()->at(i);
2434 Expression* value = property->value();
2436 if (property->is_static()) {
2437 __ Push(Operand(rsp, kPointerSize)); // constructor
2439 __ Push(Operand(rsp, 0)); // prototype
2441 EmitPropertyKey(property, lit->GetIdForProperty(i));
2443 // The static prototype property is read only. We handle the non computed
2444 // property name case in the parser. Since this is the only case where we
2445 // need to check for an own read only property we special case this so we do
2446 // not need to do this for every property.
2447 if (property->is_static() && property->is_computed_name()) {
2448 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2452 VisitForStackValue(value);
2453 EmitSetHomeObjectIfNeeded(value, 2,
2454 lit->SlotForHomeObject(value, used_store_slots));
2456 switch (property->kind()) {
2457 case ObjectLiteral::Property::CONSTANT:
2458 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2459 case ObjectLiteral::Property::PROTOTYPE:
2461 case ObjectLiteral::Property::COMPUTED:
2462 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2465 case ObjectLiteral::Property::GETTER:
2466 __ Push(Smi::FromInt(DONT_ENUM));
2467 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2470 case ObjectLiteral::Property::SETTER:
2471 __ Push(Smi::FromInt(DONT_ENUM));
2472 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2480 // Set both the prototype and constructor to have fast properties, and also
2481 // freeze them in strong mode.
2482 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2486 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2489 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2490 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2491 CallIC(code, expr->BinaryOperationFeedbackId());
2492 patch_site.EmitPatchInfo();
2493 context()->Plug(rax);
2497 void FullCodeGenerator::EmitAssignment(Expression* expr,
2498 FeedbackVectorICSlot slot) {
2499 DCHECK(expr->IsValidReferenceExpressionOrThis());
2501 Property* prop = expr->AsProperty();
2502 LhsKind assign_type = Property::GetAssignType(prop);
2504 switch (assign_type) {
2506 Variable* var = expr->AsVariableProxy()->var();
2507 EffectContext context(this);
2508 EmitVariableAssignment(var, Token::ASSIGN, slot);
2511 case NAMED_PROPERTY: {
2512 __ Push(rax); // Preserve value.
2513 VisitForAccumulatorValue(prop->obj());
2514 __ Move(StoreDescriptor::ReceiverRegister(), rax);
2515 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2516 __ Move(StoreDescriptor::NameRegister(),
2517 prop->key()->AsLiteral()->value());
2518 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2522 case NAMED_SUPER_PROPERTY: {
2524 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2525 VisitForAccumulatorValue(
2526 prop->obj()->AsSuperPropertyReference()->home_object());
2527 // stack: value, this; rax: home_object
2528 Register scratch = rcx;
2529 Register scratch2 = rdx;
2530 __ Move(scratch, result_register()); // home_object
2531 __ movp(rax, MemOperand(rsp, kPointerSize)); // value
2532 __ movp(scratch2, MemOperand(rsp, 0)); // this
2533 __ movp(MemOperand(rsp, kPointerSize), scratch2); // this
2534 __ movp(MemOperand(rsp, 0), scratch); // home_object
2535 // stack: this, home_object; rax: value
2536 EmitNamedSuperPropertyStore(prop);
2539 case KEYED_SUPER_PROPERTY: {
2541 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2543 prop->obj()->AsSuperPropertyReference()->home_object());
2544 VisitForAccumulatorValue(prop->key());
2545 Register scratch = rcx;
2546 Register scratch2 = rdx;
2547 __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize)); // value
2548 // stack: value, this, home_object; rax: key, rdx: value
2549 __ movp(scratch, MemOperand(rsp, kPointerSize)); // this
2550 __ movp(MemOperand(rsp, 2 * kPointerSize), scratch);
2551 __ movp(scratch, MemOperand(rsp, 0)); // home_object
2552 __ movp(MemOperand(rsp, kPointerSize), scratch);
2553 __ movp(MemOperand(rsp, 0), rax);
2554 __ Move(rax, scratch2);
2555 // stack: this, home_object, key; rax: value.
2556 EmitKeyedSuperPropertyStore(prop);
2559 case KEYED_PROPERTY: {
2560 __ Push(rax); // Preserve value.
2561 VisitForStackValue(prop->obj());
2562 VisitForAccumulatorValue(prop->key());
2563 __ Move(StoreDescriptor::NameRegister(), rax);
2564 __ Pop(StoreDescriptor::ReceiverRegister());
2565 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2566 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2568 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2573 context()->Plug(rax);
2577 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2578 Variable* var, MemOperand location) {
2579 __ movp(location, rax);
2580 if (var->IsContextSlot()) {
2582 __ RecordWriteContextSlot(
2583 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2588 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2589 FeedbackVectorICSlot slot) {
2590 if (var->IsUnallocated()) {
2591 // Global var, const, or let.
2592 __ Move(StoreDescriptor::NameRegister(), var->name());
2593 __ movp(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2594 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2597 } else if (var->IsGlobalSlot()) {
2598 // Global var, const, or let.
2599 DCHECK(var->index() > 0);
2600 DCHECK(var->IsStaticGlobalObjectProperty());
2601 int const slot = var->index();
2602 int const depth = scope()->ContextChainLength(var->scope());
2603 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2604 __ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
2605 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax));
2606 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2609 __ Push(Smi::FromInt(slot));
2611 __ CallRuntime(is_strict(language_mode())
2612 ? Runtime::kStoreGlobalViaContext_Strict
2613 : Runtime::kStoreGlobalViaContext_Sloppy,
2617 } else if (var->mode() == LET && op != Token::INIT_LET) {
2618 // Non-initializing assignment to let variable needs a write barrier.
2619 DCHECK(!var->IsLookupSlot());
2620 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2622 MemOperand location = VarOperand(var, rcx);
2623 __ movp(rdx, location);
2624 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2625 __ j(not_equal, &assign, Label::kNear);
2626 __ Push(var->name());
2627 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2629 EmitStoreToStackLocalOrContextSlot(var, location);
2631 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2632 // Assignment to const variable needs a write barrier.
2633 DCHECK(!var->IsLookupSlot());
2634 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2636 MemOperand location = VarOperand(var, rcx);
2637 __ movp(rdx, location);
2638 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2639 __ j(not_equal, &const_error, Label::kNear);
2640 __ Push(var->name());
2641 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2642 __ bind(&const_error);
2643 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2645 } else if (var->is_this() && op == Token::INIT_CONST) {
2646 // Initializing assignment to const {this} needs a write barrier.
2647 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2648 Label uninitialized_this;
2649 MemOperand location = VarOperand(var, rcx);
2650 __ movp(rdx, location);
2651 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2652 __ j(equal, &uninitialized_this);
2653 __ Push(var->name());
2654 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2655 __ bind(&uninitialized_this);
2656 EmitStoreToStackLocalOrContextSlot(var, location);
2658 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2659 if (var->IsLookupSlot()) {
2660 // Assignment to var.
2661 __ Push(rax); // Value.
2662 __ Push(rsi); // Context.
2663 __ Push(var->name());
2664 __ Push(Smi::FromInt(language_mode()));
2665 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2667 // Assignment to var or initializing assignment to let/const in harmony
2669 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2670 MemOperand location = VarOperand(var, rcx);
2671 if (generate_debug_code_ && op == Token::INIT_LET) {
2672 // Check for an uninitialized let binding.
2673 __ movp(rdx, location);
2674 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2675 __ Check(equal, kLetBindingReInitialization);
2677 EmitStoreToStackLocalOrContextSlot(var, location);
2680 } else if (op == Token::INIT_CONST_LEGACY) {
2681 // Const initializers need a write barrier.
2682 DCHECK(var->mode() == CONST_LEGACY);
2683 DCHECK(!var->IsParameter()); // No const parameters.
2684 if (var->IsLookupSlot()) {
2687 __ Push(var->name());
2688 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2690 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2692 MemOperand location = VarOperand(var, rcx);
2693 __ movp(rdx, location);
2694 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2695 __ j(not_equal, &skip);
2696 EmitStoreToStackLocalOrContextSlot(var, location);
2701 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2702 if (is_strict(language_mode())) {
2703 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2705 // Silently ignore store in sloppy mode.
2710 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2711 // Assignment to a property, using a named store IC.
2712 Property* prop = expr->target()->AsProperty();
2713 DCHECK(prop != NULL);
2714 DCHECK(prop->key()->IsLiteral());
2716 __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2717 __ Pop(StoreDescriptor::ReceiverRegister());
2718 if (FLAG_vector_stores) {
2719 EmitLoadStoreICSlot(expr->AssignmentSlot());
2722 CallStoreIC(expr->AssignmentFeedbackId());
2725 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2726 context()->Plug(rax);
2730 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2731 // Assignment to named property of super.
2733 // stack : receiver ('this'), home_object
2734 DCHECK(prop != NULL);
2735 Literal* key = prop->key()->AsLiteral();
2736 DCHECK(key != NULL);
2738 __ Push(key->value());
2740 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2741 : Runtime::kStoreToSuper_Sloppy),
2746 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2747 // Assignment to named property of super.
2749 // stack : receiver ('this'), home_object, key
2750 DCHECK(prop != NULL);
2754 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2755 : Runtime::kStoreKeyedToSuper_Sloppy),
2760 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2761 // Assignment to a property, using a keyed store IC.
2762 __ Pop(StoreDescriptor::NameRegister()); // Key.
2763 __ Pop(StoreDescriptor::ReceiverRegister());
2764 DCHECK(StoreDescriptor::ValueRegister().is(rax));
2766 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2767 if (FLAG_vector_stores) {
2768 EmitLoadStoreICSlot(expr->AssignmentSlot());
2771 CallIC(ic, expr->AssignmentFeedbackId());
2774 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2775 context()->Plug(rax);
2779 void FullCodeGenerator::VisitProperty(Property* expr) {
2780 Comment cmnt(masm_, "[ Property");
2781 SetExpressionPosition(expr);
2783 Expression* key = expr->key();
2785 if (key->IsPropertyName()) {
2786 if (!expr->IsSuperAccess()) {
2787 VisitForAccumulatorValue(expr->obj());
2788 DCHECK(!rax.is(LoadDescriptor::ReceiverRegister()));
2789 __ movp(LoadDescriptor::ReceiverRegister(), rax);
2790 EmitNamedPropertyLoad(expr);
2792 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2794 expr->obj()->AsSuperPropertyReference()->home_object());
2795 EmitNamedSuperPropertyLoad(expr);
2798 if (!expr->IsSuperAccess()) {
2799 VisitForStackValue(expr->obj());
2800 VisitForAccumulatorValue(expr->key());
2801 __ Move(LoadDescriptor::NameRegister(), rax);
2802 __ Pop(LoadDescriptor::ReceiverRegister());
2803 EmitKeyedPropertyLoad(expr);
2805 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2807 expr->obj()->AsSuperPropertyReference()->home_object());
2808 VisitForStackValue(expr->key());
2809 EmitKeyedSuperPropertyLoad(expr);
2812 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2813 context()->Plug(rax);
2817 void FullCodeGenerator::CallIC(Handle<Code> code,
2818 TypeFeedbackId ast_id) {
2820 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2824 // Code common for calls using the IC.
2825 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2826 Expression* callee = expr->expression();
2828 CallICState::CallType call_type =
2829 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2830 // Get the target function.
2831 if (call_type == CallICState::FUNCTION) {
2832 { StackValueContext context(this);
2833 EmitVariableLoad(callee->AsVariableProxy());
2834 PrepareForBailout(callee, NO_REGISTERS);
2836 // Push undefined as receiver. This is patched in the method prologue if it
2837 // is a sloppy mode method.
2838 __ Push(isolate()->factory()->undefined_value());
2840 // Load the function from the receiver.
2841 DCHECK(callee->IsProperty());
2842 DCHECK(!callee->AsProperty()->IsSuperAccess());
2843 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2844 EmitNamedPropertyLoad(callee->AsProperty());
2845 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2846 // Push the target function under the receiver.
2847 __ Push(Operand(rsp, 0));
2848 __ movp(Operand(rsp, kPointerSize), rax);
2851 EmitCall(expr, call_type);
2855 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2856 Expression* callee = expr->expression();
2857 DCHECK(callee->IsProperty());
2858 Property* prop = callee->AsProperty();
2859 DCHECK(prop->IsSuperAccess());
2860 SetExpressionPosition(prop);
2862 Literal* key = prop->key()->AsLiteral();
2863 DCHECK(!key->value()->IsSmi());
2864 // Load the function from the receiver.
2865 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2866 VisitForStackValue(super_ref->home_object());
2867 VisitForAccumulatorValue(super_ref->this_var());
2870 __ Push(Operand(rsp, kPointerSize * 2));
2871 __ Push(key->value());
2872 __ Push(Smi::FromInt(language_mode()));
2876 // - this (receiver)
2877 // - this (receiver) <-- LoadFromSuper will pop here and below.
2881 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2883 // Replace home_object with target function.
2884 __ movp(Operand(rsp, kPointerSize), rax);
2887 // - target function
2888 // - this (receiver)
2889 EmitCall(expr, CallICState::METHOD);
2893 // Common code for calls using the IC.
2894 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2897 VisitForAccumulatorValue(key);
2899 Expression* callee = expr->expression();
2901 // Load the function from the receiver.
2902 DCHECK(callee->IsProperty());
2903 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2904 __ Move(LoadDescriptor::NameRegister(), rax);
2905 EmitKeyedPropertyLoad(callee->AsProperty());
2906 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2908 // Push the target function under the receiver.
2909 __ Push(Operand(rsp, 0));
2910 __ movp(Operand(rsp, kPointerSize), rax);
2912 EmitCall(expr, CallICState::METHOD);
2916 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2917 Expression* callee = expr->expression();
2918 DCHECK(callee->IsProperty());
2919 Property* prop = callee->AsProperty();
2920 DCHECK(prop->IsSuperAccess());
2922 SetExpressionPosition(prop);
2923 // Load the function from the receiver.
2924 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2925 VisitForStackValue(super_ref->home_object());
2926 VisitForAccumulatorValue(super_ref->this_var());
2929 __ Push(Operand(rsp, kPointerSize * 2));
2930 VisitForStackValue(prop->key());
2931 __ Push(Smi::FromInt(language_mode()));
2935 // - this (receiver)
2936 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2940 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2942 // Replace home_object with target function.
2943 __ movp(Operand(rsp, kPointerSize), rax);
2946 // - target function
2947 // - this (receiver)
2948 EmitCall(expr, CallICState::METHOD);
2952 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2953 // Load the arguments.
2954 ZoneList<Expression*>* args = expr->arguments();
2955 int arg_count = args->length();
2956 for (int i = 0; i < arg_count; i++) {
2957 VisitForStackValue(args->at(i));
2960 SetCallPosition(expr, arg_count);
2961 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2962 __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
2963 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2964 // Don't assign a type feedback id to the IC, since type feedback is provided
2965 // by the vector above.
2968 RecordJSReturnSite(expr);
2970 // Restore context register.
2971 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2972 // Discard the function left on TOS.
2973 context()->DropAndPlug(1, rax);
2977 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2978 // Push copy of the first argument or undefined if it doesn't exist.
2979 if (arg_count > 0) {
2980 __ Push(Operand(rsp, arg_count * kPointerSize));
2982 __ PushRoot(Heap::kUndefinedValueRootIndex);
2985 // Push the enclosing function.
2986 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2988 // Push the language mode.
2989 __ Push(Smi::FromInt(language_mode()));
2991 // Push the start position of the scope the calls resides in.
2992 __ Push(Smi::FromInt(scope()->start_position()));
2994 // Do the runtime call.
2995 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2999 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3000 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3001 VariableProxy* callee = expr->expression()->AsVariableProxy();
3002 if (callee->var()->IsLookupSlot()) {
3004 SetExpressionPosition(callee);
3005 // Generate code for loading from variables potentially shadowed by
3006 // eval-introduced variables.
3007 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3009 // Call the runtime to find the function to call (returned in rax) and
3010 // the object holding it (returned in rdx).
3011 __ Push(context_register());
3012 __ Push(callee->name());
3013 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3014 __ Push(rax); // Function.
3015 __ Push(rdx); // Receiver.
3016 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3018 // If fast case code has been generated, emit code to push the function
3019 // and receiver and have the slow path jump around this code.
3020 if (done.is_linked()) {
3022 __ jmp(&call, Label::kNear);
3026 // Pass undefined as the receiver, which is the WithBaseObject of a
3027 // non-object environment record. If the callee is sloppy, it will patch
3028 // it up to be the global receiver.
3029 __ PushRoot(Heap::kUndefinedValueRootIndex);
3033 VisitForStackValue(callee);
3034 // refEnv.WithBaseObject()
3035 __ PushRoot(Heap::kUndefinedValueRootIndex);
3040 void FullCodeGenerator::VisitCall(Call* expr) {
3042 // We want to verify that RecordJSReturnSite gets called on all paths
3043 // through this function. Avoid early returns.
3044 expr->return_is_recorded_ = false;
3047 Comment cmnt(masm_, "[ Call");
3048 Expression* callee = expr->expression();
3049 Call::CallType call_type = expr->GetCallType(isolate());
3051 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3052 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3053 // to resolve the function we need to call. Then we call the resolved
3054 // function using the given arguments.
3055 ZoneList<Expression*>* args = expr->arguments();
3056 int arg_count = args->length();
3057 PushCalleeAndWithBaseObject(expr);
3059 // Push the arguments.
3060 for (int i = 0; i < arg_count; i++) {
3061 VisitForStackValue(args->at(i));
3064 // Push a copy of the function (found below the arguments) and resolve
3066 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
3067 EmitResolvePossiblyDirectEval(arg_count);
3069 // Touch up the callee.
3070 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
3072 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3074 SetCallPosition(expr, arg_count);
3075 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3076 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
3078 RecordJSReturnSite(expr);
3079 // Restore context register.
3080 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3081 context()->DropAndPlug(1, rax);
3082 } else if (call_type == Call::GLOBAL_CALL) {
3083 EmitCallWithLoadIC(expr);
3085 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3086 // Call to a lookup slot (dynamically introduced variable).
3087 PushCalleeAndWithBaseObject(expr);
3089 } else if (call_type == Call::PROPERTY_CALL) {
3090 Property* property = callee->AsProperty();
3091 bool is_named_call = property->key()->IsPropertyName();
3092 if (property->IsSuperAccess()) {
3093 if (is_named_call) {
3094 EmitSuperCallWithLoadIC(expr);
3096 EmitKeyedSuperCallWithLoadIC(expr);
3099 VisitForStackValue(property->obj());
3100 if (is_named_call) {
3101 EmitCallWithLoadIC(expr);
3103 EmitKeyedCallWithLoadIC(expr, property->key());
3106 } else if (call_type == Call::SUPER_CALL) {
3107 EmitSuperConstructorCall(expr);
3109 DCHECK(call_type == Call::OTHER_CALL);
3110 // Call to an arbitrary expression not handled specially above.
3111 VisitForStackValue(callee);
3112 __ PushRoot(Heap::kUndefinedValueRootIndex);
3113 // Emit function call.
3118 // RecordJSReturnSite should have been called.
3119 DCHECK(expr->return_is_recorded_);
3124 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3125 Comment cmnt(masm_, "[ CallNew");
3126 // According to ECMA-262, section 11.2.2, page 44, the function
3127 // expression in new calls must be evaluated before the
3130 // Push constructor on the stack. If it's not a function it's used as
3131 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3133 DCHECK(!expr->expression()->IsSuperPropertyReference());
3134 VisitForStackValue(expr->expression());
3136 // Push the arguments ("left-to-right") on the stack.
3137 ZoneList<Expression*>* args = expr->arguments();
3138 int arg_count = args->length();
3139 for (int i = 0; i < arg_count; i++) {
3140 VisitForStackValue(args->at(i));
3143 // Call the construct call builtin that handles allocation and
3144 // constructor invocation.
3145 SetConstructCallPosition(expr);
3147 // Load function and argument count into rdi and rax.
3148 __ Set(rax, arg_count);
3149 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
3151 // Record call targets in unoptimized code, but not in the snapshot.
3152 if (FLAG_pretenuring_call_new) {
3153 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3154 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3155 expr->CallNewFeedbackSlot().ToInt() + 1);
3158 __ Move(rbx, FeedbackVector());
3159 __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
3161 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3162 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3163 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3164 context()->Plug(rax);
3168 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3169 SuperCallReference* super_call_ref =
3170 expr->expression()->AsSuperCallReference();
3171 DCHECK_NOT_NULL(super_call_ref);
3173 EmitLoadSuperConstructor(super_call_ref);
3174 __ Push(result_register());
3176 // Push the arguments ("left-to-right") on the stack.
3177 ZoneList<Expression*>* args = expr->arguments();
3178 int arg_count = args->length();
3179 for (int i = 0; i < arg_count; i++) {
3180 VisitForStackValue(args->at(i));
3183 // Call the construct call builtin that handles allocation and
3184 // constructor invocation.
3185 SetConstructCallPosition(expr);
3187 // Load original constructor into rcx.
3188 VisitForAccumulatorValue(super_call_ref->new_target_var());
3189 __ movp(rcx, result_register());
3191 // Load function and argument count into rdi and rax.
3192 __ Set(rax, arg_count);
3193 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
3195 // Record call targets in unoptimized code.
3196 if (FLAG_pretenuring_call_new) {
3198 /* TODO(dslomov): support pretenuring.
3199 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3200 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3201 expr->CallNewFeedbackSlot().ToInt() + 1);
3205 __ Move(rbx, FeedbackVector());
3206 __ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
3208 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3209 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3211 RecordJSReturnSite(expr);
3213 context()->Plug(rax);
3217 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3218 ZoneList<Expression*>* args = expr->arguments();
3219 DCHECK(args->length() == 1);
3221 VisitForAccumulatorValue(args->at(0));
3223 Label materialize_true, materialize_false;
3224 Label* if_true = NULL;
3225 Label* if_false = NULL;
3226 Label* fall_through = NULL;
3227 context()->PrepareTest(&materialize_true, &materialize_false,
3228 &if_true, &if_false, &fall_through);
3230 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3231 __ JumpIfSmi(rax, if_true);
3234 context()->Plug(if_true, if_false);
3238 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3239 ZoneList<Expression*>* args = expr->arguments();
3240 DCHECK(args->length() == 1);
3242 VisitForAccumulatorValue(args->at(0));
3244 Label materialize_true, materialize_false;
3245 Label* if_true = NULL;
3246 Label* if_false = NULL;
3247 Label* fall_through = NULL;
3248 context()->PrepareTest(&materialize_true, &materialize_false,
3249 &if_true, &if_false, &fall_through);
3251 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3252 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
3253 Split(non_negative_smi, if_true, if_false, fall_through);
3255 context()->Plug(if_true, if_false);
3259 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3260 ZoneList<Expression*>* args = expr->arguments();
3261 DCHECK(args->length() == 1);
3263 VisitForAccumulatorValue(args->at(0));
3265 Label materialize_true, materialize_false;
3266 Label* if_true = NULL;
3267 Label* if_false = NULL;
3268 Label* fall_through = NULL;
3269 context()->PrepareTest(&materialize_true, &materialize_false,
3270 &if_true, &if_false, &fall_through);
3272 __ JumpIfSmi(rax, if_false);
3273 __ CompareRoot(rax, Heap::kNullValueRootIndex);
3274 __ j(equal, if_true);
3275 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3276 // Undetectable objects behave like undefined when tested with typeof.
3277 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3278 Immediate(1 << Map::kIsUndetectable));
3279 __ j(not_zero, if_false);
3280 __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3281 __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3282 __ j(below, if_false);
3283 __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3284 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3285 Split(below_equal, if_true, if_false, fall_through);
3287 context()->Plug(if_true, if_false);
3291 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK(args->length() == 1);
3295 VisitForAccumulatorValue(args->at(0));
3297 Label materialize_true, materialize_false;
3298 Label* if_true = NULL;
3299 Label* if_false = NULL;
3300 Label* fall_through = NULL;
3301 context()->PrepareTest(&materialize_true, &materialize_false,
3302 &if_true, &if_false, &fall_through);
3304 __ JumpIfSmi(rax, if_false);
3305 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
3306 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3307 Split(above_equal, if_true, if_false, fall_through);
3309 context()->Plug(if_true, if_false);
3313 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3314 ZoneList<Expression*>* args = expr->arguments();
3315 DCHECK(args->length() == 1);
3317 VisitForAccumulatorValue(args->at(0));
3319 Label materialize_true, materialize_false;
3320 Label* if_true = NULL;
3321 Label* if_false = NULL;
3322 Label* fall_through = NULL;
3323 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3324 &if_false, &fall_through);
3326 __ JumpIfSmi(rax, if_false);
3327 __ CmpObjectType(rax, SIMD128_VALUE_TYPE, rbx);
3328 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3329 Split(equal, if_true, if_false, fall_through);
3331 context()->Plug(if_true, if_false);
3335 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3336 CallRuntime* expr) {
3337 ZoneList<Expression*>* args = expr->arguments();
3338 DCHECK(args->length() == 1);
3340 VisitForAccumulatorValue(args->at(0));
3342 Label materialize_true, materialize_false, skip_lookup;
3343 Label* if_true = NULL;
3344 Label* if_false = NULL;
3345 Label* fall_through = NULL;
3346 context()->PrepareTest(&materialize_true, &materialize_false,
3347 &if_true, &if_false, &fall_through);
3349 __ AssertNotSmi(rax);
3351 // Check whether this map has already been checked to be safe for default
3353 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3354 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
3355 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3356 __ j(not_zero, &skip_lookup);
3358 // Check for fast case object. Generate false result for slow case object.
3359 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
3360 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3361 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3362 __ j(equal, if_false);
3364 // Look for valueOf string in the descriptor array, and indicate false if
3365 // found. Since we omit an enumeration index check, if it is added via a
3366 // transition that shares its descriptor array, this is a false positive.
3367 Label entry, loop, done;
3369 // Skip loop if no descriptors are valid.
3370 __ NumberOfOwnDescriptors(rcx, rbx);
3371 __ cmpp(rcx, Immediate(0));
3374 __ LoadInstanceDescriptors(rbx, r8);
3375 // rbx: descriptor array.
3376 // rcx: valid entries in the descriptor array.
3377 // Calculate the end of the descriptor array.
3378 __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3380 Operand(r8, rcx, times_pointer_size, DescriptorArray::kFirstOffset));
3381 // Calculate location of the first key name.
3382 __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3383 // Loop through all the keys in the descriptor array. If one of these is the
3384 // internalized string "valueOf" the result is false.
3387 __ movp(rdx, FieldOperand(r8, 0));
3388 __ Cmp(rdx, isolate()->factory()->value_of_string());
3389 __ j(equal, if_false);
3390 __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3393 __ j(not_equal, &loop);
3397 // Set the bit in the map to indicate that there is no local valueOf field.
3398 __ orp(FieldOperand(rbx, Map::kBitField2Offset),
3399 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3401 __ bind(&skip_lookup);
3403 // If a valueOf property is not found on the object check that its
3404 // prototype is the un-modified String prototype. If not result is false.
3405 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
3406 __ testp(rcx, Immediate(kSmiTagMask));
3407 __ j(zero, if_false);
3408 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3409 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3410 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3412 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3413 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3414 Split(equal, if_true, if_false, fall_through);
3416 context()->Plug(if_true, if_false);
3420 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3421 ZoneList<Expression*>* args = expr->arguments();
3422 DCHECK(args->length() == 1);
3424 VisitForAccumulatorValue(args->at(0));
3426 Label materialize_true, materialize_false;
3427 Label* if_true = NULL;
3428 Label* if_false = NULL;
3429 Label* fall_through = NULL;
3430 context()->PrepareTest(&materialize_true, &materialize_false,
3431 &if_true, &if_false, &fall_through);
3433 __ JumpIfSmi(rax, if_false);
3434 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3435 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3436 Split(equal, if_true, if_false, fall_through);
3438 context()->Plug(if_true, if_false);
3442 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3443 ZoneList<Expression*>* args = expr->arguments();
3444 DCHECK(args->length() == 1);
3446 VisitForAccumulatorValue(args->at(0));
3448 Label materialize_true, materialize_false;
3449 Label* if_true = NULL;
3450 Label* if_false = NULL;
3451 Label* fall_through = NULL;
3452 context()->PrepareTest(&materialize_true, &materialize_false,
3453 &if_true, &if_false, &fall_through);
3455 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3456 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3457 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3459 __ j(no_overflow, if_false);
3460 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3461 Immediate(0x00000000));
3462 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3463 Split(equal, if_true, if_false, fall_through);
3465 context()->Plug(if_true, if_false);
3469 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3470 ZoneList<Expression*>* args = expr->arguments();
3471 DCHECK(args->length() == 1);
3473 VisitForAccumulatorValue(args->at(0));
3475 Label materialize_true, materialize_false;
3476 Label* if_true = NULL;
3477 Label* if_false = NULL;
3478 Label* fall_through = NULL;
3479 context()->PrepareTest(&materialize_true, &materialize_false,
3480 &if_true, &if_false, &fall_through);
3482 __ JumpIfSmi(rax, if_false);
3483 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3484 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3485 Split(equal, if_true, if_false, fall_through);
3487 context()->Plug(if_true, if_false);
3491 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3492 ZoneList<Expression*>* args = expr->arguments();
3493 DCHECK(args->length() == 1);
3495 VisitForAccumulatorValue(args->at(0));
3497 Label materialize_true, materialize_false;
3498 Label* if_true = NULL;
3499 Label* if_false = NULL;
3500 Label* fall_through = NULL;
3501 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3502 &if_false, &fall_through);
3504 __ JumpIfSmi(rax, if_false);
3505 __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
3506 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3507 Split(equal, if_true, if_false, fall_through);
3509 context()->Plug(if_true, if_false);
3513 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3514 ZoneList<Expression*>* args = expr->arguments();
3515 DCHECK(args->length() == 1);
3517 VisitForAccumulatorValue(args->at(0));
3519 Label materialize_true, materialize_false;
3520 Label* if_true = NULL;
3521 Label* if_false = NULL;
3522 Label* fall_through = NULL;
3523 context()->PrepareTest(&materialize_true, &materialize_false,
3524 &if_true, &if_false, &fall_through);
3526 __ JumpIfSmi(rax, if_false);
3527 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3528 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3529 Split(equal, if_true, if_false, fall_through);
3531 context()->Plug(if_true, if_false);
3535 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3536 ZoneList<Expression*>* args = expr->arguments();
3537 DCHECK(args->length() == 1);
3539 VisitForAccumulatorValue(args->at(0));
3541 Label materialize_true, materialize_false;
3542 Label* if_true = NULL;
3543 Label* if_false = NULL;
3544 Label* fall_through = NULL;
3545 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3546 &if_false, &fall_through);
3548 __ JumpIfSmi(rax, if_false);
3550 __ movp(map, FieldOperand(rax, HeapObject::kMapOffset));
3551 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3552 __ j(less, if_false);
3553 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3554 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3555 Split(less_equal, if_true, if_false, fall_through);
3557 context()->Plug(if_true, if_false);
3561 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3562 DCHECK(expr->arguments()->length() == 0);
3564 Label materialize_true, materialize_false;
3565 Label* if_true = NULL;
3566 Label* if_false = NULL;
3567 Label* fall_through = NULL;
3568 context()->PrepareTest(&materialize_true, &materialize_false,
3569 &if_true, &if_false, &fall_through);
3571 // Get the frame pointer for the calling frame.
3572 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3574 // Skip the arguments adaptor frame if it exists.
3575 Label check_frame_marker;
3576 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3577 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3578 __ j(not_equal, &check_frame_marker);
3579 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3581 // Check the marker in the calling frame.
3582 __ bind(&check_frame_marker);
3583 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3584 Smi::FromInt(StackFrame::CONSTRUCT));
3585 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3586 Split(equal, if_true, if_false, fall_through);
3588 context()->Plug(if_true, if_false);
3592 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3593 ZoneList<Expression*>* args = expr->arguments();
3594 DCHECK(args->length() == 2);
3596 // Load the two objects into registers and perform the comparison.
3597 VisitForStackValue(args->at(0));
3598 VisitForAccumulatorValue(args->at(1));
3600 Label materialize_true, materialize_false;
3601 Label* if_true = NULL;
3602 Label* if_false = NULL;
3603 Label* fall_through = NULL;
3604 context()->PrepareTest(&materialize_true, &materialize_false,
3605 &if_true, &if_false, &fall_through);
3609 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3610 Split(equal, if_true, if_false, fall_through);
3612 context()->Plug(if_true, if_false);
3616 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3617 ZoneList<Expression*>* args = expr->arguments();
3618 DCHECK(args->length() == 1);
3620 // ArgumentsAccessStub expects the key in rdx and the formal
3621 // parameter count in rax.
3622 VisitForAccumulatorValue(args->at(0));
3624 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3625 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3627 context()->Plug(rax);
3631 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3632 DCHECK(expr->arguments()->length() == 0);
3635 // Get the number of formal parameters.
3636 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3638 // Check if the calling frame is an arguments adaptor frame.
3639 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3640 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3641 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3642 __ j(not_equal, &exit, Label::kNear);
3644 // Arguments adaptor case: Read the arguments length from the
3646 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3650 context()->Plug(rax);
3654 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3655 ZoneList<Expression*>* args = expr->arguments();
3656 DCHECK(args->length() == 1);
3657 Label done, null, function, non_function_constructor;
3659 VisitForAccumulatorValue(args->at(0));
3661 // If the object is a smi, we return null.
3662 __ JumpIfSmi(rax, &null);
3664 // Check that the object is a JS object but take special care of JS
3665 // functions to make sure they have 'Function' as their class.
3666 // Assume that there are only two callable types, and one of them is at
3667 // either end of the type range for JS object types. Saves extra comparisons.
3668 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3669 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3670 // Map is now in rax.
3672 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3673 FIRST_SPEC_OBJECT_TYPE + 1);
3674 __ j(equal, &function);
3676 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3677 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3678 LAST_SPEC_OBJECT_TYPE - 1);
3679 __ j(equal, &function);
3680 // Assume that there is no larger type.
3681 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3683 // Check if the constructor in the map is a JS function.
3684 __ GetMapConstructor(rax, rax, rbx);
3685 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3686 __ j(not_equal, &non_function_constructor);
3688 // rax now contains the constructor function. Grab the
3689 // instance class name from there.
3690 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3691 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3694 // Functions have class 'Function'.
3696 __ Move(rax, isolate()->factory()->Function_string());
3699 // Objects with a non-function constructor have class 'Object'.
3700 __ bind(&non_function_constructor);
3701 __ Move(rax, isolate()->factory()->Object_string());
3704 // Non-JS objects have class null.
3706 __ LoadRoot(rax, Heap::kNullValueRootIndex);
3711 context()->Plug(rax);
3715 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3716 ZoneList<Expression*>* args = expr->arguments();
3717 DCHECK(args->length() == 1);
3719 VisitForAccumulatorValue(args->at(0)); // Load the object.
3722 // If the object is a smi return the object.
3723 __ JumpIfSmi(rax, &done);
3724 // If the object is not a value type, return the object.
3725 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3726 __ j(not_equal, &done);
3727 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3730 context()->Plug(rax);
3734 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3735 ZoneList<Expression*>* args = expr->arguments();
3736 DCHECK_EQ(1, args->length());
3738 VisitForAccumulatorValue(args->at(0));
3740 Label materialize_true, materialize_false;
3741 Label* if_true = nullptr;
3742 Label* if_false = nullptr;
3743 Label* fall_through = nullptr;
3744 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3745 &if_false, &fall_through);
3747 __ JumpIfSmi(rax, if_false);
3748 __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
3749 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3750 Split(equal, if_true, if_false, fall_through);
3752 context()->Plug(if_true, if_false);
3756 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3757 ZoneList<Expression*>* args = expr->arguments();
3758 DCHECK(args->length() == 2);
3759 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3760 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3762 VisitForAccumulatorValue(args->at(0)); // Load the object.
3764 Register object = rax;
3765 Register result = rax;
3766 Register scratch = rcx;
3768 if (FLAG_debug_code) {
3769 __ AssertNotSmi(object);
3770 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3771 __ Check(equal, kOperandIsNotADate);
3774 if (index->value() == 0) {
3775 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3777 Label runtime, done;
3778 if (index->value() < JSDate::kFirstUncachedField) {
3779 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3780 Operand stamp_operand = __ ExternalOperand(stamp);
3781 __ movp(scratch, stamp_operand);
3782 __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3783 __ j(not_equal, &runtime, Label::kNear);
3784 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3785 kPointerSize * index->value()));
3786 __ jmp(&done, Label::kNear);
3789 __ PrepareCallCFunction(2);
3790 __ movp(arg_reg_1, object);
3791 __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3792 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3793 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3797 context()->Plug(rax);
3801 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3802 ZoneList<Expression*>* args = expr->arguments();
3803 DCHECK_EQ(3, args->length());
3805 Register string = rax;
3806 Register index = rbx;
3807 Register value = rcx;
3809 VisitForStackValue(args->at(0)); // index
3810 VisitForStackValue(args->at(1)); // value
3811 VisitForAccumulatorValue(args->at(2)); // string
3815 if (FLAG_debug_code) {
3816 __ Check(__ CheckSmi(value), kNonSmiValue);
3817 __ Check(__ CheckSmi(index), kNonSmiValue);
3820 __ SmiToInteger32(value, value);
3821 __ SmiToInteger32(index, index);
3823 if (FLAG_debug_code) {
3824 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3825 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3828 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3830 context()->Plug(string);
3834 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3835 ZoneList<Expression*>* args = expr->arguments();
3836 DCHECK_EQ(3, args->length());
3838 Register string = rax;
3839 Register index = rbx;
3840 Register value = rcx;
3842 VisitForStackValue(args->at(0)); // index
3843 VisitForStackValue(args->at(1)); // value
3844 VisitForAccumulatorValue(args->at(2)); // string
3848 if (FLAG_debug_code) {
3849 __ Check(__ CheckSmi(value), kNonSmiValue);
3850 __ Check(__ CheckSmi(index), kNonSmiValue);
3853 __ SmiToInteger32(value, value);
3854 __ SmiToInteger32(index, index);
3856 if (FLAG_debug_code) {
3857 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3858 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3861 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3863 context()->Plug(rax);
3867 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3868 ZoneList<Expression*>* args = expr->arguments();
3869 DCHECK(args->length() == 2);
3871 VisitForStackValue(args->at(0)); // Load the object.
3872 VisitForAccumulatorValue(args->at(1)); // Load the value.
3873 __ Pop(rbx); // rax = value. rbx = object.
3876 // If the object is a smi, return the value.
3877 __ JumpIfSmi(rbx, &done);
3879 // If the object is not a value type, return the value.
3880 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3881 __ j(not_equal, &done);
3884 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3885 // Update the write barrier. Save the value as it will be
3886 // overwritten by the write barrier code and is needed afterward.
3888 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3891 context()->Plug(rax);
3895 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3896 ZoneList<Expression*>* args = expr->arguments();
3897 DCHECK_EQ(args->length(), 1);
3899 // Load the argument into rax and call the stub.
3900 VisitForAccumulatorValue(args->at(0));
3902 NumberToStringStub stub(isolate());
3904 context()->Plug(rax);
3908 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3909 ZoneList<Expression*>* args = expr->arguments();
3910 DCHECK_EQ(1, args->length());
3912 // Load the argument into rax and convert it.
3913 VisitForAccumulatorValue(args->at(0));
3915 ToObjectStub stub(isolate());
3917 context()->Plug(rax);
3921 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3922 ZoneList<Expression*>* args = expr->arguments();
3923 DCHECK(args->length() == 1);
3925 VisitForAccumulatorValue(args->at(0));
3928 StringCharFromCodeGenerator generator(rax, rbx);
3929 generator.GenerateFast(masm_);
3932 NopRuntimeCallHelper call_helper;
3933 generator.GenerateSlow(masm_, call_helper);
3936 context()->Plug(rbx);
3940 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3941 ZoneList<Expression*>* args = expr->arguments();
3942 DCHECK(args->length() == 2);
3944 VisitForStackValue(args->at(0));
3945 VisitForAccumulatorValue(args->at(1));
3947 Register object = rbx;
3948 Register index = rax;
3949 Register result = rdx;
3953 Label need_conversion;
3954 Label index_out_of_range;
3956 StringCharCodeAtGenerator generator(object,
3961 &index_out_of_range,
3962 STRING_INDEX_IS_NUMBER);
3963 generator.GenerateFast(masm_);
3966 __ bind(&index_out_of_range);
3967 // When the index is out of range, the spec requires us to return
3969 __ LoadRoot(result, Heap::kNanValueRootIndex);
3972 __ bind(&need_conversion);
3973 // Move the undefined value into the result register, which will
3974 // trigger conversion.
3975 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3978 NopRuntimeCallHelper call_helper;
3979 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3982 context()->Plug(result);
3986 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3987 ZoneList<Expression*>* args = expr->arguments();
3988 DCHECK(args->length() == 2);
3990 VisitForStackValue(args->at(0));
3991 VisitForAccumulatorValue(args->at(1));
3993 Register object = rbx;
3994 Register index = rax;
3995 Register scratch = rdx;
3996 Register result = rax;
4000 Label need_conversion;
4001 Label index_out_of_range;
4003 StringCharAtGenerator generator(object,
4009 &index_out_of_range,
4010 STRING_INDEX_IS_NUMBER);
4011 generator.GenerateFast(masm_);
4014 __ bind(&index_out_of_range);
4015 // When the index is out of range, the spec requires us to return
4016 // the empty string.
4017 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4020 __ bind(&need_conversion);
4021 // Move smi zero into the result register, which will trigger
4023 __ Move(result, Smi::FromInt(0));
4026 NopRuntimeCallHelper call_helper;
4027 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4030 context()->Plug(result);
4034 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4035 ZoneList<Expression*>* args = expr->arguments();
4036 DCHECK_EQ(2, args->length());
4037 VisitForStackValue(args->at(0));
4038 VisitForAccumulatorValue(args->at(1));
4041 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4043 context()->Plug(rax);
4047 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4048 ZoneList<Expression*>* args = expr->arguments();
4049 DCHECK(args->length() >= 2);
4051 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4052 for (int i = 0; i < arg_count + 1; i++) {
4053 VisitForStackValue(args->at(i));
4055 VisitForAccumulatorValue(args->last()); // Function.
4057 Label runtime, done;
4058 // Check for non-function argument (including proxy).
4059 __ JumpIfSmi(rax, &runtime);
4060 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
4061 __ j(not_equal, &runtime);
4063 // InvokeFunction requires the function in rdi. Move it in there.
4064 __ movp(rdi, result_register());
4065 ParameterCount count(arg_count);
4066 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
4067 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4072 __ CallRuntime(Runtime::kCall, args->length());
4075 context()->Plug(rax);
4079 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4080 ZoneList<Expression*>* args = expr->arguments();
4081 DCHECK(args->length() == 2);
4084 VisitForStackValue(args->at(0));
4087 VisitForStackValue(args->at(1));
4088 __ CallRuntime(Runtime::kGetPrototype, 1);
4089 __ Push(result_register());
4091 // Load original constructor into rcx.
4092 __ movp(rcx, Operand(rsp, 1 * kPointerSize));
4094 // Check if the calling frame is an arguments adaptor frame.
4095 Label adaptor_frame, args_set_up, runtime;
4096 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4097 __ movp(rbx, Operand(rdx, StandardFrameConstants::kContextOffset));
4098 __ Cmp(rbx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4099 __ j(equal, &adaptor_frame);
4100 // default constructor has no arguments, so no adaptor frame means no args.
4101 __ movp(rax, Immediate(0));
4102 __ jmp(&args_set_up);
4104 // Copy arguments from adaptor frame.
4106 __ bind(&adaptor_frame);
4107 __ movp(rbx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4108 __ SmiToInteger64(rbx, rbx);
4111 __ leap(rdx, Operand(rdx, rbx, times_pointer_size,
4112 StandardFrameConstants::kCallerSPOffset));
4115 __ Push(Operand(rdx, -1 * kPointerSize));
4116 __ subp(rdx, Immediate(kPointerSize));
4118 __ j(not_zero, &loop);
4121 __ bind(&args_set_up);
4122 __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0));
4123 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
4125 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4126 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4130 context()->Plug(result_register());
4134 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4135 RegExpConstructResultStub stub(isolate());
4136 ZoneList<Expression*>* args = expr->arguments();
4137 DCHECK(args->length() == 3);
4138 VisitForStackValue(args->at(0));
4139 VisitForStackValue(args->at(1));
4140 VisitForAccumulatorValue(args->at(2));
4144 context()->Plug(rax);
4148 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4149 ZoneList<Expression*>* args = expr->arguments();
4150 DCHECK(args->length() == 1);
4152 VisitForAccumulatorValue(args->at(0));
4154 Label materialize_true, materialize_false;
4155 Label* if_true = NULL;
4156 Label* if_false = NULL;
4157 Label* fall_through = NULL;
4158 context()->PrepareTest(&materialize_true, &materialize_false,
4159 &if_true, &if_false, &fall_through);
4161 __ testl(FieldOperand(rax, String::kHashFieldOffset),
4162 Immediate(String::kContainsCachedArrayIndexMask));
4163 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4164 __ j(zero, if_true);
4167 context()->Plug(if_true, if_false);
4171 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4172 ZoneList<Expression*>* args = expr->arguments();
4173 DCHECK(args->length() == 1);
4174 VisitForAccumulatorValue(args->at(0));
4176 __ AssertString(rax);
4178 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
4179 DCHECK(String::kHashShift >= kSmiTagSize);
4180 __ IndexFromHash(rax, rax);
4182 context()->Plug(rax);
4186 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4187 Label bailout, return_result, done, one_char_separator, long_separator,
4188 non_trivial_array, not_size_one_array, loop,
4189 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4190 ZoneList<Expression*>* args = expr->arguments();
4191 DCHECK(args->length() == 2);
4192 // We will leave the separator on the stack until the end of the function.
4193 VisitForStackValue(args->at(1));
4194 // Load this to rax (= array)
4195 VisitForAccumulatorValue(args->at(0));
4196 // All aliases of the same register have disjoint lifetimes.
4197 Register array = rax;
4198 Register elements = no_reg; // Will be rax.
4200 Register index = rdx;
4202 Register string_length = rcx;
4204 Register string = rsi;
4206 Register scratch = rbx;
4208 Register array_length = rdi;
4209 Register result_pos = no_reg; // Will be rdi.
4211 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
4212 Operand result_operand = Operand(rsp, 1 * kPointerSize);
4213 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
4214 // Separator operand is already pushed. Make room for the two
4215 // other stack fields, and clear the direction flag in anticipation
4216 // of calling CopyBytes.
4217 __ subp(rsp, Immediate(2 * kPointerSize));
4219 // Check that the array is a JSArray
4220 __ JumpIfSmi(array, &bailout);
4221 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4222 __ j(not_equal, &bailout);
4224 // Check that the array has fast elements.
4225 __ CheckFastElements(scratch, &bailout);
4227 // Array has fast elements, so its length must be a smi.
4228 // If the array has length zero, return the empty string.
4229 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
4230 __ SmiCompare(array_length, Smi::FromInt(0));
4231 __ j(not_zero, &non_trivial_array);
4232 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
4233 __ jmp(&return_result);
4235 // Save the array length on the stack.
4236 __ bind(&non_trivial_array);
4237 __ SmiToInteger32(array_length, array_length);
4238 __ movl(array_length_operand, array_length);
4240 // Save the FixedArray containing array's elements.
4241 // End of array's live range.
4243 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
4247 // Check that all array elements are sequential one-byte strings, and
4248 // accumulate the sum of their lengths, as a smi-encoded value.
4250 __ Set(string_length, 0);
4251 // Loop condition: while (index < array_length).
4252 // Live loop registers: index(int32), array_length(int32), string(String*),
4253 // scratch, string_length(int32), elements(FixedArray*).
4254 if (generate_debug_code_) {
4255 __ cmpp(index, array_length);
4256 __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4259 __ movp(string, FieldOperand(elements,
4262 FixedArray::kHeaderSize));
4263 __ JumpIfSmi(string, &bailout);
4264 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
4265 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4266 __ andb(scratch, Immediate(
4267 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4268 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4269 __ j(not_equal, &bailout);
4270 __ AddSmiField(string_length,
4271 FieldOperand(string, SeqOneByteString::kLengthOffset));
4272 __ j(overflow, &bailout);
4274 __ cmpl(index, array_length);
4278 // string_length: Sum of string lengths.
4279 // elements: FixedArray of strings.
4280 // index: Array length.
4281 // array_length: Array length.
4283 // If array_length is 1, return elements[0], a string.
4284 __ cmpl(array_length, Immediate(1));
4285 __ j(not_equal, ¬_size_one_array);
4286 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
4287 __ jmp(&return_result);
4289 __ bind(¬_size_one_array);
4291 // End of array_length live range.
4292 result_pos = array_length;
4293 array_length = no_reg;
4296 // string_length: Sum of string lengths.
4297 // elements: FixedArray of strings.
4298 // index: Array length.
4300 // Check that the separator is a sequential one-byte string.
4301 __ movp(string, separator_operand);
4302 __ JumpIfSmi(string, &bailout);
4303 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
4304 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4305 __ andb(scratch, Immediate(
4306 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4307 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4308 __ j(not_equal, &bailout);
4311 // string_length: Sum of string lengths.
4312 // elements: FixedArray of strings.
4313 // index: Array length.
4314 // string: Separator string.
4316 // Add (separator length times (array_length - 1)) to string_length.
4317 __ SmiToInteger32(scratch,
4318 FieldOperand(string, SeqOneByteString::kLengthOffset));
4320 __ imull(scratch, index);
4321 __ j(overflow, &bailout);
4322 __ addl(string_length, scratch);
4323 __ j(overflow, &bailout);
4325 // Live registers and stack values:
4326 // string_length: Total length of result string.
4327 // elements: FixedArray of strings.
4328 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4330 __ movp(result_operand, result_pos);
4331 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4333 __ movp(string, separator_operand);
4334 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4336 __ j(equal, &one_char_separator);
4337 __ j(greater, &long_separator);
4340 // Empty separator case:
4342 __ movl(scratch, array_length_operand);
4343 __ jmp(&loop_1_condition);
4344 // Loop condition: while (index < array_length).
4346 // Each iteration of the loop concatenates one string to the result.
4347 // Live values in registers:
4348 // index: which element of the elements array we are adding to the result.
4349 // result_pos: the position to which we are currently copying characters.
4350 // elements: the FixedArray of strings we are joining.
4351 // scratch: array length.
4353 // Get string = array[index].
4354 __ movp(string, FieldOperand(elements, index,
4356 FixedArray::kHeaderSize));
4357 __ SmiToInteger32(string_length,
4358 FieldOperand(string, String::kLengthOffset));
4360 FieldOperand(string, SeqOneByteString::kHeaderSize));
4361 __ CopyBytes(result_pos, string, string_length);
4363 __ bind(&loop_1_condition);
4364 __ cmpl(index, scratch);
4365 __ j(less, &loop_1); // Loop while (index < array_length).
4368 // Generic bailout code used from several places.
4370 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4371 __ jmp(&return_result);
4374 // One-character separator case
4375 __ bind(&one_char_separator);
4376 // Get the separator one-byte character value.
4377 // Register "string" holds the separator.
4378 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4380 // Jump into the loop after the code that copies the separator, so the first
4381 // element is not preceded by a separator
4382 __ jmp(&loop_2_entry);
4383 // Loop condition: while (index < length).
4385 // Each iteration of the loop concatenates one string to the result.
4386 // Live values in registers:
4387 // elements: The FixedArray of strings we are joining.
4388 // index: which element of the elements array we are adding to the result.
4389 // result_pos: the position to which we are currently copying characters.
4390 // scratch: Separator character.
4392 // Copy the separator character to the result.
4393 __ movb(Operand(result_pos, 0), scratch);
4394 __ incp(result_pos);
4396 __ bind(&loop_2_entry);
4397 // Get string = array[index].
4398 __ movp(string, FieldOperand(elements, index,
4400 FixedArray::kHeaderSize));
4401 __ SmiToInteger32(string_length,
4402 FieldOperand(string, String::kLengthOffset));
4404 FieldOperand(string, SeqOneByteString::kHeaderSize));
4405 __ CopyBytes(result_pos, string, string_length);
4407 __ cmpl(index, array_length_operand);
4408 __ j(less, &loop_2); // End while (index < length).
4412 // Long separator case (separator is more than one character).
4413 __ bind(&long_separator);
4415 // Make elements point to end of elements array, and index
4416 // count from -array_length to zero, so we don't need to maintain
4418 __ movl(index, array_length_operand);
4419 __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4420 FixedArray::kHeaderSize));
4423 // Replace separator string with pointer to its first character, and
4424 // make scratch be its length.
4425 __ movp(string, separator_operand);
4426 __ SmiToInteger32(scratch,
4427 FieldOperand(string, String::kLengthOffset));
4429 FieldOperand(string, SeqOneByteString::kHeaderSize));
4430 __ movp(separator_operand, string);
4432 // Jump into the loop after the code that copies the separator, so the first
4433 // element is not preceded by a separator
4434 __ jmp(&loop_3_entry);
4435 // Loop condition: while (index < length).
4437 // Each iteration of the loop concatenates one string to the result.
4438 // Live values in registers:
4439 // index: which element of the elements array we are adding to the result.
4440 // result_pos: the position to which we are currently copying characters.
4441 // scratch: Separator length.
4442 // separator_operand (rsp[0x10]): Address of first char of separator.
4444 // Copy the separator to the result.
4445 __ movp(string, separator_operand);
4446 __ movl(string_length, scratch);
4447 __ CopyBytes(result_pos, string, string_length, 2);
4449 __ bind(&loop_3_entry);
4450 // Get string = array[index].
4451 __ movp(string, Operand(elements, index, times_pointer_size, 0));
4452 __ SmiToInteger32(string_length,
4453 FieldOperand(string, String::kLengthOffset));
4455 FieldOperand(string, SeqOneByteString::kHeaderSize));
4456 __ CopyBytes(result_pos, string, string_length);
4458 __ j(not_equal, &loop_3); // Loop while (index < 0).
4461 __ movp(rax, result_operand);
4463 __ bind(&return_result);
4464 // Drop temp values from the stack, and restore context register.
4465 __ addp(rsp, Immediate(3 * kPointerSize));
4466 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4467 context()->Plug(rax);
4471 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4472 DCHECK(expr->arguments()->length() == 0);
4473 ExternalReference debug_is_active =
4474 ExternalReference::debug_is_active_address(isolate());
4475 __ Move(kScratchRegister, debug_is_active);
4476 __ movzxbp(rax, Operand(kScratchRegister, 0));
4477 __ Integer32ToSmi(rax, rax);
4478 context()->Plug(rax);
4482 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4483 // Push the builtins object as receiver.
4484 __ movp(rax, GlobalObjectOperand());
4485 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4487 // Load the function from the receiver.
4488 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4489 __ Move(LoadDescriptor::NameRegister(), expr->name());
4490 __ Move(LoadDescriptor::SlotRegister(),
4491 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4492 CallLoadIC(NOT_INSIDE_TYPEOF);
4496 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4497 ZoneList<Expression*>* args = expr->arguments();
4498 int arg_count = args->length();
4500 SetCallPosition(expr, arg_count);
4501 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4502 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4507 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4508 ZoneList<Expression*>* args = expr->arguments();
4509 int arg_count = args->length();
4511 if (expr->is_jsruntime()) {
4512 Comment cmnt(masm_, "[ CallRuntime");
4514 EmitLoadJSRuntimeFunction(expr);
4516 // Push the target function under the receiver.
4517 __ Push(Operand(rsp, 0));
4518 __ movp(Operand(rsp, kPointerSize), rax);
4520 // Push the arguments ("left-to-right").
4521 for (int i = 0; i < arg_count; i++) {
4522 VisitForStackValue(args->at(i));
4525 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4526 EmitCallJSRuntimeFunction(expr);
4528 // Restore context register.
4529 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4530 context()->DropAndPlug(1, rax);
4533 const Runtime::Function* function = expr->function();
4534 switch (function->function_id) {
4535 #define CALL_INTRINSIC_GENERATOR(Name) \
4536 case Runtime::kInline##Name: { \
4537 Comment cmnt(masm_, "[ Inline" #Name); \
4538 return Emit##Name(expr); \
4540 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4541 #undef CALL_INTRINSIC_GENERATOR
4543 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4544 // Push the arguments ("left-to-right").
4545 for (int i = 0; i < arg_count; i++) {
4546 VisitForStackValue(args->at(i));
4549 // Call the C runtime.
4550 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4551 __ CallRuntime(function, arg_count);
4552 context()->Plug(rax);
4559 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4560 switch (expr->op()) {
4561 case Token::DELETE: {
4562 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4563 Property* property = expr->expression()->AsProperty();
4564 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4566 if (property != NULL) {
4567 VisitForStackValue(property->obj());
4568 VisitForStackValue(property->key());
4569 __ CallRuntime(is_strict(language_mode())
4570 ? Runtime::kDeleteProperty_Strict
4571 : Runtime::kDeleteProperty_Sloppy,
4573 context()->Plug(rax);
4574 } else if (proxy != NULL) {
4575 Variable* var = proxy->var();
4576 // Delete of an unqualified identifier is disallowed in strict mode but
4577 // "delete this" is allowed.
4578 bool is_this = var->HasThisName(isolate());
4579 DCHECK(is_sloppy(language_mode()) || is_this);
4580 if (var->IsUnallocatedOrGlobalSlot()) {
4581 __ Push(GlobalObjectOperand());
4582 __ Push(var->name());
4583 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4584 context()->Plug(rax);
4585 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4586 // Result of deleting non-global variables is false. 'this' is
4587 // not really a variable, though we implement it as one. The
4588 // subexpression does not have side effects.
4589 context()->Plug(is_this);
4591 // Non-global variable. Call the runtime to try to delete from the
4592 // context where the variable was introduced.
4593 __ Push(context_register());
4594 __ Push(var->name());
4595 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4596 context()->Plug(rax);
4599 // Result of deleting non-property, non-variable reference is true.
4600 // The subexpression may have side effects.
4601 VisitForEffect(expr->expression());
4602 context()->Plug(true);
4608 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4609 VisitForEffect(expr->expression());
4610 context()->Plug(Heap::kUndefinedValueRootIndex);
4615 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4616 if (context()->IsEffect()) {
4617 // Unary NOT has no side effects so it's only necessary to visit the
4618 // subexpression. Match the optimizing compiler by not branching.
4619 VisitForEffect(expr->expression());
4620 } else if (context()->IsTest()) {
4621 const TestContext* test = TestContext::cast(context());
4622 // The labels are swapped for the recursive call.
4623 VisitForControl(expr->expression(),
4624 test->false_label(),
4626 test->fall_through());
4627 context()->Plug(test->true_label(), test->false_label());
4629 // We handle value contexts explicitly rather than simply visiting
4630 // for control and plugging the control flow into the context,
4631 // because we need to prepare a pair of extra administrative AST ids
4632 // for the optimizing compiler.
4633 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4634 Label materialize_true, materialize_false, done;
4635 VisitForControl(expr->expression(),
4639 __ bind(&materialize_true);
4640 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4641 if (context()->IsAccumulatorValue()) {
4642 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4644 __ PushRoot(Heap::kTrueValueRootIndex);
4646 __ jmp(&done, Label::kNear);
4647 __ bind(&materialize_false);
4648 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4649 if (context()->IsAccumulatorValue()) {
4650 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4652 __ PushRoot(Heap::kFalseValueRootIndex);
4659 case Token::TYPEOF: {
4660 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4662 AccumulatorValueContext context(this);
4663 VisitForTypeofValue(expr->expression());
4666 TypeofStub typeof_stub(isolate());
4667 __ CallStub(&typeof_stub);
4668 context()->Plug(rax);
4678 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4679 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4681 Comment cmnt(masm_, "[ CountOperation");
4683 Property* prop = expr->expression()->AsProperty();
4684 LhsKind assign_type = Property::GetAssignType(prop);
4686 // Evaluate expression and get value.
4687 if (assign_type == VARIABLE) {
4688 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4689 AccumulatorValueContext context(this);
4690 EmitVariableLoad(expr->expression()->AsVariableProxy());
4692 // Reserve space for result of postfix operation.
4693 if (expr->is_postfix() && !context()->IsEffect()) {
4694 __ Push(Smi::FromInt(0));
4696 switch (assign_type) {
4697 case NAMED_PROPERTY: {
4698 VisitForStackValue(prop->obj());
4699 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4700 EmitNamedPropertyLoad(prop);
4704 case NAMED_SUPER_PROPERTY: {
4705 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4706 VisitForAccumulatorValue(
4707 prop->obj()->AsSuperPropertyReference()->home_object());
4708 __ Push(result_register());
4709 __ Push(MemOperand(rsp, kPointerSize));
4710 __ Push(result_register());
4711 EmitNamedSuperPropertyLoad(prop);
4715 case KEYED_SUPER_PROPERTY: {
4716 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4718 prop->obj()->AsSuperPropertyReference()->home_object());
4719 VisitForAccumulatorValue(prop->key());
4720 __ Push(result_register());
4721 __ Push(MemOperand(rsp, 2 * kPointerSize));
4722 __ Push(MemOperand(rsp, 2 * kPointerSize));
4723 __ Push(result_register());
4724 EmitKeyedSuperPropertyLoad(prop);
4728 case KEYED_PROPERTY: {
4729 VisitForStackValue(prop->obj());
4730 VisitForStackValue(prop->key());
4731 // Leave receiver on stack
4732 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
4733 // Copy of key, needed for later store.
4734 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
4735 EmitKeyedPropertyLoad(prop);
4744 // We need a second deoptimization point after loading the value
4745 // in case evaluating the property load my have a side effect.
4746 if (assign_type == VARIABLE) {
4747 PrepareForBailout(expr->expression(), TOS_REG);
4749 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4752 // Inline smi case if we are in a loop.
4753 Label done, stub_call;
4754 JumpPatchSite patch_site(masm_);
4755 if (ShouldInlineSmiCase(expr->op())) {
4757 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4759 // Save result for postfix expressions.
4760 if (expr->is_postfix()) {
4761 if (!context()->IsEffect()) {
4762 // Save the result on the stack. If we have a named or keyed property
4763 // we store the result under the receiver that is currently on top
4765 switch (assign_type) {
4769 case NAMED_PROPERTY:
4770 __ movp(Operand(rsp, kPointerSize), rax);
4772 case NAMED_SUPER_PROPERTY:
4773 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4775 case KEYED_PROPERTY:
4776 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4778 case KEYED_SUPER_PROPERTY:
4779 __ movp(Operand(rsp, 3 * kPointerSize), rax);
4785 SmiOperationConstraints constraints =
4786 SmiOperationConstraint::kPreserveSourceRegister |
4787 SmiOperationConstraint::kBailoutOnNoOverflow;
4788 if (expr->op() == Token::INC) {
4789 __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
4792 __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
4795 __ jmp(&stub_call, Label::kNear);
4798 if (!is_strong(language_mode())) {
4799 ToNumberStub convert_stub(isolate());
4800 __ CallStub(&convert_stub);
4801 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4804 // Save result for postfix expressions.
4805 if (expr->is_postfix()) {
4806 if (!context()->IsEffect()) {
4807 // Save the result on the stack. If we have a named or keyed property
4808 // we store the result under the receiver that is currently on top
4810 switch (assign_type) {
4814 case NAMED_PROPERTY:
4815 __ movp(Operand(rsp, kPointerSize), rax);
4817 case NAMED_SUPER_PROPERTY:
4818 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4820 case KEYED_PROPERTY:
4821 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4823 case KEYED_SUPER_PROPERTY:
4824 __ movp(Operand(rsp, 3 * kPointerSize), rax);
4830 SetExpressionPosition(expr);
4832 // Call stub for +1/-1.
4833 __ bind(&stub_call);
4835 __ Move(rax, Smi::FromInt(1));
4836 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4837 strength(language_mode())).code();
4838 CallIC(code, expr->CountBinOpFeedbackId());
4839 patch_site.EmitPatchInfo();
4842 if (is_strong(language_mode())) {
4843 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4845 // Store the value returned in rax.
4846 switch (assign_type) {
4848 if (expr->is_postfix()) {
4849 // Perform the assignment as if via '='.
4850 { EffectContext context(this);
4851 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4852 Token::ASSIGN, expr->CountSlot());
4853 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4856 // For all contexts except kEffect: We have the result on
4857 // top of the stack.
4858 if (!context()->IsEffect()) {
4859 context()->PlugTOS();
4862 // Perform the assignment as if via '='.
4863 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4864 Token::ASSIGN, expr->CountSlot());
4865 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4866 context()->Plug(rax);
4869 case NAMED_PROPERTY: {
4870 __ Move(StoreDescriptor::NameRegister(),
4871 prop->key()->AsLiteral()->value());
4872 __ Pop(StoreDescriptor::ReceiverRegister());
4873 if (FLAG_vector_stores) {
4874 EmitLoadStoreICSlot(expr->CountSlot());
4877 CallStoreIC(expr->CountStoreFeedbackId());
4879 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4880 if (expr->is_postfix()) {
4881 if (!context()->IsEffect()) {
4882 context()->PlugTOS();
4885 context()->Plug(rax);
4889 case NAMED_SUPER_PROPERTY: {
4890 EmitNamedSuperPropertyStore(prop);
4891 if (expr->is_postfix()) {
4892 if (!context()->IsEffect()) {
4893 context()->PlugTOS();
4896 context()->Plug(rax);
4900 case KEYED_SUPER_PROPERTY: {
4901 EmitKeyedSuperPropertyStore(prop);
4902 if (expr->is_postfix()) {
4903 if (!context()->IsEffect()) {
4904 context()->PlugTOS();
4907 context()->Plug(rax);
4911 case KEYED_PROPERTY: {
4912 __ Pop(StoreDescriptor::NameRegister());
4913 __ Pop(StoreDescriptor::ReceiverRegister());
4915 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4916 if (FLAG_vector_stores) {
4917 EmitLoadStoreICSlot(expr->CountSlot());
4920 CallIC(ic, expr->CountStoreFeedbackId());
4922 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4923 if (expr->is_postfix()) {
4924 if (!context()->IsEffect()) {
4925 context()->PlugTOS();
4928 context()->Plug(rax);
4936 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4937 Expression* sub_expr,
4938 Handle<String> check) {
4939 Label materialize_true, materialize_false;
4940 Label* if_true = NULL;
4941 Label* if_false = NULL;
4942 Label* fall_through = NULL;
4943 context()->PrepareTest(&materialize_true, &materialize_false,
4944 &if_true, &if_false, &fall_through);
4946 { AccumulatorValueContext context(this);
4947 VisitForTypeofValue(sub_expr);
4949 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4951 Factory* factory = isolate()->factory();
4952 if (String::Equals(check, factory->number_string())) {
4953 __ JumpIfSmi(rax, if_true);
4954 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
4955 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4956 Split(equal, if_true, if_false, fall_through);
4957 } else if (String::Equals(check, factory->string_string())) {
4958 __ JumpIfSmi(rax, if_false);
4959 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4960 Split(below, if_true, if_false, fall_through);
4961 } else if (String::Equals(check, factory->symbol_string())) {
4962 __ JumpIfSmi(rax, if_false);
4963 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4964 Split(equal, if_true, if_false, fall_through);
4965 } else if (String::Equals(check, factory->boolean_string())) {
4966 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4967 __ j(equal, if_true);
4968 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4969 Split(equal, if_true, if_false, fall_through);
4970 } else if (String::Equals(check, factory->undefined_string())) {
4971 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4972 __ j(equal, if_true);
4973 __ JumpIfSmi(rax, if_false);
4974 // Check for undetectable objects => true.
4975 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4976 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4977 Immediate(1 << Map::kIsUndetectable));
4978 Split(not_zero, if_true, if_false, fall_through);
4979 } else if (String::Equals(check, factory->function_string())) {
4980 __ JumpIfSmi(rax, if_false);
4981 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4982 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4983 __ j(equal, if_true);
4984 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4985 Split(equal, if_true, if_false, fall_through);
4986 } else if (String::Equals(check, factory->object_string())) {
4987 __ JumpIfSmi(rax, if_false);
4988 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4989 __ j(equal, if_true);
4990 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4991 __ j(below, if_false);
4992 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4993 __ j(above, if_false);
4994 // Check for undetectable objects => false.
4995 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4996 Immediate(1 << Map::kIsUndetectable));
4997 Split(zero, if_true, if_false, fall_through);
4999 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5000 } else if (String::Equals(check, factory->type##_string())) { \
5001 __ JumpIfSmi(rax, if_false); \
5002 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); \
5003 __ CompareRoot(rax, Heap::k##Type##MapRootIndex); \
5004 Split(equal, if_true, if_false, fall_through);
5005 SIMD128_TYPES(SIMD128_TYPE)
5009 if (if_false != fall_through) __ jmp(if_false);
5011 context()->Plug(if_true, if_false);
5015 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5016 Comment cmnt(masm_, "[ CompareOperation");
5017 SetExpressionPosition(expr);
5019 // First we try a fast inlined version of the compare when one of
5020 // the operands is a literal.
5021 if (TryLiteralCompare(expr)) return;
5023 // Always perform the comparison for its control flow. Pack the result
5024 // into the expression's context after the comparison is performed.
5025 Label materialize_true, materialize_false;
5026 Label* if_true = NULL;
5027 Label* if_false = NULL;
5028 Label* fall_through = NULL;
5029 context()->PrepareTest(&materialize_true, &materialize_false,
5030 &if_true, &if_false, &fall_through);
5032 Token::Value op = expr->op();
5033 VisitForStackValue(expr->left());
5036 VisitForStackValue(expr->right());
5037 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5038 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5039 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
5040 Split(equal, if_true, if_false, fall_through);
5043 case Token::INSTANCEOF: {
5044 VisitForStackValue(expr->right());
5045 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5047 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5049 // The stub returns 0 for true.
5050 Split(zero, if_true, if_false, fall_through);
5055 VisitForAccumulatorValue(expr->right());
5056 Condition cc = CompareIC::ComputeCondition(op);
5059 bool inline_smi_code = ShouldInlineSmiCase(op);
5060 JumpPatchSite patch_site(masm_);
5061 if (inline_smi_code) {
5065 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
5067 Split(cc, if_true, if_false, NULL);
5068 __ bind(&slow_case);
5071 Handle<Code> ic = CodeFactory::CompareIC(
5072 isolate(), op, strength(language_mode())).code();
5073 CallIC(ic, expr->CompareOperationFeedbackId());
5074 patch_site.EmitPatchInfo();
5076 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5078 Split(cc, if_true, if_false, fall_through);
5082 // Convert the result of the comparison into one expected for this
5083 // expression's context.
5084 context()->Plug(if_true, if_false);
5088 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5089 Expression* sub_expr,
5091 Label materialize_true, materialize_false;
5092 Label* if_true = NULL;
5093 Label* if_false = NULL;
5094 Label* fall_through = NULL;
5095 context()->PrepareTest(&materialize_true, &materialize_false,
5096 &if_true, &if_false, &fall_through);
5098 VisitForAccumulatorValue(sub_expr);
5099 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5100 if (expr->op() == Token::EQ_STRICT) {
5101 Heap::RootListIndex nil_value = nil == kNullValue ?
5102 Heap::kNullValueRootIndex :
5103 Heap::kUndefinedValueRootIndex;
5104 __ CompareRoot(rax, nil_value);
5105 Split(equal, if_true, if_false, fall_through);
5107 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5108 CallIC(ic, expr->CompareOperationFeedbackId());
5110 Split(not_zero, if_true, if_false, fall_through);
5112 context()->Plug(if_true, if_false);
5116 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5117 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5118 context()->Plug(rax);
5122 Register FullCodeGenerator::result_register() {
5127 Register FullCodeGenerator::context_register() {
5132 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5133 DCHECK(IsAligned(frame_offset, kPointerSize));
5134 __ movp(Operand(rbp, frame_offset), value);
5138 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5139 __ movp(dst, ContextOperand(rsi, context_index));
5143 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5144 Scope* closure_scope = scope()->ClosureScope();
5145 if (closure_scope->is_script_scope() ||
5146 closure_scope->is_module_scope()) {
5147 // Contexts nested in the native context have a canonical empty function
5148 // as their closure, not the anonymous closure containing the global
5149 // code. Pass a smi sentinel and let the runtime look up the empty
5151 __ Push(Smi::FromInt(0));
5152 } else if (closure_scope->is_eval_scope()) {
5153 // Contexts created by a call to eval have the same closure as the
5154 // context calling eval, not the anonymous closure containing the eval
5155 // code. Fetch it from the context.
5156 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
5158 DCHECK(closure_scope->is_function_scope());
5159 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5164 // ----------------------------------------------------------------------------
5165 // Non-local control flow support.
5168 void FullCodeGenerator::EnterFinallyBlock() {
5169 DCHECK(!result_register().is(rdx));
5170 DCHECK(!result_register().is(rcx));
5171 // Cook return address on top of stack (smi encoded Code* delta)
5172 __ PopReturnAddressTo(rdx);
5173 __ Move(rcx, masm_->CodeObject());
5175 __ Integer32ToSmi(rdx, rdx);
5178 // Store result register while executing finally block.
5179 __ Push(result_register());
5181 // Store pending message while executing finally block.
5182 ExternalReference pending_message_obj =
5183 ExternalReference::address_of_pending_message_obj(isolate());
5184 __ Load(rdx, pending_message_obj);
5187 ClearPendingMessage();
5191 void FullCodeGenerator::ExitFinallyBlock() {
5192 DCHECK(!result_register().is(rdx));
5193 DCHECK(!result_register().is(rcx));
5194 // Restore pending message from stack.
5196 ExternalReference pending_message_obj =
5197 ExternalReference::address_of_pending_message_obj(isolate());
5198 __ Store(pending_message_obj, rdx);
5200 // Restore result register from stack.
5201 __ Pop(result_register());
5203 // Uncook return address.
5205 __ SmiToInteger32(rdx, rdx);
5206 __ Move(rcx, masm_->CodeObject());
5212 void FullCodeGenerator::ClearPendingMessage() {
5213 DCHECK(!result_register().is(rdx));
5214 ExternalReference pending_message_obj =
5215 ExternalReference::address_of_pending_message_obj(isolate());
5216 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
5217 __ Store(pending_message_obj, rdx);
5221 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5222 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5223 __ Move(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5230 static const byte kJnsInstruction = 0x79;
5231 static const byte kNopByteOne = 0x66;
5232 static const byte kNopByteTwo = 0x90;
5234 static const byte kCallInstruction = 0xe8;
5238 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5240 BackEdgeState target_state,
5241 Code* replacement_code) {
5242 Address call_target_address = pc - kIntSize;
5243 Address jns_instr_address = call_target_address - 3;
5244 Address jns_offset_address = call_target_address - 2;
5246 switch (target_state) {
5248 // sub <profiling_counter>, <delta> ;; Not changed
5250 // call <interrupt stub>
5252 *jns_instr_address = kJnsInstruction;
5253 *jns_offset_address = kJnsOffset;
5255 case ON_STACK_REPLACEMENT:
5256 case OSR_AFTER_STACK_CHECK:
5257 // sub <profiling_counter>, <delta> ;; Not changed
5260 // call <on-stack replacment>
5262 *jns_instr_address = kNopByteOne;
5263 *jns_offset_address = kNopByteTwo;
5267 Assembler::set_target_address_at(call_target_address,
5269 replacement_code->entry());
5270 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5271 unoptimized_code, call_target_address, replacement_code);
5275 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5277 Code* unoptimized_code,
5279 Address call_target_address = pc - kIntSize;
5280 Address jns_instr_address = call_target_address - 3;
5281 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5283 if (*jns_instr_address == kJnsInstruction) {
5284 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5285 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5286 Assembler::target_address_at(call_target_address,
5291 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5292 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5294 if (Assembler::target_address_at(call_target_address,
5295 unoptimized_code) ==
5296 isolate->builtins()->OnStackReplacement()->entry()) {
5297 return ON_STACK_REPLACEMENT;
5300 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5301 Assembler::target_address_at(call_target_address,
5303 return OSR_AFTER_STACK_CHECK;
5307 } // namespace internal
5310 #endif // V8_TARGET_ARCH_X64