1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_ARM64
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parser.h"
15 #include "src/scopes.h"
17 #include "src/arm64/code-stubs-arm64.h"
18 #include "src/arm64/frames-arm64.h"
19 #include "src/arm64/macro-assembler-arm64.h"
24 #define __ ACCESS_MASM(masm_)
26 class JumpPatchSite BASE_EMBEDDED {
28 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
30 info_emitted_ = false;
35 if (patch_site_.is_bound()) {
36 DCHECK(info_emitted_);
38 DCHECK(reg_.IsNone());
42 void EmitJumpIfNotSmi(Register reg, Label* target) {
43 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
44 InstructionAccurateScope scope(masm_, 1);
45 DCHECK(!info_emitted_);
46 DCHECK(reg.Is64Bits());
49 __ bind(&patch_site_);
50 __ tbz(xzr, 0, target); // Always taken before patched.
53 void EmitJumpIfSmi(Register reg, Label* target) {
54 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
55 InstructionAccurateScope scope(masm_, 1);
56 DCHECK(!info_emitted_);
57 DCHECK(reg.Is64Bits());
60 __ bind(&patch_site_);
61 __ tbnz(xzr, 0, target); // Never taken before patched.
64 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
65 UseScratchRegisterScope temps(masm_);
66 Register temp = temps.AcquireX();
67 __ Orr(temp, reg1, reg2);
68 EmitJumpIfNotSmi(temp, target);
71 void EmitPatchInfo() {
72 Assembler::BlockPoolsScope scope(masm_);
73 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
80 MacroAssembler* masm_;
89 // Generate code for a JS function. On entry to the function the receiver
90 // and arguments have been pushed on the stack left to right. The actual
91 // argument count matches the formal parameter count expected by the
94 // The live registers are:
95 // - x1: the JS function object being called (i.e. ourselves).
97 // - fp: our caller's frame pointer.
98 // - jssp: stack pointer.
99 // - lr: return address.
101 // The function builds a JS frame. See JavaScriptFrameConstants in
102 // frames-arm.h for its layout.
103 void FullCodeGenerator::Generate() {
104 CompilationInfo* info = info_;
105 profiling_counter_ = isolate()->factory()->NewCell(
106 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
107 SetFunctionPosition(literal());
108 Comment cmnt(masm_, "[ Function compiled by full code generator");
110 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
113 if (strlen(FLAG_stop_at) > 0 &&
114 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
115 __ Debug("stop-at", __LINE__, BREAK);
119 // Sloppy mode functions and builtins need to replace the receiver with the
120 // global proxy when called as functions (without an explicit receiver
122 if (info->MustReplaceUndefinedReceiverWithGlobalProxy()) {
124 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
125 __ Peek(x10, receiver_offset);
126 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
128 __ Ldr(x10, GlobalObjectMemOperand());
129 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
130 __ Poke(x10, receiver_offset);
136 // Open a frame scope to indicate that there is a frame on the stack.
137 // The MANUAL indicates that the scope shouldn't actually generate code
138 // to set up the frame because we do it manually below.
139 FrameScope frame_scope(masm_, StackFrame::MANUAL);
141 // This call emits the following sequence in a way that can be patched for
142 // code ageing support:
143 // Push(lr, fp, cp, x1);
144 // Add(fp, jssp, 2 * kPointerSize);
145 info->set_prologue_offset(masm_->pc_offset());
146 __ Prologue(info->IsCodePreAgingActive());
147 info->AddNoFrameRange(0, masm_->pc_offset());
149 // Reserve space on the stack for locals.
150 { Comment cmnt(masm_, "[ Allocate locals");
151 int locals_count = info->scope()->num_stack_slots();
152 // Generators allocate locals, if any, in context slots.
153 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
155 if (locals_count > 0) {
156 if (locals_count >= 128) {
158 DCHECK(jssp.Is(__ StackPointer()));
159 __ Sub(x10, jssp, locals_count * kPointerSize);
160 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
162 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
165 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
166 if (FLAG_optimize_for_size) {
167 __ PushMultipleTimes(x10 , locals_count);
169 const int kMaxPushes = 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ Mov(x3, loop_iterations);
174 __ Bind(&loop_header);
176 __ PushMultipleTimes(x10 , kMaxPushes);
178 __ B(ne, &loop_header);
180 int remaining = locals_count % kMaxPushes;
181 // Emit the remaining pushes.
182 __ PushMultipleTimes(x10 , remaining);
187 bool function_in_register_x1 = true;
189 if (info->scope()->num_heap_slots() > 0) {
190 // Argument to NewContext is the function, which is still in x1.
191 Comment cmnt(masm_, "[ Allocate context");
192 bool need_write_barrier = true;
193 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194 if (info->scope()->is_script_scope()) {
195 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
197 __ CallRuntime(Runtime::kNewScriptContext, 2);
198 } else if (slots <= FastNewContextStub::kMaximumSlots) {
199 FastNewContextStub stub(isolate(), slots);
201 // Result of FastNewContextStub is always in new space.
202 need_write_barrier = false;
205 __ CallRuntime(Runtime::kNewFunctionContext, 1);
207 function_in_register_x1 = false;
208 // Context is returned in x0. It replaces the context passed to us.
209 // It's saved in the stack and kept live in cp.
211 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
212 // Copy any necessary parameters into the context.
213 int num_parameters = info->scope()->num_parameters();
214 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
215 for (int i = first_parameter; i < num_parameters; i++) {
216 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
217 if (var->IsContextSlot()) {
218 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
219 (num_parameters - 1 - i) * kPointerSize;
220 // Load parameter from stack.
221 __ Ldr(x10, MemOperand(fp, parameter_offset));
222 // Store it in the context.
223 MemOperand target = ContextMemOperand(cp, var->index());
226 // Update the write barrier.
227 if (need_write_barrier) {
228 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
229 x11, kLRHasBeenSaved, kDontSaveFPRegs);
230 } else if (FLAG_debug_code) {
232 __ JumpIfInNewSpace(cp, &done);
233 __ Abort(kExpectedNewSpaceObject);
240 PrepareForBailoutForId(BailoutId::Prologue(), NO_REGISTERS);
241 // Function register is trashed in case we bailout here. But since that
242 // could happen only when we allocate a context the value of
243 // |function_in_register_x1| is correct.
245 // Possibly set up a local binding to the this function which is used in
246 // derived constructors with super calls.
247 Variable* this_function_var = scope()->this_function_var();
248 if (this_function_var != nullptr) {
249 Comment cmnt(masm_, "[ This function");
250 if (!function_in_register_x1) {
251 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
252 // The write barrier clobbers register again, keep it marked as such.
254 SetVar(this_function_var, x1, x0, x2);
257 Variable* new_target_var = scope()->new_target_var();
258 if (new_target_var != nullptr) {
259 Comment cmnt(masm_, "[ new.target");
260 // Get the frame pointer for the calling frame.
261 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
263 Label check_frame_marker;
264 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
265 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
266 __ B(ne, &check_frame_marker);
267 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
268 __ Bind(&check_frame_marker);
269 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
270 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
271 function_in_register_x1 = false;
273 Label non_construct_frame, done;
275 __ B(ne, &non_construct_frame);
277 MemOperand(x2, ConstructFrameConstants::kOriginalConstructorOffset));
280 __ Bind(&non_construct_frame);
281 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
285 SetVar(new_target_var, x0, x2, x3);
288 Variable* arguments = scope()->arguments();
289 if (arguments != NULL) {
290 // Function uses arguments object.
291 Comment cmnt(masm_, "[ Allocate arguments object");
292 if (!function_in_register_x1) {
293 // Load this again, if it's used by the local context below.
294 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
298 // Receiver is just before the parameters on the caller's stack.
299 int num_parameters = info->scope()->num_parameters();
300 int offset = num_parameters * kPointerSize;
301 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
302 __ Mov(x1, Smi::FromInt(num_parameters));
305 // Arguments to ArgumentsAccessStub:
306 // function, receiver address, parameter count.
307 // The stub will rewrite receiver and parameter count if the previous
308 // stack frame was an arguments adapter frame.
309 ArgumentsAccessStub::Type type;
310 if (is_strict(language_mode()) || !has_simple_parameters()) {
311 type = ArgumentsAccessStub::NEW_STRICT;
312 } else if (literal()->has_duplicate_parameters()) {
313 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
315 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
317 ArgumentsAccessStub stub(isolate(), type);
320 SetVar(arguments, x0, x1, x2);
324 __ CallRuntime(Runtime::kTraceEnter, 0);
327 // Visit the declarations and body unless there is an illegal
329 if (scope()->HasIllegalRedeclaration()) {
330 Comment cmnt(masm_, "[ Declarations");
331 VisitForEffect(scope()->GetIllegalRedeclaration());
334 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
335 { Comment cmnt(masm_, "[ Declarations");
336 VisitDeclarations(scope()->declarations());
339 // Assert that the declarations do not use ICs. Otherwise the debugger
340 // won't be able to redirect a PC at an IC to the correct IC in newly
342 DCHECK_EQ(0, ic_total_count_);
345 Comment cmnt(masm_, "[ Stack check");
346 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
348 DCHECK(jssp.Is(__ StackPointer()));
349 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
351 PredictableCodeSizeScope predictable(masm_,
352 Assembler::kCallSizeWithRelocation);
353 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
358 Comment cmnt(masm_, "[ Body");
359 DCHECK(loop_depth() == 0);
360 VisitStatements(literal()->body());
361 DCHECK(loop_depth() == 0);
365 // Always emit a 'return undefined' in case control fell off the end of
367 { Comment cmnt(masm_, "[ return <undefined>;");
368 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
370 EmitReturnSequence();
372 // Force emission of the pools, so they don't get emitted in the middle
373 // of the back edge table.
374 masm()->CheckVeneerPool(true, false);
375 masm()->CheckConstPool(true, false);
379 void FullCodeGenerator::ClearAccumulator() {
380 __ Mov(x0, Smi::FromInt(0));
384 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
385 __ Mov(x2, Operand(profiling_counter_));
386 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
387 __ Subs(x3, x3, Smi::FromInt(delta));
388 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
392 void FullCodeGenerator::EmitProfilingCounterReset() {
393 int reset_value = FLAG_interrupt_budget;
394 __ Mov(x2, Operand(profiling_counter_));
395 __ Mov(x3, Smi::FromInt(reset_value));
396 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
400 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
401 Label* back_edge_target) {
402 DCHECK(jssp.Is(__ StackPointer()));
403 Comment cmnt(masm_, "[ Back edge bookkeeping");
404 // Block literal pools whilst emitting back edge code.
405 Assembler::BlockPoolsScope block_const_pool(masm_);
408 DCHECK(back_edge_target->is_bound());
409 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
410 // to reduce the absolute error due to the integer division. To do that,
411 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
414 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
415 kCodeSizeMultiplier / 2);
416 int weight = Min(kMaxBackEdgeWeight,
417 Max(1, distance / kCodeSizeMultiplier));
418 EmitProfilingCounterDecrement(weight);
420 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
422 // Record a mapping of this PC offset to the OSR id. This is used to find
423 // the AST id from the unoptimized code in order to use it as a key into
424 // the deoptimization input data found in the optimized code.
425 RecordBackEdge(stmt->OsrEntryId());
427 EmitProfilingCounterReset();
430 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
431 // Record a mapping of the OSR id to this PC. This is used if the OSR
432 // entry becomes the target of a bailout. We don't expect it to be, but
433 // we want it to work if it is.
434 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
438 void FullCodeGenerator::EmitReturnSequence() {
439 Comment cmnt(masm_, "[ Return sequence");
441 if (return_label_.is_bound()) {
442 __ B(&return_label_);
445 __ Bind(&return_label_);
447 // Push the return value on the stack as the parameter.
448 // Runtime::TraceExit returns its parameter in x0.
449 __ Push(result_register());
450 __ CallRuntime(Runtime::kTraceExit, 1);
451 DCHECK(x0.Is(result_register()));
453 // Pretend that the exit is a backwards jump to the entry.
455 if (info_->ShouldSelfOptimize()) {
456 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
458 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
459 weight = Min(kMaxBackEdgeWeight,
460 Max(1, distance / kCodeSizeMultiplier));
462 EmitProfilingCounterDecrement(weight);
466 __ Call(isolate()->builtins()->InterruptCheck(),
467 RelocInfo::CODE_TARGET);
469 EmitProfilingCounterReset();
472 SetReturnPosition(literal());
473 const Register& current_sp = __ StackPointer();
474 // Nothing ensures 16 bytes alignment here.
475 DCHECK(!current_sp.Is(csp));
476 __ Mov(current_sp, fp);
477 int no_frame_start = masm_->pc_offset();
478 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
479 // Drop the arguments and receiver and return.
480 // TODO(all): This implementation is overkill as it supports 2**31+1
481 // arguments, consider how to improve it without creating a security
483 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
484 __ Add(current_sp, current_sp, ip0);
486 int32_t arg_count = info_->scope()->num_parameters() + 1;
487 __ dc64(kXRegSize * arg_count);
488 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
493 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
494 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
495 codegen()->GetVar(result_register(), var);
496 __ Push(result_register());
500 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
501 // Root values have no side effects.
505 void FullCodeGenerator::AccumulatorValueContext::Plug(
506 Heap::RootListIndex index) const {
507 __ LoadRoot(result_register(), index);
511 void FullCodeGenerator::StackValueContext::Plug(
512 Heap::RootListIndex index) const {
513 __ LoadRoot(result_register(), index);
514 __ Push(result_register());
518 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
519 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
521 if (index == Heap::kUndefinedValueRootIndex ||
522 index == Heap::kNullValueRootIndex ||
523 index == Heap::kFalseValueRootIndex) {
524 if (false_label_ != fall_through_) __ B(false_label_);
525 } else if (index == Heap::kTrueValueRootIndex) {
526 if (true_label_ != fall_through_) __ B(true_label_);
528 __ LoadRoot(result_register(), index);
529 codegen()->DoTest(this);
534 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
538 void FullCodeGenerator::AccumulatorValueContext::Plug(
539 Handle<Object> lit) const {
540 __ Mov(result_register(), Operand(lit));
544 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
545 // Immediates cannot be pushed directly.
546 __ Mov(result_register(), Operand(lit));
547 __ Push(result_register());
551 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
552 codegen()->PrepareForBailoutBeforeSplit(condition(),
556 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
557 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
558 if (false_label_ != fall_through_) __ B(false_label_);
559 } else if (lit->IsTrue() || lit->IsJSObject()) {
560 if (true_label_ != fall_through_) __ B(true_label_);
561 } else if (lit->IsString()) {
562 if (String::cast(*lit)->length() == 0) {
563 if (false_label_ != fall_through_) __ B(false_label_);
565 if (true_label_ != fall_through_) __ B(true_label_);
567 } else if (lit->IsSmi()) {
568 if (Smi::cast(*lit)->value() == 0) {
569 if (false_label_ != fall_through_) __ B(false_label_);
571 if (true_label_ != fall_through_) __ B(true_label_);
574 // For simplicity we always test the accumulator register.
575 __ Mov(result_register(), Operand(lit));
576 codegen()->DoTest(this);
581 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
582 Register reg) const {
588 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
590 Register reg) const {
593 __ Move(result_register(), reg);
597 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
598 Register reg) const {
600 if (count > 1) __ Drop(count - 1);
605 void FullCodeGenerator::TestContext::DropAndPlug(int count,
606 Register reg) const {
608 // For simplicity we always test the accumulator register.
610 __ Mov(result_register(), reg);
611 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
612 codegen()->DoTest(this);
616 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
617 Label* materialize_false) const {
618 DCHECK(materialize_true == materialize_false);
619 __ Bind(materialize_true);
623 void FullCodeGenerator::AccumulatorValueContext::Plug(
624 Label* materialize_true,
625 Label* materialize_false) const {
627 __ Bind(materialize_true);
628 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
630 __ Bind(materialize_false);
631 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636 void FullCodeGenerator::StackValueContext::Plug(
637 Label* materialize_true,
638 Label* materialize_false) const {
640 __ Bind(materialize_true);
641 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
643 __ Bind(materialize_false);
644 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651 Label* materialize_false) const {
652 DCHECK(materialize_true == true_label_);
653 DCHECK(materialize_false == false_label_);
657 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
658 Heap::RootListIndex value_root_index =
659 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660 __ LoadRoot(result_register(), value_root_index);
664 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
665 Heap::RootListIndex value_root_index =
666 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
667 __ LoadRoot(x10, value_root_index);
672 void FullCodeGenerator::TestContext::Plug(bool flag) const {
673 codegen()->PrepareForBailoutBeforeSplit(condition(),
678 if (true_label_ != fall_through_) {
682 if (false_label_ != fall_through_) {
689 void FullCodeGenerator::DoTest(Expression* condition,
692 Label* fall_through) {
693 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
694 CallIC(ic, condition->test_id());
695 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
699 // If (cond), branch to if_true.
700 // If (!cond), branch to if_false.
701 // fall_through is used as an optimization in cases where only one branch
702 // instruction is necessary.
703 void FullCodeGenerator::Split(Condition cond,
706 Label* fall_through) {
707 if (if_false == fall_through) {
709 } else if (if_true == fall_through) {
710 DCHECK(if_false != fall_through);
711 __ B(NegateCondition(cond), if_false);
719 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
720 // Offset is negative because higher indexes are at lower addresses.
721 int offset = -var->index() * kXRegSize;
722 // Adjust by a (parameter or local) base offset.
723 if (var->IsParameter()) {
724 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
726 offset += JavaScriptFrameConstants::kLocal0Offset;
728 return MemOperand(fp, offset);
732 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
733 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
734 if (var->IsContextSlot()) {
735 int context_chain_length = scope()->ContextChainLength(var->scope());
736 __ LoadContext(scratch, context_chain_length);
737 return ContextMemOperand(scratch, var->index());
739 return StackOperand(var);
744 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
745 // Use destination as scratch.
746 MemOperand location = VarOperand(var, dest);
747 __ Ldr(dest, location);
751 void FullCodeGenerator::SetVar(Variable* var,
755 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
756 DCHECK(!AreAliased(src, scratch0, scratch1));
757 MemOperand location = VarOperand(var, scratch0);
758 __ Str(src, location);
760 // Emit the write barrier code if the location is in the heap.
761 if (var->IsContextSlot()) {
762 // scratch0 contains the correct context.
763 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
764 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
769 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
770 bool should_normalize,
773 // Only prepare for bailouts before splits if we're in a test
774 // context. Otherwise, we let the Visit function deal with the
775 // preparation to avoid preparing with the same AST id twice.
776 if (!context()->IsTest()) return;
778 // TODO(all): Investigate to see if there is something to work on here.
780 if (should_normalize) {
783 PrepareForBailout(expr, TOS_REG);
784 if (should_normalize) {
785 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
786 Split(eq, if_true, if_false, NULL);
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
793 // The variable in the declaration always resides in the current function
795 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (generate_debug_code_) {
797 // Check that we're not inside a with or catch context.
798 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
799 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
800 __ Check(ne, kDeclarationInWithContext);
801 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
802 __ Check(ne, kDeclarationInCatchContext);
807 void FullCodeGenerator::VisitVariableDeclaration(
808 VariableDeclaration* declaration) {
809 // If it was not possible to allocate the variable at compile time, we
810 // need to "declare" it at runtime to make sure it actually exists in the
812 VariableProxy* proxy = declaration->proxy();
813 VariableMode mode = declaration->mode();
814 Variable* variable = proxy->var();
815 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
817 switch (variable->location()) {
818 case VariableLocation::GLOBAL:
819 case VariableLocation::UNALLOCATED:
820 globals_->Add(variable->name(), zone());
821 globals_->Add(variable->binding_needs_init()
822 ? isolate()->factory()->the_hole_value()
823 : isolate()->factory()->undefined_value(),
827 case VariableLocation::PARAMETER:
828 case VariableLocation::LOCAL:
830 Comment cmnt(masm_, "[ VariableDeclaration");
831 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
832 __ Str(x10, StackOperand(variable));
836 case VariableLocation::CONTEXT:
838 Comment cmnt(masm_, "[ VariableDeclaration");
839 EmitDebugCheckDeclarationContext(variable);
840 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
841 __ Str(x10, ContextMemOperand(cp, variable->index()));
842 // No write barrier since the_hole_value is in old space.
843 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
847 case VariableLocation::LOOKUP: {
848 Comment cmnt(masm_, "[ VariableDeclaration");
849 __ Mov(x2, Operand(variable->name()));
850 // Declaration nodes are always introduced in one of four modes.
851 DCHECK(IsDeclaredVariableMode(mode));
852 // Push initial value, if any.
853 // Note: For variables we must not push an initial value (such as
854 // 'undefined') because we may have a (legal) redeclaration and we
855 // must not destroy the current value.
857 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
860 // Pushing 0 (xzr) indicates no initial value.
863 __ CallRuntime(IsImmutableVariableMode(mode)
864 ? Runtime::kDeclareReadOnlyLookupSlot
865 : Runtime::kDeclareLookupSlot,
873 void FullCodeGenerator::VisitFunctionDeclaration(
874 FunctionDeclaration* declaration) {
875 VariableProxy* proxy = declaration->proxy();
876 Variable* variable = proxy->var();
877 switch (variable->location()) {
878 case VariableLocation::GLOBAL:
879 case VariableLocation::UNALLOCATED: {
880 globals_->Add(variable->name(), zone());
881 Handle<SharedFunctionInfo> function =
882 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
883 // Check for stack overflow exception.
884 if (function.is_null()) return SetStackOverflow();
885 globals_->Add(function, zone());
889 case VariableLocation::PARAMETER:
890 case VariableLocation::LOCAL: {
891 Comment cmnt(masm_, "[ Function Declaration");
892 VisitForAccumulatorValue(declaration->fun());
893 __ Str(result_register(), StackOperand(variable));
897 case VariableLocation::CONTEXT: {
898 Comment cmnt(masm_, "[ Function Declaration");
899 EmitDebugCheckDeclarationContext(variable);
900 VisitForAccumulatorValue(declaration->fun());
901 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
902 int offset = Context::SlotOffset(variable->index());
903 // We know that we have written a function, which is not a smi.
904 __ RecordWriteContextSlot(cp,
912 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
916 case VariableLocation::LOOKUP: {
917 Comment cmnt(masm_, "[ Function Declaration");
918 __ Mov(x2, Operand(variable->name()));
920 // Push initial value for function declaration.
921 VisitForStackValue(declaration->fun());
922 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
929 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
930 // Call the runtime to declare the globals.
931 __ Mov(x11, Operand(pairs));
932 Register flags = xzr;
933 if (Smi::FromInt(DeclareGlobalsFlags())) {
935 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
938 __ CallRuntime(Runtime::kDeclareGlobals, 2);
939 // Return value is ignored.
943 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
944 // Call the runtime to declare the modules.
945 __ Push(descriptions);
946 __ CallRuntime(Runtime::kDeclareModules, 1);
947 // Return value is ignored.
951 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
952 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
953 Comment cmnt(masm_, "[ SwitchStatement");
954 Breakable nested_statement(this, stmt);
955 SetStatementPosition(stmt);
957 // Keep the switch value on the stack until a case matches.
958 VisitForStackValue(stmt->tag());
959 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
961 ZoneList<CaseClause*>* clauses = stmt->cases();
962 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
964 Label next_test; // Recycled for each test.
965 // Compile all the tests with branches to their bodies.
966 for (int i = 0; i < clauses->length(); i++) {
967 CaseClause* clause = clauses->at(i);
968 clause->body_target()->Unuse();
970 // The default is not a test, but remember it as final fall through.
971 if (clause->is_default()) {
972 default_clause = clause;
976 Comment cmnt(masm_, "[ Case comparison");
980 // Compile the label expression.
981 VisitForAccumulatorValue(clause->label());
983 // Perform the comparison as if via '==='.
984 __ Peek(x1, 0); // Switch value.
986 JumpPatchSite patch_site(masm_);
987 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
989 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
991 __ B(ne, &next_test);
992 __ Drop(1); // Switch value is no longer needed.
993 __ B(clause->body_target());
997 // Record position before stub call for type feedback.
998 SetExpressionPosition(clause);
999 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1000 strength(language_mode())).code();
1001 CallIC(ic, clause->CompareId());
1002 patch_site.EmitPatchInfo();
1006 PrepareForBailout(clause, TOS_REG);
1007 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1009 __ B(clause->body_target());
1012 __ Cbnz(x0, &next_test);
1013 __ Drop(1); // Switch value is no longer needed.
1014 __ B(clause->body_target());
1017 // Discard the test value and jump to the default if present, otherwise to
1018 // the end of the statement.
1019 __ Bind(&next_test);
1020 __ Drop(1); // Switch value is no longer needed.
1021 if (default_clause == NULL) {
1022 __ B(nested_statement.break_label());
1024 __ B(default_clause->body_target());
1027 // Compile all the case bodies.
1028 for (int i = 0; i < clauses->length(); i++) {
1029 Comment cmnt(masm_, "[ Case body");
1030 CaseClause* clause = clauses->at(i);
1031 __ Bind(clause->body_target());
1032 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1033 VisitStatements(clause->statements());
1036 __ Bind(nested_statement.break_label());
1037 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1041 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1042 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1043 Comment cmnt(masm_, "[ ForInStatement");
1044 SetStatementPosition(stmt, SKIP_BREAK);
1046 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1048 // TODO(all): This visitor probably needs better comments and a revisit.
1051 ForIn loop_statement(this, stmt);
1052 increment_loop_depth();
1054 // Get the object to enumerate over. If the object is null or undefined, skip
1055 // over the loop. See ECMA-262 version 5, section 12.6.4.
1056 SetExpressionAsStatementPosition(stmt->enumerable());
1057 VisitForAccumulatorValue(stmt->enumerable());
1058 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1059 Register null_value = x15;
1060 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1061 __ Cmp(x0, null_value);
1064 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1066 // Convert the object to a JS object.
1067 Label convert, done_convert;
1068 __ JumpIfSmi(x0, &convert);
1069 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1071 ToObjectStub stub(isolate());
1073 __ Bind(&done_convert);
1074 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1077 // Check for proxies.
1079 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1080 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1082 // Check cache validity in generated code. This is a fast case for
1083 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1084 // guarantee cache validity, call the runtime system to check cache
1085 // validity or get the property names in a fixed array.
1086 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1088 // The enum cache is valid. Load the map of the object being
1089 // iterated over and use the cache for the iteration.
1091 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1094 // Get the set of properties to enumerate.
1095 __ Bind(&call_runtime);
1096 __ Push(x0); // Duplicate the enumerable object on the stack.
1097 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1098 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1100 // If we got a map from the runtime call, we can do a fast
1101 // modification check. Otherwise, we got a fixed array, and we have
1102 // to do a slow check.
1103 Label fixed_array, no_descriptors;
1104 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1105 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1107 // We got a map in register x0. Get the enumeration cache from it.
1108 __ Bind(&use_cache);
1110 __ EnumLengthUntagged(x1, x0);
1111 __ Cbz(x1, &no_descriptors);
1113 __ LoadInstanceDescriptors(x0, x2);
1114 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1116 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1118 // Set up the four remaining stack slots.
1120 // Map, enumeration cache, enum cache length, zero (both last as smis).
1121 __ Push(x0, x2, x1, xzr);
1124 __ Bind(&no_descriptors);
1128 // We got a fixed array in register x0. Iterate through that.
1129 __ Bind(&fixed_array);
1131 __ LoadObject(x1, FeedbackVector());
1132 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1133 int vector_index = FeedbackVector()->GetIndex(slot);
1134 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1136 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1137 __ Peek(x10, 0); // Get enumerated object.
1138 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1139 // TODO(all): similar check was done already. Can we avoid it here?
1140 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1141 DCHECK(Smi::FromInt(0) == 0);
1142 __ CzeroX(x1, le); // Zero indicates proxy.
1143 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1144 // Smi and array, fixed array length (as smi) and initial index.
1145 __ Push(x1, x0, x2, xzr);
1147 // Generate code for doing the condition check.
1148 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1150 SetExpressionAsStatementPosition(stmt->each());
1152 // Load the current count to x0, load the length to x1.
1153 __ PeekPair(x0, x1, 0);
1154 __ Cmp(x0, x1); // Compare to the array length.
1155 __ B(hs, loop_statement.break_label());
1157 // Get the current entry of the array into register r3.
1158 __ Peek(x10, 2 * kXRegSize);
1159 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1160 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1162 // Get the expected map from the stack or a smi in the
1163 // permanent slow case into register x10.
1164 __ Peek(x2, 3 * kXRegSize);
1166 // Check if the expected map still matches that of the enumerable.
1167 // If not, we may have to filter the key.
1169 __ Peek(x1, 4 * kXRegSize);
1170 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1172 __ B(eq, &update_each);
1174 // For proxies, no filtering is done.
1175 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1176 STATIC_ASSERT(kSmiTag == 0);
1177 __ Cbz(x2, &update_each);
1179 // Convert the entry to a string or (smi) 0 if it isn't a property
1180 // any more. If the property has been removed while iterating, we
1183 __ CallRuntime(Runtime::kForInFilter, 2);
1184 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1186 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1187 loop_statement.continue_label());
1189 // Update the 'each' property or variable from the possibly filtered
1190 // entry in register x3.
1191 __ Bind(&update_each);
1192 __ Mov(result_register(), x3);
1193 // Perform the assignment as if via '='.
1194 { EffectContext context(this);
1195 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1196 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1199 // Generate code for the body of the loop.
1200 Visit(stmt->body());
1202 // Generate code for going to the next element by incrementing
1203 // the index (smi) stored on top of the stack.
1204 __ Bind(loop_statement.continue_label());
1205 // TODO(all): We could use a callee saved register to avoid popping.
1207 __ Add(x0, x0, Smi::FromInt(1));
1210 EmitBackEdgeBookkeeping(stmt, &loop);
1213 // Remove the pointers stored on the stack.
1214 __ Bind(loop_statement.break_label());
1217 // Exit and decrement the loop depth.
1218 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1220 decrement_loop_depth();
1224 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1226 // Use the fast case closure allocation code that allocates in new space for
1227 // nested functions that don't need literals cloning. If we're running with
1228 // the --always-opt or the --prepare-always-opt flag, we need to use the
1229 // runtime function so that the new function we are creating here gets a
1230 // chance to have its code optimized and doesn't just get a copy of the
1231 // existing unoptimized code.
1232 if (!FLAG_always_opt &&
1233 !FLAG_prepare_always_opt &&
1235 scope()->is_function_scope() &&
1236 info->num_literals() == 0) {
1237 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1238 __ Mov(x2, Operand(info));
1243 pretenure ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure, 1);
1245 context()->Plug(x0);
1249 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1250 FeedbackVectorICSlot slot) {
1251 DCHECK(NeedsHomeObject(initializer));
1252 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1253 __ Mov(StoreDescriptor::NameRegister(),
1254 Operand(isolate()->factory()->home_object_symbol()));
1255 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1256 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1261 void FullCodeGenerator::EmitSetHomeObjectAccumulator(
1262 Expression* initializer, int offset, FeedbackVectorICSlot slot) {
1263 DCHECK(NeedsHomeObject(initializer));
1264 __ Move(StoreDescriptor::ReceiverRegister(), x0);
1265 __ Mov(StoreDescriptor::NameRegister(),
1266 Operand(isolate()->factory()->home_object_symbol()));
1267 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1268 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1273 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1274 TypeofMode typeof_mode,
1276 Register current = cp;
1277 Register next = x10;
1278 Register temp = x11;
1282 if (s->num_heap_slots() > 0) {
1283 if (s->calls_sloppy_eval()) {
1284 // Check that extension is NULL.
1285 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1286 __ Cbnz(temp, slow);
1288 // Load next context in chain.
1289 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1290 // Walk the rest of the chain without clobbering cp.
1293 // If no outer scope calls eval, we do not need to check more
1294 // context extensions.
1295 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1296 s = s->outer_scope();
1299 if (s->is_eval_scope()) {
1301 __ Mov(next, current);
1304 // Terminate at native context.
1305 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1306 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1307 // Check that extension is NULL.
1308 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1309 __ Cbnz(temp, slow);
1310 // Load next context in chain.
1311 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1316 // All extension objects were empty and it is safe to use a normal global
1318 EmitGlobalVariableLoad(proxy, typeof_mode);
1322 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1324 DCHECK(var->IsContextSlot());
1325 Register context = cp;
1326 Register next = x10;
1327 Register temp = x11;
1329 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1330 if (s->num_heap_slots() > 0) {
1331 if (s->calls_sloppy_eval()) {
1332 // Check that extension is NULL.
1333 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1334 __ Cbnz(temp, slow);
1336 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1337 // Walk the rest of the chain without clobbering cp.
1341 // Check that last extension is NULL.
1342 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1343 __ Cbnz(temp, slow);
1345 // This function is used only for loads, not stores, so it's safe to
1346 // return an cp-based operand (the write barrier cannot be allowed to
1347 // destroy the cp register).
1348 return ContextMemOperand(context, var->index());
1352 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1353 TypeofMode typeof_mode,
1354 Label* slow, Label* done) {
1355 // Generate fast-case code for variables that might be shadowed by
1356 // eval-introduced variables. Eval is used a lot without
1357 // introducing variables. In those cases, we do not want to
1358 // perform a runtime call for all variables in the scope
1359 // containing the eval.
1360 Variable* var = proxy->var();
1361 if (var->mode() == DYNAMIC_GLOBAL) {
1362 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1364 } else if (var->mode() == DYNAMIC_LOCAL) {
1365 Variable* local = var->local_if_not_shadowed();
1366 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1367 if (local->mode() == LET || local->mode() == CONST ||
1368 local->mode() == CONST_LEGACY) {
1369 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1370 if (local->mode() == CONST_LEGACY) {
1371 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1372 } else { // LET || CONST
1373 __ Mov(x0, Operand(var->name()));
1375 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1383 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1384 TypeofMode typeof_mode) {
1385 Variable* var = proxy->var();
1386 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1387 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1388 if (var->IsGlobalSlot()) {
1389 DCHECK(var->index() > 0);
1390 DCHECK(var->IsStaticGlobalObjectProperty());
1391 int const slot = var->index();
1392 int const depth = scope()->ContextChainLength(var->scope());
1393 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1394 __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
1395 LoadGlobalViaContextStub stub(isolate(), depth);
1398 __ Push(Smi::FromInt(slot));
1399 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1402 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1403 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1404 __ Mov(LoadDescriptor::SlotRegister(),
1405 SmiFromSlot(proxy->VariableFeedbackSlot()));
1406 CallLoadIC(typeof_mode);
1411 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1412 TypeofMode typeof_mode) {
1413 // Record position before possible IC call.
1414 SetExpressionPosition(proxy);
1415 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1416 Variable* var = proxy->var();
1418 // Three cases: global variables, lookup variables, and all other types of
1420 switch (var->location()) {
1421 case VariableLocation::GLOBAL:
1422 case VariableLocation::UNALLOCATED: {
1423 Comment cmnt(masm_, "Global variable");
1424 EmitGlobalVariableLoad(proxy, typeof_mode);
1425 context()->Plug(x0);
1429 case VariableLocation::PARAMETER:
1430 case VariableLocation::LOCAL:
1431 case VariableLocation::CONTEXT: {
1432 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1433 Comment cmnt(masm_, var->IsContextSlot()
1434 ? "Context variable"
1435 : "Stack variable");
1436 if (NeedsHoleCheckForLoad(proxy)) {
1437 // Let and const need a read barrier.
1440 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1441 if (var->mode() == LET || var->mode() == CONST) {
1442 // Throw a reference error when using an uninitialized let/const
1443 // binding in harmony mode.
1444 __ Mov(x0, Operand(var->name()));
1446 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1449 // Uninitialized legacy const bindings are unholed.
1450 DCHECK(var->mode() == CONST_LEGACY);
1451 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1454 context()->Plug(x0);
1457 context()->Plug(var);
1461 case VariableLocation::LOOKUP: {
1463 // Generate code for loading from variables potentially shadowed by
1464 // eval-introduced variables.
1465 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1467 Comment cmnt(masm_, "Lookup variable");
1468 __ Mov(x1, Operand(var->name()));
1469 __ Push(cp, x1); // Context and name.
1470 Runtime::FunctionId function_id =
1471 typeof_mode == NOT_INSIDE_TYPEOF
1472 ? Runtime::kLoadLookupSlot
1473 : Runtime::kLoadLookupSlotNoReferenceError;
1474 __ CallRuntime(function_id, 2);
1476 context()->Plug(x0);
1483 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1484 Comment cmnt(masm_, "[ RegExpLiteral");
1486 // Registers will be used as follows:
1487 // x5 = materialized value (RegExp literal)
1488 // x4 = JS function, literals array
1489 // x3 = literal index
1490 // x2 = RegExp pattern
1491 // x1 = RegExp flags
1492 // x0 = RegExp literal clone
1493 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1494 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1495 int literal_offset =
1496 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1497 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1498 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1500 // Create regexp literal using runtime function.
1501 // Result will be in x0.
1502 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1503 __ Mov(x2, Operand(expr->pattern()));
1504 __ Mov(x1, Operand(expr->flags()));
1505 __ Push(x4, x3, x2, x1);
1506 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1509 __ Bind(&materialized);
1510 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1511 Label allocated, runtime_allocate;
1512 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1515 __ Bind(&runtime_allocate);
1516 __ Mov(x10, Smi::FromInt(size));
1518 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1521 __ Bind(&allocated);
1522 // After this, registers are used as follows:
1523 // x0: Newly allocated regexp.
1524 // x5: Materialized regexp.
1525 // x10, x11, x12: temps.
1526 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1527 context()->Plug(x0);
1531 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1532 Expression* expression = (property == NULL) ? NULL : property->value();
1533 if (expression == NULL) {
1534 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1537 VisitForStackValue(expression);
1538 if (NeedsHomeObject(expression)) {
1539 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1540 property->kind() == ObjectLiteral::Property::SETTER);
1541 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1542 EmitSetHomeObject(expression, offset, property->GetSlot());
1548 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1549 Comment cmnt(masm_, "[ ObjectLiteral");
1551 Handle<FixedArray> constant_properties = expr->constant_properties();
1552 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1553 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1554 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1555 __ Mov(x1, Operand(constant_properties));
1556 int flags = expr->ComputeFlags();
1557 __ Mov(x0, Smi::FromInt(flags));
1558 if (MustCreateObjectLiteralWithRuntime(expr)) {
1559 __ Push(x3, x2, x1, x0);
1560 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1562 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1565 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1567 // If result_saved is true the result is on top of the stack. If
1568 // result_saved is false the result is in x0.
1569 bool result_saved = false;
1571 AccessorTable accessor_table(zone());
1572 int property_index = 0;
1573 for (; property_index < expr->properties()->length(); property_index++) {
1574 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1575 if (property->is_computed_name()) break;
1576 if (property->IsCompileTimeValue()) continue;
1578 Literal* key = property->key()->AsLiteral();
1579 Expression* value = property->value();
1580 if (!result_saved) {
1581 __ Push(x0); // Save result on stack
1582 result_saved = true;
1584 switch (property->kind()) {
1585 case ObjectLiteral::Property::CONSTANT:
1587 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1588 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1590 case ObjectLiteral::Property::COMPUTED:
1591 // It is safe to use [[Put]] here because the boilerplate already
1592 // contains computed properties with an uninitialized value.
1593 if (key->value()->IsInternalizedString()) {
1594 if (property->emit_store()) {
1595 VisitForAccumulatorValue(value);
1596 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1597 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1598 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1599 if (FLAG_vector_stores) {
1600 EmitLoadStoreICSlot(property->GetSlot(0));
1603 CallStoreIC(key->LiteralFeedbackId());
1605 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1607 if (NeedsHomeObject(value)) {
1608 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1611 VisitForEffect(value);
1617 VisitForStackValue(key);
1618 VisitForStackValue(value);
1619 if (property->emit_store()) {
1620 if (NeedsHomeObject(value)) {
1621 EmitSetHomeObject(value, 2, property->GetSlot());
1623 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1625 __ CallRuntime(Runtime::kSetProperty, 4);
1630 case ObjectLiteral::Property::PROTOTYPE:
1631 DCHECK(property->emit_store());
1632 // Duplicate receiver on stack.
1635 VisitForStackValue(value);
1636 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1638 case ObjectLiteral::Property::GETTER:
1639 if (property->emit_store()) {
1640 accessor_table.lookup(key)->second->getter = property;
1643 case ObjectLiteral::Property::SETTER:
1644 if (property->emit_store()) {
1645 accessor_table.lookup(key)->second->setter = property;
1651 // Emit code to define accessors, using only a single call to the runtime for
1652 // each pair of corresponding getters and setters.
1653 for (AccessorTable::Iterator it = accessor_table.begin();
1654 it != accessor_table.end();
1656 __ Peek(x10, 0); // Duplicate receiver.
1658 VisitForStackValue(it->first);
1659 EmitAccessor(it->second->getter);
1660 EmitAccessor(it->second->setter);
1661 __ Mov(x10, Smi::FromInt(NONE));
1663 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1666 // Object literals have two parts. The "static" part on the left contains no
1667 // computed property names, and so we can compute its map ahead of time; see
1668 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1669 // starts with the first computed property name, and continues with all
1670 // properties to its right. All the code from above initializes the static
1671 // component of the object literal, and arranges for the map of the result to
1672 // reflect the static order in which the keys appear. For the dynamic
1673 // properties, we compile them into a series of "SetOwnProperty" runtime
1674 // calls. This will preserve insertion order.
1675 for (; property_index < expr->properties()->length(); property_index++) {
1676 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1678 Expression* value = property->value();
1679 if (!result_saved) {
1680 __ Push(x0); // Save result on stack
1681 result_saved = true;
1684 __ Peek(x10, 0); // Duplicate receiver.
1687 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1688 DCHECK(!property->is_computed_name());
1689 VisitForStackValue(value);
1690 DCHECK(property->emit_store());
1691 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1693 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1694 VisitForStackValue(value);
1695 if (NeedsHomeObject(value)) {
1696 EmitSetHomeObject(value, 2, property->GetSlot());
1699 switch (property->kind()) {
1700 case ObjectLiteral::Property::CONSTANT:
1701 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1702 case ObjectLiteral::Property::COMPUTED:
1703 if (property->emit_store()) {
1704 __ Mov(x0, Smi::FromInt(NONE));
1706 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1712 case ObjectLiteral::Property::PROTOTYPE:
1716 case ObjectLiteral::Property::GETTER:
1717 __ Mov(x0, Smi::FromInt(NONE));
1719 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1722 case ObjectLiteral::Property::SETTER:
1723 __ Mov(x0, Smi::FromInt(NONE));
1725 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1731 if (expr->has_function()) {
1732 DCHECK(result_saved);
1735 __ CallRuntime(Runtime::kToFastProperties, 1);
1739 context()->PlugTOS();
1741 context()->Plug(x0);
1746 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1747 Comment cmnt(masm_, "[ ArrayLiteral");
1749 expr->BuildConstantElements(isolate());
1750 Handle<FixedArray> constant_elements = expr->constant_elements();
1751 bool has_fast_elements =
1752 IsFastObjectElementsKind(expr->constant_elements_kind());
1754 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1755 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1756 // If the only customer of allocation sites is transitioning, then
1757 // we can turn it off if we don't have anywhere else to transition to.
1758 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1761 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1762 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1763 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1764 __ Mov(x1, Operand(constant_elements));
1765 if (MustCreateArrayLiteralWithRuntime(expr)) {
1766 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1767 __ Push(x3, x2, x1, x0);
1768 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1770 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1773 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1775 bool result_saved = false; // Is the result saved to the stack?
1776 ZoneList<Expression*>* subexprs = expr->values();
1777 int length = subexprs->length();
1779 // Emit code to evaluate all the non-constant subexpressions and to store
1780 // them into the newly cloned array.
1781 int array_index = 0;
1782 for (; array_index < length; array_index++) {
1783 Expression* subexpr = subexprs->at(array_index);
1784 if (subexpr->IsSpread()) break;
1786 // If the subexpression is a literal or a simple materialized literal it
1787 // is already set in the cloned array.
1788 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1790 if (!result_saved) {
1791 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1793 result_saved = true;
1795 VisitForAccumulatorValue(subexpr);
1797 if (has_fast_elements) {
1798 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1799 __ Peek(x6, kPointerSize); // Copy of array literal.
1800 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1801 __ Str(result_register(), FieldMemOperand(x1, offset));
1802 // Update the write barrier for the array store.
1803 __ RecordWriteField(x1, offset, result_register(), x10,
1804 kLRHasBeenSaved, kDontSaveFPRegs,
1805 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1807 __ Mov(x3, Smi::FromInt(array_index));
1808 StoreArrayLiteralElementStub stub(isolate());
1812 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1815 // In case the array literal contains spread expressions it has two parts. The
1816 // first part is the "static" array which has a literal index is handled
1817 // above. The second part is the part after the first spread expression
1818 // (inclusive) and these elements gets appended to the array. Note that the
1819 // number elements an iterable produces is unknown ahead of time.
1820 if (array_index < length && result_saved) {
1821 __ Drop(1); // literal index
1823 result_saved = false;
1825 for (; array_index < length; array_index++) {
1826 Expression* subexpr = subexprs->at(array_index);
1829 if (subexpr->IsSpread()) {
1830 VisitForStackValue(subexpr->AsSpread()->expression());
1831 __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1834 VisitForStackValue(subexpr);
1835 __ CallRuntime(Runtime::kAppendElement, 2);
1838 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1842 __ Drop(1); // literal index
1843 context()->PlugTOS();
1845 context()->Plug(x0);
1850 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1851 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1853 Comment cmnt(masm_, "[ Assignment");
1854 SetExpressionPosition(expr, INSERT_BREAK);
1856 Property* property = expr->target()->AsProperty();
1857 LhsKind assign_type = Property::GetAssignType(property);
1859 // Evaluate LHS expression.
1860 switch (assign_type) {
1862 // Nothing to do here.
1864 case NAMED_PROPERTY:
1865 if (expr->is_compound()) {
1866 // We need the receiver both on the stack and in the register.
1867 VisitForStackValue(property->obj());
1868 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1870 VisitForStackValue(property->obj());
1873 case NAMED_SUPER_PROPERTY:
1875 property->obj()->AsSuperPropertyReference()->this_var());
1876 VisitForAccumulatorValue(
1877 property->obj()->AsSuperPropertyReference()->home_object());
1878 __ Push(result_register());
1879 if (expr->is_compound()) {
1880 const Register scratch = x10;
1881 __ Peek(scratch, kPointerSize);
1882 __ Push(scratch, result_register());
1885 case KEYED_SUPER_PROPERTY:
1887 property->obj()->AsSuperPropertyReference()->this_var());
1889 property->obj()->AsSuperPropertyReference()->home_object());
1890 VisitForAccumulatorValue(property->key());
1891 __ Push(result_register());
1892 if (expr->is_compound()) {
1893 const Register scratch1 = x10;
1894 const Register scratch2 = x11;
1895 __ Peek(scratch1, 2 * kPointerSize);
1896 __ Peek(scratch2, kPointerSize);
1897 __ Push(scratch1, scratch2, result_register());
1900 case KEYED_PROPERTY:
1901 if (expr->is_compound()) {
1902 VisitForStackValue(property->obj());
1903 VisitForStackValue(property->key());
1904 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1905 __ Peek(LoadDescriptor::NameRegister(), 0);
1907 VisitForStackValue(property->obj());
1908 VisitForStackValue(property->key());
1913 // For compound assignments we need another deoptimization point after the
1914 // variable/property load.
1915 if (expr->is_compound()) {
1916 { AccumulatorValueContext context(this);
1917 switch (assign_type) {
1919 EmitVariableLoad(expr->target()->AsVariableProxy());
1920 PrepareForBailout(expr->target(), TOS_REG);
1922 case NAMED_PROPERTY:
1923 EmitNamedPropertyLoad(property);
1924 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1926 case NAMED_SUPER_PROPERTY:
1927 EmitNamedSuperPropertyLoad(property);
1928 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1930 case KEYED_SUPER_PROPERTY:
1931 EmitKeyedSuperPropertyLoad(property);
1932 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1934 case KEYED_PROPERTY:
1935 EmitKeyedPropertyLoad(property);
1936 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1941 Token::Value op = expr->binary_op();
1942 __ Push(x0); // Left operand goes on the stack.
1943 VisitForAccumulatorValue(expr->value());
1945 AccumulatorValueContext context(this);
1946 if (ShouldInlineSmiCase(op)) {
1947 EmitInlineSmiBinaryOp(expr->binary_operation(),
1952 EmitBinaryOp(expr->binary_operation(), op);
1955 // Deoptimization point in case the binary operation may have side effects.
1956 PrepareForBailout(expr->binary_operation(), TOS_REG);
1958 VisitForAccumulatorValue(expr->value());
1961 SetExpressionPosition(expr);
1964 switch (assign_type) {
1966 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1967 expr->op(), expr->AssignmentSlot());
1968 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1969 context()->Plug(x0);
1971 case NAMED_PROPERTY:
1972 EmitNamedPropertyAssignment(expr);
1974 case NAMED_SUPER_PROPERTY:
1975 EmitNamedSuperPropertyStore(property);
1976 context()->Plug(x0);
1978 case KEYED_SUPER_PROPERTY:
1979 EmitKeyedSuperPropertyStore(property);
1980 context()->Plug(x0);
1982 case KEYED_PROPERTY:
1983 EmitKeyedPropertyAssignment(expr);
1989 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1990 SetExpressionPosition(prop);
1991 Literal* key = prop->key()->AsLiteral();
1992 DCHECK(!prop->IsSuperAccess());
1994 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
1995 __ Mov(LoadDescriptor::SlotRegister(),
1996 SmiFromSlot(prop->PropertyFeedbackSlot()));
1997 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2001 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2002 // Stack: receiver, home_object.
2003 SetExpressionPosition(prop);
2004 Literal* key = prop->key()->AsLiteral();
2005 DCHECK(!key->value()->IsSmi());
2006 DCHECK(prop->IsSuperAccess());
2008 __ Push(key->value());
2009 __ Push(Smi::FromInt(language_mode()));
2010 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2014 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2015 SetExpressionPosition(prop);
2016 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2017 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2018 __ Mov(LoadDescriptor::SlotRegister(),
2019 SmiFromSlot(prop->PropertyFeedbackSlot()));
2024 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2025 // Stack: receiver, home_object, key.
2026 SetExpressionPosition(prop);
2027 __ Push(Smi::FromInt(language_mode()));
2028 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2032 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2034 Expression* left_expr,
2035 Expression* right_expr) {
2036 Label done, both_smis, stub_call;
2038 // Get the arguments.
2040 Register right = x0;
2041 Register result = x0;
2044 // Perform combined smi check on both operands.
2045 __ Orr(x10, left, right);
2046 JumpPatchSite patch_site(masm_);
2047 patch_site.EmitJumpIfSmi(x10, &both_smis);
2049 __ Bind(&stub_call);
2052 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2054 Assembler::BlockPoolsScope scope(masm_);
2055 CallIC(code, expr->BinaryOperationFeedbackId());
2056 patch_site.EmitPatchInfo();
2060 __ Bind(&both_smis);
2061 // Smi case. This code works in the same way as the smi-smi case in the type
2062 // recording binary operation stub, see
2063 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2064 // TODO(all): That doesn't exist any more. Where are the comments?
2066 // The set of operations that needs to be supported here is controlled by
2067 // FullCodeGenerator::ShouldInlineSmiCase().
2070 __ Ubfx(right, right, kSmiShift, 5);
2071 __ Asr(result, left, right);
2072 __ Bic(result, result, kSmiShiftMask);
2075 __ Ubfx(right, right, kSmiShift, 5);
2076 __ Lsl(result, left, right);
2079 // If `left >>> right` >= 0x80000000, the result is not representable in a
2080 // signed 32-bit smi.
2081 __ Ubfx(right, right, kSmiShift, 5);
2082 __ Lsr(x10, left, right);
2083 __ Tbnz(x10, kXSignBit, &stub_call);
2084 __ Bic(result, x10, kSmiShiftMask);
2087 __ Adds(x10, left, right);
2088 __ B(vs, &stub_call);
2089 __ Mov(result, x10);
2092 __ Subs(x10, left, right);
2093 __ B(vs, &stub_call);
2094 __ Mov(result, x10);
2097 Label not_minus_zero, done;
2098 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2099 STATIC_ASSERT(kSmiTag == 0);
2100 __ Smulh(x10, left, right);
2101 __ Cbnz(x10, ¬_minus_zero);
2102 __ Eor(x11, left, right);
2103 __ Tbnz(x11, kXSignBit, &stub_call);
2104 __ Mov(result, x10);
2106 __ Bind(¬_minus_zero);
2108 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2109 __ B(lt, &stub_call);
2110 __ SmiTag(result, x10);
2115 __ Orr(result, left, right);
2117 case Token::BIT_AND:
2118 __ And(result, left, right);
2120 case Token::BIT_XOR:
2121 __ Eor(result, left, right);
2128 context()->Plug(x0);
2132 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2135 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2136 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2138 Assembler::BlockPoolsScope scope(masm_);
2139 CallIC(code, expr->BinaryOperationFeedbackId());
2140 patch_site.EmitPatchInfo();
2142 context()->Plug(x0);
2146 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2147 // Constructor is in x0.
2148 DCHECK(lit != NULL);
2151 // No access check is needed here since the constructor is created by the
2153 Register scratch = x1;
2155 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2158 for (int i = 0; i < lit->properties()->length(); i++) {
2159 ObjectLiteral::Property* property = lit->properties()->at(i);
2160 Expression* value = property->value();
2162 if (property->is_static()) {
2163 __ Peek(scratch, kPointerSize); // constructor
2165 __ Peek(scratch, 0); // prototype
2168 EmitPropertyKey(property, lit->GetIdForProperty(i));
2170 // The static prototype property is read only. We handle the non computed
2171 // property name case in the parser. Since this is the only case where we
2172 // need to check for an own read only property we special case this so we do
2173 // not need to do this for every property.
2174 if (property->is_static() && property->is_computed_name()) {
2175 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2179 VisitForStackValue(value);
2180 if (NeedsHomeObject(value)) {
2181 EmitSetHomeObject(value, 2, property->GetSlot());
2184 switch (property->kind()) {
2185 case ObjectLiteral::Property::CONSTANT:
2186 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2187 case ObjectLiteral::Property::PROTOTYPE:
2189 case ObjectLiteral::Property::COMPUTED:
2190 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2193 case ObjectLiteral::Property::GETTER:
2194 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2196 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2199 case ObjectLiteral::Property::SETTER:
2200 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2202 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2210 // Set both the prototype and constructor to have fast properties, and also
2211 // freeze them in strong mode.
2212 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2216 void FullCodeGenerator::EmitAssignment(Expression* expr,
2217 FeedbackVectorICSlot slot) {
2218 DCHECK(expr->IsValidReferenceExpressionOrThis());
2220 Property* prop = expr->AsProperty();
2221 LhsKind assign_type = Property::GetAssignType(prop);
2223 switch (assign_type) {
2225 Variable* var = expr->AsVariableProxy()->var();
2226 EffectContext context(this);
2227 EmitVariableAssignment(var, Token::ASSIGN, slot);
2230 case NAMED_PROPERTY: {
2231 __ Push(x0); // Preserve value.
2232 VisitForAccumulatorValue(prop->obj());
2233 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2235 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2236 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2237 __ Mov(StoreDescriptor::NameRegister(),
2238 Operand(prop->key()->AsLiteral()->value()));
2239 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2243 case NAMED_SUPER_PROPERTY: {
2245 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2246 VisitForAccumulatorValue(
2247 prop->obj()->AsSuperPropertyReference()->home_object());
2248 // stack: value, this; x0: home_object
2249 Register scratch = x10;
2250 Register scratch2 = x11;
2251 __ mov(scratch, result_register()); // home_object
2252 __ Peek(x0, kPointerSize); // value
2253 __ Peek(scratch2, 0); // this
2254 __ Poke(scratch2, kPointerSize); // this
2255 __ Poke(scratch, 0); // home_object
2256 // stack: this, home_object; x0: value
2257 EmitNamedSuperPropertyStore(prop);
2260 case KEYED_SUPER_PROPERTY: {
2262 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2264 prop->obj()->AsSuperPropertyReference()->home_object());
2265 VisitForAccumulatorValue(prop->key());
2266 Register scratch = x10;
2267 Register scratch2 = x11;
2268 __ Peek(scratch2, 2 * kPointerSize); // value
2269 // stack: value, this, home_object; x0: key, x11: value
2270 __ Peek(scratch, kPointerSize); // this
2271 __ Poke(scratch, 2 * kPointerSize);
2272 __ Peek(scratch, 0); // home_object
2273 __ Poke(scratch, kPointerSize);
2275 __ Move(x0, scratch2);
2276 // stack: this, home_object, key; x0: value.
2277 EmitKeyedSuperPropertyStore(prop);
2280 case KEYED_PROPERTY: {
2281 __ Push(x0); // Preserve value.
2282 VisitForStackValue(prop->obj());
2283 VisitForAccumulatorValue(prop->key());
2284 __ Mov(StoreDescriptor::NameRegister(), x0);
2285 __ Pop(StoreDescriptor::ReceiverRegister(),
2286 StoreDescriptor::ValueRegister());
2287 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2289 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2294 context()->Plug(x0);
2298 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2299 Variable* var, MemOperand location) {
2300 __ Str(result_register(), location);
2301 if (var->IsContextSlot()) {
2302 // RecordWrite may destroy all its register arguments.
2303 __ Mov(x10, result_register());
2304 int offset = Context::SlotOffset(var->index());
2305 __ RecordWriteContextSlot(
2306 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2311 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2312 FeedbackVectorICSlot slot) {
2313 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2314 if (var->IsUnallocated()) {
2315 // Global var, const, or let.
2316 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2317 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2318 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2321 } else if (var->IsGlobalSlot()) {
2322 // Global var, const, or let.
2323 DCHECK(var->index() > 0);
2324 DCHECK(var->IsStaticGlobalObjectProperty());
2325 int const slot = var->index();
2326 int const depth = scope()->ContextChainLength(var->scope());
2327 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2328 __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
2329 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
2330 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2333 __ Push(Smi::FromInt(slot));
2335 __ CallRuntime(is_strict(language_mode())
2336 ? Runtime::kStoreGlobalViaContext_Strict
2337 : Runtime::kStoreGlobalViaContext_Sloppy,
2340 } else if (var->mode() == LET && op != Token::INIT_LET) {
2341 // Non-initializing assignment to let variable needs a write barrier.
2342 DCHECK(!var->IsLookupSlot());
2343 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2345 MemOperand location = VarOperand(var, x1);
2346 __ Ldr(x10, location);
2347 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2348 __ Mov(x10, Operand(var->name()));
2350 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2351 // Perform the assignment.
2353 EmitStoreToStackLocalOrContextSlot(var, location);
2355 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2356 // Assignment to const variable needs a write barrier.
2357 DCHECK(!var->IsLookupSlot());
2358 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2360 MemOperand location = VarOperand(var, x1);
2361 __ Ldr(x10, location);
2362 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2363 __ Mov(x10, Operand(var->name()));
2365 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2366 __ Bind(&const_error);
2367 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2369 } else if (var->is_this() && op == Token::INIT_CONST) {
2370 // Initializing assignment to const {this} needs a write barrier.
2371 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2372 Label uninitialized_this;
2373 MemOperand location = VarOperand(var, x1);
2374 __ Ldr(x10, location);
2375 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2376 __ Mov(x0, Operand(var->name()));
2378 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2379 __ bind(&uninitialized_this);
2380 EmitStoreToStackLocalOrContextSlot(var, location);
2382 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2383 if (var->IsLookupSlot()) {
2384 // Assignment to var.
2385 __ Mov(x11, Operand(var->name()));
2386 __ Mov(x10, Smi::FromInt(language_mode()));
2389 // jssp[16] : context.
2390 // jssp[24] : value.
2391 __ Push(x0, cp, x11, x10);
2392 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2394 // Assignment to var or initializing assignment to let/const in harmony
2396 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2397 MemOperand location = VarOperand(var, x1);
2398 if (FLAG_debug_code && op == Token::INIT_LET) {
2399 __ Ldr(x10, location);
2400 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2401 __ Check(eq, kLetBindingReInitialization);
2403 EmitStoreToStackLocalOrContextSlot(var, location);
2406 } else if (op == Token::INIT_CONST_LEGACY) {
2407 // Const initializers need a write barrier.
2408 DCHECK(var->mode() == CONST_LEGACY);
2409 DCHECK(!var->IsParameter()); // No const parameters.
2410 if (var->IsLookupSlot()) {
2411 __ Mov(x1, Operand(var->name()));
2412 __ Push(x0, cp, x1);
2413 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2415 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2417 MemOperand location = VarOperand(var, x1);
2418 __ Ldr(x10, location);
2419 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2420 EmitStoreToStackLocalOrContextSlot(var, location);
2425 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2426 if (is_strict(language_mode())) {
2427 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2429 // Silently ignore store in sloppy mode.
2434 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2435 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2436 // Assignment to a property, using a named store IC.
2437 Property* prop = expr->target()->AsProperty();
2438 DCHECK(prop != NULL);
2439 DCHECK(prop->key()->IsLiteral());
2441 __ Mov(StoreDescriptor::NameRegister(),
2442 Operand(prop->key()->AsLiteral()->value()));
2443 __ Pop(StoreDescriptor::ReceiverRegister());
2444 if (FLAG_vector_stores) {
2445 EmitLoadStoreICSlot(expr->AssignmentSlot());
2448 CallStoreIC(expr->AssignmentFeedbackId());
2451 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2452 context()->Plug(x0);
2456 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2457 // Assignment to named property of super.
2459 // stack : receiver ('this'), home_object
2460 DCHECK(prop != NULL);
2461 Literal* key = prop->key()->AsLiteral();
2462 DCHECK(key != NULL);
2464 __ Push(key->value());
2466 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2467 : Runtime::kStoreToSuper_Sloppy),
2472 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2473 // Assignment to named property of super.
2475 // stack : receiver ('this'), home_object, key
2476 DCHECK(prop != NULL);
2480 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2481 : Runtime::kStoreKeyedToSuper_Sloppy),
2486 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2487 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2488 // Assignment to a property, using a keyed store IC.
2490 // TODO(all): Could we pass this in registers rather than on the stack?
2491 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2492 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2495 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2496 if (FLAG_vector_stores) {
2497 EmitLoadStoreICSlot(expr->AssignmentSlot());
2500 CallIC(ic, expr->AssignmentFeedbackId());
2503 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2504 context()->Plug(x0);
2508 void FullCodeGenerator::VisitProperty(Property* expr) {
2509 Comment cmnt(masm_, "[ Property");
2510 SetExpressionPosition(expr);
2511 Expression* key = expr->key();
2513 if (key->IsPropertyName()) {
2514 if (!expr->IsSuperAccess()) {
2515 VisitForAccumulatorValue(expr->obj());
2516 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2517 EmitNamedPropertyLoad(expr);
2519 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2521 expr->obj()->AsSuperPropertyReference()->home_object());
2522 EmitNamedSuperPropertyLoad(expr);
2525 if (!expr->IsSuperAccess()) {
2526 VisitForStackValue(expr->obj());
2527 VisitForAccumulatorValue(expr->key());
2528 __ Move(LoadDescriptor::NameRegister(), x0);
2529 __ Pop(LoadDescriptor::ReceiverRegister());
2530 EmitKeyedPropertyLoad(expr);
2532 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2534 expr->obj()->AsSuperPropertyReference()->home_object());
2535 VisitForStackValue(expr->key());
2536 EmitKeyedSuperPropertyLoad(expr);
2539 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2540 context()->Plug(x0);
2544 void FullCodeGenerator::CallIC(Handle<Code> code,
2545 TypeFeedbackId ast_id) {
2547 // All calls must have a predictable size in full-codegen code to ensure that
2548 // the debugger can patch them correctly.
2549 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2553 // Code common for calls using the IC.
2554 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2555 Expression* callee = expr->expression();
2557 CallICState::CallType call_type =
2558 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2560 // Get the target function.
2561 if (call_type == CallICState::FUNCTION) {
2562 { StackValueContext context(this);
2563 EmitVariableLoad(callee->AsVariableProxy());
2564 PrepareForBailout(callee, NO_REGISTERS);
2566 // Push undefined as receiver. This is patched in the method prologue if it
2567 // is a sloppy mode method.
2569 UseScratchRegisterScope temps(masm_);
2570 Register temp = temps.AcquireX();
2571 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2575 // Load the function from the receiver.
2576 DCHECK(callee->IsProperty());
2577 DCHECK(!callee->AsProperty()->IsSuperAccess());
2578 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2579 EmitNamedPropertyLoad(callee->AsProperty());
2580 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2581 // Push the target function under the receiver.
2586 EmitCall(expr, call_type);
2590 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2591 Expression* callee = expr->expression();
2592 DCHECK(callee->IsProperty());
2593 Property* prop = callee->AsProperty();
2594 DCHECK(prop->IsSuperAccess());
2595 SetExpressionPosition(prop);
2597 Literal* key = prop->key()->AsLiteral();
2598 DCHECK(!key->value()->IsSmi());
2600 // Load the function from the receiver.
2601 const Register scratch = x10;
2602 SuperPropertyReference* super_ref =
2603 callee->AsProperty()->obj()->AsSuperPropertyReference();
2604 VisitForStackValue(super_ref->home_object());
2605 VisitForAccumulatorValue(super_ref->this_var());
2607 __ Peek(scratch, kPointerSize);
2608 __ Push(x0, scratch);
2609 __ Push(key->value());
2610 __ Push(Smi::FromInt(language_mode()));
2614 // - this (receiver)
2615 // - this (receiver) <-- LoadFromSuper will pop here and below.
2618 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2620 // Replace home_object with target function.
2621 __ Poke(x0, kPointerSize);
2624 // - target function
2625 // - this (receiver)
2626 EmitCall(expr, CallICState::METHOD);
2630 // Code common for calls using the IC.
2631 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2634 VisitForAccumulatorValue(key);
2636 Expression* callee = expr->expression();
2638 // Load the function from the receiver.
2639 DCHECK(callee->IsProperty());
2640 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2641 __ Move(LoadDescriptor::NameRegister(), x0);
2642 EmitKeyedPropertyLoad(callee->AsProperty());
2643 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2645 // Push the target function under the receiver.
2649 EmitCall(expr, CallICState::METHOD);
2653 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2654 Expression* callee = expr->expression();
2655 DCHECK(callee->IsProperty());
2656 Property* prop = callee->AsProperty();
2657 DCHECK(prop->IsSuperAccess());
2658 SetExpressionPosition(prop);
2660 // Load the function from the receiver.
2661 const Register scratch = x10;
2662 SuperPropertyReference* super_ref =
2663 callee->AsProperty()->obj()->AsSuperPropertyReference();
2664 VisitForStackValue(super_ref->home_object());
2665 VisitForAccumulatorValue(super_ref->this_var());
2667 __ Peek(scratch, kPointerSize);
2668 __ Push(x0, scratch);
2669 VisitForStackValue(prop->key());
2670 __ Push(Smi::FromInt(language_mode()));
2674 // - this (receiver)
2675 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2679 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2681 // Replace home_object with target function.
2682 __ Poke(x0, kPointerSize);
2685 // - target function
2686 // - this (receiver)
2687 EmitCall(expr, CallICState::METHOD);
2691 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2692 // Load the arguments.
2693 ZoneList<Expression*>* args = expr->arguments();
2694 int arg_count = args->length();
2695 for (int i = 0; i < arg_count; i++) {
2696 VisitForStackValue(args->at(i));
2699 SetCallPosition(expr, arg_count);
2701 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2702 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2703 __ Peek(x1, (arg_count + 1) * kXRegSize);
2704 // Don't assign a type feedback id to the IC, since type feedback is provided
2705 // by the vector above.
2708 RecordJSReturnSite(expr);
2709 // Restore context register.
2710 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2711 context()->DropAndPlug(1, x0);
2715 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2716 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2717 // Prepare to push a copy of the first argument or undefined if it doesn't
2719 if (arg_count > 0) {
2720 __ Peek(x9, arg_count * kXRegSize);
2722 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2725 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2727 // Prepare to push the language mode.
2728 __ Mov(x11, Smi::FromInt(language_mode()));
2729 // Prepare to push the start position of the scope the calls resides in.
2730 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2733 __ Push(x9, x10, x11, x12);
2735 // Do the runtime call.
2736 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2740 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2741 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2742 VariableProxy* callee = expr->expression()->AsVariableProxy();
2743 if (callee->var()->IsLookupSlot()) {
2745 SetExpressionPosition(callee);
2746 // Generate code for loading from variables potentially shadowed
2747 // by eval-introduced variables.
2748 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2751 // Call the runtime to find the function to call (returned in x0)
2752 // and the object holding it (returned in x1).
2753 __ Mov(x10, Operand(callee->name()));
2754 __ Push(context_register(), x10);
2755 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2756 __ Push(x0, x1); // Receiver, function.
2757 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2759 // If fast case code has been generated, emit code to push the
2760 // function and receiver and have the slow path jump around this
2762 if (done.is_linked()) {
2767 // The receiver is implicitly the global receiver. Indicate this
2768 // by passing the undefined to the call function stub.
2769 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2774 VisitForStackValue(callee);
2775 // refEnv.WithBaseObject()
2776 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2777 __ Push(x10); // Reserved receiver slot.
2782 void FullCodeGenerator::VisitCall(Call* expr) {
2784 // We want to verify that RecordJSReturnSite gets called on all paths
2785 // through this function. Avoid early returns.
2786 expr->return_is_recorded_ = false;
2789 Comment cmnt(masm_, "[ Call");
2790 Expression* callee = expr->expression();
2791 Call::CallType call_type = expr->GetCallType(isolate());
2793 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2794 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2795 // to resolve the function we need to call. Then we call the resolved
2796 // function using the given arguments.
2797 ZoneList<Expression*>* args = expr->arguments();
2798 int arg_count = args->length();
2800 PushCalleeAndWithBaseObject(expr);
2802 // Push the arguments.
2803 for (int i = 0; i < arg_count; i++) {
2804 VisitForStackValue(args->at(i));
2807 // Push a copy of the function (found below the arguments) and
2809 __ Peek(x10, (arg_count + 1) * kPointerSize);
2811 EmitResolvePossiblyDirectEval(arg_count);
2813 // Touch up the stack with the resolved function.
2814 __ Poke(x0, (arg_count + 1) * kPointerSize);
2816 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2818 // Record source position for debugger.
2819 SetCallPosition(expr, arg_count);
2821 // Call the evaluated function.
2822 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2823 __ Peek(x1, (arg_count + 1) * kXRegSize);
2825 RecordJSReturnSite(expr);
2826 // Restore context register.
2827 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2828 context()->DropAndPlug(1, x0);
2830 } else if (call_type == Call::GLOBAL_CALL) {
2831 EmitCallWithLoadIC(expr);
2833 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2834 // Call to a lookup slot (dynamically introduced variable).
2835 PushCalleeAndWithBaseObject(expr);
2837 } else if (call_type == Call::PROPERTY_CALL) {
2838 Property* property = callee->AsProperty();
2839 bool is_named_call = property->key()->IsPropertyName();
2840 if (property->IsSuperAccess()) {
2841 if (is_named_call) {
2842 EmitSuperCallWithLoadIC(expr);
2844 EmitKeyedSuperCallWithLoadIC(expr);
2847 VisitForStackValue(property->obj());
2848 if (is_named_call) {
2849 EmitCallWithLoadIC(expr);
2851 EmitKeyedCallWithLoadIC(expr, property->key());
2854 } else if (call_type == Call::SUPER_CALL) {
2855 EmitSuperConstructorCall(expr);
2857 DCHECK(call_type == Call::OTHER_CALL);
2858 // Call to an arbitrary expression not handled specially above.
2859 VisitForStackValue(callee);
2860 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2862 // Emit function call.
2867 // RecordJSReturnSite should have been called.
2868 DCHECK(expr->return_is_recorded_);
2873 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2874 Comment cmnt(masm_, "[ CallNew");
2875 // According to ECMA-262, section 11.2.2, page 44, the function
2876 // expression in new calls must be evaluated before the
2879 // Push constructor on the stack. If it's not a function it's used as
2880 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2882 DCHECK(!expr->expression()->IsSuperPropertyReference());
2883 VisitForStackValue(expr->expression());
2885 // Push the arguments ("left-to-right") on the stack.
2886 ZoneList<Expression*>* args = expr->arguments();
2887 int arg_count = args->length();
2888 for (int i = 0; i < arg_count; i++) {
2889 VisitForStackValue(args->at(i));
2892 // Call the construct call builtin that handles allocation and
2893 // constructor invocation.
2894 SetConstructCallPosition(expr);
2896 // Load function and argument count into x1 and x0.
2897 __ Mov(x0, arg_count);
2898 __ Peek(x1, arg_count * kXRegSize);
2900 // Record call targets in unoptimized code.
2901 __ LoadObject(x2, FeedbackVector());
2902 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2904 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2905 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2906 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2907 // Restore context register.
2908 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2909 context()->Plug(x0);
2913 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2914 SuperCallReference* super_call_ref =
2915 expr->expression()->AsSuperCallReference();
2916 DCHECK_NOT_NULL(super_call_ref);
2918 EmitLoadSuperConstructor(super_call_ref);
2919 __ push(result_register());
2921 // Push the arguments ("left-to-right") on the stack.
2922 ZoneList<Expression*>* args = expr->arguments();
2923 int arg_count = args->length();
2924 for (int i = 0; i < arg_count; i++) {
2925 VisitForStackValue(args->at(i));
2928 // Call the construct call builtin that handles allocation and
2929 // constructor invocation.
2930 SetConstructCallPosition(expr);
2932 // Load original constructor into x4.
2933 VisitForAccumulatorValue(super_call_ref->new_target_var());
2934 __ Mov(x4, result_register());
2936 // Load function and argument count into x1 and x0.
2937 __ Mov(x0, arg_count);
2938 __ Peek(x1, arg_count * kXRegSize);
2940 // Record call targets in unoptimized code.
2941 __ LoadObject(x2, FeedbackVector());
2942 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
2944 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
2945 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2947 RecordJSReturnSite(expr);
2949 // Restore context register.
2950 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2951 context()->Plug(x0);
2955 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2956 ZoneList<Expression*>* args = expr->arguments();
2957 DCHECK(args->length() == 1);
2959 VisitForAccumulatorValue(args->at(0));
2961 Label materialize_true, materialize_false;
2962 Label* if_true = NULL;
2963 Label* if_false = NULL;
2964 Label* fall_through = NULL;
2965 context()->PrepareTest(&materialize_true, &materialize_false,
2966 &if_true, &if_false, &fall_through);
2968 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2969 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2971 context()->Plug(if_true, if_false);
2975 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2976 ZoneList<Expression*>* args = expr->arguments();
2977 DCHECK(args->length() == 1);
2979 VisitForAccumulatorValue(args->at(0));
2981 Label materialize_true, materialize_false;
2982 Label* if_true = NULL;
2983 Label* if_false = NULL;
2984 Label* fall_through = NULL;
2985 context()->PrepareTest(&materialize_true, &materialize_false,
2986 &if_true, &if_false, &fall_through);
2988 __ JumpIfSmi(x0, if_false);
2989 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2990 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2991 Split(ge, if_true, if_false, fall_through);
2993 context()->Plug(if_true, if_false);
2997 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
2998 ZoneList<Expression*>* args = expr->arguments();
2999 DCHECK(args->length() == 1);
3001 VisitForAccumulatorValue(args->at(0));
3003 Label materialize_true, materialize_false;
3004 Label* if_true = NULL;
3005 Label* if_false = NULL;
3006 Label* fall_through = NULL;
3007 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3008 &if_false, &fall_through);
3010 __ JumpIfSmi(x0, if_false);
3011 __ CompareObjectType(x0, x10, x11, SIMD128_VALUE_TYPE);
3012 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013 Split(eq, if_true, if_false, fall_through);
3015 context()->Plug(if_true, if_false);
3019 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3020 ZoneList<Expression*>* args = expr->arguments();
3021 DCHECK(args->length() == 1);
3023 VisitForAccumulatorValue(args->at(0));
3025 Label materialize_true, materialize_false;
3026 Label* if_true = NULL;
3027 Label* if_false = NULL;
3028 Label* fall_through = NULL;
3029 context()->PrepareTest(&materialize_true, &materialize_false,
3030 &if_true, &if_false, &fall_through);
3032 __ JumpIfSmi(x0, if_false);
3033 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3034 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035 Split(eq, if_true, if_false, fall_through);
3037 context()->Plug(if_true, if_false);
3041 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 DCHECK(args->length() == 1);
3045 VisitForAccumulatorValue(args->at(0));
3047 Label materialize_true, materialize_false;
3048 Label* if_true = NULL;
3049 Label* if_false = NULL;
3050 Label* fall_through = NULL;
3051 context()->PrepareTest(&materialize_true, &materialize_false,
3052 &if_true, &if_false, &fall_through);
3054 // Only a HeapNumber can be -0.0, so return false if we have something else.
3055 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3057 // Test the bit pattern.
3058 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3059 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3061 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3062 Split(vs, if_true, if_false, fall_through);
3064 context()->Plug(if_true, if_false);
3068 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3069 ZoneList<Expression*>* args = expr->arguments();
3070 DCHECK(args->length() == 1);
3072 VisitForAccumulatorValue(args->at(0));
3074 Label materialize_true, materialize_false;
3075 Label* if_true = NULL;
3076 Label* if_false = NULL;
3077 Label* fall_through = NULL;
3078 context()->PrepareTest(&materialize_true, &materialize_false,
3079 &if_true, &if_false, &fall_through);
3081 __ JumpIfSmi(x0, if_false);
3082 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3083 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3084 Split(eq, if_true, if_false, fall_through);
3086 context()->Plug(if_true, if_false);
3090 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3091 ZoneList<Expression*>* args = expr->arguments();
3092 DCHECK(args->length() == 1);
3094 VisitForAccumulatorValue(args->at(0));
3096 Label materialize_true, materialize_false;
3097 Label* if_true = NULL;
3098 Label* if_false = NULL;
3099 Label* fall_through = NULL;
3100 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3101 &if_false, &fall_through);
3103 __ JumpIfSmi(x0, if_false);
3104 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
3105 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3106 Split(eq, if_true, if_false, fall_through);
3108 context()->Plug(if_true, if_false);
3112 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3113 ZoneList<Expression*>* args = expr->arguments();
3114 DCHECK(args->length() == 1);
3116 VisitForAccumulatorValue(args->at(0));
3118 Label materialize_true, materialize_false;
3119 Label* if_true = NULL;
3120 Label* if_false = NULL;
3121 Label* fall_through = NULL;
3122 context()->PrepareTest(&materialize_true, &materialize_false,
3123 &if_true, &if_false, &fall_through);
3125 __ JumpIfSmi(x0, if_false);
3126 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3127 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3128 Split(eq, if_true, if_false, fall_through);
3130 context()->Plug(if_true, if_false);
3134 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3135 ZoneList<Expression*>* args = expr->arguments();
3136 DCHECK(args->length() == 1);
3138 VisitForAccumulatorValue(args->at(0));
3140 Label materialize_true, materialize_false;
3141 Label* if_true = NULL;
3142 Label* if_false = NULL;
3143 Label* fall_through = NULL;
3144 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3145 &if_false, &fall_through);
3147 __ JumpIfSmi(x0, if_false);
3149 Register type_reg = x11;
3150 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3151 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3152 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3153 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3154 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3155 Split(ls, if_true, if_false, fall_through);
3157 context()->Plug(if_true, if_false);
3161 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3162 DCHECK(expr->arguments()->length() == 0);
3164 Label materialize_true, materialize_false;
3165 Label* if_true = NULL;
3166 Label* if_false = NULL;
3167 Label* fall_through = NULL;
3168 context()->PrepareTest(&materialize_true, &materialize_false,
3169 &if_true, &if_false, &fall_through);
3171 // Get the frame pointer for the calling frame.
3172 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3174 // Skip the arguments adaptor frame if it exists.
3175 Label check_frame_marker;
3176 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3177 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3178 __ B(ne, &check_frame_marker);
3179 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3181 // Check the marker in the calling frame.
3182 __ Bind(&check_frame_marker);
3183 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3184 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3185 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3186 Split(eq, if_true, if_false, fall_through);
3188 context()->Plug(if_true, if_false);
3192 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3193 ZoneList<Expression*>* args = expr->arguments();
3194 DCHECK(args->length() == 2);
3196 // Load the two objects into registers and perform the comparison.
3197 VisitForStackValue(args->at(0));
3198 VisitForAccumulatorValue(args->at(1));
3200 Label materialize_true, materialize_false;
3201 Label* if_true = NULL;
3202 Label* if_false = NULL;
3203 Label* fall_through = NULL;
3204 context()->PrepareTest(&materialize_true, &materialize_false,
3205 &if_true, &if_false, &fall_through);
3209 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210 Split(eq, if_true, if_false, fall_through);
3212 context()->Plug(if_true, if_false);
3216 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3217 ZoneList<Expression*>* args = expr->arguments();
3218 DCHECK(args->length() == 1);
3220 // ArgumentsAccessStub expects the key in x1.
3221 VisitForAccumulatorValue(args->at(0));
3223 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3224 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3226 context()->Plug(x0);
3230 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3231 DCHECK(expr->arguments()->length() == 0);
3233 // Get the number of formal parameters.
3234 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3236 // Check if the calling frame is an arguments adaptor frame.
3237 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3238 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3239 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3242 // Arguments adaptor case: Read the arguments length from the
3244 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3247 context()->Plug(x0);
3251 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3252 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3253 ZoneList<Expression*>* args = expr->arguments();
3254 DCHECK(args->length() == 1);
3255 Label done, null, function, non_function_constructor;
3257 VisitForAccumulatorValue(args->at(0));
3259 // If the object is a smi, we return null.
3260 __ JumpIfSmi(x0, &null);
3262 // Check that the object is a JS object but take special care of JS
3263 // functions to make sure they have 'Function' as their class.
3264 // Assume that there are only two callable types, and one of them is at
3265 // either end of the type range for JS object types. Saves extra comparisons.
3266 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3267 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3268 // x10: object's map.
3269 // x11: object's type.
3271 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3272 FIRST_SPEC_OBJECT_TYPE + 1);
3273 __ B(eq, &function);
3275 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3276 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3277 LAST_SPEC_OBJECT_TYPE - 1);
3278 __ B(eq, &function);
3279 // Assume that there is no larger type.
3280 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3282 // Check if the constructor in the map is a JS function.
3283 Register instance_type = x14;
3284 __ GetMapConstructor(x12, x10, x13, instance_type);
3285 __ Cmp(instance_type, JS_FUNCTION_TYPE);
3286 __ B(ne, &non_function_constructor);
3288 // x12 now contains the constructor function. Grab the
3289 // instance class name from there.
3290 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3292 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3295 // Functions have class 'Function'.
3297 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3300 // Objects with a non-function constructor have class 'Object'.
3301 __ Bind(&non_function_constructor);
3302 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3305 // Non-JS objects have class null.
3307 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3312 context()->Plug(x0);
3316 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3317 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3318 ZoneList<Expression*>* args = expr->arguments();
3319 DCHECK(args->length() == 1);
3320 VisitForAccumulatorValue(args->at(0)); // Load the object.
3323 // If the object is a smi return the object.
3324 __ JumpIfSmi(x0, &done);
3325 // If the object is not a value type, return the object.
3326 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3327 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3330 context()->Plug(x0);
3334 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3335 ZoneList<Expression*>* args = expr->arguments();
3336 DCHECK_EQ(1, args->length());
3338 VisitForAccumulatorValue(args->at(0));
3340 Label materialize_true, materialize_false;
3341 Label* if_true = nullptr;
3342 Label* if_false = nullptr;
3343 Label* fall_through = nullptr;
3344 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3345 &if_false, &fall_through);
3347 __ JumpIfSmi(x0, if_false);
3348 __ CompareObjectType(x0, x10, x11, JS_DATE_TYPE);
3349 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3350 Split(eq, if_true, if_false, fall_through);
3352 context()->Plug(if_true, if_false);
3356 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3357 ZoneList<Expression*>* args = expr->arguments();
3358 DCHECK(args->length() == 2);
3359 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3360 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3362 VisitForAccumulatorValue(args->at(0)); // Load the object.
3364 Register object = x0;
3365 Register result = x0;
3366 Register stamp_addr = x10;
3367 Register stamp_cache = x11;
3369 if (index->value() == 0) {
3370 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3372 Label runtime, done;
3373 if (index->value() < JSDate::kFirstUncachedField) {
3374 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3375 __ Mov(stamp_addr, stamp);
3376 __ Ldr(stamp_addr, MemOperand(stamp_addr));
3377 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3378 __ Cmp(stamp_addr, stamp_cache);
3380 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3381 kPointerSize * index->value()));
3387 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3391 context()->Plug(result);
3395 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3396 ZoneList<Expression*>* args = expr->arguments();
3397 DCHECK_EQ(3, args->length());
3399 Register string = x0;
3400 Register index = x1;
3401 Register value = x2;
3402 Register scratch = x10;
3404 VisitForStackValue(args->at(0)); // index
3405 VisitForStackValue(args->at(1)); // value
3406 VisitForAccumulatorValue(args->at(2)); // string
3407 __ Pop(value, index);
3409 if (FLAG_debug_code) {
3410 __ AssertSmi(value, kNonSmiValue);
3411 __ AssertSmi(index, kNonSmiIndex);
3412 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3413 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3417 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3420 __ Strb(value, MemOperand(scratch, index));
3421 context()->Plug(string);
3425 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3426 ZoneList<Expression*>* args = expr->arguments();
3427 DCHECK_EQ(3, args->length());
3429 Register string = x0;
3430 Register index = x1;
3431 Register value = x2;
3432 Register scratch = x10;
3434 VisitForStackValue(args->at(0)); // index
3435 VisitForStackValue(args->at(1)); // value
3436 VisitForAccumulatorValue(args->at(2)); // string
3437 __ Pop(value, index);
3439 if (FLAG_debug_code) {
3440 __ AssertSmi(value, kNonSmiValue);
3441 __ AssertSmi(index, kNonSmiIndex);
3442 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3443 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3447 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3450 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3451 context()->Plug(string);
3455 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3456 ZoneList<Expression*>* args = expr->arguments();
3457 DCHECK(args->length() == 2);
3458 VisitForStackValue(args->at(0)); // Load the object.
3459 VisitForAccumulatorValue(args->at(1)); // Load the value.
3465 // If the object is a smi, return the value.
3466 __ JumpIfSmi(x1, &done);
3468 // If the object is not a value type, return the value.
3469 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3472 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3473 // Update the write barrier. Save the value as it will be
3474 // overwritten by the write barrier code and is needed afterward.
3476 __ RecordWriteField(
3477 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3480 context()->Plug(x0);
3484 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3485 ZoneList<Expression*>* args = expr->arguments();
3486 DCHECK_EQ(args->length(), 1);
3488 // Load the argument into x0 and call the stub.
3489 VisitForAccumulatorValue(args->at(0));
3491 NumberToStringStub stub(isolate());
3493 context()->Plug(x0);
3497 void FullCodeGenerator::EmitToString(CallRuntime* expr) {
3498 ZoneList<Expression*>* args = expr->arguments();
3499 DCHECK_EQ(1, args->length());
3501 // Load the argument into x0 and convert it.
3502 VisitForAccumulatorValue(args->at(0));
3504 ToStringStub stub(isolate());
3506 context()->Plug(x0);
3510 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3511 ZoneList<Expression*>* args = expr->arguments();
3512 DCHECK_EQ(1, args->length());
3514 // Load the argument into x0 and convert it.
3515 VisitForAccumulatorValue(args->at(0));
3517 Label convert, done_convert;
3518 __ JumpIfSmi(x0, &convert);
3519 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3520 __ JumpIfObjectType(x0, x1, x1, LAST_NAME_TYPE, &done_convert, ls);
3522 ToStringStub stub(isolate());
3524 __ Bind(&done_convert);
3525 context()->Plug(x0);
3529 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3530 ZoneList<Expression*>* args = expr->arguments();
3531 DCHECK_EQ(1, args->length());
3533 // Load the argument into x0 and convert it.
3534 VisitForAccumulatorValue(args->at(0));
3536 ToObjectStub stub(isolate());
3538 context()->Plug(x0);
3542 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3543 ZoneList<Expression*>* args = expr->arguments();
3544 DCHECK(args->length() == 1);
3546 VisitForAccumulatorValue(args->at(0));
3550 Register result = x1;
3552 StringCharFromCodeGenerator generator(code, result);
3553 generator.GenerateFast(masm_);
3556 NopRuntimeCallHelper call_helper;
3557 generator.GenerateSlow(masm_, call_helper);
3560 context()->Plug(result);
3564 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3565 ZoneList<Expression*>* args = expr->arguments();
3566 DCHECK(args->length() == 2);
3568 VisitForStackValue(args->at(0));
3569 VisitForAccumulatorValue(args->at(1));
3571 Register object = x1;
3572 Register index = x0;
3573 Register result = x3;
3577 Label need_conversion;
3578 Label index_out_of_range;
3580 StringCharCodeAtGenerator generator(object,
3585 &index_out_of_range,
3586 STRING_INDEX_IS_NUMBER);
3587 generator.GenerateFast(masm_);
3590 __ Bind(&index_out_of_range);
3591 // When the index is out of range, the spec requires us to return NaN.
3592 __ LoadRoot(result, Heap::kNanValueRootIndex);
3595 __ Bind(&need_conversion);
3596 // Load the undefined value into the result register, which will
3597 // trigger conversion.
3598 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3601 NopRuntimeCallHelper call_helper;
3602 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3605 context()->Plug(result);
3609 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3610 ZoneList<Expression*>* args = expr->arguments();
3611 DCHECK(args->length() == 2);
3613 VisitForStackValue(args->at(0));
3614 VisitForAccumulatorValue(args->at(1));
3616 Register object = x1;
3617 Register index = x0;
3618 Register result = x0;
3622 Label need_conversion;
3623 Label index_out_of_range;
3625 StringCharAtGenerator generator(object,
3631 &index_out_of_range,
3632 STRING_INDEX_IS_NUMBER);
3633 generator.GenerateFast(masm_);
3636 __ Bind(&index_out_of_range);
3637 // When the index is out of range, the spec requires us to return
3638 // the empty string.
3639 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3642 __ Bind(&need_conversion);
3643 // Move smi zero into the result register, which will trigger conversion.
3644 __ Mov(result, Smi::FromInt(0));
3647 NopRuntimeCallHelper call_helper;
3648 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3651 context()->Plug(result);
3655 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3656 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3657 ZoneList<Expression*>* args = expr->arguments();
3658 DCHECK_EQ(2, args->length());
3660 VisitForStackValue(args->at(0));
3661 VisitForAccumulatorValue(args->at(1));
3664 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3667 context()->Plug(x0);
3671 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3672 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3673 ZoneList<Expression*>* args = expr->arguments();
3674 DCHECK_LE(2, args->length());
3675 // Push target, receiver and arguments onto the stack.
3676 for (Expression* const arg : *args) {
3677 VisitForStackValue(arg);
3679 // Move target to x1.
3680 int const argc = args->length() - 2;
3681 __ Peek(x1, (argc + 1) * kXRegSize);
3684 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3685 // Restore context register.
3686 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3687 // Discard the function left on TOS.
3688 context()->DropAndPlug(1, x0);
3692 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3693 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3694 ZoneList<Expression*>* args = expr->arguments();
3695 DCHECK(args->length() >= 2);
3697 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3698 for (int i = 0; i < arg_count + 1; i++) {
3699 VisitForStackValue(args->at(i));
3701 VisitForAccumulatorValue(args->last()); // Function.
3703 Label runtime, done;
3704 // Check for non-function argument (including proxy).
3705 __ JumpIfSmi(x0, &runtime);
3706 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3708 // InvokeFunction requires the function in x1. Move it in there.
3710 ParameterCount count(arg_count);
3711 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3712 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3717 __ CallRuntime(Runtime::kCallFunction, args->length());
3720 context()->Plug(x0);
3724 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3725 ZoneList<Expression*>* args = expr->arguments();
3726 DCHECK(args->length() == 2);
3728 // Evaluate new.target and super constructor.
3729 VisitForStackValue(args->at(0));
3730 VisitForStackValue(args->at(1));
3732 // Load original constructor into x4.
3733 __ Peek(x4, 1 * kPointerSize);
3735 // Check if the calling frame is an arguments adaptor frame.
3736 Label adaptor_frame, args_set_up, runtime;
3737 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3738 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3739 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3740 __ B(eq, &adaptor_frame);
3741 // default constructor has no arguments, so no adaptor frame means no args.
3742 __ Mov(x0, Operand(0));
3745 // Copy arguments from adaptor frame.
3747 __ bind(&adaptor_frame);
3748 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3749 __ SmiUntag(x1, x1);
3753 // Get arguments pointer in x11.
3754 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3755 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3758 // Pre-decrement x11 with kPointerSize on each iteration.
3759 // Pre-decrement in order to skip receiver.
3760 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3762 __ Sub(x1, x1, Operand(1));
3766 __ bind(&args_set_up);
3767 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
3768 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3770 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
3771 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3773 // Restore context register.
3774 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3776 context()->DropAndPlug(1, x0);
3780 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3781 RegExpConstructResultStub stub(isolate());
3782 ZoneList<Expression*>* args = expr->arguments();
3783 DCHECK(args->length() == 3);
3784 VisitForStackValue(args->at(0));
3785 VisitForStackValue(args->at(1));
3786 VisitForAccumulatorValue(args->at(2));
3789 context()->Plug(x0);
3793 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3794 ZoneList<Expression*>* args = expr->arguments();
3795 VisitForAccumulatorValue(args->at(0));
3797 Label materialize_true, materialize_false;
3798 Label* if_true = NULL;
3799 Label* if_false = NULL;
3800 Label* fall_through = NULL;
3801 context()->PrepareTest(&materialize_true, &materialize_false,
3802 &if_true, &if_false, &fall_through);
3804 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3805 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3806 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3807 Split(eq, if_true, if_false, fall_through);
3809 context()->Plug(if_true, if_false);
3813 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3814 ZoneList<Expression*>* args = expr->arguments();
3815 DCHECK(args->length() == 1);
3816 VisitForAccumulatorValue(args->at(0));
3818 __ AssertString(x0);
3820 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3821 __ IndexFromHash(x10, x0);
3823 context()->Plug(x0);
3827 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3828 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
3830 ZoneList<Expression*>* args = expr->arguments();
3831 DCHECK(args->length() == 2);
3832 VisitForStackValue(args->at(1));
3833 VisitForAccumulatorValue(args->at(0));
3835 Register array = x0;
3836 Register result = x0;
3837 Register elements = x1;
3838 Register element = x2;
3839 Register separator = x3;
3840 Register array_length = x4;
3841 Register result_pos = x5;
3843 Register string_length = x10;
3844 Register elements_end = x11;
3845 Register string = x12;
3846 Register scratch1 = x13;
3847 Register scratch2 = x14;
3848 Register scratch3 = x7;
3849 Register separator_length = x15;
3851 Label bailout, done, one_char_separator, long_separator,
3852 non_trivial_array, not_size_one_array, loop,
3853 empty_separator_loop, one_char_separator_loop,
3854 one_char_separator_loop_entry, long_separator_loop;
3856 // The separator operand is on the stack.
3859 // Check that the array is a JSArray.
3860 __ JumpIfSmi(array, &bailout);
3861 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3863 // Check that the array has fast elements.
3864 __ CheckFastElements(map, scratch1, &bailout);
3866 // If the array has length zero, return the empty string.
3867 // Load and untag the length of the array.
3868 // It is an unsigned value, so we can skip sign extension.
3869 // We assume little endianness.
3870 __ Ldrsw(array_length,
3871 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3872 __ Cbnz(array_length, &non_trivial_array);
3873 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3876 __ Bind(&non_trivial_array);
3877 // Get the FixedArray containing array's elements.
3878 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3880 // Check that all array elements are sequential one-byte strings, and
3881 // accumulate the sum of their lengths.
3882 __ Mov(string_length, 0);
3883 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3884 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3885 // Loop condition: while (element < elements_end).
3886 // Live values in registers:
3887 // elements: Fixed array of strings.
3888 // array_length: Length of the fixed array of strings (not smi)
3889 // separator: Separator string
3890 // string_length: Accumulated sum of string lengths (not smi).
3891 // element: Current array element.
3892 // elements_end: Array end.
3893 if (FLAG_debug_code) {
3894 __ Cmp(array_length, 0);
3895 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3898 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3899 __ JumpIfSmi(string, &bailout);
3900 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3901 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3902 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3904 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3905 __ Adds(string_length, string_length, scratch1);
3907 __ Cmp(element, elements_end);
3910 // If array_length is 1, return elements[0], a string.
3911 __ Cmp(array_length, 1);
3912 __ B(ne, ¬_size_one_array);
3913 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3916 __ Bind(¬_size_one_array);
3918 // Live values in registers:
3919 // separator: Separator string
3920 // array_length: Length of the array (not smi).
3921 // string_length: Sum of string lengths (not smi).
3922 // elements: FixedArray of strings.
3924 // Check that the separator is a flat one-byte string.
3925 __ JumpIfSmi(separator, &bailout);
3926 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3927 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3928 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3930 // Add (separator length times array_length) - separator length to the
3931 // string_length to get the length of the result string.
3932 // Load the separator length as untagged.
3933 // We assume little endianness, and that the length is positive.
3934 __ Ldrsw(separator_length,
3935 UntagSmiFieldMemOperand(separator,
3936 SeqOneByteString::kLengthOffset));
3937 __ Sub(string_length, string_length, separator_length);
3938 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3941 // Get first element in the array.
3942 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3943 // Live values in registers:
3944 // element: First array element
3945 // separator: Separator string
3946 // string_length: Length of result string (not smi)
3947 // array_length: Length of the array (not smi).
3948 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3951 // Prepare for looping. Set up elements_end to end of the array. Set
3952 // result_pos to the position of the result where to write the first
3954 // TODO(all): useless unless AllocateOneByteString trashes the register.
3955 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3956 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3958 // Check the length of the separator.
3959 __ Cmp(separator_length, 1);
3960 __ B(eq, &one_char_separator);
3961 __ B(gt, &long_separator);
3963 // Empty separator case
3964 __ Bind(&empty_separator_loop);
3965 // Live values in registers:
3966 // result_pos: the position to which we are currently copying characters.
3967 // element: Current array element.
3968 // elements_end: Array end.
3970 // Copy next array element to the result.
3971 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3972 __ Ldrsw(string_length,
3973 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3974 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3975 __ CopyBytes(result_pos, string, string_length, scratch1);
3976 __ Cmp(element, elements_end);
3977 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
3980 // One-character separator case
3981 __ Bind(&one_char_separator);
3982 // Replace separator with its one-byte character value.
3983 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3984 // Jump into the loop after the code that copies the separator, so the first
3985 // element is not preceded by a separator
3986 __ B(&one_char_separator_loop_entry);
3988 __ Bind(&one_char_separator_loop);
3989 // Live values in registers:
3990 // result_pos: the position to which we are currently copying characters.
3991 // element: Current array element.
3992 // elements_end: Array end.
3993 // separator: Single separator one-byte char (in lower byte).
3995 // Copy the separator character to the result.
3996 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3998 // Copy next array element to the result.
3999 __ Bind(&one_char_separator_loop_entry);
4000 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4001 __ Ldrsw(string_length,
4002 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4003 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4004 __ CopyBytes(result_pos, string, string_length, scratch1);
4005 __ Cmp(element, elements_end);
4006 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4009 // Long separator case (separator is more than one character). Entry is at the
4010 // label long_separator below.
4011 __ Bind(&long_separator_loop);
4012 // Live values in registers:
4013 // result_pos: the position to which we are currently copying characters.
4014 // element: Current array element.
4015 // elements_end: Array end.
4016 // separator: Separator string.
4018 // Copy the separator to the result.
4019 // TODO(all): hoist next two instructions.
4020 __ Ldrsw(string_length,
4021 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4022 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4023 __ CopyBytes(result_pos, string, string_length, scratch1);
4025 __ Bind(&long_separator);
4026 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4027 __ Ldrsw(string_length,
4028 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4029 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4030 __ CopyBytes(result_pos, string, string_length, scratch1);
4031 __ Cmp(element, elements_end);
4032 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4036 // Returning undefined will force slower code to handle it.
4037 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4039 context()->Plug(result);
4043 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4044 DCHECK(expr->arguments()->length() == 0);
4045 ExternalReference debug_is_active =
4046 ExternalReference::debug_is_active_address(isolate());
4047 __ Mov(x10, debug_is_active);
4048 __ Ldrb(x0, MemOperand(x10));
4050 context()->Plug(x0);
4054 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
4055 ZoneList<Expression*>* args = expr->arguments();
4056 DCHECK_EQ(2, args->length());
4057 VisitForStackValue(args->at(0));
4058 VisitForStackValue(args->at(1));
4060 Label runtime, done;
4062 Register result = x0;
4063 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime, TAG_OBJECT);
4064 Register map_reg = x1;
4065 Register result_value = x2;
4066 Register boolean_done = x3;
4067 Register empty_fixed_array = x4;
4068 Register untagged_result = x5;
4069 __ Ldr(map_reg, GlobalObjectMemOperand());
4070 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
4072 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
4073 __ Pop(boolean_done);
4074 __ Pop(result_value);
4075 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
4076 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4077 JSObject::kElementsOffset);
4078 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
4079 JSIteratorResult::kDoneOffset);
4080 __ ObjectUntag(untagged_result, result);
4081 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4082 __ Stp(empty_fixed_array, empty_fixed_array,
4083 MemOperand(untagged_result, JSObject::kPropertiesOffset));
4084 __ Stp(result_value, boolean_done,
4085 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
4086 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4090 __ CallRuntime(Runtime::kCreateIterResultObject, 2);
4093 context()->Plug(x0);
4097 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4098 // Push undefined as the receiver.
4099 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4102 __ Ldr(x0, GlobalObjectMemOperand());
4103 __ Ldr(x0, FieldMemOperand(x0, GlobalObject::kNativeContextOffset));
4104 __ Ldr(x0, ContextMemOperand(x0, expr->context_index()));
4108 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4109 ZoneList<Expression*>* args = expr->arguments();
4110 int arg_count = args->length();
4112 SetCallPosition(expr, arg_count);
4113 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4114 __ Peek(x1, (arg_count + 1) * kPointerSize);
4119 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4120 ZoneList<Expression*>* args = expr->arguments();
4121 int arg_count = args->length();
4123 if (expr->is_jsruntime()) {
4124 Comment cmnt(masm_, "[ CallRunTime");
4125 EmitLoadJSRuntimeFunction(expr);
4127 // Push the target function under the receiver.
4131 for (int i = 0; i < arg_count; i++) {
4132 VisitForStackValue(args->at(i));
4135 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4136 EmitCallJSRuntimeFunction(expr);
4138 // Restore context register.
4139 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4141 context()->DropAndPlug(1, x0);
4144 const Runtime::Function* function = expr->function();
4145 switch (function->function_id) {
4146 #define CALL_INTRINSIC_GENERATOR(Name) \
4147 case Runtime::kInline##Name: { \
4148 Comment cmnt(masm_, "[ Inline" #Name); \
4149 return Emit##Name(expr); \
4151 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4152 #undef CALL_INTRINSIC_GENERATOR
4154 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4155 // Push the arguments ("left-to-right").
4156 for (int i = 0; i < arg_count; i++) {
4157 VisitForStackValue(args->at(i));
4160 // Call the C runtime function.
4161 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4162 __ CallRuntime(expr->function(), arg_count);
4163 context()->Plug(x0);
4170 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4171 switch (expr->op()) {
4172 case Token::DELETE: {
4173 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4174 Property* property = expr->expression()->AsProperty();
4175 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4177 if (property != NULL) {
4178 VisitForStackValue(property->obj());
4179 VisitForStackValue(property->key());
4180 __ CallRuntime(is_strict(language_mode())
4181 ? Runtime::kDeleteProperty_Strict
4182 : Runtime::kDeleteProperty_Sloppy,
4184 context()->Plug(x0);
4185 } else if (proxy != NULL) {
4186 Variable* var = proxy->var();
4187 // Delete of an unqualified identifier is disallowed in strict mode but
4188 // "delete this" is allowed.
4189 bool is_this = var->HasThisName(isolate());
4190 DCHECK(is_sloppy(language_mode()) || is_this);
4191 if (var->IsUnallocatedOrGlobalSlot()) {
4192 __ Ldr(x12, GlobalObjectMemOperand());
4193 __ Mov(x11, Operand(var->name()));
4195 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4196 context()->Plug(x0);
4197 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4198 // Result of deleting non-global, non-dynamic variables is false.
4199 // The subexpression does not have side effects.
4200 context()->Plug(is_this);
4202 // Non-global variable. Call the runtime to try to delete from the
4203 // context where the variable was introduced.
4204 __ Mov(x2, Operand(var->name()));
4205 __ Push(context_register(), x2);
4206 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4207 context()->Plug(x0);
4210 // Result of deleting non-property, non-variable reference is true.
4211 // The subexpression may have side effects.
4212 VisitForEffect(expr->expression());
4213 context()->Plug(true);
4219 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4220 VisitForEffect(expr->expression());
4221 context()->Plug(Heap::kUndefinedValueRootIndex);
4225 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4226 if (context()->IsEffect()) {
4227 // Unary NOT has no side effects so it's only necessary to visit the
4228 // subexpression. Match the optimizing compiler by not branching.
4229 VisitForEffect(expr->expression());
4230 } else if (context()->IsTest()) {
4231 const TestContext* test = TestContext::cast(context());
4232 // The labels are swapped for the recursive call.
4233 VisitForControl(expr->expression(),
4234 test->false_label(),
4236 test->fall_through());
4237 context()->Plug(test->true_label(), test->false_label());
4239 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4240 // TODO(jbramley): This could be much more efficient using (for
4241 // example) the CSEL instruction.
4242 Label materialize_true, materialize_false, done;
4243 VisitForControl(expr->expression(),
4248 __ Bind(&materialize_true);
4249 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4250 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4253 __ Bind(&materialize_false);
4254 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4255 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4259 if (context()->IsStackValue()) {
4260 __ Push(result_register());
4265 case Token::TYPEOF: {
4266 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4268 AccumulatorValueContext context(this);
4269 VisitForTypeofValue(expr->expression());
4272 TypeofStub typeof_stub(isolate());
4273 __ CallStub(&typeof_stub);
4274 context()->Plug(x0);
4283 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4284 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4286 Comment cmnt(masm_, "[ CountOperation");
4288 Property* prop = expr->expression()->AsProperty();
4289 LhsKind assign_type = Property::GetAssignType(prop);
4291 // Evaluate expression and get value.
4292 if (assign_type == VARIABLE) {
4293 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4294 AccumulatorValueContext context(this);
4295 EmitVariableLoad(expr->expression()->AsVariableProxy());
4297 // Reserve space for result of postfix operation.
4298 if (expr->is_postfix() && !context()->IsEffect()) {
4301 switch (assign_type) {
4302 case NAMED_PROPERTY: {
4303 // Put the object both on the stack and in the register.
4304 VisitForStackValue(prop->obj());
4305 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4306 EmitNamedPropertyLoad(prop);
4310 case NAMED_SUPER_PROPERTY: {
4311 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4312 VisitForAccumulatorValue(
4313 prop->obj()->AsSuperPropertyReference()->home_object());
4314 __ Push(result_register());
4315 const Register scratch = x10;
4316 __ Peek(scratch, kPointerSize);
4317 __ Push(scratch, result_register());
4318 EmitNamedSuperPropertyLoad(prop);
4322 case KEYED_SUPER_PROPERTY: {
4323 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4325 prop->obj()->AsSuperPropertyReference()->home_object());
4326 VisitForAccumulatorValue(prop->key());
4327 __ Push(result_register());
4328 const Register scratch1 = x10;
4329 const Register scratch2 = x11;
4330 __ Peek(scratch1, 2 * kPointerSize);
4331 __ Peek(scratch2, kPointerSize);
4332 __ Push(scratch1, scratch2, result_register());
4333 EmitKeyedSuperPropertyLoad(prop);
4337 case KEYED_PROPERTY: {
4338 VisitForStackValue(prop->obj());
4339 VisitForStackValue(prop->key());
4340 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4341 __ Peek(LoadDescriptor::NameRegister(), 0);
4342 EmitKeyedPropertyLoad(prop);
4351 // We need a second deoptimization point after loading the value
4352 // in case evaluating the property load my have a side effect.
4353 if (assign_type == VARIABLE) {
4354 PrepareForBailout(expr->expression(), TOS_REG);
4356 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4359 // Inline smi case if we are in a loop.
4360 Label stub_call, done;
4361 JumpPatchSite patch_site(masm_);
4363 int count_value = expr->op() == Token::INC ? 1 : -1;
4364 if (ShouldInlineSmiCase(expr->op())) {
4366 patch_site.EmitJumpIfNotSmi(x0, &slow);
4368 // Save result for postfix expressions.
4369 if (expr->is_postfix()) {
4370 if (!context()->IsEffect()) {
4371 // Save the result on the stack. If we have a named or keyed property we
4372 // store the result under the receiver that is currently on top of the
4374 switch (assign_type) {
4378 case NAMED_PROPERTY:
4379 __ Poke(x0, kPointerSize);
4381 case NAMED_SUPER_PROPERTY:
4382 __ Poke(x0, kPointerSize * 2);
4384 case KEYED_PROPERTY:
4385 __ Poke(x0, kPointerSize * 2);
4387 case KEYED_SUPER_PROPERTY:
4388 __ Poke(x0, kPointerSize * 3);
4394 __ Adds(x0, x0, Smi::FromInt(count_value));
4396 // Call stub. Undo operation first.
4397 __ Sub(x0, x0, Smi::FromInt(count_value));
4401 if (!is_strong(language_mode())) {
4402 ToNumberStub convert_stub(isolate());
4403 __ CallStub(&convert_stub);
4404 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4407 // Save result for postfix expressions.
4408 if (expr->is_postfix()) {
4409 if (!context()->IsEffect()) {
4410 // Save the result on the stack. If we have a named or keyed property
4411 // we store the result under the receiver that is currently on top
4413 switch (assign_type) {
4417 case NAMED_PROPERTY:
4418 __ Poke(x0, kXRegSize);
4420 case NAMED_SUPER_PROPERTY:
4421 __ Poke(x0, 2 * kXRegSize);
4423 case KEYED_PROPERTY:
4424 __ Poke(x0, 2 * kXRegSize);
4426 case KEYED_SUPER_PROPERTY:
4427 __ Poke(x0, 3 * kXRegSize);
4433 __ Bind(&stub_call);
4435 __ Mov(x0, Smi::FromInt(count_value));
4437 SetExpressionPosition(expr);
4440 Assembler::BlockPoolsScope scope(masm_);
4442 CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4443 strength(language_mode())).code();
4444 CallIC(code, expr->CountBinOpFeedbackId());
4445 patch_site.EmitPatchInfo();
4449 if (is_strong(language_mode())) {
4450 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4452 // Store the value returned in x0.
4453 switch (assign_type) {
4455 if (expr->is_postfix()) {
4456 { EffectContext context(this);
4457 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4458 Token::ASSIGN, expr->CountSlot());
4459 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4462 // For all contexts except EffectConstant We have the result on
4463 // top of the stack.
4464 if (!context()->IsEffect()) {
4465 context()->PlugTOS();
4468 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4469 Token::ASSIGN, expr->CountSlot());
4470 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4471 context()->Plug(x0);
4474 case NAMED_PROPERTY: {
4475 __ Mov(StoreDescriptor::NameRegister(),
4476 Operand(prop->key()->AsLiteral()->value()));
4477 __ Pop(StoreDescriptor::ReceiverRegister());
4478 if (FLAG_vector_stores) {
4479 EmitLoadStoreICSlot(expr->CountSlot());
4482 CallStoreIC(expr->CountStoreFeedbackId());
4484 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4485 if (expr->is_postfix()) {
4486 if (!context()->IsEffect()) {
4487 context()->PlugTOS();
4490 context()->Plug(x0);
4494 case NAMED_SUPER_PROPERTY: {
4495 EmitNamedSuperPropertyStore(prop);
4496 if (expr->is_postfix()) {
4497 if (!context()->IsEffect()) {
4498 context()->PlugTOS();
4501 context()->Plug(x0);
4505 case KEYED_SUPER_PROPERTY: {
4506 EmitKeyedSuperPropertyStore(prop);
4507 if (expr->is_postfix()) {
4508 if (!context()->IsEffect()) {
4509 context()->PlugTOS();
4512 context()->Plug(x0);
4516 case KEYED_PROPERTY: {
4517 __ Pop(StoreDescriptor::NameRegister());
4518 __ Pop(StoreDescriptor::ReceiverRegister());
4520 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4521 if (FLAG_vector_stores) {
4522 EmitLoadStoreICSlot(expr->CountSlot());
4525 CallIC(ic, expr->CountStoreFeedbackId());
4527 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4528 if (expr->is_postfix()) {
4529 if (!context()->IsEffect()) {
4530 context()->PlugTOS();
4533 context()->Plug(x0);
4541 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4542 Expression* sub_expr,
4543 Handle<String> check) {
4544 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4545 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4546 Label materialize_true, materialize_false;
4547 Label* if_true = NULL;
4548 Label* if_false = NULL;
4549 Label* fall_through = NULL;
4550 context()->PrepareTest(&materialize_true, &materialize_false,
4551 &if_true, &if_false, &fall_through);
4553 { AccumulatorValueContext context(this);
4554 VisitForTypeofValue(sub_expr);
4556 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4558 Factory* factory = isolate()->factory();
4559 if (String::Equals(check, factory->number_string())) {
4560 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4561 __ JumpIfSmi(x0, if_true);
4562 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4563 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4564 Split(eq, if_true, if_false, fall_through);
4565 } else if (String::Equals(check, factory->string_string())) {
4566 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4567 __ JumpIfSmi(x0, if_false);
4568 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
4569 Split(lt, if_true, if_false, fall_through);
4570 } else if (String::Equals(check, factory->symbol_string())) {
4571 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4572 __ JumpIfSmi(x0, if_false);
4573 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4574 Split(eq, if_true, if_false, fall_through);
4575 } else if (String::Equals(check, factory->boolean_string())) {
4576 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4577 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4578 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4579 Split(eq, if_true, if_false, fall_through);
4580 } else if (String::Equals(check, factory->undefined_string())) {
4582 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4583 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4584 __ JumpIfSmi(x0, if_false);
4585 // Check for undetectable objects => true.
4586 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4587 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4588 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4590 } else if (String::Equals(check, factory->function_string())) {
4591 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4592 __ JumpIfSmi(x0, if_false);
4593 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4594 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4595 __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4596 __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
4597 if_false, fall_through);
4598 } else if (String::Equals(check, factory->object_string())) {
4599 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4600 __ JumpIfSmi(x0, if_false);
4601 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4602 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
4603 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, if_false, lt);
4604 // Check for callable or undetectable objects => false.
4605 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
4606 __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
4607 if_true, if_false, fall_through);
4609 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
4610 } else if (String::Equals(check, factory->type##_string())) { \
4611 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
4613 __ JumpIfSmi(x0, if_true); \
4614 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); \
4615 __ CompareRoot(x0, Heap::k##Type##MapRootIndex); \
4616 Split(eq, if_true, if_false, fall_through);
4617 SIMD128_TYPES(SIMD128_TYPE)
4621 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4622 if (if_false != fall_through) __ B(if_false);
4624 context()->Plug(if_true, if_false);
4628 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4629 Comment cmnt(masm_, "[ CompareOperation");
4630 SetExpressionPosition(expr);
4632 // Try to generate an optimized comparison with a literal value.
4633 // TODO(jbramley): This only checks common values like NaN or undefined.
4634 // Should it also handle ARM64 immediate operands?
4635 if (TryLiteralCompare(expr)) {
4639 // Assign labels according to context()->PrepareTest.
4640 Label materialize_true;
4641 Label materialize_false;
4642 Label* if_true = NULL;
4643 Label* if_false = NULL;
4644 Label* fall_through = NULL;
4645 context()->PrepareTest(&materialize_true, &materialize_false,
4646 &if_true, &if_false, &fall_through);
4648 Token::Value op = expr->op();
4649 VisitForStackValue(expr->left());
4652 VisitForStackValue(expr->right());
4653 __ CallRuntime(Runtime::kHasProperty, 2);
4654 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4655 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4656 Split(eq, if_true, if_false, fall_through);
4659 case Token::INSTANCEOF: {
4660 VisitForAccumulatorValue(expr->right());
4662 InstanceOfStub stub(isolate());
4664 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4665 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4666 Split(eq, if_true, if_false, fall_through);
4671 VisitForAccumulatorValue(expr->right());
4672 Condition cond = CompareIC::ComputeCondition(op);
4674 // Pop the stack value.
4677 JumpPatchSite patch_site(masm_);
4678 if (ShouldInlineSmiCase(op)) {
4680 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4682 Split(cond, if_true, if_false, NULL);
4683 __ Bind(&slow_case);
4686 Handle<Code> ic = CodeFactory::CompareIC(
4687 isolate(), op, strength(language_mode())).code();
4688 CallIC(ic, expr->CompareOperationFeedbackId());
4689 patch_site.EmitPatchInfo();
4690 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4691 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4695 // Convert the result of the comparison into one expected for this
4696 // expression's context.
4697 context()->Plug(if_true, if_false);
4701 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4702 Expression* sub_expr,
4704 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4705 Label materialize_true, materialize_false;
4706 Label* if_true = NULL;
4707 Label* if_false = NULL;
4708 Label* fall_through = NULL;
4709 context()->PrepareTest(&materialize_true, &materialize_false,
4710 &if_true, &if_false, &fall_through);
4712 VisitForAccumulatorValue(sub_expr);
4713 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4715 if (expr->op() == Token::EQ_STRICT) {
4716 Heap::RootListIndex nil_value = nil == kNullValue ?
4717 Heap::kNullValueRootIndex :
4718 Heap::kUndefinedValueRootIndex;
4719 __ CompareRoot(x0, nil_value);
4720 Split(eq, if_true, if_false, fall_through);
4722 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4723 CallIC(ic, expr->CompareOperationFeedbackId());
4724 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4727 context()->Plug(if_true, if_false);
4731 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4732 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4733 context()->Plug(x0);
4737 void FullCodeGenerator::VisitYield(Yield* expr) {
4738 Comment cmnt(masm_, "[ Yield");
4739 SetExpressionPosition(expr);
4741 // Evaluate yielded value first; the initial iterator definition depends on
4742 // this. It stays on the stack while we update the iterator.
4743 VisitForStackValue(expr->expression());
4745 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4746 // and suchlike. The implementation changes a little by bleeding_edge so I
4747 // don't want to spend too much time on it now.
4749 switch (expr->yield_kind()) {
4750 case Yield::kSuspend:
4751 // Pop value from top-of-stack slot; box result into result register.
4752 EmitCreateIteratorResult(false);
4753 __ Push(result_register());
4755 case Yield::kInitial: {
4756 Label suspend, continuation, post_runtime, resume;
4759 // TODO(jbramley): This label is bound here because the following code
4760 // looks at its pos(). Is it possible to do something more efficient here,
4761 // perhaps using Adr?
4762 __ Bind(&continuation);
4763 __ RecordGeneratorContinuation();
4767 VisitForAccumulatorValue(expr->generator_object());
4768 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4769 __ Mov(x1, Smi::FromInt(continuation.pos()));
4770 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4771 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4773 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4774 kLRHasBeenSaved, kDontSaveFPRegs);
4775 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4776 __ Cmp(__ StackPointer(), x1);
4777 __ B(eq, &post_runtime);
4778 __ Push(x0); // generator object
4779 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4780 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4781 __ Bind(&post_runtime);
4782 __ Pop(result_register());
4783 EmitReturnSequence();
4786 context()->Plug(result_register());
4790 case Yield::kFinal: {
4791 VisitForAccumulatorValue(expr->generator_object());
4792 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4793 __ Str(x1, FieldMemOperand(result_register(),
4794 JSGeneratorObject::kContinuationOffset));
4795 // Pop value from top-of-stack slot, box result into result register.
4796 EmitCreateIteratorResult(true);
4797 EmitUnwindBeforeReturn();
4798 EmitReturnSequence();
4802 case Yield::kDelegating: {
4803 VisitForStackValue(expr->generator_object());
4805 // Initial stack layout is as follows:
4806 // [sp + 1 * kPointerSize] iter
4807 // [sp + 0 * kPointerSize] g
4809 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4810 Label l_next, l_call, l_loop;
4811 Register load_receiver = LoadDescriptor::ReceiverRegister();
4812 Register load_name = LoadDescriptor::NameRegister();
4814 // Initial send value is undefined.
4815 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4818 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4820 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
4821 __ Peek(x3, 1 * kPointerSize); // iter
4822 __ Push(load_name, x3, x0); // "throw", iter, except
4825 // try { received = %yield result }
4826 // Shuffle the received result above a try handler and yield it without
4829 __ Pop(x0); // result
4830 int handler_index = NewHandlerTableEntry();
4831 EnterTryBlock(handler_index, &l_catch);
4832 const int try_block_size = TryCatch::kElementCount * kPointerSize;
4833 __ Push(x0); // result
4836 // TODO(jbramley): This label is bound here because the following code
4837 // looks at its pos(). Is it possible to do something more efficient here,
4838 // perhaps using Adr?
4839 __ Bind(&l_continuation);
4840 __ RecordGeneratorContinuation();
4843 __ Bind(&l_suspend);
4844 const int generator_object_depth = kPointerSize + try_block_size;
4845 __ Peek(x0, generator_object_depth);
4847 __ Push(Smi::FromInt(handler_index)); // handler-index
4848 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4849 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4850 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4851 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4853 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4854 kLRHasBeenSaved, kDontSaveFPRegs);
4855 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
4856 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4857 __ Pop(x0); // result
4858 EmitReturnSequence();
4859 __ Bind(&l_resume); // received in x0
4860 ExitTryBlock(handler_index);
4862 // receiver = iter; f = 'next'; arg = received;
4865 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
4866 __ Peek(x3, 1 * kPointerSize); // iter
4867 __ Push(load_name, x3, x0); // "next", iter, received
4869 // result = receiver[f](arg);
4871 __ Peek(load_receiver, 1 * kPointerSize);
4872 __ Peek(load_name, 2 * kPointerSize);
4873 __ Mov(LoadDescriptor::SlotRegister(),
4874 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
4875 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
4876 CallIC(ic, TypeFeedbackId::None());
4878 __ Poke(x1, 2 * kPointerSize);
4879 SetCallPosition(expr, 1);
4880 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
4883 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4884 __ Drop(1); // The function is still on the stack; drop it.
4886 // if (!result.done) goto l_try;
4888 __ Move(load_receiver, x0);
4890 __ Push(load_receiver); // save result
4891 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
4892 __ Mov(LoadDescriptor::SlotRegister(),
4893 SmiFromSlot(expr->DoneFeedbackSlot()));
4894 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.done
4895 // The ToBooleanStub argument (result.done) is in x0.
4896 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4901 __ Pop(load_receiver); // result
4902 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
4903 __ Mov(LoadDescriptor::SlotRegister(),
4904 SmiFromSlot(expr->ValueFeedbackSlot()));
4905 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.value
4906 context()->DropAndPlug(2, x0); // drop iter and g
4913 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4915 JSGeneratorObject::ResumeMode resume_mode) {
4916 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4917 Register generator_object = x1;
4918 Register the_hole = x2;
4919 Register operand_stack_size = w3;
4920 Register function = x4;
4922 // The value stays in x0, and is ultimately read by the resumed generator, as
4923 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4924 // is read to throw the value when the resumed generator is already closed. x1
4925 // will hold the generator object until the activation has been resumed.
4926 VisitForStackValue(generator);
4927 VisitForAccumulatorValue(value);
4928 __ Pop(generator_object);
4930 // Load suspended function and context.
4931 __ Ldr(cp, FieldMemOperand(generator_object,
4932 JSGeneratorObject::kContextOffset));
4933 __ Ldr(function, FieldMemOperand(generator_object,
4934 JSGeneratorObject::kFunctionOffset));
4936 // Load receiver and store as the first argument.
4937 __ Ldr(x10, FieldMemOperand(generator_object,
4938 JSGeneratorObject::kReceiverOffset));
4941 // Push holes for the rest of the arguments to the generator function.
4942 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4944 // The number of arguments is stored as an int32_t, and -1 is a marker
4945 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4946 // extension to correctly handle it. However, in this case, we operate on
4947 // 32-bit W registers, so extension isn't required.
4948 __ Ldr(w10, FieldMemOperand(x10,
4949 SharedFunctionInfo::kFormalParameterCountOffset));
4950 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4951 __ PushMultipleTimes(the_hole, w10);
4953 // Enter a new JavaScript frame, and initialize its slots as they were when
4954 // the generator was suspended.
4955 Label resume_frame, done;
4956 __ Bl(&resume_frame);
4959 __ Bind(&resume_frame);
4960 __ Push(lr, // Return address.
4961 fp, // Caller's frame pointer.
4962 cp, // Callee's context.
4963 function); // Callee's JS Function.
4964 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4966 // Load and untag the operand stack size.
4967 __ Ldr(x10, FieldMemOperand(generator_object,
4968 JSGeneratorObject::kOperandStackOffset));
4969 __ Ldr(operand_stack_size,
4970 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4972 // If we are sending a value and there is no operand stack, we can jump back
4974 if (resume_mode == JSGeneratorObject::NEXT) {
4976 __ Cbnz(operand_stack_size, &slow_resume);
4977 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4979 UntagSmiFieldMemOperand(generator_object,
4980 JSGeneratorObject::kContinuationOffset));
4981 __ Add(x10, x10, x11);
4982 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4983 __ Str(x12, FieldMemOperand(generator_object,
4984 JSGeneratorObject::kContinuationOffset));
4987 __ Bind(&slow_resume);
4990 // Otherwise, we push holes for the operand stack and call the runtime to fix
4991 // up the stack and the handlers.
4992 __ PushMultipleTimes(the_hole, operand_stack_size);
4994 __ Mov(x10, Smi::FromInt(resume_mode));
4995 __ Push(generator_object, result_register(), x10);
4996 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4997 // Not reached: the runtime call returns elsewhere.
5001 context()->Plug(result_register());
5005 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5006 Label allocate, done_allocate;
5008 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5010 Register result = x0;
5011 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate, TAG_OBJECT);
5012 __ B(&done_allocate);
5015 __ Push(Smi::FromInt(JSIteratorResult::kSize));
5016 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5018 __ Bind(&done_allocate);
5019 Register map_reg = x1;
5020 Register result_value = x2;
5021 Register boolean_done = x3;
5022 Register empty_fixed_array = x4;
5023 Register untagged_result = x5;
5024 __ Ldr(map_reg, GlobalObjectMemOperand());
5025 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5027 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5028 __ Pop(result_value);
5029 __ LoadRoot(boolean_done,
5030 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
5031 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
5032 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5033 JSObject::kElementsOffset);
5034 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
5035 JSIteratorResult::kDoneOffset);
5036 __ ObjectUntag(untagged_result, result);
5037 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5038 __ Stp(empty_fixed_array, empty_fixed_array,
5039 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5040 __ Stp(result_value, boolean_done,
5041 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
5042 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
5046 // TODO(all): I don't like this method.
5047 // It seems to me that in too many places x0 is used in place of this.
5048 // Also, this function is not suitable for all places where x0 should be
5049 // abstracted (eg. when used as an argument). But some places assume that the
5050 // first argument register is x0, and use this function instead.
5051 // Considering that most of the register allocation is hard-coded in the
5052 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5053 // that abstracting the allocation through functions would not yield any
5054 // performance benefit, I think the existence of this function is debatable.
5055 Register FullCodeGenerator::result_register() {
5060 Register FullCodeGenerator::context_register() {
5065 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5066 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5067 __ Str(value, MemOperand(fp, frame_offset));
5071 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5072 __ Ldr(dst, ContextMemOperand(cp, context_index));
5076 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5077 Scope* closure_scope = scope()->ClosureScope();
5078 if (closure_scope->is_script_scope() ||
5079 closure_scope->is_module_scope()) {
5080 // Contexts nested in the native context have a canonical empty function
5081 // as their closure, not the anonymous closure containing the global
5082 // code. Pass a smi sentinel and let the runtime look up the empty
5084 DCHECK(kSmiTag == 0);
5086 } else if (closure_scope->is_eval_scope()) {
5087 // Contexts created by a call to eval have the same closure as the
5088 // context calling eval, not the anonymous closure containing the eval
5089 // code. Fetch it from the context.
5090 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5093 DCHECK(closure_scope->is_function_scope());
5094 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5100 void FullCodeGenerator::EnterFinallyBlock() {
5101 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5102 DCHECK(!result_register().is(x10));
5103 // Preserve the result register while executing finally block.
5104 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5105 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5107 __ Push(result_register(), x10);
5109 // Store pending message while executing finally block.
5110 ExternalReference pending_message_obj =
5111 ExternalReference::address_of_pending_message_obj(isolate());
5112 __ Mov(x10, pending_message_obj);
5113 __ Ldr(x10, MemOperand(x10));
5116 ClearPendingMessage();
5120 void FullCodeGenerator::ExitFinallyBlock() {
5121 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5122 DCHECK(!result_register().is(x10));
5124 // Restore pending message from stack.
5126 ExternalReference pending_message_obj =
5127 ExternalReference::address_of_pending_message_obj(isolate());
5128 __ Mov(x13, pending_message_obj);
5129 __ Str(x10, MemOperand(x13));
5131 // Restore result register and cooked return address from the stack.
5132 __ Pop(x10, result_register());
5134 // Uncook the return address (see EnterFinallyBlock).
5136 __ Add(x11, x10, Operand(masm_->CodeObject()));
5141 void FullCodeGenerator::ClearPendingMessage() {
5142 DCHECK(!result_register().is(x10));
5143 ExternalReference pending_message_obj =
5144 ExternalReference::address_of_pending_message_obj(isolate());
5145 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
5146 __ Mov(x13, pending_message_obj);
5147 __ Str(x10, MemOperand(x13));
5151 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5152 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5153 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5160 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5162 BackEdgeState target_state,
5163 Code* replacement_code) {
5164 // Turn the jump into a nop.
5165 Address branch_address = pc - 3 * kInstructionSize;
5166 PatchingAssembler patcher(branch_address, 1);
5168 DCHECK(Instruction::Cast(branch_address)
5169 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5170 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5171 Instruction::Cast(branch_address)->ImmPCOffset() ==
5172 6 * kInstructionSize));
5174 switch (target_state) {
5176 // <decrement profiling counter>
5177 // .. .. .. .. b.pl ok
5178 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5179 // .. .. .. .. blr x16
5180 // ... more instructions.
5182 // Jump offset is 6 instructions.
5185 case ON_STACK_REPLACEMENT:
5186 case OSR_AFTER_STACK_CHECK:
5187 // <decrement profiling counter>
5188 // .. .. .. .. mov x0, x0 (NOP)
5189 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5190 // .. .. .. .. blr x16
5191 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5195 // Replace the call address.
5196 Instruction* load = Instruction::Cast(pc)->preceding(2);
5197 Address interrupt_address_pointer =
5198 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5199 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5200 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5202 ->OnStackReplacement()
5204 (Memory::uint64_at(interrupt_address_pointer) ==
5205 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5209 (Memory::uint64_at(interrupt_address_pointer) ==
5210 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5212 ->OsrAfterStackCheck()
5214 (Memory::uint64_at(interrupt_address_pointer) ==
5215 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5217 ->OnStackReplacement()
5219 Memory::uint64_at(interrupt_address_pointer) =
5220 reinterpret_cast<uint64_t>(replacement_code->entry());
5222 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5223 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5227 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5229 Code* unoptimized_code,
5231 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5232 // back-end), but this function is gone in bleeding_edge so it might not
5234 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5236 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5237 Instruction* load = Instruction::Cast(pc)->preceding(2);
5238 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5239 load->ImmPCOffset());
5240 if (entry == reinterpret_cast<uint64_t>(
5241 isolate->builtins()->OnStackReplacement()->entry())) {
5242 return ON_STACK_REPLACEMENT;
5243 } else if (entry == reinterpret_cast<uint64_t>(
5244 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5245 return OSR_AFTER_STACK_CHECK;
5255 } // namespace internal
5258 #endif // V8_TARGET_ARCH_ARM64