1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
20 #include "src/arm64/code-stubs-arm64.h"
21 #include "src/arm64/macro-assembler-arm64.h"
26 #define __ ACCESS_MASM(masm_)
28 class JumpPatchSite BASE_EMBEDDED {
30 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
32 info_emitted_ = false;
37 if (patch_site_.is_bound()) {
38 DCHECK(info_emitted_);
40 DCHECK(reg_.IsNone());
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
46 InstructionAccurateScope scope(masm_, 1);
47 DCHECK(!info_emitted_);
48 DCHECK(reg.Is64Bits());
51 __ bind(&patch_site_);
52 __ tbz(xzr, 0, target); // Always taken before patched.
55 void EmitJumpIfSmi(Register reg, Label* target) {
56 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
57 InstructionAccurateScope scope(masm_, 1);
58 DCHECK(!info_emitted_);
59 DCHECK(reg.Is64Bits());
62 __ bind(&patch_site_);
63 __ tbnz(xzr, 0, target); // Never taken before patched.
66 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
67 UseScratchRegisterScope temps(masm_);
68 Register temp = temps.AcquireX();
69 __ Orr(temp, reg1, reg2);
70 EmitJumpIfNotSmi(temp, target);
73 void EmitPatchInfo() {
74 Assembler::BlockPoolsScope scope(masm_);
75 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
82 MacroAssembler* masm_;
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
96 // The live registers are:
97 // - x1: the JS function object being called (i.e. ourselves).
99 // - fp: our caller's frame pointer.
100 // - jssp: stack pointer.
101 // - lr: return address.
103 // The function builds a JS frame. See JavaScriptFrameConstants in
104 // frames-arm.h for its layout.
105 void FullCodeGenerator::Generate() {
106 CompilationInfo* info = info_;
108 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(function());
113 Comment cmnt(masm_, "[ Function compiled by full code generator");
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120 __ Debug("stop-at", __LINE__, BREAK);
124 // Sloppy mode functions and builtins need to replace the receiver with the
125 // global proxy when called as functions (without an explicit receiver
127 if (info->strict_mode() == SLOPPY && !info->is_native()) {
129 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
130 __ Peek(x10, receiver_offset);
131 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
133 __ Ldr(x10, GlobalObjectMemOperand());
134 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
135 __ Poke(x10, receiver_offset);
141 // Open a frame scope to indicate that there is a frame on the stack.
142 // The MANUAL indicates that the scope shouldn't actually generate code
143 // to set up the frame because we do it manually below.
144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 // This call emits the following sequence in a way that can be patched for
147 // code ageing support:
148 // Push(lr, fp, cp, x1);
149 // Add(fp, jssp, 2 * kPointerSize);
150 info->set_prologue_offset(masm_->pc_offset());
151 __ Prologue(info->IsCodePreAgingActive());
152 info->AddNoFrameRange(0, masm_->pc_offset());
154 // Reserve space on the stack for locals.
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!info->function()->is_generator() || locals_count == 0);
160 if (locals_count > 0) {
161 if (locals_count >= 128) {
163 DCHECK(jssp.Is(__ StackPointer()));
164 __ Sub(x10, jssp, locals_count * kPointerSize);
165 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
170 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
171 if (FLAG_optimize_for_size) {
172 __ PushMultipleTimes(x10 , locals_count);
174 const int kMaxPushes = 32;
175 if (locals_count >= kMaxPushes) {
176 int loop_iterations = locals_count / kMaxPushes;
177 __ Mov(x3, loop_iterations);
179 __ Bind(&loop_header);
181 __ PushMultipleTimes(x10 , kMaxPushes);
183 __ B(ne, &loop_header);
185 int remaining = locals_count % kMaxPushes;
186 // Emit the remaining pushes.
187 __ PushMultipleTimes(x10 , remaining);
192 bool function_in_register_x1 = true;
194 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (heap_slots > 0) {
196 // Argument to NewContext is the function, which is still in x1.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
202 __ CallRuntime(Runtime::kNewGlobalContext, 2);
203 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(isolate(), heap_slots);
206 // Result of FastNewContextStub is always in new space.
207 need_write_barrier = false;
210 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 function_in_register_x1 = false;
213 // Context is returned in x0. It replaces the context passed to us.
214 // It's saved in the stack and kept live in cp.
216 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
217 // Copy any necessary parameters into the context.
218 int num_parameters = info->scope()->num_parameters();
219 for (int i = 0; i < num_parameters; i++) {
220 Variable* var = scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ Ldr(x10, MemOperand(fp, parameter_offset));
226 // Store it in the context.
227 MemOperand target = ContextMemOperand(cp, var->index());
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
233 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(cp, &done);
237 __ Abort(kExpectedNewSpaceObject);
244 Variable* arguments = scope()->arguments();
245 if (arguments != NULL) {
246 // Function uses arguments object.
247 Comment cmnt(masm_, "[ Allocate arguments object");
248 if (!function_in_register_x1) {
249 // Load this again, if it's used by the local context below.
250 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // Receiver is just before the parameters on the caller's stack.
255 int num_parameters = info->scope()->num_parameters();
256 int offset = num_parameters * kPointerSize;
257 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
258 __ Mov(x1, Smi::FromInt(num_parameters));
261 // Arguments to ArgumentsAccessStub:
262 // function, receiver address, parameter count.
263 // The stub will rewrite receiver and parameter count if the previous
264 // stack frame was an arguments adapter frame.
265 ArgumentsAccessStub::Type type;
266 if (strict_mode() == STRICT) {
267 type = ArgumentsAccessStub::NEW_STRICT;
268 } else if (function()->has_duplicate_parameters()) {
269 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
271 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
273 ArgumentsAccessStub stub(isolate(), type);
276 SetVar(arguments, x0, x1, x2);
280 __ CallRuntime(Runtime::kTraceEnter, 0);
284 // Visit the declarations and body unless there is an illegal
286 if (scope()->HasIllegalRedeclaration()) {
287 Comment cmnt(masm_, "[ Declarations");
288 scope()->VisitIllegalRedeclaration(this);
291 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
292 { Comment cmnt(masm_, "[ Declarations");
293 if (scope()->is_function_scope() && scope()->function() != NULL) {
294 VariableDeclaration* function = scope()->function();
295 DCHECK(function->proxy()->var()->mode() == CONST ||
296 function->proxy()->var()->mode() == CONST_LEGACY);
297 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
298 VisitVariableDeclaration(function);
300 VisitDeclarations(scope()->declarations());
304 Comment cmnt(masm_, "[ Stack check");
305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
307 DCHECK(jssp.Is(__ StackPointer()));
308 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
310 PredictableCodeSizeScope predictable(masm_,
311 Assembler::kCallSizeWithRelocation);
312 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
317 Comment cmnt(masm_, "[ Body");
318 DCHECK(loop_depth() == 0);
319 VisitStatements(function()->body());
320 DCHECK(loop_depth() == 0);
324 // Always emit a 'return undefined' in case control fell off the end of
326 { Comment cmnt(masm_, "[ return <undefined>;");
327 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
329 EmitReturnSequence();
331 // Force emission of the pools, so they don't get emitted in the middle
332 // of the back edge table.
333 masm()->CheckVeneerPool(true, false);
334 masm()->CheckConstPool(true, false);
338 void FullCodeGenerator::ClearAccumulator() {
339 __ Mov(x0, Smi::FromInt(0));
343 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
344 __ Mov(x2, Operand(profiling_counter_));
345 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
346 __ Subs(x3, x3, Smi::FromInt(delta));
347 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
351 void FullCodeGenerator::EmitProfilingCounterReset() {
352 int reset_value = FLAG_interrupt_budget;
353 if (info_->is_debug()) {
354 // Detect debug break requests as soon as possible.
355 reset_value = FLAG_interrupt_budget >> 4;
357 __ Mov(x2, Operand(profiling_counter_));
358 __ Mov(x3, Smi::FromInt(reset_value));
359 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
363 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
364 Label* back_edge_target) {
365 DCHECK(jssp.Is(__ StackPointer()));
366 Comment cmnt(masm_, "[ Back edge bookkeeping");
367 // Block literal pools whilst emitting back edge code.
368 Assembler::BlockPoolsScope block_const_pool(masm_);
371 DCHECK(back_edge_target->is_bound());
372 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
373 // to reduce the absolute error due to the integer division. To do that,
374 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
377 masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
378 int weight = Min(kMaxBackEdgeWeight,
379 Max(1, distance / kCodeSizeMultiplier));
380 EmitProfilingCounterDecrement(weight);
382 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
384 // Record a mapping of this PC offset to the OSR id. This is used to find
385 // the AST id from the unoptimized code in order to use it as a key into
386 // the deoptimization input data found in the optimized code.
387 RecordBackEdge(stmt->OsrEntryId());
389 EmitProfilingCounterReset();
392 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
393 // Record a mapping of the OSR id to this PC. This is used if the OSR
394 // entry becomes the target of a bailout. We don't expect it to be, but
395 // we want it to work if it is.
396 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
400 void FullCodeGenerator::EmitReturnSequence() {
401 Comment cmnt(masm_, "[ Return sequence");
403 if (return_label_.is_bound()) {
404 __ B(&return_label_);
407 __ Bind(&return_label_);
409 // Push the return value on the stack as the parameter.
410 // Runtime::TraceExit returns its parameter in x0.
411 __ Push(result_register());
412 __ CallRuntime(Runtime::kTraceExit, 1);
413 DCHECK(x0.Is(result_register()));
415 // Pretend that the exit is a backwards jump to the entry.
417 if (info_->ShouldSelfOptimize()) {
418 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
420 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
421 weight = Min(kMaxBackEdgeWeight,
422 Max(1, distance / kCodeSizeMultiplier));
424 EmitProfilingCounterDecrement(weight);
428 __ Call(isolate()->builtins()->InterruptCheck(),
429 RelocInfo::CODE_TARGET);
431 EmitProfilingCounterReset();
434 // Make sure that the constant pool is not emitted inside of the return
435 // sequence. This sequence can get patched when the debugger is used. See
436 // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
438 InstructionAccurateScope scope(masm_,
439 Assembler::kJSRetSequenceInstructions);
440 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
442 // This code is generated using Assembler methods rather than Macro
443 // Assembler methods because it will be patched later on, and so the size
444 // of the generated code must be consistent.
445 const Register& current_sp = __ StackPointer();
446 // Nothing ensures 16 bytes alignment here.
447 DCHECK(!current_sp.Is(csp));
448 __ mov(current_sp, fp);
449 int no_frame_start = masm_->pc_offset();
450 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
451 // Drop the arguments and receiver and return.
452 // TODO(all): This implementation is overkill as it supports 2**31+1
453 // arguments, consider how to improve it without creating a security
455 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
456 __ add(current_sp, current_sp, ip0);
458 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
459 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
465 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
466 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
470 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
471 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var);
476 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
477 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
478 codegen()->GetVar(result_register(), var);
479 __ Push(result_register());
483 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
484 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
485 // For simplicity we always test the accumulator register.
486 codegen()->GetVar(result_register(), var);
487 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
488 codegen()->DoTest(this);
492 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
493 // Root values have no side effects.
497 void FullCodeGenerator::AccumulatorValueContext::Plug(
498 Heap::RootListIndex index) const {
499 __ LoadRoot(result_register(), index);
503 void FullCodeGenerator::StackValueContext::Plug(
504 Heap::RootListIndex index) const {
505 __ LoadRoot(result_register(), index);
506 __ Push(result_register());
510 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
511 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
513 if (index == Heap::kUndefinedValueRootIndex ||
514 index == Heap::kNullValueRootIndex ||
515 index == Heap::kFalseValueRootIndex) {
516 if (false_label_ != fall_through_) __ B(false_label_);
517 } else if (index == Heap::kTrueValueRootIndex) {
518 if (true_label_ != fall_through_) __ B(true_label_);
520 __ LoadRoot(result_register(), index);
521 codegen()->DoTest(this);
526 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
530 void FullCodeGenerator::AccumulatorValueContext::Plug(
531 Handle<Object> lit) const {
532 __ Mov(result_register(), Operand(lit));
536 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
537 // Immediates cannot be pushed directly.
538 __ Mov(result_register(), Operand(lit));
539 __ Push(result_register());
543 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
544 codegen()->PrepareForBailoutBeforeSplit(condition(),
548 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
549 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
550 if (false_label_ != fall_through_) __ B(false_label_);
551 } else if (lit->IsTrue() || lit->IsJSObject()) {
552 if (true_label_ != fall_through_) __ B(true_label_);
553 } else if (lit->IsString()) {
554 if (String::cast(*lit)->length() == 0) {
555 if (false_label_ != fall_through_) __ B(false_label_);
557 if (true_label_ != fall_through_) __ B(true_label_);
559 } else if (lit->IsSmi()) {
560 if (Smi::cast(*lit)->value() == 0) {
561 if (false_label_ != fall_through_) __ B(false_label_);
563 if (true_label_ != fall_through_) __ B(true_label_);
566 // For simplicity we always test the accumulator register.
567 __ Mov(result_register(), Operand(lit));
568 codegen()->DoTest(this);
573 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
574 Register reg) const {
580 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
582 Register reg) const {
585 __ Move(result_register(), reg);
589 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
590 Register reg) const {
592 if (count > 1) __ Drop(count - 1);
597 void FullCodeGenerator::TestContext::DropAndPlug(int count,
598 Register reg) const {
600 // For simplicity we always test the accumulator register.
602 __ Mov(result_register(), reg);
603 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
604 codegen()->DoTest(this);
608 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
609 Label* materialize_false) const {
610 DCHECK(materialize_true == materialize_false);
611 __ Bind(materialize_true);
615 void FullCodeGenerator::AccumulatorValueContext::Plug(
616 Label* materialize_true,
617 Label* materialize_false) const {
619 __ Bind(materialize_true);
620 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
622 __ Bind(materialize_false);
623 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
628 void FullCodeGenerator::StackValueContext::Plug(
629 Label* materialize_true,
630 Label* materialize_false) const {
632 __ Bind(materialize_true);
633 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
635 __ Bind(materialize_false);
636 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
642 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643 Label* materialize_false) const {
644 DCHECK(materialize_true == true_label_);
645 DCHECK(materialize_false == false_label_);
649 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
653 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
654 Heap::RootListIndex value_root_index =
655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656 __ LoadRoot(result_register(), value_root_index);
660 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
661 Heap::RootListIndex value_root_index =
662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663 __ LoadRoot(x10, value_root_index);
668 void FullCodeGenerator::TestContext::Plug(bool flag) const {
669 codegen()->PrepareForBailoutBeforeSplit(condition(),
674 if (true_label_ != fall_through_) {
678 if (false_label_ != fall_through_) {
685 void FullCodeGenerator::DoTest(Expression* condition,
688 Label* fall_through) {
689 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
690 CallIC(ic, condition->test_id());
691 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
695 // If (cond), branch to if_true.
696 // If (!cond), branch to if_false.
697 // fall_through is used as an optimization in cases where only one branch
698 // instruction is necessary.
699 void FullCodeGenerator::Split(Condition cond,
702 Label* fall_through) {
703 if (if_false == fall_through) {
705 } else if (if_true == fall_through) {
706 DCHECK(if_false != fall_through);
707 __ B(NegateCondition(cond), if_false);
715 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
716 // Offset is negative because higher indexes are at lower addresses.
717 int offset = -var->index() * kXRegSize;
718 // Adjust by a (parameter or local) base offset.
719 if (var->IsParameter()) {
720 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
722 offset += JavaScriptFrameConstants::kLocal0Offset;
724 return MemOperand(fp, offset);
728 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
729 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
730 if (var->IsContextSlot()) {
731 int context_chain_length = scope()->ContextChainLength(var->scope());
732 __ LoadContext(scratch, context_chain_length);
733 return ContextMemOperand(scratch, var->index());
735 return StackOperand(var);
740 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
741 // Use destination as scratch.
742 MemOperand location = VarOperand(var, dest);
743 __ Ldr(dest, location);
747 void FullCodeGenerator::SetVar(Variable* var,
751 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
752 DCHECK(!AreAliased(src, scratch0, scratch1));
753 MemOperand location = VarOperand(var, scratch0);
754 __ Str(src, location);
756 // Emit the write barrier code if the location is in the heap.
757 if (var->IsContextSlot()) {
758 // scratch0 contains the correct context.
759 __ RecordWriteContextSlot(scratch0,
769 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
770 bool should_normalize,
773 // Only prepare for bailouts before splits if we're in a test
774 // context. Otherwise, we let the Visit function deal with the
775 // preparation to avoid preparing with the same AST id twice.
776 if (!context()->IsTest() || !info_->IsOptimizable()) return;
778 // TODO(all): Investigate to see if there is something to work on here.
780 if (should_normalize) {
783 PrepareForBailout(expr, TOS_REG);
784 if (should_normalize) {
785 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
786 Split(eq, if_true, if_false, NULL);
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
793 // The variable in the declaration always resides in the current function
795 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (generate_debug_code_) {
797 // Check that we're not inside a with or catch context.
798 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
799 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
800 __ Check(ne, kDeclarationInWithContext);
801 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
802 __ Check(ne, kDeclarationInCatchContext);
807 void FullCodeGenerator::VisitVariableDeclaration(
808 VariableDeclaration* declaration) {
809 // If it was not possible to allocate the variable at compile time, we
810 // need to "declare" it at runtime to make sure it actually exists in the
812 VariableProxy* proxy = declaration->proxy();
813 VariableMode mode = declaration->mode();
814 Variable* variable = proxy->var();
815 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
817 switch (variable->location()) {
818 case Variable::UNALLOCATED:
819 globals_->Add(variable->name(), zone());
820 globals_->Add(variable->binding_needs_init()
821 ? isolate()->factory()->the_hole_value()
822 : isolate()->factory()->undefined_value(),
826 case Variable::PARAMETER:
827 case Variable::LOCAL:
829 Comment cmnt(masm_, "[ VariableDeclaration");
830 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
831 __ Str(x10, StackOperand(variable));
835 case Variable::CONTEXT:
837 Comment cmnt(masm_, "[ VariableDeclaration");
838 EmitDebugCheckDeclarationContext(variable);
839 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
840 __ Str(x10, ContextMemOperand(cp, variable->index()));
841 // No write barrier since the_hole_value is in old space.
842 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846 case Variable::LOOKUP: {
847 Comment cmnt(masm_, "[ VariableDeclaration");
848 __ Mov(x2, Operand(variable->name()));
849 // Declaration nodes are always introduced in one of four modes.
850 DCHECK(IsDeclaredVariableMode(mode));
851 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
853 __ Mov(x1, Smi::FromInt(attr));
854 // Push initial value, if any.
855 // Note: For variables we must not push an initial value (such as
856 // 'undefined') because we may have a (legal) redeclaration and we
857 // must not destroy the current value.
859 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
860 __ Push(cp, x2, x1, x0);
862 // Pushing 0 (xzr) indicates no initial value.
863 __ Push(cp, x2, x1, xzr);
865 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
872 void FullCodeGenerator::VisitFunctionDeclaration(
873 FunctionDeclaration* declaration) {
874 VariableProxy* proxy = declaration->proxy();
875 Variable* variable = proxy->var();
876 switch (variable->location()) {
877 case Variable::UNALLOCATED: {
878 globals_->Add(variable->name(), zone());
879 Handle<SharedFunctionInfo> function =
880 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
881 // Check for stack overflow exception.
882 if (function.is_null()) return SetStackOverflow();
883 globals_->Add(function, zone());
887 case Variable::PARAMETER:
888 case Variable::LOCAL: {
889 Comment cmnt(masm_, "[ Function Declaration");
890 VisitForAccumulatorValue(declaration->fun());
891 __ Str(result_register(), StackOperand(variable));
895 case Variable::CONTEXT: {
896 Comment cmnt(masm_, "[ Function Declaration");
897 EmitDebugCheckDeclarationContext(variable);
898 VisitForAccumulatorValue(declaration->fun());
899 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
900 int offset = Context::SlotOffset(variable->index());
901 // We know that we have written a function, which is not a smi.
902 __ RecordWriteContextSlot(cp,
910 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
914 case Variable::LOOKUP: {
915 Comment cmnt(masm_, "[ Function Declaration");
916 __ Mov(x2, Operand(variable->name()));
917 __ Mov(x1, Smi::FromInt(NONE));
919 // Push initial value for function declaration.
920 VisitForStackValue(declaration->fun());
921 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
928 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
929 Variable* variable = declaration->proxy()->var();
930 DCHECK(variable->location() == Variable::CONTEXT);
931 DCHECK(variable->interface()->IsFrozen());
933 Comment cmnt(masm_, "[ ModuleDeclaration");
934 EmitDebugCheckDeclarationContext(variable);
936 // Load instance object.
937 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
938 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
939 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
942 __ Str(x1, ContextMemOperand(cp, variable->index()));
943 // We know that we have written a module, which is not a smi.
944 __ RecordWriteContextSlot(cp,
945 Context::SlotOffset(variable->index()),
952 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
954 // Traverse info body.
955 Visit(declaration->module());
959 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
960 VariableProxy* proxy = declaration->proxy();
961 Variable* variable = proxy->var();
962 switch (variable->location()) {
963 case Variable::UNALLOCATED:
967 case Variable::CONTEXT: {
968 Comment cmnt(masm_, "[ ImportDeclaration");
969 EmitDebugCheckDeclarationContext(variable);
974 case Variable::PARAMETER:
975 case Variable::LOCAL:
976 case Variable::LOOKUP:
982 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
987 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
988 // Call the runtime to declare the globals.
989 __ Mov(x11, Operand(pairs));
990 Register flags = xzr;
991 if (Smi::FromInt(DeclareGlobalsFlags())) {
993 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
995 __ Push(cp, x11, flags);
996 __ CallRuntime(Runtime::kDeclareGlobals, 3);
997 // Return value is ignored.
1001 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1002 // Call the runtime to declare the modules.
1003 __ Push(descriptions);
1004 __ CallRuntime(Runtime::kDeclareModules, 1);
1005 // Return value is ignored.
1009 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1010 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1011 Comment cmnt(masm_, "[ SwitchStatement");
1012 Breakable nested_statement(this, stmt);
1013 SetStatementPosition(stmt);
1015 // Keep the switch value on the stack until a case matches.
1016 VisitForStackValue(stmt->tag());
1017 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1019 ZoneList<CaseClause*>* clauses = stmt->cases();
1020 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1022 Label next_test; // Recycled for each test.
1023 // Compile all the tests with branches to their bodies.
1024 for (int i = 0; i < clauses->length(); i++) {
1025 CaseClause* clause = clauses->at(i);
1026 clause->body_target()->Unuse();
1028 // The default is not a test, but remember it as final fall through.
1029 if (clause->is_default()) {
1030 default_clause = clause;
1034 Comment cmnt(masm_, "[ Case comparison");
1035 __ Bind(&next_test);
1038 // Compile the label expression.
1039 VisitForAccumulatorValue(clause->label());
1041 // Perform the comparison as if via '==='.
1042 __ Peek(x1, 0); // Switch value.
1044 JumpPatchSite patch_site(masm_);
1045 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1047 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1049 __ B(ne, &next_test);
1050 __ Drop(1); // Switch value is no longer needed.
1051 __ B(clause->body_target());
1052 __ Bind(&slow_case);
1055 // Record position before stub call for type feedback.
1056 SetSourcePosition(clause->position());
1058 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1059 CallIC(ic, clause->CompareId());
1060 patch_site.EmitPatchInfo();
1064 PrepareForBailout(clause, TOS_REG);
1065 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1067 __ B(clause->body_target());
1070 __ Cbnz(x0, &next_test);
1071 __ Drop(1); // Switch value is no longer needed.
1072 __ B(clause->body_target());
1075 // Discard the test value and jump to the default if present, otherwise to
1076 // the end of the statement.
1077 __ Bind(&next_test);
1078 __ Drop(1); // Switch value is no longer needed.
1079 if (default_clause == NULL) {
1080 __ B(nested_statement.break_label());
1082 __ B(default_clause->body_target());
1085 // Compile all the case bodies.
1086 for (int i = 0; i < clauses->length(); i++) {
1087 Comment cmnt(masm_, "[ Case body");
1088 CaseClause* clause = clauses->at(i);
1089 __ Bind(clause->body_target());
1090 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1091 VisitStatements(clause->statements());
1094 __ Bind(nested_statement.break_label());
1095 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1099 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1100 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1101 Comment cmnt(masm_, "[ ForInStatement");
1102 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1103 // TODO(all): This visitor probably needs better comments and a revisit.
1104 SetStatementPosition(stmt);
1107 ForIn loop_statement(this, stmt);
1108 increment_loop_depth();
1110 // Get the object to enumerate over. If the object is null or undefined, skip
1111 // over the loop. See ECMA-262 version 5, section 12.6.4.
1112 VisitForAccumulatorValue(stmt->enumerable());
1113 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1114 Register null_value = x15;
1115 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1116 __ Cmp(x0, null_value);
1119 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1121 // Convert the object to a JS object.
1122 Label convert, done_convert;
1123 __ JumpIfSmi(x0, &convert);
1124 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1127 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1128 __ Bind(&done_convert);
1129 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1132 // Check for proxies.
1134 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1135 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1137 // Check cache validity in generated code. This is a fast case for
1138 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1139 // guarantee cache validity, call the runtime system to check cache
1140 // validity or get the property names in a fixed array.
1141 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1143 // The enum cache is valid. Load the map of the object being
1144 // iterated over and use the cache for the iteration.
1146 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1149 // Get the set of properties to enumerate.
1150 __ Bind(&call_runtime);
1151 __ Push(x0); // Duplicate the enumerable object on the stack.
1152 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1153 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1155 // If we got a map from the runtime call, we can do a fast
1156 // modification check. Otherwise, we got a fixed array, and we have
1157 // to do a slow check.
1158 Label fixed_array, no_descriptors;
1159 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1160 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1162 // We got a map in register x0. Get the enumeration cache from it.
1163 __ Bind(&use_cache);
1165 __ EnumLengthUntagged(x1, x0);
1166 __ Cbz(x1, &no_descriptors);
1168 __ LoadInstanceDescriptors(x0, x2);
1169 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1171 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1173 // Set up the four remaining stack slots.
1175 // Map, enumeration cache, enum cache length, zero (both last as smis).
1176 __ Push(x0, x2, x1, xzr);
1179 __ Bind(&no_descriptors);
1183 // We got a fixed array in register x0. Iterate through that.
1184 __ Bind(&fixed_array);
1186 __ LoadObject(x1, FeedbackVector());
1187 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1188 int vector_index = FeedbackVector()->GetIndex(slot);
1189 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1191 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1192 __ Peek(x10, 0); // Get enumerated object.
1193 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1194 // TODO(all): similar check was done already. Can we avoid it here?
1195 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1196 DCHECK(Smi::FromInt(0) == 0);
1197 __ CzeroX(x1, le); // Zero indicates proxy.
1198 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1199 // Smi and array, fixed array length (as smi) and initial index.
1200 __ Push(x1, x0, x2, xzr);
1202 // Generate code for doing the condition check.
1203 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1205 // Load the current count to x0, load the length to x1.
1206 __ PeekPair(x0, x1, 0);
1207 __ Cmp(x0, x1); // Compare to the array length.
1208 __ B(hs, loop_statement.break_label());
1210 // Get the current entry of the array into register r3.
1211 __ Peek(x10, 2 * kXRegSize);
1212 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1213 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1215 // Get the expected map from the stack or a smi in the
1216 // permanent slow case into register x10.
1217 __ Peek(x2, 3 * kXRegSize);
1219 // Check if the expected map still matches that of the enumerable.
1220 // If not, we may have to filter the key.
1222 __ Peek(x1, 4 * kXRegSize);
1223 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1225 __ B(eq, &update_each);
1227 // For proxies, no filtering is done.
1228 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1229 STATIC_ASSERT(kSmiTag == 0);
1230 __ Cbz(x2, &update_each);
1232 // Convert the entry to a string or (smi) 0 if it isn't a property
1233 // any more. If the property has been removed while iterating, we
1236 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1238 __ Cbz(x0, loop_statement.continue_label());
1240 // Update the 'each' property or variable from the possibly filtered
1241 // entry in register x3.
1242 __ Bind(&update_each);
1243 __ Mov(result_register(), x3);
1244 // Perform the assignment as if via '='.
1245 { EffectContext context(this);
1246 EmitAssignment(stmt->each());
1249 // Generate code for the body of the loop.
1250 Visit(stmt->body());
1252 // Generate code for going to the next element by incrementing
1253 // the index (smi) stored on top of the stack.
1254 __ Bind(loop_statement.continue_label());
1255 // TODO(all): We could use a callee saved register to avoid popping.
1257 __ Add(x0, x0, Smi::FromInt(1));
1260 EmitBackEdgeBookkeeping(stmt, &loop);
1263 // Remove the pointers stored on the stack.
1264 __ Bind(loop_statement.break_label());
1267 // Exit and decrement the loop depth.
1268 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1270 decrement_loop_depth();
1274 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1275 Comment cmnt(masm_, "[ ForOfStatement");
1276 SetStatementPosition(stmt);
1278 Iteration loop_statement(this, stmt);
1279 increment_loop_depth();
1281 // var iterator = iterable[Symbol.iterator]();
1282 VisitForEffect(stmt->assign_iterator());
1285 __ Bind(loop_statement.continue_label());
1287 // result = iterator.next()
1288 VisitForEffect(stmt->next_result());
1290 // if (result.done) break;
1291 Label result_not_done;
1292 VisitForControl(stmt->result_done(),
1293 loop_statement.break_label(),
1296 __ Bind(&result_not_done);
1298 // each = result.value
1299 VisitForEffect(stmt->assign_each());
1301 // Generate code for the body of the loop.
1302 Visit(stmt->body());
1304 // Check stack before looping.
1305 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1306 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1307 __ B(loop_statement.continue_label());
1309 // Exit and decrement the loop depth.
1310 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1311 __ Bind(loop_statement.break_label());
1312 decrement_loop_depth();
1316 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1318 // Use the fast case closure allocation code that allocates in new space for
1319 // nested functions that don't need literals cloning. If we're running with
1320 // the --always-opt or the --prepare-always-opt flag, we need to use the
1321 // runtime function so that the new function we are creating here gets a
1322 // chance to have its code optimized and doesn't just get a copy of the
1323 // existing unoptimized code.
1324 if (!FLAG_always_opt &&
1325 !FLAG_prepare_always_opt &&
1327 scope()->is_function_scope() &&
1328 info->num_literals() == 0) {
1329 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1330 __ Mov(x2, Operand(info));
1333 __ Mov(x11, Operand(info));
1334 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1335 : Heap::kFalseValueRootIndex);
1336 __ Push(cp, x11, x10);
1337 __ CallRuntime(Runtime::kNewClosure, 3);
1339 context()->Plug(x0);
1343 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1344 Comment cmnt(masm_, "[ VariableProxy");
1345 EmitVariableLoad(expr);
1349 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1350 Comment cnmt(masm_, "[ SuperReference ");
1352 __ ldr(LoadDescriptor::ReceiverRegister(),
1353 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1355 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1356 __ Mov(LoadDescriptor::NameRegister(), Operand(home_object_symbol));
1358 if (FLAG_vector_ics) {
1359 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1360 SmiFromSlot(expr->HomeObjectFeedbackSlot()));
1361 CallLoadIC(NOT_CONTEXTUAL);
1363 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1366 __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
1370 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1375 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1376 TypeofState typeof_state,
1378 Register current = cp;
1379 Register next = x10;
1380 Register temp = x11;
1384 if (s->num_heap_slots() > 0) {
1385 if (s->calls_sloppy_eval()) {
1386 // Check that extension is NULL.
1387 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1388 __ Cbnz(temp, slow);
1390 // Load next context in chain.
1391 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1392 // Walk the rest of the chain without clobbering cp.
1395 // If no outer scope calls eval, we do not need to check more
1396 // context extensions.
1397 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1398 s = s->outer_scope();
1401 if (s->is_eval_scope()) {
1403 __ Mov(next, current);
1406 // Terminate at native context.
1407 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1408 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1409 // Check that extension is NULL.
1410 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1411 __ Cbnz(temp, slow);
1412 // Load next context in chain.
1413 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1418 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1419 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1420 if (FLAG_vector_ics) {
1421 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1422 SmiFromSlot(proxy->VariableFeedbackSlot()));
1425 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1431 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1433 DCHECK(var->IsContextSlot());
1434 Register context = cp;
1435 Register next = x10;
1436 Register temp = x11;
1438 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1439 if (s->num_heap_slots() > 0) {
1440 if (s->calls_sloppy_eval()) {
1441 // Check that extension is NULL.
1442 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1443 __ Cbnz(temp, slow);
1445 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1446 // Walk the rest of the chain without clobbering cp.
1450 // Check that last extension is NULL.
1451 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1452 __ Cbnz(temp, slow);
1454 // This function is used only for loads, not stores, so it's safe to
1455 // return an cp-based operand (the write barrier cannot be allowed to
1456 // destroy the cp register).
1457 return ContextMemOperand(context, var->index());
1461 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1462 TypeofState typeof_state,
1465 // Generate fast-case code for variables that might be shadowed by
1466 // eval-introduced variables. Eval is used a lot without
1467 // introducing variables. In those cases, we do not want to
1468 // perform a runtime call for all variables in the scope
1469 // containing the eval.
1470 Variable* var = proxy->var();
1471 if (var->mode() == DYNAMIC_GLOBAL) {
1472 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1474 } else if (var->mode() == DYNAMIC_LOCAL) {
1475 Variable* local = var->local_if_not_shadowed();
1476 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1477 if (local->mode() == LET || local->mode() == CONST ||
1478 local->mode() == CONST_LEGACY) {
1479 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1480 if (local->mode() == CONST_LEGACY) {
1481 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1482 } else { // LET || CONST
1483 __ Mov(x0, Operand(var->name()));
1485 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1493 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1494 // Record position before possible IC call.
1495 SetSourcePosition(proxy->position());
1496 Variable* var = proxy->var();
1498 // Three cases: global variables, lookup variables, and all other types of
1500 switch (var->location()) {
1501 case Variable::UNALLOCATED: {
1502 Comment cmnt(masm_, "Global variable");
1503 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1504 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1505 if (FLAG_vector_ics) {
1506 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1507 SmiFromSlot(proxy->VariableFeedbackSlot()));
1509 CallLoadIC(CONTEXTUAL);
1510 context()->Plug(x0);
1514 case Variable::PARAMETER:
1515 case Variable::LOCAL:
1516 case Variable::CONTEXT: {
1517 Comment cmnt(masm_, var->IsContextSlot()
1518 ? "Context variable"
1519 : "Stack variable");
1520 if (var->binding_needs_init()) {
1521 // var->scope() may be NULL when the proxy is located in eval code and
1522 // refers to a potential outside binding. Currently those bindings are
1523 // always looked up dynamically, i.e. in that case
1524 // var->location() == LOOKUP.
1526 DCHECK(var->scope() != NULL);
1528 // Check if the binding really needs an initialization check. The check
1529 // can be skipped in the following situation: we have a LET or CONST
1530 // binding in harmony mode, both the Variable and the VariableProxy have
1531 // the same declaration scope (i.e. they are both in global code, in the
1532 // same function or in the same eval code) and the VariableProxy is in
1533 // the source physically located after the initializer of the variable.
1535 // We cannot skip any initialization checks for CONST in non-harmony
1536 // mode because const variables may be declared but never initialized:
1537 // if (false) { const x; }; var y = x;
1539 // The condition on the declaration scopes is a conservative check for
1540 // nested functions that access a binding and are called before the
1541 // binding is initialized:
1542 // function() { f(); let x = 1; function f() { x = 2; } }
1544 bool skip_init_check;
1545 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1546 skip_init_check = false;
1548 // Check that we always have valid source position.
1549 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1550 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1551 skip_init_check = var->mode() != CONST_LEGACY &&
1552 var->initializer_position() < proxy->position();
1555 if (!skip_init_check) {
1556 // Let and const need a read barrier.
1559 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1560 if (var->mode() == LET || var->mode() == CONST) {
1561 // Throw a reference error when using an uninitialized let/const
1562 // binding in harmony mode.
1563 __ Mov(x0, Operand(var->name()));
1565 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1568 // Uninitalized const bindings outside of harmony mode are unholed.
1569 DCHECK(var->mode() == CONST_LEGACY);
1570 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1573 context()->Plug(x0);
1577 context()->Plug(var);
1581 case Variable::LOOKUP: {
1583 // Generate code for loading from variables potentially shadowed by
1584 // eval-introduced variables.
1585 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1587 Comment cmnt(masm_, "Lookup variable");
1588 __ Mov(x1, Operand(var->name()));
1589 __ Push(cp, x1); // Context and name.
1590 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1592 context()->Plug(x0);
1599 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1600 Comment cmnt(masm_, "[ RegExpLiteral");
1602 // Registers will be used as follows:
1603 // x5 = materialized value (RegExp literal)
1604 // x4 = JS function, literals array
1605 // x3 = literal index
1606 // x2 = RegExp pattern
1607 // x1 = RegExp flags
1608 // x0 = RegExp literal clone
1609 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1610 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1611 int literal_offset =
1612 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1613 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1614 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1616 // Create regexp literal using runtime function.
1617 // Result will be in x0.
1618 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1619 __ Mov(x2, Operand(expr->pattern()));
1620 __ Mov(x1, Operand(expr->flags()));
1621 __ Push(x4, x3, x2, x1);
1622 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1625 __ Bind(&materialized);
1626 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1627 Label allocated, runtime_allocate;
1628 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1631 __ Bind(&runtime_allocate);
1632 __ Mov(x10, Smi::FromInt(size));
1634 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1637 __ Bind(&allocated);
1638 // After this, registers are used as follows:
1639 // x0: Newly allocated regexp.
1640 // x5: Materialized regexp.
1641 // x10, x11, x12: temps.
1642 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1643 context()->Plug(x0);
1647 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1648 if (expression == NULL) {
1649 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1652 VisitForStackValue(expression);
1657 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1658 Comment cmnt(masm_, "[ ObjectLiteral");
1660 expr->BuildConstantProperties(isolate());
1661 Handle<FixedArray> constant_properties = expr->constant_properties();
1662 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1663 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1664 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1665 __ Mov(x1, Operand(constant_properties));
1666 int flags = expr->fast_elements()
1667 ? ObjectLiteral::kFastElements
1668 : ObjectLiteral::kNoFlags;
1669 flags |= expr->has_function()
1670 ? ObjectLiteral::kHasFunction
1671 : ObjectLiteral::kNoFlags;
1672 __ Mov(x0, Smi::FromInt(flags));
1673 int properties_count = constant_properties->length() / 2;
1674 const int max_cloned_properties =
1675 FastCloneShallowObjectStub::kMaximumClonedProperties;
1676 if (expr->may_store_doubles() || expr->depth() > 1 ||
1677 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1678 properties_count > max_cloned_properties) {
1679 __ Push(x3, x2, x1, x0);
1680 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1682 FastCloneShallowObjectStub stub(isolate(), properties_count);
1685 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1687 // If result_saved is true the result is on top of the stack. If
1688 // result_saved is false the result is in x0.
1689 bool result_saved = false;
1691 // Mark all computed expressions that are bound to a key that
1692 // is shadowed by a later occurrence of the same key. For the
1693 // marked expressions, no store code is emitted.
1694 expr->CalculateEmitStore(zone());
1696 AccessorTable accessor_table(zone());
1697 for (int i = 0; i < expr->properties()->length(); i++) {
1698 ObjectLiteral::Property* property = expr->properties()->at(i);
1699 if (property->IsCompileTimeValue()) continue;
1701 Literal* key = property->key();
1702 Expression* value = property->value();
1703 if (!result_saved) {
1704 __ Push(x0); // Save result on stack
1705 result_saved = true;
1707 switch (property->kind()) {
1708 case ObjectLiteral::Property::CONSTANT:
1710 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1711 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1713 case ObjectLiteral::Property::COMPUTED:
1714 // It is safe to use [[Put]] here because the boilerplate already
1715 // contains computed properties with an uninitialized value.
1716 if (key->value()->IsInternalizedString()) {
1717 if (property->emit_store()) {
1718 VisitForAccumulatorValue(value);
1719 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1720 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1721 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1722 CallStoreIC(key->LiteralFeedbackId());
1723 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1725 VisitForEffect(value);
1729 if (property->emit_store()) {
1730 // Duplicate receiver on stack.
1733 VisitForStackValue(key);
1734 VisitForStackValue(value);
1735 __ Mov(x0, Smi::FromInt(SLOPPY)); // Strict mode
1737 __ CallRuntime(Runtime::kSetProperty, 4);
1739 VisitForEffect(key);
1740 VisitForEffect(value);
1743 case ObjectLiteral::Property::PROTOTYPE:
1744 if (property->emit_store()) {
1745 // Duplicate receiver on stack.
1748 VisitForStackValue(value);
1749 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1751 VisitForEffect(value);
1754 case ObjectLiteral::Property::GETTER:
1755 accessor_table.lookup(key)->second->getter = value;
1757 case ObjectLiteral::Property::SETTER:
1758 accessor_table.lookup(key)->second->setter = value;
1763 // Emit code to define accessors, using only a single call to the runtime for
1764 // each pair of corresponding getters and setters.
1765 for (AccessorTable::Iterator it = accessor_table.begin();
1766 it != accessor_table.end();
1768 __ Peek(x10, 0); // Duplicate receiver.
1770 VisitForStackValue(it->first);
1771 EmitAccessor(it->second->getter);
1772 EmitAccessor(it->second->setter);
1773 __ Mov(x10, Smi::FromInt(NONE));
1775 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1778 if (expr->has_function()) {
1779 DCHECK(result_saved);
1782 __ CallRuntime(Runtime::kToFastProperties, 1);
1786 context()->PlugTOS();
1788 context()->Plug(x0);
1793 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1794 Comment cmnt(masm_, "[ ArrayLiteral");
1796 expr->BuildConstantElements(isolate());
1797 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1798 : ArrayLiteral::kNoFlags;
1800 ZoneList<Expression*>* subexprs = expr->values();
1801 int length = subexprs->length();
1802 Handle<FixedArray> constant_elements = expr->constant_elements();
1803 DCHECK_EQ(2, constant_elements->length());
1804 ElementsKind constant_elements_kind =
1805 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1806 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1807 Handle<FixedArrayBase> constant_elements_values(
1808 FixedArrayBase::cast(constant_elements->get(1)));
1810 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1811 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1812 // If the only customer of allocation sites is transitioning, then
1813 // we can turn it off if we don't have anywhere else to transition to.
1814 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1817 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1818 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1819 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1820 __ Mov(x1, Operand(constant_elements));
1821 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1822 __ Mov(x0, Smi::FromInt(flags));
1823 __ Push(x3, x2, x1, x0);
1824 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1826 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1830 bool result_saved = false; // Is the result saved to the stack?
1832 // Emit code to evaluate all the non-constant subexpressions and to store
1833 // them into the newly cloned array.
1834 for (int i = 0; i < length; i++) {
1835 Expression* subexpr = subexprs->at(i);
1836 // If the subexpression is a literal or a simple materialized literal it
1837 // is already set in the cloned array.
1838 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1840 if (!result_saved) {
1841 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1843 result_saved = true;
1845 VisitForAccumulatorValue(subexpr);
1847 if (IsFastObjectElementsKind(constant_elements_kind)) {
1848 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1849 __ Peek(x6, kPointerSize); // Copy of array literal.
1850 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1851 __ Str(result_register(), FieldMemOperand(x1, offset));
1852 // Update the write barrier for the array store.
1853 __ RecordWriteField(x1, offset, result_register(), x10,
1854 kLRHasBeenSaved, kDontSaveFPRegs,
1855 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1857 __ Mov(x3, Smi::FromInt(i));
1858 StoreArrayLiteralElementStub stub(isolate());
1862 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1866 __ Drop(1); // literal index
1867 context()->PlugTOS();
1869 context()->Plug(x0);
1874 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1875 DCHECK(expr->target()->IsValidReferenceExpression());
1877 Comment cmnt(masm_, "[ Assignment");
1879 Property* property = expr->target()->AsProperty();
1880 LhsKind assign_type = GetAssignType(property);
1882 // Evaluate LHS expression.
1883 switch (assign_type) {
1885 // Nothing to do here.
1887 case NAMED_PROPERTY:
1888 if (expr->is_compound()) {
1889 // We need the receiver both on the stack and in the register.
1890 VisitForStackValue(property->obj());
1891 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1893 VisitForStackValue(property->obj());
1896 case NAMED_SUPER_PROPERTY:
1897 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1898 EmitLoadHomeObject(property->obj()->AsSuperReference());
1899 __ Push(result_register());
1900 if (expr->is_compound()) {
1901 const Register scratch = x10;
1902 __ Peek(scratch, kPointerSize);
1903 __ Push(scratch, result_register());
1906 case KEYED_SUPER_PROPERTY:
1907 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1908 EmitLoadHomeObject(property->obj()->AsSuperReference());
1909 __ Push(result_register());
1910 VisitForAccumulatorValue(property->key());
1911 __ Push(result_register());
1912 if (expr->is_compound()) {
1913 const Register scratch1 = x10;
1914 const Register scratch2 = x11;
1915 __ Peek(scratch1, 2 * kPointerSize);
1916 __ Peek(scratch2, kPointerSize);
1917 __ Push(scratch1, scratch2, result_register());
1920 case KEYED_PROPERTY:
1921 if (expr->is_compound()) {
1922 VisitForStackValue(property->obj());
1923 VisitForStackValue(property->key());
1924 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1925 __ Peek(LoadDescriptor::NameRegister(), 0);
1927 VisitForStackValue(property->obj());
1928 VisitForStackValue(property->key());
1933 // For compound assignments we need another deoptimization point after the
1934 // variable/property load.
1935 if (expr->is_compound()) {
1936 { AccumulatorValueContext context(this);
1937 switch (assign_type) {
1939 EmitVariableLoad(expr->target()->AsVariableProxy());
1940 PrepareForBailout(expr->target(), TOS_REG);
1942 case NAMED_PROPERTY:
1943 EmitNamedPropertyLoad(property);
1944 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1946 case NAMED_SUPER_PROPERTY:
1947 EmitNamedSuperPropertyLoad(property);
1948 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1950 case KEYED_SUPER_PROPERTY:
1951 EmitKeyedSuperPropertyLoad(property);
1952 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1954 case KEYED_PROPERTY:
1955 EmitKeyedPropertyLoad(property);
1956 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1961 Token::Value op = expr->binary_op();
1962 __ Push(x0); // Left operand goes on the stack.
1963 VisitForAccumulatorValue(expr->value());
1965 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1968 SetSourcePosition(expr->position() + 1);
1969 AccumulatorValueContext context(this);
1970 if (ShouldInlineSmiCase(op)) {
1971 EmitInlineSmiBinaryOp(expr->binary_operation(),
1977 EmitBinaryOp(expr->binary_operation(), op, mode);
1980 // Deoptimization point in case the binary operation may have side effects.
1981 PrepareForBailout(expr->binary_operation(), TOS_REG);
1983 VisitForAccumulatorValue(expr->value());
1986 // Record source position before possible IC call.
1987 SetSourcePosition(expr->position());
1990 switch (assign_type) {
1992 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1994 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1995 context()->Plug(x0);
1997 case NAMED_PROPERTY:
1998 EmitNamedPropertyAssignment(expr);
2000 case NAMED_SUPER_PROPERTY:
2001 EmitNamedSuperPropertyStore(property);
2002 context()->Plug(x0);
2004 case KEYED_SUPER_PROPERTY:
2005 EmitKeyedSuperPropertyStore(property);
2006 context()->Plug(x0);
2008 case KEYED_PROPERTY:
2009 EmitKeyedPropertyAssignment(expr);
2015 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2016 SetSourcePosition(prop->position());
2017 Literal* key = prop->key()->AsLiteral();
2018 DCHECK(!prop->IsSuperAccess());
2020 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2021 if (FLAG_vector_ics) {
2022 __ Mov(VectorLoadICDescriptor::SlotRegister(),
2023 SmiFromSlot(prop->PropertyFeedbackSlot()));
2024 CallLoadIC(NOT_CONTEXTUAL);
2026 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2031 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2032 // Stack: receiver, home_object.
2033 SetSourcePosition(prop->position());
2034 Literal* key = prop->key()->AsLiteral();
2035 DCHECK(!key->value()->IsSmi());
2036 DCHECK(prop->IsSuperAccess());
2038 __ Push(key->value());
2039 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2043 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2044 SetSourcePosition(prop->position());
2045 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2046 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2047 if (FLAG_vector_ics) {
2048 __ Mov(VectorLoadICDescriptor::SlotRegister(),
2049 SmiFromSlot(prop->PropertyFeedbackSlot()));
2052 CallIC(ic, prop->PropertyFeedbackId());
2057 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2058 // Stack: receiver, home_object, key.
2059 SetSourcePosition(prop->position());
2061 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2065 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2068 Expression* left_expr,
2069 Expression* right_expr) {
2070 Label done, both_smis, stub_call;
2072 // Get the arguments.
2074 Register right = x0;
2075 Register result = x0;
2078 // Perform combined smi check on both operands.
2079 __ Orr(x10, left, right);
2080 JumpPatchSite patch_site(masm_);
2081 patch_site.EmitJumpIfSmi(x10, &both_smis);
2083 __ Bind(&stub_call);
2085 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2087 Assembler::BlockPoolsScope scope(masm_);
2088 CallIC(code, expr->BinaryOperationFeedbackId());
2089 patch_site.EmitPatchInfo();
2093 __ Bind(&both_smis);
2094 // Smi case. This code works in the same way as the smi-smi case in the type
2095 // recording binary operation stub, see
2096 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2097 // TODO(all): That doesn't exist any more. Where are the comments?
2099 // The set of operations that needs to be supported here is controlled by
2100 // FullCodeGenerator::ShouldInlineSmiCase().
2103 __ Ubfx(right, right, kSmiShift, 5);
2104 __ Asr(result, left, right);
2105 __ Bic(result, result, kSmiShiftMask);
2108 __ Ubfx(right, right, kSmiShift, 5);
2109 __ Lsl(result, left, right);
2112 // If `left >>> right` >= 0x80000000, the result is not representable in a
2113 // signed 32-bit smi.
2114 __ Ubfx(right, right, kSmiShift, 5);
2115 __ Lsr(x10, left, right);
2116 __ Tbnz(x10, kXSignBit, &stub_call);
2117 __ Bic(result, x10, kSmiShiftMask);
2120 __ Adds(x10, left, right);
2121 __ B(vs, &stub_call);
2122 __ Mov(result, x10);
2125 __ Subs(x10, left, right);
2126 __ B(vs, &stub_call);
2127 __ Mov(result, x10);
2130 Label not_minus_zero, done;
2131 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2132 STATIC_ASSERT(kSmiTag == 0);
2133 __ Smulh(x10, left, right);
2134 __ Cbnz(x10, ¬_minus_zero);
2135 __ Eor(x11, left, right);
2136 __ Tbnz(x11, kXSignBit, &stub_call);
2137 __ Mov(result, x10);
2139 __ Bind(¬_minus_zero);
2141 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2142 __ B(lt, &stub_call);
2143 __ SmiTag(result, x10);
2148 __ Orr(result, left, right);
2150 case Token::BIT_AND:
2151 __ And(result, left, right);
2153 case Token::BIT_XOR:
2154 __ Eor(result, left, right);
2161 context()->Plug(x0);
2165 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2167 OverwriteMode mode) {
2169 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2170 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2172 Assembler::BlockPoolsScope scope(masm_);
2173 CallIC(code, expr->BinaryOperationFeedbackId());
2174 patch_site.EmitPatchInfo();
2176 context()->Plug(x0);
2180 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2181 // Constructor is in x0.
2182 DCHECK(lit != NULL);
2185 // No access check is needed here since the constructor is created by the
2187 Register scratch = x1;
2189 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2192 for (int i = 0; i < lit->properties()->length(); i++) {
2193 ObjectLiteral::Property* property = lit->properties()->at(i);
2194 Literal* key = property->key()->AsLiteral();
2195 Expression* value = property->value();
2196 DCHECK(key != NULL);
2198 if (property->is_static()) {
2199 __ Peek(scratch, kPointerSize); // constructor
2201 __ Peek(scratch, 0); // prototype
2204 VisitForStackValue(key);
2205 VisitForStackValue(value);
2207 switch (property->kind()) {
2208 case ObjectLiteral::Property::CONSTANT:
2209 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2210 case ObjectLiteral::Property::COMPUTED:
2211 case ObjectLiteral::Property::PROTOTYPE:
2212 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2215 case ObjectLiteral::Property::GETTER:
2216 __ CallRuntime(Runtime::kDefineClassGetter, 3);
2219 case ObjectLiteral::Property::SETTER:
2220 __ CallRuntime(Runtime::kDefineClassSetter, 3);
2229 __ CallRuntime(Runtime::kToFastProperties, 1);
2232 __ CallRuntime(Runtime::kToFastProperties, 1);
2236 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2237 DCHECK(expr->IsValidReferenceExpression());
2239 Property* prop = expr->AsProperty();
2240 LhsKind assign_type = GetAssignType(prop);
2242 switch (assign_type) {
2244 Variable* var = expr->AsVariableProxy()->var();
2245 EffectContext context(this);
2246 EmitVariableAssignment(var, Token::ASSIGN);
2249 case NAMED_PROPERTY: {
2250 __ Push(x0); // Preserve value.
2251 VisitForAccumulatorValue(prop->obj());
2252 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2254 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2255 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2256 __ Mov(StoreDescriptor::NameRegister(),
2257 Operand(prop->key()->AsLiteral()->value()));
2261 case NAMED_SUPER_PROPERTY: {
2263 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2264 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2265 // stack: value, this; x0: home_object
2266 Register scratch = x10;
2267 Register scratch2 = x11;
2268 __ mov(scratch, result_register()); // home_object
2269 __ Peek(x0, kPointerSize); // value
2270 __ Peek(scratch2, 0); // this
2271 __ Poke(scratch2, kPointerSize); // this
2272 __ Poke(scratch, 0); // home_object
2273 // stack: this, home_object; x0: value
2274 EmitNamedSuperPropertyStore(prop);
2277 case KEYED_SUPER_PROPERTY: {
2279 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2280 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2281 __ Push(result_register());
2282 VisitForAccumulatorValue(prop->key());
2283 Register scratch = x10;
2284 Register scratch2 = x11;
2285 __ Peek(scratch2, 2 * kPointerSize); // value
2286 // stack: value, this, home_object; x0: key, x11: value
2287 __ Peek(scratch, kPointerSize); // this
2288 __ Poke(scratch, 2 * kPointerSize);
2289 __ Peek(scratch, 0); // home_object
2290 __ Poke(scratch, kPointerSize);
2292 __ Move(x0, scratch2);
2293 // stack: this, home_object, key; x0: value.
2294 EmitKeyedSuperPropertyStore(prop);
2297 case KEYED_PROPERTY: {
2298 __ Push(x0); // Preserve value.
2299 VisitForStackValue(prop->obj());
2300 VisitForAccumulatorValue(prop->key());
2301 __ Mov(StoreDescriptor::NameRegister(), x0);
2302 __ Pop(StoreDescriptor::ReceiverRegister(),
2303 StoreDescriptor::ValueRegister());
2305 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2310 context()->Plug(x0);
2314 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2315 Variable* var, MemOperand location) {
2316 __ Str(result_register(), location);
2317 if (var->IsContextSlot()) {
2318 // RecordWrite may destroy all its register arguments.
2319 __ Mov(x10, result_register());
2320 int offset = Context::SlotOffset(var->index());
2321 __ RecordWriteContextSlot(
2322 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2327 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2329 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2330 if (var->IsUnallocated()) {
2331 // Global var, const, or let.
2332 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2333 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2336 } else if (op == Token::INIT_CONST_LEGACY) {
2337 // Const initializers need a write barrier.
2338 DCHECK(!var->IsParameter()); // No const parameters.
2339 if (var->IsLookupSlot()) {
2340 __ Mov(x1, Operand(var->name()));
2341 __ Push(x0, cp, x1);
2342 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2344 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2346 MemOperand location = VarOperand(var, x1);
2347 __ Ldr(x10, location);
2348 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2349 EmitStoreToStackLocalOrContextSlot(var, location);
2353 } else if (var->mode() == LET && op != Token::INIT_LET) {
2354 // Non-initializing assignment to let variable needs a write barrier.
2355 DCHECK(!var->IsLookupSlot());
2356 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2358 MemOperand location = VarOperand(var, x1);
2359 __ Ldr(x10, location);
2360 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2361 __ Mov(x10, Operand(var->name()));
2363 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2364 // Perform the assignment.
2366 EmitStoreToStackLocalOrContextSlot(var, location);
2368 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2369 if (var->IsLookupSlot()) {
2370 // Assignment to var.
2371 __ Mov(x11, Operand(var->name()));
2372 __ Mov(x10, Smi::FromInt(strict_mode()));
2375 // jssp[16] : context.
2376 // jssp[24] : value.
2377 __ Push(x0, cp, x11, x10);
2378 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2380 // Assignment to var or initializing assignment to let/const in harmony
2382 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2383 MemOperand location = VarOperand(var, x1);
2384 if (FLAG_debug_code && op == Token::INIT_LET) {
2385 __ Ldr(x10, location);
2386 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2387 __ Check(eq, kLetBindingReInitialization);
2389 EmitStoreToStackLocalOrContextSlot(var, location);
2392 // Non-initializing assignments to consts are ignored.
2396 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2397 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2398 // Assignment to a property, using a named store IC.
2399 Property* prop = expr->target()->AsProperty();
2400 DCHECK(prop != NULL);
2401 DCHECK(prop->key()->IsLiteral());
2403 // Record source code position before IC call.
2404 SetSourcePosition(expr->position());
2405 __ Mov(StoreDescriptor::NameRegister(),
2406 Operand(prop->key()->AsLiteral()->value()));
2407 __ Pop(StoreDescriptor::ReceiverRegister());
2408 CallStoreIC(expr->AssignmentFeedbackId());
2410 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2411 context()->Plug(x0);
2415 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2416 // Assignment to named property of super.
2418 // stack : receiver ('this'), home_object
2419 DCHECK(prop != NULL);
2420 Literal* key = prop->key()->AsLiteral();
2421 DCHECK(key != NULL);
2423 __ Push(key->value());
2425 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2426 : Runtime::kStoreToSuper_Sloppy),
2431 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2432 // Assignment to named property of super.
2434 // stack : receiver ('this'), home_object, key
2435 DCHECK(prop != NULL);
2438 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreKeyedToSuper_Strict
2439 : Runtime::kStoreKeyedToSuper_Sloppy),
2444 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2445 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2446 // Assignment to a property, using a keyed store IC.
2448 // Record source code position before IC call.
2449 SetSourcePosition(expr->position());
2450 // TODO(all): Could we pass this in registers rather than on the stack?
2451 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2452 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2454 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2455 CallIC(ic, expr->AssignmentFeedbackId());
2457 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2458 context()->Plug(x0);
2462 void FullCodeGenerator::VisitProperty(Property* expr) {
2463 Comment cmnt(masm_, "[ Property");
2464 Expression* key = expr->key();
2466 if (key->IsPropertyName()) {
2467 if (!expr->IsSuperAccess()) {
2468 VisitForAccumulatorValue(expr->obj());
2469 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2470 EmitNamedPropertyLoad(expr);
2472 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2473 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2474 __ Push(result_register());
2475 EmitNamedSuperPropertyLoad(expr);
2477 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2478 context()->Plug(x0);
2480 if (!expr->IsSuperAccess()) {
2481 VisitForStackValue(expr->obj());
2482 VisitForAccumulatorValue(expr->key());
2483 __ Move(LoadDescriptor::NameRegister(), x0);
2484 __ Pop(LoadDescriptor::ReceiverRegister());
2485 EmitKeyedPropertyLoad(expr);
2487 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2488 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2489 __ Push(result_register());
2490 VisitForStackValue(expr->key());
2491 EmitKeyedSuperPropertyLoad(expr);
2493 context()->Plug(x0);
2498 void FullCodeGenerator::CallIC(Handle<Code> code,
2499 TypeFeedbackId ast_id) {
2501 // All calls must have a predictable size in full-codegen code to ensure that
2502 // the debugger can patch them correctly.
2503 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2507 // Code common for calls using the IC.
2508 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2509 Expression* callee = expr->expression();
2511 CallICState::CallType call_type =
2512 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2514 // Get the target function.
2515 if (call_type == CallICState::FUNCTION) {
2516 { StackValueContext context(this);
2517 EmitVariableLoad(callee->AsVariableProxy());
2518 PrepareForBailout(callee, NO_REGISTERS);
2520 // Push undefined as receiver. This is patched in the method prologue if it
2521 // is a sloppy mode method.
2522 __ Push(isolate()->factory()->undefined_value());
2524 // Load the function from the receiver.
2525 DCHECK(callee->IsProperty());
2526 DCHECK(!callee->AsProperty()->IsSuperAccess());
2527 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2528 EmitNamedPropertyLoad(callee->AsProperty());
2529 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2530 // Push the target function under the receiver.
2535 EmitCall(expr, call_type);
2539 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2540 Expression* callee = expr->expression();
2541 DCHECK(callee->IsProperty());
2542 Property* prop = callee->AsProperty();
2543 DCHECK(prop->IsSuperAccess());
2545 SetSourcePosition(prop->position());
2546 Literal* key = prop->key()->AsLiteral();
2547 DCHECK(!key->value()->IsSmi());
2549 // Load the function from the receiver.
2550 const Register scratch = x10;
2551 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2552 EmitLoadHomeObject(super_ref);
2554 VisitForAccumulatorValue(super_ref->this_var());
2556 __ Peek(scratch, kPointerSize);
2557 __ Push(x0, scratch);
2558 __ Push(key->value());
2562 // - this (receiver)
2563 // - this (receiver) <-- LoadFromSuper will pop here and below.
2566 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2568 // Replace home_object with target function.
2569 __ Poke(x0, kPointerSize);
2572 // - target function
2573 // - this (receiver)
2574 EmitCall(expr, CallICState::METHOD);
2578 // Code common for calls using the IC.
2579 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2582 VisitForAccumulatorValue(key);
2584 Expression* callee = expr->expression();
2586 // Load the function from the receiver.
2587 DCHECK(callee->IsProperty());
2588 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2589 __ Move(LoadDescriptor::NameRegister(), x0);
2590 EmitKeyedPropertyLoad(callee->AsProperty());
2591 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2593 // Push the target function under the receiver.
2597 EmitCall(expr, CallICState::METHOD);
2601 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2602 Expression* callee = expr->expression();
2603 DCHECK(callee->IsProperty());
2604 Property* prop = callee->AsProperty();
2605 DCHECK(prop->IsSuperAccess());
2607 SetSourcePosition(prop->position());
2609 // Load the function from the receiver.
2610 const Register scratch = x10;
2611 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2612 EmitLoadHomeObject(super_ref);
2614 VisitForAccumulatorValue(super_ref->this_var());
2616 __ Peek(scratch, kPointerSize);
2617 __ Push(x0, scratch);
2618 VisitForStackValue(prop->key());
2622 // - this (receiver)
2623 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2626 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2628 // Replace home_object with target function.
2629 __ Poke(x0, kPointerSize);
2632 // - target function
2633 // - this (receiver)
2634 EmitCall(expr, CallICState::METHOD);
2638 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2639 // Load the arguments.
2640 ZoneList<Expression*>* args = expr->arguments();
2641 int arg_count = args->length();
2642 { PreservePositionScope scope(masm()->positions_recorder());
2643 for (int i = 0; i < arg_count; i++) {
2644 VisitForStackValue(args->at(i));
2647 // Record source position of the IC call.
2648 SetSourcePosition(expr->position());
2650 Handle<Code> ic = CallIC::initialize_stub(
2651 isolate(), arg_count, call_type);
2652 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
2653 __ Peek(x1, (arg_count + 1) * kXRegSize);
2654 // Don't assign a type feedback id to the IC, since type feedback is provided
2655 // by the vector above.
2658 RecordJSReturnSite(expr);
2659 // Restore context register.
2660 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2661 context()->DropAndPlug(1, x0);
2665 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2666 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2667 // Prepare to push a copy of the first argument or undefined if it doesn't
2669 if (arg_count > 0) {
2670 __ Peek(x9, arg_count * kXRegSize);
2672 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2675 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2676 // Prepare to push the receiver of the enclosing function.
2677 int receiver_offset = 2 + info_->scope()->num_parameters();
2678 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2680 // Prepare to push the language mode.
2681 __ Mov(x12, Smi::FromInt(strict_mode()));
2682 // Prepare to push the start position of the scope the calls resides in.
2683 __ Mov(x13, Smi::FromInt(scope()->start_position()));
2686 __ Push(x9, x10, x11, x12, x13);
2688 // Do the runtime call.
2689 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2693 void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) {
2694 DCHECK(super_ref != NULL);
2695 __ ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2697 __ CallRuntime(Runtime::kGetPrototype, 1);
2701 void FullCodeGenerator::VisitCall(Call* expr) {
2703 // We want to verify that RecordJSReturnSite gets called on all paths
2704 // through this function. Avoid early returns.
2705 expr->return_is_recorded_ = false;
2708 Comment cmnt(masm_, "[ Call");
2709 Expression* callee = expr->expression();
2710 Call::CallType call_type = expr->GetCallType(isolate());
2712 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2713 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2714 // to resolve the function we need to call and the receiver of the
2715 // call. Then we call the resolved function using the given
2717 ZoneList<Expression*>* args = expr->arguments();
2718 int arg_count = args->length();
2721 PreservePositionScope pos_scope(masm()->positions_recorder());
2722 VisitForStackValue(callee);
2723 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2724 __ Push(x10); // Reserved receiver slot.
2726 // Push the arguments.
2727 for (int i = 0; i < arg_count; i++) {
2728 VisitForStackValue(args->at(i));
2731 // Push a copy of the function (found below the arguments) and
2733 __ Peek(x10, (arg_count + 1) * kPointerSize);
2735 EmitResolvePossiblyDirectEval(arg_count);
2737 // The runtime call returns a pair of values in x0 (function) and
2738 // x1 (receiver). Touch up the stack with the right values.
2739 __ PokePair(x1, x0, arg_count * kPointerSize);
2741 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2744 // Record source position for debugger.
2745 SetSourcePosition(expr->position());
2747 // Call the evaluated function.
2748 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2749 __ Peek(x1, (arg_count + 1) * kXRegSize);
2751 RecordJSReturnSite(expr);
2752 // Restore context register.
2753 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2754 context()->DropAndPlug(1, x0);
2756 } else if (call_type == Call::GLOBAL_CALL) {
2757 EmitCallWithLoadIC(expr);
2759 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2760 // Call to a lookup slot (dynamically introduced variable).
2761 VariableProxy* proxy = callee->AsVariableProxy();
2764 { PreservePositionScope scope(masm()->positions_recorder());
2765 // Generate code for loading from variables potentially shadowed
2766 // by eval-introduced variables.
2767 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2771 // Call the runtime to find the function to call (returned in x0)
2772 // and the object holding it (returned in x1).
2773 __ Mov(x10, Operand(proxy->name()));
2774 __ Push(context_register(), x10);
2775 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2776 __ Push(x0, x1); // Receiver, function.
2777 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2779 // If fast case code has been generated, emit code to push the
2780 // function and receiver and have the slow path jump around this
2782 if (done.is_linked()) {
2787 // The receiver is implicitly the global receiver. Indicate this
2788 // by passing the undefined to the call function stub.
2789 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2794 // The receiver is either the global receiver or an object found
2795 // by LoadContextSlot.
2797 } else if (call_type == Call::PROPERTY_CALL) {
2798 Property* property = callee->AsProperty();
2799 bool is_named_call = property->key()->IsPropertyName();
2800 if (property->IsSuperAccess()) {
2801 if (is_named_call) {
2802 EmitSuperCallWithLoadIC(expr);
2804 EmitKeyedSuperCallWithLoadIC(expr);
2808 PreservePositionScope scope(masm()->positions_recorder());
2809 VisitForStackValue(property->obj());
2811 if (is_named_call) {
2812 EmitCallWithLoadIC(expr);
2814 EmitKeyedCallWithLoadIC(expr, property->key());
2817 } else if (call_type == Call::SUPER_CALL) {
2818 SuperReference* super_ref = callee->AsSuperReference();
2819 EmitLoadSuperConstructor(super_ref);
2820 __ Push(result_register());
2821 VisitForStackValue(super_ref->this_var());
2822 EmitCall(expr, CallICState::METHOD);
2824 DCHECK(call_type == Call::OTHER_CALL);
2825 // Call to an arbitrary expression not handled specially above.
2826 { PreservePositionScope scope(masm()->positions_recorder());
2827 VisitForStackValue(callee);
2829 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2831 // Emit function call.
2836 // RecordJSReturnSite should have been called.
2837 DCHECK(expr->return_is_recorded_);
2842 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2843 Comment cmnt(masm_, "[ CallNew");
2844 // According to ECMA-262, section 11.2.2, page 44, the function
2845 // expression in new calls must be evaluated before the
2848 // Push constructor on the stack. If it's not a function it's used as
2849 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2851 if (expr->expression()->IsSuperReference()) {
2852 EmitLoadSuperConstructor(expr->expression()->AsSuperReference());
2853 __ Push(result_register());
2855 VisitForStackValue(expr->expression());
2858 // Push the arguments ("left-to-right") on the stack.
2859 ZoneList<Expression*>* args = expr->arguments();
2860 int arg_count = args->length();
2861 for (int i = 0; i < arg_count; i++) {
2862 VisitForStackValue(args->at(i));
2865 // Call the construct call builtin that handles allocation and
2866 // constructor invocation.
2867 SetSourcePosition(expr->position());
2869 // Load function and argument count into x1 and x0.
2870 __ Mov(x0, arg_count);
2871 __ Peek(x1, arg_count * kXRegSize);
2873 // Record call targets in unoptimized code.
2874 if (FLAG_pretenuring_call_new) {
2875 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2876 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2877 expr->CallNewFeedbackSlot().ToInt() + 1);
2880 __ LoadObject(x2, FeedbackVector());
2881 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2883 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2884 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2885 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2886 context()->Plug(x0);
2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2891 ZoneList<Expression*>* args = expr->arguments();
2892 DCHECK(args->length() == 1);
2894 VisitForAccumulatorValue(args->at(0));
2896 Label materialize_true, materialize_false;
2897 Label* if_true = NULL;
2898 Label* if_false = NULL;
2899 Label* fall_through = NULL;
2900 context()->PrepareTest(&materialize_true, &materialize_false,
2901 &if_true, &if_false, &fall_through);
2903 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2904 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2906 context()->Plug(if_true, if_false);
2910 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2911 ZoneList<Expression*>* args = expr->arguments();
2912 DCHECK(args->length() == 1);
2914 VisitForAccumulatorValue(args->at(0));
2916 Label materialize_true, materialize_false;
2917 Label* if_true = NULL;
2918 Label* if_false = NULL;
2919 Label* fall_through = NULL;
2920 context()->PrepareTest(&materialize_true, &materialize_false,
2921 &if_true, &if_false, &fall_through);
2923 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
2925 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2926 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
2928 context()->Plug(if_true, if_false);
2932 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2933 ZoneList<Expression*>* args = expr->arguments();
2934 DCHECK(args->length() == 1);
2936 VisitForAccumulatorValue(args->at(0));
2938 Label materialize_true, materialize_false;
2939 Label* if_true = NULL;
2940 Label* if_false = NULL;
2941 Label* fall_through = NULL;
2942 context()->PrepareTest(&materialize_true, &materialize_false,
2943 &if_true, &if_false, &fall_through);
2945 __ JumpIfSmi(x0, if_false);
2946 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2947 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2948 // Undetectable objects behave like undefined when tested with typeof.
2949 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2950 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2951 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2952 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2954 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2955 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2956 Split(le, if_true, if_false, fall_through);
2958 context()->Plug(if_true, if_false);
2962 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2963 ZoneList<Expression*>* args = expr->arguments();
2964 DCHECK(args->length() == 1);
2966 VisitForAccumulatorValue(args->at(0));
2968 Label materialize_true, materialize_false;
2969 Label* if_true = NULL;
2970 Label* if_false = NULL;
2971 Label* fall_through = NULL;
2972 context()->PrepareTest(&materialize_true, &materialize_false,
2973 &if_true, &if_false, &fall_through);
2975 __ JumpIfSmi(x0, if_false);
2976 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2978 Split(ge, if_true, if_false, fall_through);
2980 context()->Plug(if_true, if_false);
2984 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2985 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2986 ZoneList<Expression*>* args = expr->arguments();
2987 DCHECK(args->length() == 1);
2989 VisitForAccumulatorValue(args->at(0));
2991 Label materialize_true, materialize_false;
2992 Label* if_true = NULL;
2993 Label* if_false = NULL;
2994 Label* fall_through = NULL;
2995 context()->PrepareTest(&materialize_true, &materialize_false,
2996 &if_true, &if_false, &fall_through);
2998 __ JumpIfSmi(x0, if_false);
2999 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3000 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3001 __ Tst(x11, 1 << Map::kIsUndetectable);
3002 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3003 Split(ne, if_true, if_false, fall_through);
3005 context()->Plug(if_true, if_false);
3009 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3010 CallRuntime* expr) {
3011 ZoneList<Expression*>* args = expr->arguments();
3012 DCHECK(args->length() == 1);
3013 VisitForAccumulatorValue(args->at(0));
3015 Label materialize_true, materialize_false, skip_lookup;
3016 Label* if_true = NULL;
3017 Label* if_false = NULL;
3018 Label* fall_through = NULL;
3019 context()->PrepareTest(&materialize_true, &materialize_false,
3020 &if_true, &if_false, &fall_through);
3022 Register object = x0;
3023 __ AssertNotSmi(object);
3026 Register bitfield2 = x11;
3027 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3028 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3029 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3031 // Check for fast case object. Generate false result for slow case object.
3032 Register props = x12;
3033 Register props_map = x12;
3034 Register hash_table_map = x13;
3035 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3036 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3037 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3038 __ Cmp(props_map, hash_table_map);
3041 // Look for valueOf name in the descriptor array, and indicate false if found.
3042 // Since we omit an enumeration index check, if it is added via a transition
3043 // that shares its descriptor array, this is a false positive.
3046 // Skip loop if no descriptors are valid.
3047 Register descriptors = x12;
3048 Register descriptors_length = x13;
3049 __ NumberOfOwnDescriptors(descriptors_length, map);
3050 __ Cbz(descriptors_length, &done);
3052 __ LoadInstanceDescriptors(map, descriptors);
3054 // Calculate the end of the descriptor array.
3055 Register descriptors_end = x14;
3056 __ Mov(x15, DescriptorArray::kDescriptorSize);
3057 __ Mul(descriptors_length, descriptors_length, x15);
3058 // Calculate location of the first key name.
3059 __ Add(descriptors, descriptors,
3060 DescriptorArray::kFirstOffset - kHeapObjectTag);
3061 // Calculate the end of the descriptor array.
3062 __ Add(descriptors_end, descriptors,
3063 Operand(descriptors_length, LSL, kPointerSizeLog2));
3065 // Loop through all the keys in the descriptor array. If one of these is the
3066 // string "valueOf" the result is false.
3067 Register valueof_string = x1;
3068 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3069 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3071 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3072 __ Cmp(x15, valueof_string);
3074 __ Cmp(descriptors, descriptors_end);
3079 // Set the bit in the map to indicate that there is no local valueOf field.
3080 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3081 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3082 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3084 __ Bind(&skip_lookup);
3086 // If a valueOf property is not found on the object check that its prototype
3087 // is the unmodified String prototype. If not result is false.
3088 Register prototype = x1;
3089 Register global_idx = x2;
3090 Register native_context = x2;
3091 Register string_proto = x3;
3092 Register proto_map = x4;
3093 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3094 __ JumpIfSmi(prototype, if_false);
3095 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3096 __ Ldr(global_idx, GlobalObjectMemOperand());
3097 __ Ldr(native_context,
3098 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3099 __ Ldr(string_proto,
3100 ContextMemOperand(native_context,
3101 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3102 __ Cmp(proto_map, string_proto);
3104 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3105 Split(eq, if_true, if_false, fall_through);
3107 context()->Plug(if_true, if_false);
3111 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3112 ZoneList<Expression*>* args = expr->arguments();
3113 DCHECK(args->length() == 1);
3115 VisitForAccumulatorValue(args->at(0));
3117 Label materialize_true, materialize_false;
3118 Label* if_true = NULL;
3119 Label* if_false = NULL;
3120 Label* fall_through = NULL;
3121 context()->PrepareTest(&materialize_true, &materialize_false,
3122 &if_true, &if_false, &fall_through);
3124 __ JumpIfSmi(x0, if_false);
3125 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3126 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3127 Split(eq, if_true, if_false, fall_through);
3129 context()->Plug(if_true, if_false);
3133 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3134 ZoneList<Expression*>* args = expr->arguments();
3135 DCHECK(args->length() == 1);
3137 VisitForAccumulatorValue(args->at(0));
3139 Label materialize_true, materialize_false;
3140 Label* if_true = NULL;
3141 Label* if_false = NULL;
3142 Label* fall_through = NULL;
3143 context()->PrepareTest(&materialize_true, &materialize_false,
3144 &if_true, &if_false, &fall_through);
3146 // Only a HeapNumber can be -0.0, so return false if we have something else.
3147 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3149 // Test the bit pattern.
3150 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3151 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3153 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3154 Split(vs, if_true, if_false, fall_through);
3156 context()->Plug(if_true, if_false);
3160 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3161 ZoneList<Expression*>* args = expr->arguments();
3162 DCHECK(args->length() == 1);
3164 VisitForAccumulatorValue(args->at(0));
3166 Label materialize_true, materialize_false;
3167 Label* if_true = NULL;
3168 Label* if_false = NULL;
3169 Label* fall_through = NULL;
3170 context()->PrepareTest(&materialize_true, &materialize_false,
3171 &if_true, &if_false, &fall_through);
3173 __ JumpIfSmi(x0, if_false);
3174 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3175 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3176 Split(eq, if_true, if_false, fall_through);
3178 context()->Plug(if_true, if_false);
3182 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3183 ZoneList<Expression*>* args = expr->arguments();
3184 DCHECK(args->length() == 1);
3186 VisitForAccumulatorValue(args->at(0));
3188 Label materialize_true, materialize_false;
3189 Label* if_true = NULL;
3190 Label* if_false = NULL;
3191 Label* fall_through = NULL;
3192 context()->PrepareTest(&materialize_true, &materialize_false,
3193 &if_true, &if_false, &fall_through);
3195 __ JumpIfSmi(x0, if_false);
3196 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3197 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3198 Split(eq, if_true, if_false, fall_through);
3200 context()->Plug(if_true, if_false);
3204 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3205 ZoneList<Expression*>* args = expr->arguments();
3206 DCHECK(args->length() == 1);
3208 VisitForAccumulatorValue(args->at(0));
3210 Label materialize_true, materialize_false;
3211 Label* if_true = NULL;
3212 Label* if_false = NULL;
3213 Label* fall_through = NULL;
3214 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3215 &if_false, &fall_through);
3217 __ JumpIfSmi(x0, if_false);
3219 Register type_reg = x11;
3220 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3221 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3222 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3223 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3224 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3225 Split(ls, if_true, if_false, fall_through);
3227 context()->Plug(if_true, if_false);
3231 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3232 DCHECK(expr->arguments()->length() == 0);
3234 Label materialize_true, materialize_false;
3235 Label* if_true = NULL;
3236 Label* if_false = NULL;
3237 Label* fall_through = NULL;
3238 context()->PrepareTest(&materialize_true, &materialize_false,
3239 &if_true, &if_false, &fall_through);
3241 // Get the frame pointer for the calling frame.
3242 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3244 // Skip the arguments adaptor frame if it exists.
3245 Label check_frame_marker;
3246 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3247 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3248 __ B(ne, &check_frame_marker);
3249 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3251 // Check the marker in the calling frame.
3252 __ Bind(&check_frame_marker);
3253 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3254 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3255 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3256 Split(eq, if_true, if_false, fall_through);
3258 context()->Plug(if_true, if_false);
3262 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3263 ZoneList<Expression*>* args = expr->arguments();
3264 DCHECK(args->length() == 2);
3266 // Load the two objects into registers and perform the comparison.
3267 VisitForStackValue(args->at(0));
3268 VisitForAccumulatorValue(args->at(1));
3270 Label materialize_true, materialize_false;
3271 Label* if_true = NULL;
3272 Label* if_false = NULL;
3273 Label* fall_through = NULL;
3274 context()->PrepareTest(&materialize_true, &materialize_false,
3275 &if_true, &if_false, &fall_through);
3279 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3280 Split(eq, if_true, if_false, fall_through);
3282 context()->Plug(if_true, if_false);
3286 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3287 ZoneList<Expression*>* args = expr->arguments();
3288 DCHECK(args->length() == 1);
3290 // ArgumentsAccessStub expects the key in x1.
3291 VisitForAccumulatorValue(args->at(0));
3293 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3294 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3296 context()->Plug(x0);
3300 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3301 DCHECK(expr->arguments()->length() == 0);
3303 // Get the number of formal parameters.
3304 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3306 // Check if the calling frame is an arguments adaptor frame.
3307 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3308 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3309 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3312 // Arguments adaptor case: Read the arguments length from the
3314 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3317 context()->Plug(x0);
3321 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3322 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3323 ZoneList<Expression*>* args = expr->arguments();
3324 DCHECK(args->length() == 1);
3325 Label done, null, function, non_function_constructor;
3327 VisitForAccumulatorValue(args->at(0));
3329 // If the object is a smi, we return null.
3330 __ JumpIfSmi(x0, &null);
3332 // Check that the object is a JS object but take special care of JS
3333 // functions to make sure they have 'Function' as their class.
3334 // Assume that there are only two callable types, and one of them is at
3335 // either end of the type range for JS object types. Saves extra comparisons.
3336 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3337 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3338 // x10: object's map.
3339 // x11: object's type.
3341 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3342 FIRST_SPEC_OBJECT_TYPE + 1);
3343 __ B(eq, &function);
3345 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3346 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3347 LAST_SPEC_OBJECT_TYPE - 1);
3348 __ B(eq, &function);
3349 // Assume that there is no larger type.
3350 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3352 // Check if the constructor in the map is a JS function.
3353 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3354 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3355 &non_function_constructor);
3357 // x12 now contains the constructor function. Grab the
3358 // instance class name from there.
3359 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3361 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3364 // Functions have class 'Function'.
3366 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3369 // Objects with a non-function constructor have class 'Object'.
3370 __ Bind(&non_function_constructor);
3371 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3374 // Non-JS objects have class null.
3376 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3381 context()->Plug(x0);
3385 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3386 // Load the arguments on the stack and call the stub.
3387 SubStringStub stub(isolate());
3388 ZoneList<Expression*>* args = expr->arguments();
3389 DCHECK(args->length() == 3);
3390 VisitForStackValue(args->at(0));
3391 VisitForStackValue(args->at(1));
3392 VisitForStackValue(args->at(2));
3394 context()->Plug(x0);
3398 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3399 // Load the arguments on the stack and call the stub.
3400 RegExpExecStub stub(isolate());
3401 ZoneList<Expression*>* args = expr->arguments();
3402 DCHECK(args->length() == 4);
3403 VisitForStackValue(args->at(0));
3404 VisitForStackValue(args->at(1));
3405 VisitForStackValue(args->at(2));
3406 VisitForStackValue(args->at(3));
3408 context()->Plug(x0);
3412 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3413 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3414 ZoneList<Expression*>* args = expr->arguments();
3415 DCHECK(args->length() == 1);
3416 VisitForAccumulatorValue(args->at(0)); // Load the object.
3419 // If the object is a smi return the object.
3420 __ JumpIfSmi(x0, &done);
3421 // If the object is not a value type, return the object.
3422 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3423 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3426 context()->Plug(x0);
3430 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3431 ZoneList<Expression*>* args = expr->arguments();
3432 DCHECK(args->length() == 2);
3433 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3434 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3436 VisitForAccumulatorValue(args->at(0)); // Load the object.
3438 Label runtime, done, not_date_object;
3439 Register object = x0;
3440 Register result = x0;
3441 Register stamp_addr = x10;
3442 Register stamp_cache = x11;
3444 __ JumpIfSmi(object, ¬_date_object);
3445 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object);
3447 if (index->value() == 0) {
3448 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3451 if (index->value() < JSDate::kFirstUncachedField) {
3452 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3454 __ Ldr(stamp_addr, MemOperand(x10));
3455 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3456 __ Cmp(stamp_addr, stamp_cache);
3458 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3459 kPointerSize * index->value()));
3465 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3469 __ Bind(¬_date_object);
3470 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3472 context()->Plug(x0);
3476 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3477 ZoneList<Expression*>* args = expr->arguments();
3478 DCHECK_EQ(3, args->length());
3480 Register string = x0;
3481 Register index = x1;
3482 Register value = x2;
3483 Register scratch = x10;
3485 VisitForStackValue(args->at(0)); // index
3486 VisitForStackValue(args->at(1)); // value
3487 VisitForAccumulatorValue(args->at(2)); // string
3488 __ Pop(value, index);
3490 if (FLAG_debug_code) {
3491 __ AssertSmi(value, kNonSmiValue);
3492 __ AssertSmi(index, kNonSmiIndex);
3493 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3494 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3498 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3501 __ Strb(value, MemOperand(scratch, index));
3502 context()->Plug(string);
3506 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3507 ZoneList<Expression*>* args = expr->arguments();
3508 DCHECK_EQ(3, args->length());
3510 Register string = x0;
3511 Register index = x1;
3512 Register value = x2;
3513 Register scratch = x10;
3515 VisitForStackValue(args->at(0)); // index
3516 VisitForStackValue(args->at(1)); // value
3517 VisitForAccumulatorValue(args->at(2)); // string
3518 __ Pop(value, index);
3520 if (FLAG_debug_code) {
3521 __ AssertSmi(value, kNonSmiValue);
3522 __ AssertSmi(index, kNonSmiIndex);
3523 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3524 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3528 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3531 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3532 context()->Plug(string);
3536 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3537 // Load the arguments on the stack and call the MathPow stub.
3538 ZoneList<Expression*>* args = expr->arguments();
3539 DCHECK(args->length() == 2);
3540 VisitForStackValue(args->at(0));
3541 VisitForStackValue(args->at(1));
3542 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3544 context()->Plug(x0);
3548 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3549 ZoneList<Expression*>* args = expr->arguments();
3550 DCHECK(args->length() == 2);
3551 VisitForStackValue(args->at(0)); // Load the object.
3552 VisitForAccumulatorValue(args->at(1)); // Load the value.
3558 // If the object is a smi, return the value.
3559 __ JumpIfSmi(x1, &done);
3561 // If the object is not a value type, return the value.
3562 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3565 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3566 // Update the write barrier. Save the value as it will be
3567 // overwritten by the write barrier code and is needed afterward.
3569 __ RecordWriteField(
3570 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3573 context()->Plug(x0);
3577 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3578 ZoneList<Expression*>* args = expr->arguments();
3579 DCHECK_EQ(args->length(), 1);
3581 // Load the argument into x0 and call the stub.
3582 VisitForAccumulatorValue(args->at(0));
3584 NumberToStringStub stub(isolate());
3586 context()->Plug(x0);
3590 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591 ZoneList<Expression*>* args = expr->arguments();
3592 DCHECK(args->length() == 1);
3594 VisitForAccumulatorValue(args->at(0));
3598 Register result = x1;
3600 StringCharFromCodeGenerator generator(code, result);
3601 generator.GenerateFast(masm_);
3604 NopRuntimeCallHelper call_helper;
3605 generator.GenerateSlow(masm_, call_helper);
3608 context()->Plug(result);
3612 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3613 ZoneList<Expression*>* args = expr->arguments();
3614 DCHECK(args->length() == 2);
3616 VisitForStackValue(args->at(0));
3617 VisitForAccumulatorValue(args->at(1));
3619 Register object = x1;
3620 Register index = x0;
3621 Register result = x3;
3625 Label need_conversion;
3626 Label index_out_of_range;
3628 StringCharCodeAtGenerator generator(object,
3633 &index_out_of_range,
3634 STRING_INDEX_IS_NUMBER);
3635 generator.GenerateFast(masm_);
3638 __ Bind(&index_out_of_range);
3639 // When the index is out of range, the spec requires us to return NaN.
3640 __ LoadRoot(result, Heap::kNanValueRootIndex);
3643 __ Bind(&need_conversion);
3644 // Load the undefined value into the result register, which will
3645 // trigger conversion.
3646 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3649 NopRuntimeCallHelper call_helper;
3650 generator.GenerateSlow(masm_, call_helper);
3653 context()->Plug(result);
3657 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3658 ZoneList<Expression*>* args = expr->arguments();
3659 DCHECK(args->length() == 2);
3661 VisitForStackValue(args->at(0));
3662 VisitForAccumulatorValue(args->at(1));
3664 Register object = x1;
3665 Register index = x0;
3666 Register result = x0;
3670 Label need_conversion;
3671 Label index_out_of_range;
3673 StringCharAtGenerator generator(object,
3679 &index_out_of_range,
3680 STRING_INDEX_IS_NUMBER);
3681 generator.GenerateFast(masm_);
3684 __ Bind(&index_out_of_range);
3685 // When the index is out of range, the spec requires us to return
3686 // the empty string.
3687 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3690 __ Bind(&need_conversion);
3691 // Move smi zero into the result register, which will trigger conversion.
3692 __ Mov(result, Smi::FromInt(0));
3695 NopRuntimeCallHelper call_helper;
3696 generator.GenerateSlow(masm_, call_helper);
3699 context()->Plug(result);
3703 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3704 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3705 ZoneList<Expression*>* args = expr->arguments();
3706 DCHECK_EQ(2, args->length());
3708 VisitForStackValue(args->at(0));
3709 VisitForAccumulatorValue(args->at(1));
3712 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3715 context()->Plug(x0);
3719 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3720 ZoneList<Expression*>* args = expr->arguments();
3721 DCHECK_EQ(2, args->length());
3722 VisitForStackValue(args->at(0));
3723 VisitForStackValue(args->at(1));
3725 StringCompareStub stub(isolate());
3727 context()->Plug(x0);
3731 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3732 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3733 ZoneList<Expression*>* args = expr->arguments();
3734 DCHECK(args->length() >= 2);
3736 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3737 for (int i = 0; i < arg_count + 1; i++) {
3738 VisitForStackValue(args->at(i));
3740 VisitForAccumulatorValue(args->last()); // Function.
3742 Label runtime, done;
3743 // Check for non-function argument (including proxy).
3744 __ JumpIfSmi(x0, &runtime);
3745 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3747 // InvokeFunction requires the function in x1. Move it in there.
3749 ParameterCount count(arg_count);
3750 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3751 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3756 __ CallRuntime(Runtime::kCall, args->length());
3759 context()->Plug(x0);
3763 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3764 RegExpConstructResultStub stub(isolate());
3765 ZoneList<Expression*>* args = expr->arguments();
3766 DCHECK(args->length() == 3);
3767 VisitForStackValue(args->at(0));
3768 VisitForStackValue(args->at(1));
3769 VisitForAccumulatorValue(args->at(2));
3772 context()->Plug(x0);
3776 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3777 ZoneList<Expression*>* args = expr->arguments();
3778 DCHECK_EQ(2, args->length());
3779 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3780 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3782 Handle<FixedArray> jsfunction_result_caches(
3783 isolate()->native_context()->jsfunction_result_caches());
3784 if (jsfunction_result_caches->length() <= cache_id) {
3785 __ Abort(kAttemptToUseUndefinedCache);
3786 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3787 context()->Plug(x0);
3791 VisitForAccumulatorValue(args->at(1));
3794 Register cache = x1;
3795 __ Ldr(cache, GlobalObjectMemOperand());
3796 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3797 __ Ldr(cache, ContextMemOperand(cache,
3798 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3800 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3803 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3804 JSFunctionResultCache::kFingerOffset));
3805 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3806 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3808 // Load the key and data from the cache.
3809 __ Ldp(x2, x3, MemOperand(x3));
3812 __ CmovX(x0, x3, eq);
3815 // Call runtime to perform the lookup.
3816 __ Push(cache, key);
3817 __ CallRuntime(Runtime::kGetFromCache, 2);
3820 context()->Plug(x0);
3824 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3825 ZoneList<Expression*>* args = expr->arguments();
3826 VisitForAccumulatorValue(args->at(0));
3828 Label materialize_true, materialize_false;
3829 Label* if_true = NULL;
3830 Label* if_false = NULL;
3831 Label* fall_through = NULL;
3832 context()->PrepareTest(&materialize_true, &materialize_false,
3833 &if_true, &if_false, &fall_through);
3835 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3836 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3837 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3838 Split(eq, if_true, if_false, fall_through);
3840 context()->Plug(if_true, if_false);
3844 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3845 ZoneList<Expression*>* args = expr->arguments();
3846 DCHECK(args->length() == 1);
3847 VisitForAccumulatorValue(args->at(0));
3849 __ AssertString(x0);
3851 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3852 __ IndexFromHash(x10, x0);
3854 context()->Plug(x0);
3858 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3859 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
3861 ZoneList<Expression*>* args = expr->arguments();
3862 DCHECK(args->length() == 2);
3863 VisitForStackValue(args->at(1));
3864 VisitForAccumulatorValue(args->at(0));
3866 Register array = x0;
3867 Register result = x0;
3868 Register elements = x1;
3869 Register element = x2;
3870 Register separator = x3;
3871 Register array_length = x4;
3872 Register result_pos = x5;
3874 Register string_length = x10;
3875 Register elements_end = x11;
3876 Register string = x12;
3877 Register scratch1 = x13;
3878 Register scratch2 = x14;
3879 Register scratch3 = x7;
3880 Register separator_length = x15;
3882 Label bailout, done, one_char_separator, long_separator,
3883 non_trivial_array, not_size_one_array, loop,
3884 empty_separator_loop, one_char_separator_loop,
3885 one_char_separator_loop_entry, long_separator_loop;
3887 // The separator operand is on the stack.
3890 // Check that the array is a JSArray.
3891 __ JumpIfSmi(array, &bailout);
3892 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3894 // Check that the array has fast elements.
3895 __ CheckFastElements(map, scratch1, &bailout);
3897 // If the array has length zero, return the empty string.
3898 // Load and untag the length of the array.
3899 // It is an unsigned value, so we can skip sign extension.
3900 // We assume little endianness.
3901 __ Ldrsw(array_length,
3902 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3903 __ Cbnz(array_length, &non_trivial_array);
3904 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3907 __ Bind(&non_trivial_array);
3908 // Get the FixedArray containing array's elements.
3909 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3911 // Check that all array elements are sequential one-byte strings, and
3912 // accumulate the sum of their lengths.
3913 __ Mov(string_length, 0);
3914 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3915 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3916 // Loop condition: while (element < elements_end).
3917 // Live values in registers:
3918 // elements: Fixed array of strings.
3919 // array_length: Length of the fixed array of strings (not smi)
3920 // separator: Separator string
3921 // string_length: Accumulated sum of string lengths (not smi).
3922 // element: Current array element.
3923 // elements_end: Array end.
3924 if (FLAG_debug_code) {
3925 __ Cmp(array_length, 0);
3926 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3929 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3930 __ JumpIfSmi(string, &bailout);
3931 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3932 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3933 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3935 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3936 __ Adds(string_length, string_length, scratch1);
3938 __ Cmp(element, elements_end);
3941 // If array_length is 1, return elements[0], a string.
3942 __ Cmp(array_length, 1);
3943 __ B(ne, ¬_size_one_array);
3944 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3947 __ Bind(¬_size_one_array);
3949 // Live values in registers:
3950 // separator: Separator string
3951 // array_length: Length of the array (not smi).
3952 // string_length: Sum of string lengths (not smi).
3953 // elements: FixedArray of strings.
3955 // Check that the separator is a flat one-byte string.
3956 __ JumpIfSmi(separator, &bailout);
3957 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3958 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3959 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3961 // Add (separator length times array_length) - separator length to the
3962 // string_length to get the length of the result string.
3963 // Load the separator length as untagged.
3964 // We assume little endianness, and that the length is positive.
3965 __ Ldrsw(separator_length,
3966 UntagSmiFieldMemOperand(separator,
3967 SeqOneByteString::kLengthOffset));
3968 __ Sub(string_length, string_length, separator_length);
3969 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3972 // Get first element in the array.
3973 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3974 // Live values in registers:
3975 // element: First array element
3976 // separator: Separator string
3977 // string_length: Length of result string (not smi)
3978 // array_length: Length of the array (not smi).
3979 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3982 // Prepare for looping. Set up elements_end to end of the array. Set
3983 // result_pos to the position of the result where to write the first
3985 // TODO(all): useless unless AllocateOneByteString trashes the register.
3986 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3987 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3989 // Check the length of the separator.
3990 __ Cmp(separator_length, 1);
3991 __ B(eq, &one_char_separator);
3992 __ B(gt, &long_separator);
3994 // Empty separator case
3995 __ Bind(&empty_separator_loop);
3996 // Live values in registers:
3997 // result_pos: the position to which we are currently copying characters.
3998 // element: Current array element.
3999 // elements_end: Array end.
4001 // Copy next array element to the result.
4002 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4003 __ Ldrsw(string_length,
4004 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4005 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4006 __ CopyBytes(result_pos, string, string_length, scratch1);
4007 __ Cmp(element, elements_end);
4008 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4011 // One-character separator case
4012 __ Bind(&one_char_separator);
4013 // Replace separator with its one-byte character value.
4014 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4015 // Jump into the loop after the code that copies the separator, so the first
4016 // element is not preceded by a separator
4017 __ B(&one_char_separator_loop_entry);
4019 __ Bind(&one_char_separator_loop);
4020 // Live values in registers:
4021 // result_pos: the position to which we are currently copying characters.
4022 // element: Current array element.
4023 // elements_end: Array end.
4024 // separator: Single separator one-byte char (in lower byte).
4026 // Copy the separator character to the result.
4027 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4029 // Copy next array element to the result.
4030 __ Bind(&one_char_separator_loop_entry);
4031 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4032 __ Ldrsw(string_length,
4033 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4034 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4035 __ CopyBytes(result_pos, string, string_length, scratch1);
4036 __ Cmp(element, elements_end);
4037 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4040 // Long separator case (separator is more than one character). Entry is at the
4041 // label long_separator below.
4042 __ Bind(&long_separator_loop);
4043 // Live values in registers:
4044 // result_pos: the position to which we are currently copying characters.
4045 // element: Current array element.
4046 // elements_end: Array end.
4047 // separator: Separator string.
4049 // Copy the separator to the result.
4050 // TODO(all): hoist next two instructions.
4051 __ Ldrsw(string_length,
4052 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4053 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4054 __ CopyBytes(result_pos, string, string_length, scratch1);
4056 __ Bind(&long_separator);
4057 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4058 __ Ldrsw(string_length,
4059 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4060 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4061 __ CopyBytes(result_pos, string, string_length, scratch1);
4062 __ Cmp(element, elements_end);
4063 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4067 // Returning undefined will force slower code to handle it.
4068 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4070 context()->Plug(result);
4074 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4075 DCHECK(expr->arguments()->length() == 0);
4076 ExternalReference debug_is_active =
4077 ExternalReference::debug_is_active_address(isolate());
4078 __ Mov(x10, debug_is_active);
4079 __ Ldrb(x0, MemOperand(x10));
4081 context()->Plug(x0);
4085 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4086 if (expr->function() != NULL &&
4087 expr->function()->intrinsic_type == Runtime::INLINE) {
4088 Comment cmnt(masm_, "[ InlineRuntimeCall");
4089 EmitInlineRuntimeCall(expr);
4093 Comment cmnt(masm_, "[ CallRunTime");
4094 ZoneList<Expression*>* args = expr->arguments();
4095 int arg_count = args->length();
4097 if (expr->is_jsruntime()) {
4098 // Push the builtins object as the receiver.
4099 __ Ldr(x10, GlobalObjectMemOperand());
4100 __ Ldr(LoadDescriptor::ReceiverRegister(),
4101 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4102 __ Push(LoadDescriptor::ReceiverRegister());
4104 // Load the function from the receiver.
4105 Handle<String> name = expr->name();
4106 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4107 if (FLAG_vector_ics) {
4108 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4109 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4110 CallLoadIC(NOT_CONTEXTUAL);
4112 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4115 // Push the target function under the receiver.
4119 int arg_count = args->length();
4120 for (int i = 0; i < arg_count; i++) {
4121 VisitForStackValue(args->at(i));
4124 // Record source position of the IC call.
4125 SetSourcePosition(expr->position());
4126 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4127 __ Peek(x1, (arg_count + 1) * kPointerSize);
4130 // Restore context register.
4131 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4133 context()->DropAndPlug(1, x0);
4135 // Push the arguments ("left-to-right").
4136 for (int i = 0; i < arg_count; i++) {
4137 VisitForStackValue(args->at(i));
4140 // Call the C runtime function.
4141 __ CallRuntime(expr->function(), arg_count);
4142 context()->Plug(x0);
4147 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4148 switch (expr->op()) {
4149 case Token::DELETE: {
4150 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4151 Property* property = expr->expression()->AsProperty();
4152 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4154 if (property != NULL) {
4155 VisitForStackValue(property->obj());
4156 VisitForStackValue(property->key());
4157 __ Mov(x10, Smi::FromInt(strict_mode()));
4159 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4160 context()->Plug(x0);
4161 } else if (proxy != NULL) {
4162 Variable* var = proxy->var();
4163 // Delete of an unqualified identifier is disallowed in strict mode
4164 // but "delete this" is allowed.
4165 DCHECK(strict_mode() == SLOPPY || var->is_this());
4166 if (var->IsUnallocated()) {
4167 __ Ldr(x12, GlobalObjectMemOperand());
4168 __ Mov(x11, Operand(var->name()));
4169 __ Mov(x10, Smi::FromInt(SLOPPY));
4170 __ Push(x12, x11, x10);
4171 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4172 context()->Plug(x0);
4173 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4174 // Result of deleting non-global, non-dynamic variables is false.
4175 // The subexpression does not have side effects.
4176 context()->Plug(var->is_this());
4178 // Non-global variable. Call the runtime to try to delete from the
4179 // context where the variable was introduced.
4180 __ Mov(x2, Operand(var->name()));
4181 __ Push(context_register(), x2);
4182 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4183 context()->Plug(x0);
4186 // Result of deleting non-property, non-variable reference is true.
4187 // The subexpression may have side effects.
4188 VisitForEffect(expr->expression());
4189 context()->Plug(true);
4195 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4196 VisitForEffect(expr->expression());
4197 context()->Plug(Heap::kUndefinedValueRootIndex);
4201 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4202 if (context()->IsEffect()) {
4203 // Unary NOT has no side effects so it's only necessary to visit the
4204 // subexpression. Match the optimizing compiler by not branching.
4205 VisitForEffect(expr->expression());
4206 } else if (context()->IsTest()) {
4207 const TestContext* test = TestContext::cast(context());
4208 // The labels are swapped for the recursive call.
4209 VisitForControl(expr->expression(),
4210 test->false_label(),
4212 test->fall_through());
4213 context()->Plug(test->true_label(), test->false_label());
4215 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4216 // TODO(jbramley): This could be much more efficient using (for
4217 // example) the CSEL instruction.
4218 Label materialize_true, materialize_false, done;
4219 VisitForControl(expr->expression(),
4224 __ Bind(&materialize_true);
4225 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4226 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4229 __ Bind(&materialize_false);
4230 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4231 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4235 if (context()->IsStackValue()) {
4236 __ Push(result_register());
4241 case Token::TYPEOF: {
4242 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4244 StackValueContext context(this);
4245 VisitForTypeofValue(expr->expression());
4247 __ CallRuntime(Runtime::kTypeof, 1);
4248 context()->Plug(x0);
4257 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4258 DCHECK(expr->expression()->IsValidReferenceExpression());
4260 Comment cmnt(masm_, "[ CountOperation");
4261 SetSourcePosition(expr->position());
4263 Property* prop = expr->expression()->AsProperty();
4264 LhsKind assign_type = GetAssignType(prop);
4266 // Evaluate expression and get value.
4267 if (assign_type == VARIABLE) {
4268 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4269 AccumulatorValueContext context(this);
4270 EmitVariableLoad(expr->expression()->AsVariableProxy());
4272 // Reserve space for result of postfix operation.
4273 if (expr->is_postfix() && !context()->IsEffect()) {
4276 switch (assign_type) {
4277 case NAMED_PROPERTY: {
4278 // Put the object both on the stack and in the register.
4279 VisitForStackValue(prop->obj());
4280 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4281 EmitNamedPropertyLoad(prop);
4285 case NAMED_SUPER_PROPERTY: {
4286 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4287 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4288 __ Push(result_register());
4289 const Register scratch = x10;
4290 __ Peek(scratch, kPointerSize);
4291 __ Push(scratch, result_register());
4292 EmitNamedSuperPropertyLoad(prop);
4296 case KEYED_SUPER_PROPERTY: {
4297 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4298 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4299 __ Push(result_register());
4300 VisitForAccumulatorValue(prop->key());
4301 __ Push(result_register());
4302 const Register scratch1 = x10;
4303 const Register scratch2 = x11;
4304 __ Peek(scratch1, 2 * kPointerSize);
4305 __ Peek(scratch2, kPointerSize);
4306 __ Push(scratch1, scratch2, result_register());
4307 EmitKeyedSuperPropertyLoad(prop);
4311 case KEYED_PROPERTY: {
4312 VisitForStackValue(prop->obj());
4313 VisitForStackValue(prop->key());
4314 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4315 __ Peek(LoadDescriptor::NameRegister(), 0);
4316 EmitKeyedPropertyLoad(prop);
4325 // We need a second deoptimization point after loading the value
4326 // in case evaluating the property load my have a side effect.
4327 if (assign_type == VARIABLE) {
4328 PrepareForBailout(expr->expression(), TOS_REG);
4330 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4333 // Inline smi case if we are in a loop.
4334 Label stub_call, done;
4335 JumpPatchSite patch_site(masm_);
4337 int count_value = expr->op() == Token::INC ? 1 : -1;
4338 if (ShouldInlineSmiCase(expr->op())) {
4340 patch_site.EmitJumpIfNotSmi(x0, &slow);
4342 // Save result for postfix expressions.
4343 if (expr->is_postfix()) {
4344 if (!context()->IsEffect()) {
4345 // Save the result on the stack. If we have a named or keyed property we
4346 // store the result under the receiver that is currently on top of the
4348 switch (assign_type) {
4352 case NAMED_PROPERTY:
4353 __ Poke(x0, kPointerSize);
4355 case NAMED_SUPER_PROPERTY:
4356 __ Poke(x0, kPointerSize * 2);
4358 case KEYED_PROPERTY:
4359 __ Poke(x0, kPointerSize * 2);
4361 case KEYED_SUPER_PROPERTY:
4362 __ Poke(x0, kPointerSize * 3);
4368 __ Adds(x0, x0, Smi::FromInt(count_value));
4370 // Call stub. Undo operation first.
4371 __ Sub(x0, x0, Smi::FromInt(count_value));
4375 ToNumberStub convert_stub(isolate());
4376 __ CallStub(&convert_stub);
4378 // Save result for postfix expressions.
4379 if (expr->is_postfix()) {
4380 if (!context()->IsEffect()) {
4381 // Save the result on the stack. If we have a named or keyed property
4382 // we store the result under the receiver that is currently on top
4384 switch (assign_type) {
4388 case NAMED_PROPERTY:
4389 __ Poke(x0, kXRegSize);
4391 case NAMED_SUPER_PROPERTY:
4392 __ Poke(x0, 2 * kXRegSize);
4394 case KEYED_PROPERTY:
4395 __ Poke(x0, 2 * kXRegSize);
4397 case KEYED_SUPER_PROPERTY:
4398 __ Poke(x0, 3 * kXRegSize);
4404 __ Bind(&stub_call);
4406 __ Mov(x0, Smi::FromInt(count_value));
4408 // Record position before stub call.
4409 SetSourcePosition(expr->position());
4412 Assembler::BlockPoolsScope scope(masm_);
4414 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4415 CallIC(code, expr->CountBinOpFeedbackId());
4416 patch_site.EmitPatchInfo();
4420 // Store the value returned in x0.
4421 switch (assign_type) {
4423 if (expr->is_postfix()) {
4424 { EffectContext context(this);
4425 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4427 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4430 // For all contexts except EffectConstant We have the result on
4431 // top of the stack.
4432 if (!context()->IsEffect()) {
4433 context()->PlugTOS();
4436 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4438 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4439 context()->Plug(x0);
4442 case NAMED_PROPERTY: {
4443 __ Mov(StoreDescriptor::NameRegister(),
4444 Operand(prop->key()->AsLiteral()->value()));
4445 __ Pop(StoreDescriptor::ReceiverRegister());
4446 CallStoreIC(expr->CountStoreFeedbackId());
4447 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4448 if (expr->is_postfix()) {
4449 if (!context()->IsEffect()) {
4450 context()->PlugTOS();
4453 context()->Plug(x0);
4457 case NAMED_SUPER_PROPERTY: {
4458 EmitNamedSuperPropertyStore(prop);
4459 if (expr->is_postfix()) {
4460 if (!context()->IsEffect()) {
4461 context()->PlugTOS();
4464 context()->Plug(x0);
4468 case KEYED_SUPER_PROPERTY: {
4469 EmitKeyedSuperPropertyStore(prop);
4470 if (expr->is_postfix()) {
4471 if (!context()->IsEffect()) {
4472 context()->PlugTOS();
4475 context()->Plug(x0);
4479 case KEYED_PROPERTY: {
4480 __ Pop(StoreDescriptor::NameRegister());
4481 __ Pop(StoreDescriptor::ReceiverRegister());
4483 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4484 CallIC(ic, expr->CountStoreFeedbackId());
4485 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4486 if (expr->is_postfix()) {
4487 if (!context()->IsEffect()) {
4488 context()->PlugTOS();
4491 context()->Plug(x0);
4499 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4500 DCHECK(!context()->IsEffect());
4501 DCHECK(!context()->IsTest());
4502 VariableProxy* proxy = expr->AsVariableProxy();
4503 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4504 Comment cmnt(masm_, "Global variable");
4505 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
4506 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4507 if (FLAG_vector_ics) {
4508 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4509 SmiFromSlot(proxy->VariableFeedbackSlot()));
4511 // Use a regular load, not a contextual load, to avoid a reference
4513 CallLoadIC(NOT_CONTEXTUAL);
4514 PrepareForBailout(expr, TOS_REG);
4515 context()->Plug(x0);
4516 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4519 // Generate code for loading from variables potentially shadowed
4520 // by eval-introduced variables.
4521 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4524 __ Mov(x0, Operand(proxy->name()));
4526 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4527 PrepareForBailout(expr, TOS_REG);
4530 context()->Plug(x0);
4532 // This expression cannot throw a reference error at the top level.
4533 VisitInDuplicateContext(expr);
4538 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4539 Expression* sub_expr,
4540 Handle<String> check) {
4541 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4542 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4543 Label materialize_true, materialize_false;
4544 Label* if_true = NULL;
4545 Label* if_false = NULL;
4546 Label* fall_through = NULL;
4547 context()->PrepareTest(&materialize_true, &materialize_false,
4548 &if_true, &if_false, &fall_through);
4550 { AccumulatorValueContext context(this);
4551 VisitForTypeofValue(sub_expr);
4553 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4555 Factory* factory = isolate()->factory();
4556 if (String::Equals(check, factory->number_string())) {
4557 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4558 __ JumpIfSmi(x0, if_true);
4559 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4560 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4561 Split(eq, if_true, if_false, fall_through);
4562 } else if (String::Equals(check, factory->string_string())) {
4563 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4564 __ JumpIfSmi(x0, if_false);
4565 // Check for undetectable objects => false.
4566 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4567 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4568 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4570 } else if (String::Equals(check, factory->symbol_string())) {
4571 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4572 __ JumpIfSmi(x0, if_false);
4573 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4574 Split(eq, if_true, if_false, fall_through);
4575 } else if (String::Equals(check, factory->boolean_string())) {
4576 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4577 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4578 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4579 Split(eq, if_true, if_false, fall_through);
4580 } else if (String::Equals(check, factory->undefined_string())) {
4582 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4583 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4584 __ JumpIfSmi(x0, if_false);
4585 // Check for undetectable objects => true.
4586 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4587 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4588 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4590 } else if (String::Equals(check, factory->function_string())) {
4591 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4592 __ JumpIfSmi(x0, if_false);
4593 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4594 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4595 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4598 } else if (String::Equals(check, factory->object_string())) {
4599 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4600 __ JumpIfSmi(x0, if_false);
4601 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4602 // Check for JS objects => true.
4604 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4606 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4608 // Check for undetectable objects => false.
4609 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4611 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4615 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4616 if (if_false != fall_through) __ B(if_false);
4618 context()->Plug(if_true, if_false);
4622 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4623 Comment cmnt(masm_, "[ CompareOperation");
4624 SetSourcePosition(expr->position());
4626 // Try to generate an optimized comparison with a literal value.
4627 // TODO(jbramley): This only checks common values like NaN or undefined.
4628 // Should it also handle ARM64 immediate operands?
4629 if (TryLiteralCompare(expr)) {
4633 // Assign labels according to context()->PrepareTest.
4634 Label materialize_true;
4635 Label materialize_false;
4636 Label* if_true = NULL;
4637 Label* if_false = NULL;
4638 Label* fall_through = NULL;
4639 context()->PrepareTest(&materialize_true, &materialize_false,
4640 &if_true, &if_false, &fall_through);
4642 Token::Value op = expr->op();
4643 VisitForStackValue(expr->left());
4646 VisitForStackValue(expr->right());
4647 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4648 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4649 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4650 Split(eq, if_true, if_false, fall_through);
4653 case Token::INSTANCEOF: {
4654 VisitForStackValue(expr->right());
4655 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4657 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4658 // The stub returns 0 for true.
4659 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4664 VisitForAccumulatorValue(expr->right());
4665 Condition cond = CompareIC::ComputeCondition(op);
4667 // Pop the stack value.
4670 JumpPatchSite patch_site(masm_);
4671 if (ShouldInlineSmiCase(op)) {
4673 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4675 Split(cond, if_true, if_false, NULL);
4676 __ Bind(&slow_case);
4679 // Record position and call the compare IC.
4680 SetSourcePosition(expr->position());
4681 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4682 CallIC(ic, expr->CompareOperationFeedbackId());
4683 patch_site.EmitPatchInfo();
4684 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4685 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4689 // Convert the result of the comparison into one expected for this
4690 // expression's context.
4691 context()->Plug(if_true, if_false);
4695 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4696 Expression* sub_expr,
4698 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4699 Label materialize_true, materialize_false;
4700 Label* if_true = NULL;
4701 Label* if_false = NULL;
4702 Label* fall_through = NULL;
4703 context()->PrepareTest(&materialize_true, &materialize_false,
4704 &if_true, &if_false, &fall_through);
4706 VisitForAccumulatorValue(sub_expr);
4707 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4709 if (expr->op() == Token::EQ_STRICT) {
4710 Heap::RootListIndex nil_value = nil == kNullValue ?
4711 Heap::kNullValueRootIndex :
4712 Heap::kUndefinedValueRootIndex;
4713 __ CompareRoot(x0, nil_value);
4714 Split(eq, if_true, if_false, fall_through);
4716 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4717 CallIC(ic, expr->CompareOperationFeedbackId());
4718 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4721 context()->Plug(if_true, if_false);
4725 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4726 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4727 context()->Plug(x0);
4731 void FullCodeGenerator::VisitYield(Yield* expr) {
4732 Comment cmnt(masm_, "[ Yield");
4733 // Evaluate yielded value first; the initial iterator definition depends on
4734 // this. It stays on the stack while we update the iterator.
4735 VisitForStackValue(expr->expression());
4737 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4738 // and suchlike. The implementation changes a little by bleeding_edge so I
4739 // don't want to spend too much time on it now.
4741 switch (expr->yield_kind()) {
4742 case Yield::kSuspend:
4743 // Pop value from top-of-stack slot; box result into result register.
4744 EmitCreateIteratorResult(false);
4745 __ Push(result_register());
4747 case Yield::kInitial: {
4748 Label suspend, continuation, post_runtime, resume;
4752 // TODO(jbramley): This label is bound here because the following code
4753 // looks at its pos(). Is it possible to do something more efficient here,
4754 // perhaps using Adr?
4755 __ Bind(&continuation);
4759 VisitForAccumulatorValue(expr->generator_object());
4760 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4761 __ Mov(x1, Smi::FromInt(continuation.pos()));
4762 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4763 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4765 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4766 kLRHasBeenSaved, kDontSaveFPRegs);
4767 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4768 __ Cmp(__ StackPointer(), x1);
4769 __ B(eq, &post_runtime);
4770 __ Push(x0); // generator object
4771 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4772 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4773 __ Bind(&post_runtime);
4774 __ Pop(result_register());
4775 EmitReturnSequence();
4778 context()->Plug(result_register());
4782 case Yield::kFinal: {
4783 VisitForAccumulatorValue(expr->generator_object());
4784 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4785 __ Str(x1, FieldMemOperand(result_register(),
4786 JSGeneratorObject::kContinuationOffset));
4787 // Pop value from top-of-stack slot, box result into result register.
4788 EmitCreateIteratorResult(true);
4789 EmitUnwindBeforeReturn();
4790 EmitReturnSequence();
4794 case Yield::kDelegating: {
4795 VisitForStackValue(expr->generator_object());
4797 // Initial stack layout is as follows:
4798 // [sp + 1 * kPointerSize] iter
4799 // [sp + 0 * kPointerSize] g
4801 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4802 Label l_next, l_call, l_loop;
4803 Register load_receiver = LoadDescriptor::ReceiverRegister();
4804 Register load_name = LoadDescriptor::NameRegister();
4806 // Initial send value is undefined.
4807 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4810 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4812 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4813 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
4814 __ Peek(x3, 1 * kPointerSize); // iter
4815 __ Push(load_name, x3, x0); // "throw", iter, except
4818 // try { received = %yield result }
4819 // Shuffle the received result above a try handler and yield it without
4822 __ Pop(x0); // result
4823 __ PushTryHandler(StackHandler::CATCH, expr->index());
4824 const int handler_size = StackHandlerConstants::kSize;
4825 __ Push(x0); // result
4828 // TODO(jbramley): This label is bound here because the following code
4829 // looks at its pos(). Is it possible to do something more efficient here,
4830 // perhaps using Adr?
4831 __ Bind(&l_continuation);
4834 __ Bind(&l_suspend);
4835 const int generator_object_depth = kPointerSize + handler_size;
4836 __ Peek(x0, generator_object_depth);
4838 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4839 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4840 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4841 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4843 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4844 kLRHasBeenSaved, kDontSaveFPRegs);
4845 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4846 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4847 __ Pop(x0); // result
4848 EmitReturnSequence();
4849 __ Bind(&l_resume); // received in x0
4852 // receiver = iter; f = 'next'; arg = received;
4855 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
4856 __ Peek(x3, 1 * kPointerSize); // iter
4857 __ Push(load_name, x3, x0); // "next", iter, received
4859 // result = receiver[f](arg);
4861 __ Peek(load_receiver, 1 * kPointerSize);
4862 __ Peek(load_name, 2 * kPointerSize);
4863 if (FLAG_vector_ics) {
4864 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4865 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
4867 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
4868 CallIC(ic, TypeFeedbackId::None());
4870 __ Poke(x1, 2 * kPointerSize);
4871 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
4874 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4875 __ Drop(1); // The function is still on the stack; drop it.
4877 // if (!result.done) goto l_try;
4879 __ Move(load_receiver, x0);
4881 __ Push(load_receiver); // save result
4882 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
4883 if (FLAG_vector_ics) {
4884 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4885 SmiFromSlot(expr->DoneFeedbackSlot()));
4887 CallLoadIC(NOT_CONTEXTUAL); // x0=result.done
4888 // The ToBooleanStub argument (result.done) is in x0.
4889 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4894 __ Pop(load_receiver); // result
4895 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
4896 if (FLAG_vector_ics) {
4897 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4898 SmiFromSlot(expr->ValueFeedbackSlot()));
4900 CallLoadIC(NOT_CONTEXTUAL); // x0=result.value
4901 context()->DropAndPlug(2, x0); // drop iter and g
4908 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4910 JSGeneratorObject::ResumeMode resume_mode) {
4911 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4912 Register value_reg = x0;
4913 Register generator_object = x1;
4914 Register the_hole = x2;
4915 Register operand_stack_size = w3;
4916 Register function = x4;
4918 // The value stays in x0, and is ultimately read by the resumed generator, as
4919 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4920 // is read to throw the value when the resumed generator is already closed. x1
4921 // will hold the generator object until the activation has been resumed.
4922 VisitForStackValue(generator);
4923 VisitForAccumulatorValue(value);
4924 __ Pop(generator_object);
4926 // Check generator state.
4927 Label wrong_state, closed_state, done;
4928 __ Ldr(x10, FieldMemOperand(generator_object,
4929 JSGeneratorObject::kContinuationOffset));
4930 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
4931 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
4932 __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4933 __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4935 // Load suspended function and context.
4936 __ Ldr(cp, FieldMemOperand(generator_object,
4937 JSGeneratorObject::kContextOffset));
4938 __ Ldr(function, FieldMemOperand(generator_object,
4939 JSGeneratorObject::kFunctionOffset));
4941 // Load receiver and store as the first argument.
4942 __ Ldr(x10, FieldMemOperand(generator_object,
4943 JSGeneratorObject::kReceiverOffset));
4946 // Push holes for the rest of the arguments to the generator function.
4947 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4949 // The number of arguments is stored as an int32_t, and -1 is a marker
4950 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4951 // extension to correctly handle it. However, in this case, we operate on
4952 // 32-bit W registers, so extension isn't required.
4953 __ Ldr(w10, FieldMemOperand(x10,
4954 SharedFunctionInfo::kFormalParameterCountOffset));
4955 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4956 __ PushMultipleTimes(the_hole, w10);
4958 // Enter a new JavaScript frame, and initialize its slots as they were when
4959 // the generator was suspended.
4961 __ Bl(&resume_frame);
4964 __ Bind(&resume_frame);
4965 __ Push(lr, // Return address.
4966 fp, // Caller's frame pointer.
4967 cp, // Callee's context.
4968 function); // Callee's JS Function.
4969 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4971 // Load and untag the operand stack size.
4972 __ Ldr(x10, FieldMemOperand(generator_object,
4973 JSGeneratorObject::kOperandStackOffset));
4974 __ Ldr(operand_stack_size,
4975 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4977 // If we are sending a value and there is no operand stack, we can jump back
4979 if (resume_mode == JSGeneratorObject::NEXT) {
4981 __ Cbnz(operand_stack_size, &slow_resume);
4982 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4984 UntagSmiFieldMemOperand(generator_object,
4985 JSGeneratorObject::kContinuationOffset));
4986 __ Add(x10, x10, x11);
4987 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4988 __ Str(x12, FieldMemOperand(generator_object,
4989 JSGeneratorObject::kContinuationOffset));
4992 __ Bind(&slow_resume);
4995 // Otherwise, we push holes for the operand stack and call the runtime to fix
4996 // up the stack and the handlers.
4997 __ PushMultipleTimes(the_hole, operand_stack_size);
4999 __ Mov(x10, Smi::FromInt(resume_mode));
5000 __ Push(generator_object, result_register(), x10);
5001 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5002 // Not reached: the runtime call returns elsewhere.
5005 // Reach here when generator is closed.
5006 __ Bind(&closed_state);
5007 if (resume_mode == JSGeneratorObject::NEXT) {
5008 // Return completed iterator result when generator is closed.
5009 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
5011 // Pop value from top-of-stack slot; box result into result register.
5012 EmitCreateIteratorResult(true);
5014 // Throw the provided value.
5016 __ CallRuntime(Runtime::kThrow, 1);
5020 // Throw error if we attempt to operate on a running generator.
5021 __ Bind(&wrong_state);
5022 __ Push(generator_object);
5023 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
5026 context()->Plug(result_register());
5030 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5034 const int instance_size = 5 * kPointerSize;
5035 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5038 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5040 Register result = x0;
5041 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5044 __ Bind(&gc_required);
5045 __ Push(Smi::FromInt(instance_size));
5046 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5047 __ Ldr(context_register(),
5048 MemOperand(fp, StandardFrameConstants::kContextOffset));
5050 __ Bind(&allocated);
5051 Register map_reg = x1;
5052 Register result_value = x2;
5053 Register boolean_done = x3;
5054 Register empty_fixed_array = x4;
5055 Register untagged_result = x5;
5056 __ Ldr(map_reg, GlobalObjectMemOperand());
5057 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5059 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5060 __ Pop(result_value);
5061 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5062 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5063 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5064 JSObject::kElementsOffset);
5065 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5066 JSGeneratorObject::kResultDonePropertyOffset);
5067 __ ObjectUntag(untagged_result, result);
5068 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5069 __ Stp(empty_fixed_array, empty_fixed_array,
5070 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5071 __ Stp(result_value, boolean_done,
5072 MemOperand(untagged_result,
5073 JSGeneratorObject::kResultValuePropertyOffset));
5075 // Only the value field needs a write barrier, as the other values are in the
5077 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5078 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5082 // TODO(all): I don't like this method.
5083 // It seems to me that in too many places x0 is used in place of this.
5084 // Also, this function is not suitable for all places where x0 should be
5085 // abstracted (eg. when used as an argument). But some places assume that the
5086 // first argument register is x0, and use this function instead.
5087 // Considering that most of the register allocation is hard-coded in the
5088 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5089 // that abstracting the allocation through functions would not yield any
5090 // performance benefit, I think the existence of this function is debatable.
5091 Register FullCodeGenerator::result_register() {
5096 Register FullCodeGenerator::context_register() {
5101 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5102 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5103 __ Str(value, MemOperand(fp, frame_offset));
5107 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5108 __ Ldr(dst, ContextMemOperand(cp, context_index));
5112 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5113 Scope* declaration_scope = scope()->DeclarationScope();
5114 if (declaration_scope->is_global_scope() ||
5115 declaration_scope->is_module_scope()) {
5116 // Contexts nested in the native context have a canonical empty function
5117 // as their closure, not the anonymous closure containing the global
5118 // code. Pass a smi sentinel and let the runtime look up the empty
5120 DCHECK(kSmiTag == 0);
5122 } else if (declaration_scope->is_eval_scope()) {
5123 // Contexts created by a call to eval have the same closure as the
5124 // context calling eval, not the anonymous closure containing the eval
5125 // code. Fetch it from the context.
5126 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5129 DCHECK(declaration_scope->is_function_scope());
5130 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5136 void FullCodeGenerator::EnterFinallyBlock() {
5137 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5138 DCHECK(!result_register().is(x10));
5139 // Preserve the result register while executing finally block.
5140 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5141 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5143 __ Push(result_register(), x10);
5145 // Store pending message while executing finally block.
5146 ExternalReference pending_message_obj =
5147 ExternalReference::address_of_pending_message_obj(isolate());
5148 __ Mov(x10, pending_message_obj);
5149 __ Ldr(x10, MemOperand(x10));
5151 ExternalReference has_pending_message =
5152 ExternalReference::address_of_has_pending_message(isolate());
5153 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
5154 __ Mov(x11, has_pending_message);
5155 __ Ldrb(x11, MemOperand(x11));
5160 ExternalReference pending_message_script =
5161 ExternalReference::address_of_pending_message_script(isolate());
5162 __ Mov(x10, pending_message_script);
5163 __ Ldr(x10, MemOperand(x10));
5168 void FullCodeGenerator::ExitFinallyBlock() {
5169 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5170 DCHECK(!result_register().is(x10));
5172 // Restore pending message from stack.
5173 __ Pop(x10, x11, x12);
5174 ExternalReference pending_message_script =
5175 ExternalReference::address_of_pending_message_script(isolate());
5176 __ Mov(x13, pending_message_script);
5177 __ Str(x10, MemOperand(x13));
5180 ExternalReference has_pending_message =
5181 ExternalReference::address_of_has_pending_message(isolate());
5182 __ Mov(x13, has_pending_message);
5183 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
5184 __ Strb(x11, MemOperand(x13));
5186 ExternalReference pending_message_obj =
5187 ExternalReference::address_of_pending_message_obj(isolate());
5188 __ Mov(x13, pending_message_obj);
5189 __ Str(x12, MemOperand(x13));
5191 // Restore result register and cooked return address from the stack.
5192 __ Pop(x10, result_register());
5194 // Uncook the return address (see EnterFinallyBlock).
5196 __ Add(x11, x10, Operand(masm_->CodeObject()));
5204 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5206 BackEdgeState target_state,
5207 Code* replacement_code) {
5208 // Turn the jump into a nop.
5209 Address branch_address = pc - 3 * kInstructionSize;
5210 PatchingAssembler patcher(branch_address, 1);
5212 DCHECK(Instruction::Cast(branch_address)
5213 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5214 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5215 Instruction::Cast(branch_address)->ImmPCOffset() ==
5216 6 * kInstructionSize));
5218 switch (target_state) {
5220 // <decrement profiling counter>
5221 // .. .. .. .. b.pl ok
5222 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5223 // .. .. .. .. blr x16
5224 // ... more instructions.
5226 // Jump offset is 6 instructions.
5229 case ON_STACK_REPLACEMENT:
5230 case OSR_AFTER_STACK_CHECK:
5231 // <decrement profiling counter>
5232 // .. .. .. .. mov x0, x0 (NOP)
5233 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5234 // .. .. .. .. blr x16
5235 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5239 // Replace the call address.
5240 Instruction* load = Instruction::Cast(pc)->preceding(2);
5241 Address interrupt_address_pointer =
5242 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5243 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5244 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5246 ->OnStackReplacement()
5248 (Memory::uint64_at(interrupt_address_pointer) ==
5249 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5253 (Memory::uint64_at(interrupt_address_pointer) ==
5254 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5256 ->OsrAfterStackCheck()
5258 (Memory::uint64_at(interrupt_address_pointer) ==
5259 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5261 ->OnStackReplacement()
5263 Memory::uint64_at(interrupt_address_pointer) =
5264 reinterpret_cast<uint64_t>(replacement_code->entry());
5266 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5267 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5271 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5273 Code* unoptimized_code,
5275 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5276 // back-end), but this function is gone in bleeding_edge so it might not
5278 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5280 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5281 Instruction* load = Instruction::Cast(pc)->preceding(2);
5282 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5283 load->ImmPCOffset());
5284 if (entry == reinterpret_cast<uint64_t>(
5285 isolate->builtins()->OnStackReplacement()->entry())) {
5286 return ON_STACK_REPLACEMENT;
5287 } else if (entry == reinterpret_cast<uint64_t>(
5288 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5289 return OSR_AFTER_STACK_CHECK;
5299 #define __ ACCESS_MASM(masm())
5302 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5304 int* context_length) {
5305 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
5306 // The macros used here must preserve the result register.
5308 // Because the handler block contains the context of the finally
5309 // code, we can restore it directly from there for the finally code
5310 // rather than iteratively unwinding contexts via their previous
5312 __ Drop(*stack_depth); // Down to the handler block.
5313 if (*context_length > 0) {
5314 // Restore the context to its dedicated register and the stack.
5315 __ Peek(cp, StackHandlerConstants::kContextOffset);
5316 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5319 __ Bl(finally_entry_);
5322 *context_length = 0;
5330 } } // namespace v8::internal
5332 #endif // V8_TARGET_ARCH_ARM64