1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
20 #include "src/arm64/code-stubs-arm64.h"
21 #include "src/arm64/macro-assembler-arm64.h"
26 #define __ ACCESS_MASM(masm_)
28 class JumpPatchSite BASE_EMBEDDED {
30 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
32 info_emitted_ = false;
37 if (patch_site_.is_bound()) {
38 DCHECK(info_emitted_);
40 DCHECK(reg_.IsNone());
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
46 InstructionAccurateScope scope(masm_, 1);
47 DCHECK(!info_emitted_);
48 DCHECK(reg.Is64Bits());
51 __ bind(&patch_site_);
52 __ tbz(xzr, 0, target); // Always taken before patched.
55 void EmitJumpIfSmi(Register reg, Label* target) {
56 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
57 InstructionAccurateScope scope(masm_, 1);
58 DCHECK(!info_emitted_);
59 DCHECK(reg.Is64Bits());
62 __ bind(&patch_site_);
63 __ tbnz(xzr, 0, target); // Never taken before patched.
66 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
67 UseScratchRegisterScope temps(masm_);
68 Register temp = temps.AcquireX();
69 __ Orr(temp, reg1, reg2);
70 EmitJumpIfNotSmi(temp, target);
73 void EmitPatchInfo() {
74 Assembler::BlockPoolsScope scope(masm_);
75 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
82 MacroAssembler* masm_;
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
96 // The live registers are:
97 // - x1: the JS function object being called (i.e. ourselves).
99 // - fp: our caller's frame pointer.
100 // - jssp: stack pointer.
101 // - lr: return address.
103 // The function builds a JS frame. See JavaScriptFrameConstants in
104 // frames-arm.h for its layout.
105 void FullCodeGenerator::Generate() {
106 CompilationInfo* info = info_;
108 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(function());
113 Comment cmnt(masm_, "[ Function compiled by full code generator");
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120 __ Debug("stop-at", __LINE__, BREAK);
124 // Sloppy mode functions and builtins need to replace the receiver with the
125 // global proxy when called as functions (without an explicit receiver
127 if (info->strict_mode() == SLOPPY && !info->is_native()) {
129 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
130 __ Peek(x10, receiver_offset);
131 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
133 __ Ldr(x10, GlobalObjectMemOperand());
134 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
135 __ Poke(x10, receiver_offset);
141 // Open a frame scope to indicate that there is a frame on the stack.
142 // The MANUAL indicates that the scope shouldn't actually generate code
143 // to set up the frame because we do it manually below.
144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 // This call emits the following sequence in a way that can be patched for
147 // code ageing support:
148 // Push(lr, fp, cp, x1);
149 // Add(fp, jssp, 2 * kPointerSize);
150 info->set_prologue_offset(masm_->pc_offset());
151 __ Prologue(info->IsCodePreAgingActive());
152 info->AddNoFrameRange(0, masm_->pc_offset());
154 // Reserve space on the stack for locals.
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!info->function()->is_generator() || locals_count == 0);
160 if (locals_count > 0) {
161 if (locals_count >= 128) {
163 DCHECK(jssp.Is(__ StackPointer()));
164 __ Sub(x10, jssp, locals_count * kPointerSize);
165 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
170 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
171 if (FLAG_optimize_for_size) {
172 __ PushMultipleTimes(x10 , locals_count);
174 const int kMaxPushes = 32;
175 if (locals_count >= kMaxPushes) {
176 int loop_iterations = locals_count / kMaxPushes;
177 __ Mov(x3, loop_iterations);
179 __ Bind(&loop_header);
181 __ PushMultipleTimes(x10 , kMaxPushes);
183 __ B(ne, &loop_header);
185 int remaining = locals_count % kMaxPushes;
186 // Emit the remaining pushes.
187 __ PushMultipleTimes(x10 , remaining);
192 bool function_in_register_x1 = true;
194 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (heap_slots > 0) {
196 // Argument to NewContext is the function, which is still in x1.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
202 __ CallRuntime(Runtime::kNewGlobalContext, 2);
203 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(isolate(), heap_slots);
206 // Result of FastNewContextStub is always in new space.
207 need_write_barrier = false;
210 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 function_in_register_x1 = false;
213 // Context is returned in x0. It replaces the context passed to us.
214 // It's saved in the stack and kept live in cp.
216 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
217 // Copy any necessary parameters into the context.
218 int num_parameters = info->scope()->num_parameters();
219 for (int i = 0; i < num_parameters; i++) {
220 Variable* var = scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ Ldr(x10, MemOperand(fp, parameter_offset));
226 // Store it in the context.
227 MemOperand target = ContextMemOperand(cp, var->index());
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
233 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(cp, &done);
237 __ Abort(kExpectedNewSpaceObject);
244 Variable* arguments = scope()->arguments();
245 if (arguments != NULL) {
246 // Function uses arguments object.
247 Comment cmnt(masm_, "[ Allocate arguments object");
248 if (!function_in_register_x1) {
249 // Load this again, if it's used by the local context below.
250 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // Receiver is just before the parameters on the caller's stack.
255 int num_parameters = info->scope()->num_parameters();
256 int offset = num_parameters * kPointerSize;
257 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
258 __ Mov(x1, Smi::FromInt(num_parameters));
261 // Arguments to ArgumentsAccessStub:
262 // function, receiver address, parameter count.
263 // The stub will rewrite receiver and parameter count if the previous
264 // stack frame was an arguments adapter frame.
265 ArgumentsAccessStub::Type type;
266 if (strict_mode() == STRICT) {
267 type = ArgumentsAccessStub::NEW_STRICT;
268 } else if (function()->has_duplicate_parameters()) {
269 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
271 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
273 ArgumentsAccessStub stub(isolate(), type);
276 SetVar(arguments, x0, x1, x2);
280 __ CallRuntime(Runtime::kTraceEnter, 0);
284 // Visit the declarations and body unless there is an illegal
286 if (scope()->HasIllegalRedeclaration()) {
287 Comment cmnt(masm_, "[ Declarations");
288 scope()->VisitIllegalRedeclaration(this);
291 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
292 { Comment cmnt(masm_, "[ Declarations");
293 if (scope()->is_function_scope() && scope()->function() != NULL) {
294 VariableDeclaration* function = scope()->function();
295 DCHECK(function->proxy()->var()->mode() == CONST ||
296 function->proxy()->var()->mode() == CONST_LEGACY);
297 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
298 VisitVariableDeclaration(function);
300 VisitDeclarations(scope()->declarations());
304 { Comment cmnt(masm_, "[ Stack check");
305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
307 DCHECK(jssp.Is(__ StackPointer()));
308 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
310 PredictableCodeSizeScope predictable(masm_,
311 Assembler::kCallSizeWithRelocation);
312 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
316 { Comment cmnt(masm_, "[ Body");
317 DCHECK(loop_depth() == 0);
318 VisitStatements(function()->body());
319 DCHECK(loop_depth() == 0);
322 // Always emit a 'return undefined' in case control fell off the end of
324 { Comment cmnt(masm_, "[ return <undefined>;");
325 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
327 EmitReturnSequence();
329 // Force emission of the pools, so they don't get emitted in the middle
330 // of the back edge table.
331 masm()->CheckVeneerPool(true, false);
332 masm()->CheckConstPool(true, false);
336 void FullCodeGenerator::ClearAccumulator() {
337 __ Mov(x0, Smi::FromInt(0));
341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
342 __ Mov(x2, Operand(profiling_counter_));
343 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
344 __ Subs(x3, x3, Smi::FromInt(delta));
345 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
349 void FullCodeGenerator::EmitProfilingCounterReset() {
350 int reset_value = FLAG_interrupt_budget;
351 if (info_->is_debug()) {
352 // Detect debug break requests as soon as possible.
353 reset_value = FLAG_interrupt_budget >> 4;
355 __ Mov(x2, Operand(profiling_counter_));
356 __ Mov(x3, Smi::FromInt(reset_value));
357 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
361 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
362 Label* back_edge_target) {
363 DCHECK(jssp.Is(__ StackPointer()));
364 Comment cmnt(masm_, "[ Back edge bookkeeping");
365 // Block literal pools whilst emitting back edge code.
366 Assembler::BlockPoolsScope block_const_pool(masm_);
369 DCHECK(back_edge_target->is_bound());
370 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
371 // to reduce the absolute error due to the integer division. To do that,
372 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
375 masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
376 int weight = Min(kMaxBackEdgeWeight,
377 Max(1, distance / kCodeSizeMultiplier));
378 EmitProfilingCounterDecrement(weight);
380 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
382 // Record a mapping of this PC offset to the OSR id. This is used to find
383 // the AST id from the unoptimized code in order to use it as a key into
384 // the deoptimization input data found in the optimized code.
385 RecordBackEdge(stmt->OsrEntryId());
387 EmitProfilingCounterReset();
390 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
391 // Record a mapping of the OSR id to this PC. This is used if the OSR
392 // entry becomes the target of a bailout. We don't expect it to be, but
393 // we want it to work if it is.
394 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
398 void FullCodeGenerator::EmitReturnSequence() {
399 Comment cmnt(masm_, "[ Return sequence");
401 if (return_label_.is_bound()) {
402 __ B(&return_label_);
405 __ Bind(&return_label_);
407 // Push the return value on the stack as the parameter.
408 // Runtime::TraceExit returns its parameter in x0.
409 __ Push(result_register());
410 __ CallRuntime(Runtime::kTraceExit, 1);
411 DCHECK(x0.Is(result_register()));
413 // Pretend that the exit is a backwards jump to the entry.
415 if (info_->ShouldSelfOptimize()) {
416 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
418 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
419 weight = Min(kMaxBackEdgeWeight,
420 Max(1, distance / kCodeSizeMultiplier));
422 EmitProfilingCounterDecrement(weight);
426 __ Call(isolate()->builtins()->InterruptCheck(),
427 RelocInfo::CODE_TARGET);
429 EmitProfilingCounterReset();
432 // Make sure that the constant pool is not emitted inside of the return
433 // sequence. This sequence can get patched when the debugger is used. See
434 // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
436 InstructionAccurateScope scope(masm_,
437 Assembler::kJSRetSequenceInstructions);
438 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
440 // This code is generated using Assembler methods rather than Macro
441 // Assembler methods because it will be patched later on, and so the size
442 // of the generated code must be consistent.
443 const Register& current_sp = __ StackPointer();
444 // Nothing ensures 16 bytes alignment here.
445 DCHECK(!current_sp.Is(csp));
446 __ mov(current_sp, fp);
447 int no_frame_start = masm_->pc_offset();
448 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
449 // Drop the arguments and receiver and return.
450 // TODO(all): This implementation is overkill as it supports 2**31+1
451 // arguments, consider how to improve it without creating a security
453 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
454 __ add(current_sp, current_sp, ip0);
456 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
457 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
463 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
468 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
469 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
470 codegen()->GetVar(result_register(), var);
474 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
476 codegen()->GetVar(result_register(), var);
477 __ Push(result_register());
481 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
482 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
483 // For simplicity we always test the accumulator register.
484 codegen()->GetVar(result_register(), var);
485 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
486 codegen()->DoTest(this);
490 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
491 // Root values have no side effects.
495 void FullCodeGenerator::AccumulatorValueContext::Plug(
496 Heap::RootListIndex index) const {
497 __ LoadRoot(result_register(), index);
501 void FullCodeGenerator::StackValueContext::Plug(
502 Heap::RootListIndex index) const {
503 __ LoadRoot(result_register(), index);
504 __ Push(result_register());
508 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
509 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
511 if (index == Heap::kUndefinedValueRootIndex ||
512 index == Heap::kNullValueRootIndex ||
513 index == Heap::kFalseValueRootIndex) {
514 if (false_label_ != fall_through_) __ B(false_label_);
515 } else if (index == Heap::kTrueValueRootIndex) {
516 if (true_label_ != fall_through_) __ B(true_label_);
518 __ LoadRoot(result_register(), index);
519 codegen()->DoTest(this);
524 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
528 void FullCodeGenerator::AccumulatorValueContext::Plug(
529 Handle<Object> lit) const {
530 __ Mov(result_register(), Operand(lit));
534 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535 // Immediates cannot be pushed directly.
536 __ Mov(result_register(), Operand(lit));
537 __ Push(result_register());
541 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542 codegen()->PrepareForBailoutBeforeSplit(condition(),
546 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548 if (false_label_ != fall_through_) __ B(false_label_);
549 } else if (lit->IsTrue() || lit->IsJSObject()) {
550 if (true_label_ != fall_through_) __ B(true_label_);
551 } else if (lit->IsString()) {
552 if (String::cast(*lit)->length() == 0) {
553 if (false_label_ != fall_through_) __ B(false_label_);
555 if (true_label_ != fall_through_) __ B(true_label_);
557 } else if (lit->IsSmi()) {
558 if (Smi::cast(*lit)->value() == 0) {
559 if (false_label_ != fall_through_) __ B(false_label_);
561 if (true_label_ != fall_through_) __ B(true_label_);
564 // For simplicity we always test the accumulator register.
565 __ Mov(result_register(), Operand(lit));
566 codegen()->DoTest(this);
571 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
572 Register reg) const {
578 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
580 Register reg) const {
583 __ Move(result_register(), reg);
587 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
588 Register reg) const {
590 if (count > 1) __ Drop(count - 1);
595 void FullCodeGenerator::TestContext::DropAndPlug(int count,
596 Register reg) const {
598 // For simplicity we always test the accumulator register.
600 __ Mov(result_register(), reg);
601 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602 codegen()->DoTest(this);
606 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607 Label* materialize_false) const {
608 DCHECK(materialize_true == materialize_false);
609 __ Bind(materialize_true);
613 void FullCodeGenerator::AccumulatorValueContext::Plug(
614 Label* materialize_true,
615 Label* materialize_false) const {
617 __ Bind(materialize_true);
618 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
620 __ Bind(materialize_false);
621 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
626 void FullCodeGenerator::StackValueContext::Plug(
627 Label* materialize_true,
628 Label* materialize_false) const {
630 __ Bind(materialize_true);
631 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
633 __ Bind(materialize_false);
634 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
640 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
641 Label* materialize_false) const {
642 DCHECK(materialize_true == true_label_);
643 DCHECK(materialize_false == false_label_);
647 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
651 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
652 Heap::RootListIndex value_root_index =
653 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654 __ LoadRoot(result_register(), value_root_index);
658 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
659 Heap::RootListIndex value_root_index =
660 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
661 __ LoadRoot(x10, value_root_index);
666 void FullCodeGenerator::TestContext::Plug(bool flag) const {
667 codegen()->PrepareForBailoutBeforeSplit(condition(),
672 if (true_label_ != fall_through_) {
676 if (false_label_ != fall_through_) {
683 void FullCodeGenerator::DoTest(Expression* condition,
686 Label* fall_through) {
687 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
688 CallIC(ic, condition->test_id());
689 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
693 // If (cond), branch to if_true.
694 // If (!cond), branch to if_false.
695 // fall_through is used as an optimization in cases where only one branch
696 // instruction is necessary.
697 void FullCodeGenerator::Split(Condition cond,
700 Label* fall_through) {
701 if (if_false == fall_through) {
703 } else if (if_true == fall_through) {
704 DCHECK(if_false != fall_through);
705 __ B(NegateCondition(cond), if_false);
713 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
714 // Offset is negative because higher indexes are at lower addresses.
715 int offset = -var->index() * kXRegSize;
716 // Adjust by a (parameter or local) base offset.
717 if (var->IsParameter()) {
718 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
720 offset += JavaScriptFrameConstants::kLocal0Offset;
722 return MemOperand(fp, offset);
726 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
727 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
728 if (var->IsContextSlot()) {
729 int context_chain_length = scope()->ContextChainLength(var->scope());
730 __ LoadContext(scratch, context_chain_length);
731 return ContextMemOperand(scratch, var->index());
733 return StackOperand(var);
738 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
739 // Use destination as scratch.
740 MemOperand location = VarOperand(var, dest);
741 __ Ldr(dest, location);
745 void FullCodeGenerator::SetVar(Variable* var,
749 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750 DCHECK(!AreAliased(src, scratch0, scratch1));
751 MemOperand location = VarOperand(var, scratch0);
752 __ Str(src, location);
754 // Emit the write barrier code if the location is in the heap.
755 if (var->IsContextSlot()) {
756 // scratch0 contains the correct context.
757 __ RecordWriteContextSlot(scratch0,
767 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
768 bool should_normalize,
771 // Only prepare for bailouts before splits if we're in a test
772 // context. Otherwise, we let the Visit function deal with the
773 // preparation to avoid preparing with the same AST id twice.
774 if (!context()->IsTest() || !info_->IsOptimizable()) return;
776 // TODO(all): Investigate to see if there is something to work on here.
778 if (should_normalize) {
781 PrepareForBailout(expr, TOS_REG);
782 if (should_normalize) {
783 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
784 Split(eq, if_true, if_false, NULL);
790 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
791 // The variable in the declaration always resides in the current function
793 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
794 if (generate_debug_code_) {
795 // Check that we're not inside a with or catch context.
796 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
797 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
798 __ Check(ne, kDeclarationInWithContext);
799 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
800 __ Check(ne, kDeclarationInCatchContext);
805 void FullCodeGenerator::VisitVariableDeclaration(
806 VariableDeclaration* declaration) {
807 // If it was not possible to allocate the variable at compile time, we
808 // need to "declare" it at runtime to make sure it actually exists in the
810 VariableProxy* proxy = declaration->proxy();
811 VariableMode mode = declaration->mode();
812 Variable* variable = proxy->var();
813 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
815 switch (variable->location()) {
816 case Variable::UNALLOCATED:
817 globals_->Add(variable->name(), zone());
818 globals_->Add(variable->binding_needs_init()
819 ? isolate()->factory()->the_hole_value()
820 : isolate()->factory()->undefined_value(),
824 case Variable::PARAMETER:
825 case Variable::LOCAL:
827 Comment cmnt(masm_, "[ VariableDeclaration");
828 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
829 __ Str(x10, StackOperand(variable));
833 case Variable::CONTEXT:
835 Comment cmnt(masm_, "[ VariableDeclaration");
836 EmitDebugCheckDeclarationContext(variable);
837 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
838 __ Str(x10, ContextMemOperand(cp, variable->index()));
839 // No write barrier since the_hole_value is in old space.
840 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
844 case Variable::LOOKUP: {
845 Comment cmnt(masm_, "[ VariableDeclaration");
846 __ Mov(x2, Operand(variable->name()));
847 // Declaration nodes are always introduced in one of four modes.
848 DCHECK(IsDeclaredVariableMode(mode));
849 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
851 __ Mov(x1, Smi::FromInt(attr));
852 // Push initial value, if any.
853 // Note: For variables we must not push an initial value (such as
854 // 'undefined') because we may have a (legal) redeclaration and we
855 // must not destroy the current value.
857 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
858 __ Push(cp, x2, x1, x0);
860 // Pushing 0 (xzr) indicates no initial value.
861 __ Push(cp, x2, x1, xzr);
863 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
870 void FullCodeGenerator::VisitFunctionDeclaration(
871 FunctionDeclaration* declaration) {
872 VariableProxy* proxy = declaration->proxy();
873 Variable* variable = proxy->var();
874 switch (variable->location()) {
875 case Variable::UNALLOCATED: {
876 globals_->Add(variable->name(), zone());
877 Handle<SharedFunctionInfo> function =
878 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
879 // Check for stack overflow exception.
880 if (function.is_null()) return SetStackOverflow();
881 globals_->Add(function, zone());
885 case Variable::PARAMETER:
886 case Variable::LOCAL: {
887 Comment cmnt(masm_, "[ Function Declaration");
888 VisitForAccumulatorValue(declaration->fun());
889 __ Str(result_register(), StackOperand(variable));
893 case Variable::CONTEXT: {
894 Comment cmnt(masm_, "[ Function Declaration");
895 EmitDebugCheckDeclarationContext(variable);
896 VisitForAccumulatorValue(declaration->fun());
897 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
898 int offset = Context::SlotOffset(variable->index());
899 // We know that we have written a function, which is not a smi.
900 __ RecordWriteContextSlot(cp,
908 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
912 case Variable::LOOKUP: {
913 Comment cmnt(masm_, "[ Function Declaration");
914 __ Mov(x2, Operand(variable->name()));
915 __ Mov(x1, Smi::FromInt(NONE));
917 // Push initial value for function declaration.
918 VisitForStackValue(declaration->fun());
919 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
926 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
927 Variable* variable = declaration->proxy()->var();
928 DCHECK(variable->location() == Variable::CONTEXT);
929 DCHECK(variable->interface()->IsFrozen());
931 Comment cmnt(masm_, "[ ModuleDeclaration");
932 EmitDebugCheckDeclarationContext(variable);
934 // Load instance object.
935 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
936 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
937 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
940 __ Str(x1, ContextMemOperand(cp, variable->index()));
941 // We know that we have written a module, which is not a smi.
942 __ RecordWriteContextSlot(cp,
943 Context::SlotOffset(variable->index()),
950 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
952 // Traverse info body.
953 Visit(declaration->module());
957 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
958 VariableProxy* proxy = declaration->proxy();
959 Variable* variable = proxy->var();
960 switch (variable->location()) {
961 case Variable::UNALLOCATED:
965 case Variable::CONTEXT: {
966 Comment cmnt(masm_, "[ ImportDeclaration");
967 EmitDebugCheckDeclarationContext(variable);
972 case Variable::PARAMETER:
973 case Variable::LOCAL:
974 case Variable::LOOKUP:
980 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
985 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
986 // Call the runtime to declare the globals.
987 __ Mov(x11, Operand(pairs));
988 Register flags = xzr;
989 if (Smi::FromInt(DeclareGlobalsFlags())) {
991 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
993 __ Push(cp, x11, flags);
994 __ CallRuntime(Runtime::kDeclareGlobals, 3);
995 // Return value is ignored.
999 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1000 // Call the runtime to declare the modules.
1001 __ Push(descriptions);
1002 __ CallRuntime(Runtime::kDeclareModules, 1);
1003 // Return value is ignored.
1007 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1008 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1009 Comment cmnt(masm_, "[ SwitchStatement");
1010 Breakable nested_statement(this, stmt);
1011 SetStatementPosition(stmt);
1013 // Keep the switch value on the stack until a case matches.
1014 VisitForStackValue(stmt->tag());
1015 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1017 ZoneList<CaseClause*>* clauses = stmt->cases();
1018 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1020 Label next_test; // Recycled for each test.
1021 // Compile all the tests with branches to their bodies.
1022 for (int i = 0; i < clauses->length(); i++) {
1023 CaseClause* clause = clauses->at(i);
1024 clause->body_target()->Unuse();
1026 // The default is not a test, but remember it as final fall through.
1027 if (clause->is_default()) {
1028 default_clause = clause;
1032 Comment cmnt(masm_, "[ Case comparison");
1033 __ Bind(&next_test);
1036 // Compile the label expression.
1037 VisitForAccumulatorValue(clause->label());
1039 // Perform the comparison as if via '==='.
1040 __ Peek(x1, 0); // Switch value.
1042 JumpPatchSite patch_site(masm_);
1043 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1045 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1047 __ B(ne, &next_test);
1048 __ Drop(1); // Switch value is no longer needed.
1049 __ B(clause->body_target());
1050 __ Bind(&slow_case);
1053 // Record position before stub call for type feedback.
1054 SetSourcePosition(clause->position());
1056 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1057 CallIC(ic, clause->CompareId());
1058 patch_site.EmitPatchInfo();
1062 PrepareForBailout(clause, TOS_REG);
1063 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1065 __ B(clause->body_target());
1068 __ Cbnz(x0, &next_test);
1069 __ Drop(1); // Switch value is no longer needed.
1070 __ B(clause->body_target());
1073 // Discard the test value and jump to the default if present, otherwise to
1074 // the end of the statement.
1075 __ Bind(&next_test);
1076 __ Drop(1); // Switch value is no longer needed.
1077 if (default_clause == NULL) {
1078 __ B(nested_statement.break_label());
1080 __ B(default_clause->body_target());
1083 // Compile all the case bodies.
1084 for (int i = 0; i < clauses->length(); i++) {
1085 Comment cmnt(masm_, "[ Case body");
1086 CaseClause* clause = clauses->at(i);
1087 __ Bind(clause->body_target());
1088 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1089 VisitStatements(clause->statements());
1092 __ Bind(nested_statement.break_label());
1093 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1097 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1098 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1099 Comment cmnt(masm_, "[ ForInStatement");
1100 int slot = stmt->ForInFeedbackSlot();
1101 // TODO(all): This visitor probably needs better comments and a revisit.
1102 SetStatementPosition(stmt);
1105 ForIn loop_statement(this, stmt);
1106 increment_loop_depth();
1108 // Get the object to enumerate over. If the object is null or undefined, skip
1109 // over the loop. See ECMA-262 version 5, section 12.6.4.
1110 VisitForAccumulatorValue(stmt->enumerable());
1111 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1112 Register null_value = x15;
1113 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1114 __ Cmp(x0, null_value);
1117 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1119 // Convert the object to a JS object.
1120 Label convert, done_convert;
1121 __ JumpIfSmi(x0, &convert);
1122 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1125 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1126 __ Bind(&done_convert);
1129 // Check for proxies.
1131 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1132 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1134 // Check cache validity in generated code. This is a fast case for
1135 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1136 // guarantee cache validity, call the runtime system to check cache
1137 // validity or get the property names in a fixed array.
1138 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1140 // The enum cache is valid. Load the map of the object being
1141 // iterated over and use the cache for the iteration.
1143 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1146 // Get the set of properties to enumerate.
1147 __ Bind(&call_runtime);
1148 __ Push(x0); // Duplicate the enumerable object on the stack.
1149 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1151 // If we got a map from the runtime call, we can do a fast
1152 // modification check. Otherwise, we got a fixed array, and we have
1153 // to do a slow check.
1154 Label fixed_array, no_descriptors;
1155 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1156 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1158 // We got a map in register x0. Get the enumeration cache from it.
1159 __ Bind(&use_cache);
1161 __ EnumLengthUntagged(x1, x0);
1162 __ Cbz(x1, &no_descriptors);
1164 __ LoadInstanceDescriptors(x0, x2);
1165 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1167 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1169 // Set up the four remaining stack slots.
1171 // Map, enumeration cache, enum cache length, zero (both last as smis).
1172 __ Push(x0, x2, x1, xzr);
1175 __ Bind(&no_descriptors);
1179 // We got a fixed array in register x0. Iterate through that.
1180 __ Bind(&fixed_array);
1182 __ LoadObject(x1, FeedbackVector());
1183 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1184 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1186 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1187 __ Peek(x10, 0); // Get enumerated object.
1188 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1189 // TODO(all): similar check was done already. Can we avoid it here?
1190 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1191 DCHECK(Smi::FromInt(0) == 0);
1192 __ CzeroX(x1, le); // Zero indicates proxy.
1193 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1194 // Smi and array, fixed array length (as smi) and initial index.
1195 __ Push(x1, x0, x2, xzr);
1197 // Generate code for doing the condition check.
1198 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1200 // Load the current count to x0, load the length to x1.
1201 __ PeekPair(x0, x1, 0);
1202 __ Cmp(x0, x1); // Compare to the array length.
1203 __ B(hs, loop_statement.break_label());
1205 // Get the current entry of the array into register r3.
1206 __ Peek(x10, 2 * kXRegSize);
1207 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1208 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1210 // Get the expected map from the stack or a smi in the
1211 // permanent slow case into register x10.
1212 __ Peek(x2, 3 * kXRegSize);
1214 // Check if the expected map still matches that of the enumerable.
1215 // If not, we may have to filter the key.
1217 __ Peek(x1, 4 * kXRegSize);
1218 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1220 __ B(eq, &update_each);
1222 // For proxies, no filtering is done.
1223 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1224 STATIC_ASSERT(kSmiTag == 0);
1225 __ Cbz(x2, &update_each);
1227 // Convert the entry to a string or (smi) 0 if it isn't a property
1228 // any more. If the property has been removed while iterating, we
1231 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1233 __ Cbz(x0, loop_statement.continue_label());
1235 // Update the 'each' property or variable from the possibly filtered
1236 // entry in register x3.
1237 __ Bind(&update_each);
1238 __ Mov(result_register(), x3);
1239 // Perform the assignment as if via '='.
1240 { EffectContext context(this);
1241 EmitAssignment(stmt->each());
1244 // Generate code for the body of the loop.
1245 Visit(stmt->body());
1247 // Generate code for going to the next element by incrementing
1248 // the index (smi) stored on top of the stack.
1249 __ Bind(loop_statement.continue_label());
1250 // TODO(all): We could use a callee saved register to avoid popping.
1252 __ Add(x0, x0, Smi::FromInt(1));
1255 EmitBackEdgeBookkeeping(stmt, &loop);
1258 // Remove the pointers stored on the stack.
1259 __ Bind(loop_statement.break_label());
1262 // Exit and decrement the loop depth.
1263 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1265 decrement_loop_depth();
1269 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1270 Comment cmnt(masm_, "[ ForOfStatement");
1271 SetStatementPosition(stmt);
1273 Iteration loop_statement(this, stmt);
1274 increment_loop_depth();
1276 // var iterator = iterable[Symbol.iterator]();
1277 VisitForEffect(stmt->assign_iterator());
1280 __ Bind(loop_statement.continue_label());
1282 // result = iterator.next()
1283 VisitForEffect(stmt->next_result());
1285 // if (result.done) break;
1286 Label result_not_done;
1287 VisitForControl(stmt->result_done(),
1288 loop_statement.break_label(),
1291 __ Bind(&result_not_done);
1293 // each = result.value
1294 VisitForEffect(stmt->assign_each());
1296 // Generate code for the body of the loop.
1297 Visit(stmt->body());
1299 // Check stack before looping.
1300 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1301 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1302 __ B(loop_statement.continue_label());
1304 // Exit and decrement the loop depth.
1305 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1306 __ Bind(loop_statement.break_label());
1307 decrement_loop_depth();
1311 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1313 // Use the fast case closure allocation code that allocates in new space for
1314 // nested functions that don't need literals cloning. If we're running with
1315 // the --always-opt or the --prepare-always-opt flag, we need to use the
1316 // runtime function so that the new function we are creating here gets a
1317 // chance to have its code optimized and doesn't just get a copy of the
1318 // existing unoptimized code.
1319 if (!FLAG_always_opt &&
1320 !FLAG_prepare_always_opt &&
1322 scope()->is_function_scope() &&
1323 info->num_literals() == 0) {
1324 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1325 __ Mov(x2, Operand(info));
1328 __ Mov(x11, Operand(info));
1329 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1330 : Heap::kFalseValueRootIndex);
1331 __ Push(cp, x11, x10);
1332 __ CallRuntime(Runtime::kNewClosure, 3);
1334 context()->Plug(x0);
1338 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1339 Comment cmnt(masm_, "[ VariableProxy");
1340 EmitVariableLoad(expr);
1344 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1345 Comment cnmt(masm_, "[ SuperReference ");
1347 __ ldr(LoadDescriptor::ReceiverRegister(),
1348 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1350 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1351 __ Mov(LoadDescriptor::NameRegister(), Operand(home_object_symbol));
1353 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1355 __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
1359 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1364 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1365 TypeofState typeof_state,
1367 Register current = cp;
1368 Register next = x10;
1369 Register temp = x11;
1373 if (s->num_heap_slots() > 0) {
1374 if (s->calls_sloppy_eval()) {
1375 // Check that extension is NULL.
1376 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1377 __ Cbnz(temp, slow);
1379 // Load next context in chain.
1380 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1381 // Walk the rest of the chain without clobbering cp.
1384 // If no outer scope calls eval, we do not need to check more
1385 // context extensions.
1386 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1387 s = s->outer_scope();
1390 if (s->is_eval_scope()) {
1392 __ Mov(next, current);
1395 // Terminate at native context.
1396 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1397 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1398 // Check that extension is NULL.
1399 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1400 __ Cbnz(temp, slow);
1401 // Load next context in chain.
1402 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1407 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1408 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1409 if (FLAG_vector_ics) {
1410 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1411 Smi::FromInt(proxy->VariableFeedbackSlot()));
1414 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1420 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1422 DCHECK(var->IsContextSlot());
1423 Register context = cp;
1424 Register next = x10;
1425 Register temp = x11;
1427 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1428 if (s->num_heap_slots() > 0) {
1429 if (s->calls_sloppy_eval()) {
1430 // Check that extension is NULL.
1431 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1432 __ Cbnz(temp, slow);
1434 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1435 // Walk the rest of the chain without clobbering cp.
1439 // Check that last extension is NULL.
1440 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1441 __ Cbnz(temp, slow);
1443 // This function is used only for loads, not stores, so it's safe to
1444 // return an cp-based operand (the write barrier cannot be allowed to
1445 // destroy the cp register).
1446 return ContextMemOperand(context, var->index());
1450 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1451 TypeofState typeof_state,
1454 // Generate fast-case code for variables that might be shadowed by
1455 // eval-introduced variables. Eval is used a lot without
1456 // introducing variables. In those cases, we do not want to
1457 // perform a runtime call for all variables in the scope
1458 // containing the eval.
1459 Variable* var = proxy->var();
1460 if (var->mode() == DYNAMIC_GLOBAL) {
1461 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1463 } else if (var->mode() == DYNAMIC_LOCAL) {
1464 Variable* local = var->local_if_not_shadowed();
1465 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1466 if (local->mode() == LET || local->mode() == CONST ||
1467 local->mode() == CONST_LEGACY) {
1468 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1469 if (local->mode() == CONST_LEGACY) {
1470 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1471 } else { // LET || CONST
1472 __ Mov(x0, Operand(var->name()));
1474 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1482 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1483 // Record position before possible IC call.
1484 SetSourcePosition(proxy->position());
1485 Variable* var = proxy->var();
1487 // Three cases: global variables, lookup variables, and all other types of
1489 switch (var->location()) {
1490 case Variable::UNALLOCATED: {
1491 Comment cmnt(masm_, "Global variable");
1492 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1493 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1494 if (FLAG_vector_ics) {
1495 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1496 Smi::FromInt(proxy->VariableFeedbackSlot()));
1498 CallLoadIC(CONTEXTUAL);
1499 context()->Plug(x0);
1503 case Variable::PARAMETER:
1504 case Variable::LOCAL:
1505 case Variable::CONTEXT: {
1506 Comment cmnt(masm_, var->IsContextSlot()
1507 ? "Context variable"
1508 : "Stack variable");
1509 if (var->binding_needs_init()) {
1510 // var->scope() may be NULL when the proxy is located in eval code and
1511 // refers to a potential outside binding. Currently those bindings are
1512 // always looked up dynamically, i.e. in that case
1513 // var->location() == LOOKUP.
1515 DCHECK(var->scope() != NULL);
1517 // Check if the binding really needs an initialization check. The check
1518 // can be skipped in the following situation: we have a LET or CONST
1519 // binding in harmony mode, both the Variable and the VariableProxy have
1520 // the same declaration scope (i.e. they are both in global code, in the
1521 // same function or in the same eval code) and the VariableProxy is in
1522 // the source physically located after the initializer of the variable.
1524 // We cannot skip any initialization checks for CONST in non-harmony
1525 // mode because const variables may be declared but never initialized:
1526 // if (false) { const x; }; var y = x;
1528 // The condition on the declaration scopes is a conservative check for
1529 // nested functions that access a binding and are called before the
1530 // binding is initialized:
1531 // function() { f(); let x = 1; function f() { x = 2; } }
1533 bool skip_init_check;
1534 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1535 skip_init_check = false;
1537 // Check that we always have valid source position.
1538 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1539 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1540 skip_init_check = var->mode() != CONST_LEGACY &&
1541 var->initializer_position() < proxy->position();
1544 if (!skip_init_check) {
1545 // Let and const need a read barrier.
1548 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1549 if (var->mode() == LET || var->mode() == CONST) {
1550 // Throw a reference error when using an uninitialized let/const
1551 // binding in harmony mode.
1552 __ Mov(x0, Operand(var->name()));
1554 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1557 // Uninitalized const bindings outside of harmony mode are unholed.
1558 DCHECK(var->mode() == CONST_LEGACY);
1559 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1562 context()->Plug(x0);
1566 context()->Plug(var);
1570 case Variable::LOOKUP: {
1572 // Generate code for loading from variables potentially shadowed by
1573 // eval-introduced variables.
1574 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1576 Comment cmnt(masm_, "Lookup variable");
1577 __ Mov(x1, Operand(var->name()));
1578 __ Push(cp, x1); // Context and name.
1579 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1581 context()->Plug(x0);
1588 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1589 Comment cmnt(masm_, "[ RegExpLiteral");
1591 // Registers will be used as follows:
1592 // x5 = materialized value (RegExp literal)
1593 // x4 = JS function, literals array
1594 // x3 = literal index
1595 // x2 = RegExp pattern
1596 // x1 = RegExp flags
1597 // x0 = RegExp literal clone
1598 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1599 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1600 int literal_offset =
1601 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1602 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1603 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1605 // Create regexp literal using runtime function.
1606 // Result will be in x0.
1607 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1608 __ Mov(x2, Operand(expr->pattern()));
1609 __ Mov(x1, Operand(expr->flags()));
1610 __ Push(x4, x3, x2, x1);
1611 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1614 __ Bind(&materialized);
1615 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1616 Label allocated, runtime_allocate;
1617 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1620 __ Bind(&runtime_allocate);
1621 __ Mov(x10, Smi::FromInt(size));
1623 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1626 __ Bind(&allocated);
1627 // After this, registers are used as follows:
1628 // x0: Newly allocated regexp.
1629 // x5: Materialized regexp.
1630 // x10, x11, x12: temps.
1631 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1632 context()->Plug(x0);
1636 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1637 if (expression == NULL) {
1638 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1641 VisitForStackValue(expression);
1646 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1647 Comment cmnt(masm_, "[ ObjectLiteral");
1649 expr->BuildConstantProperties(isolate());
1650 Handle<FixedArray> constant_properties = expr->constant_properties();
1651 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1652 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1653 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1654 __ Mov(x1, Operand(constant_properties));
1655 int flags = expr->fast_elements()
1656 ? ObjectLiteral::kFastElements
1657 : ObjectLiteral::kNoFlags;
1658 flags |= expr->has_function()
1659 ? ObjectLiteral::kHasFunction
1660 : ObjectLiteral::kNoFlags;
1661 __ Mov(x0, Smi::FromInt(flags));
1662 int properties_count = constant_properties->length() / 2;
1663 const int max_cloned_properties =
1664 FastCloneShallowObjectStub::kMaximumClonedProperties;
1665 if (expr->may_store_doubles() || expr->depth() > 1 ||
1666 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1667 properties_count > max_cloned_properties) {
1668 __ Push(x3, x2, x1, x0);
1669 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1671 FastCloneShallowObjectStub stub(isolate(), properties_count);
1675 // If result_saved is true the result is on top of the stack. If
1676 // result_saved is false the result is in x0.
1677 bool result_saved = false;
1679 // Mark all computed expressions that are bound to a key that
1680 // is shadowed by a later occurrence of the same key. For the
1681 // marked expressions, no store code is emitted.
1682 expr->CalculateEmitStore(zone());
1684 AccessorTable accessor_table(zone());
1685 for (int i = 0; i < expr->properties()->length(); i++) {
1686 ObjectLiteral::Property* property = expr->properties()->at(i);
1687 if (property->IsCompileTimeValue()) continue;
1689 Literal* key = property->key();
1690 Expression* value = property->value();
1691 if (!result_saved) {
1692 __ Push(x0); // Save result on stack
1693 result_saved = true;
1695 switch (property->kind()) {
1696 case ObjectLiteral::Property::CONSTANT:
1698 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1699 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1701 case ObjectLiteral::Property::COMPUTED:
1702 if (key->value()->IsInternalizedString()) {
1703 if (property->emit_store()) {
1704 VisitForAccumulatorValue(value);
1705 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1706 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1707 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1708 CallStoreIC(key->LiteralFeedbackId());
1709 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1711 VisitForEffect(value);
1715 if (property->emit_store()) {
1716 // Duplicate receiver on stack.
1719 VisitForStackValue(key);
1720 VisitForStackValue(value);
1721 __ Mov(x0, Smi::FromInt(SLOPPY)); // Strict mode
1723 __ CallRuntime(Runtime::kSetProperty, 4);
1725 VisitForEffect(key);
1726 VisitForEffect(value);
1729 case ObjectLiteral::Property::PROTOTYPE:
1730 if (property->emit_store()) {
1731 // Duplicate receiver on stack.
1734 VisitForStackValue(value);
1735 __ CallRuntime(Runtime::kSetPrototype, 2);
1737 VisitForEffect(value);
1740 case ObjectLiteral::Property::GETTER:
1741 accessor_table.lookup(key)->second->getter = value;
1743 case ObjectLiteral::Property::SETTER:
1744 accessor_table.lookup(key)->second->setter = value;
1749 // Emit code to define accessors, using only a single call to the runtime for
1750 // each pair of corresponding getters and setters.
1751 for (AccessorTable::Iterator it = accessor_table.begin();
1752 it != accessor_table.end();
1754 __ Peek(x10, 0); // Duplicate receiver.
1756 VisitForStackValue(it->first);
1757 EmitAccessor(it->second->getter);
1758 EmitAccessor(it->second->setter);
1759 __ Mov(x10, Smi::FromInt(NONE));
1761 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1764 if (expr->has_function()) {
1765 DCHECK(result_saved);
1768 __ CallRuntime(Runtime::kToFastProperties, 1);
1772 context()->PlugTOS();
1774 context()->Plug(x0);
1779 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1780 Comment cmnt(masm_, "[ ArrayLiteral");
1782 expr->BuildConstantElements(isolate());
1783 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1784 : ArrayLiteral::kNoFlags;
1786 ZoneList<Expression*>* subexprs = expr->values();
1787 int length = subexprs->length();
1788 Handle<FixedArray> constant_elements = expr->constant_elements();
1789 DCHECK_EQ(2, constant_elements->length());
1790 ElementsKind constant_elements_kind =
1791 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1792 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1793 Handle<FixedArrayBase> constant_elements_values(
1794 FixedArrayBase::cast(constant_elements->get(1)));
1796 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1797 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1798 // If the only customer of allocation sites is transitioning, then
1799 // we can turn it off if we don't have anywhere else to transition to.
1800 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1803 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1804 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1805 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1806 __ Mov(x1, Operand(constant_elements));
1807 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1808 __ Mov(x0, Smi::FromInt(flags));
1809 __ Push(x3, x2, x1, x0);
1810 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1812 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1816 bool result_saved = false; // Is the result saved to the stack?
1818 // Emit code to evaluate all the non-constant subexpressions and to store
1819 // them into the newly cloned array.
1820 for (int i = 0; i < length; i++) {
1821 Expression* subexpr = subexprs->at(i);
1822 // If the subexpression is a literal or a simple materialized literal it
1823 // is already set in the cloned array.
1824 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1826 if (!result_saved) {
1827 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1829 result_saved = true;
1831 VisitForAccumulatorValue(subexpr);
1833 if (IsFastObjectElementsKind(constant_elements_kind)) {
1834 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1835 __ Peek(x6, kPointerSize); // Copy of array literal.
1836 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1837 __ Str(result_register(), FieldMemOperand(x1, offset));
1838 // Update the write barrier for the array store.
1839 __ RecordWriteField(x1, offset, result_register(), x10,
1840 kLRHasBeenSaved, kDontSaveFPRegs,
1841 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1843 __ Mov(x3, Smi::FromInt(i));
1844 StoreArrayLiteralElementStub stub(isolate());
1848 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1852 __ Drop(1); // literal index
1853 context()->PlugTOS();
1855 context()->Plug(x0);
1860 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1861 DCHECK(expr->target()->IsValidReferenceExpression());
1863 Comment cmnt(masm_, "[ Assignment");
1865 // Left-hand side can only be a property, a global or a (parameter or local)
1867 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1868 LhsKind assign_type = VARIABLE;
1869 Property* property = expr->target()->AsProperty();
1870 if (property != NULL) {
1871 assign_type = (property->key()->IsPropertyName())
1876 // Evaluate LHS expression.
1877 switch (assign_type) {
1879 // Nothing to do here.
1881 case NAMED_PROPERTY:
1882 if (expr->is_compound()) {
1883 // We need the receiver both on the stack and in the register.
1884 VisitForStackValue(property->obj());
1885 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1887 VisitForStackValue(property->obj());
1890 case KEYED_PROPERTY:
1891 if (expr->is_compound()) {
1892 VisitForStackValue(property->obj());
1893 VisitForStackValue(property->key());
1894 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1895 __ Peek(LoadDescriptor::NameRegister(), 0);
1897 VisitForStackValue(property->obj());
1898 VisitForStackValue(property->key());
1903 // For compound assignments we need another deoptimization point after the
1904 // variable/property load.
1905 if (expr->is_compound()) {
1906 { AccumulatorValueContext context(this);
1907 switch (assign_type) {
1909 EmitVariableLoad(expr->target()->AsVariableProxy());
1910 PrepareForBailout(expr->target(), TOS_REG);
1912 case NAMED_PROPERTY:
1913 EmitNamedPropertyLoad(property);
1914 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1916 case KEYED_PROPERTY:
1917 EmitKeyedPropertyLoad(property);
1918 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1923 Token::Value op = expr->binary_op();
1924 __ Push(x0); // Left operand goes on the stack.
1925 VisitForAccumulatorValue(expr->value());
1927 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1930 SetSourcePosition(expr->position() + 1);
1931 AccumulatorValueContext context(this);
1932 if (ShouldInlineSmiCase(op)) {
1933 EmitInlineSmiBinaryOp(expr->binary_operation(),
1939 EmitBinaryOp(expr->binary_operation(), op, mode);
1942 // Deoptimization point in case the binary operation may have side effects.
1943 PrepareForBailout(expr->binary_operation(), TOS_REG);
1945 VisitForAccumulatorValue(expr->value());
1948 // Record source position before possible IC call.
1949 SetSourcePosition(expr->position());
1952 switch (assign_type) {
1954 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1956 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1957 context()->Plug(x0);
1959 case NAMED_PROPERTY:
1960 EmitNamedPropertyAssignment(expr);
1962 case KEYED_PROPERTY:
1963 EmitKeyedPropertyAssignment(expr);
1969 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1970 SetSourcePosition(prop->position());
1971 Literal* key = prop->key()->AsLiteral();
1972 DCHECK(!prop->IsSuperAccess());
1974 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
1975 if (FLAG_vector_ics) {
1976 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1977 Smi::FromInt(prop->PropertyFeedbackSlot()));
1978 CallLoadIC(NOT_CONTEXTUAL);
1980 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
1985 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
1986 SetSourcePosition(prop->position());
1987 Literal* key = prop->key()->AsLiteral();
1988 DCHECK(!key->value()->IsSmi());
1989 DCHECK(prop->IsSuperAccess());
1991 SuperReference* super_ref = prop->obj()->AsSuperReference();
1992 EmitLoadHomeObject(super_ref);
1994 VisitForStackValue(super_ref->this_var());
1995 __ Push(key->value());
1996 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2000 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2001 SetSourcePosition(prop->position());
2002 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2003 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2004 if (FLAG_vector_ics) {
2005 __ Mov(VectorLoadICDescriptor::SlotRegister(),
2006 Smi::FromInt(prop->PropertyFeedbackSlot()));
2009 CallIC(ic, prop->PropertyFeedbackId());
2014 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2017 Expression* left_expr,
2018 Expression* right_expr) {
2019 Label done, both_smis, stub_call;
2021 // Get the arguments.
2023 Register right = x0;
2024 Register result = x0;
2027 // Perform combined smi check on both operands.
2028 __ Orr(x10, left, right);
2029 JumpPatchSite patch_site(masm_);
2030 patch_site.EmitJumpIfSmi(x10, &both_smis);
2032 __ Bind(&stub_call);
2034 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2036 Assembler::BlockPoolsScope scope(masm_);
2037 CallIC(code, expr->BinaryOperationFeedbackId());
2038 patch_site.EmitPatchInfo();
2042 __ Bind(&both_smis);
2043 // Smi case. This code works in the same way as the smi-smi case in the type
2044 // recording binary operation stub, see
2045 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2046 // TODO(all): That doesn't exist any more. Where are the comments?
2048 // The set of operations that needs to be supported here is controlled by
2049 // FullCodeGenerator::ShouldInlineSmiCase().
2052 __ Ubfx(right, right, kSmiShift, 5);
2053 __ Asr(result, left, right);
2054 __ Bic(result, result, kSmiShiftMask);
2057 __ Ubfx(right, right, kSmiShift, 5);
2058 __ Lsl(result, left, right);
2061 // If `left >>> right` >= 0x80000000, the result is not representable in a
2062 // signed 32-bit smi.
2063 __ Ubfx(right, right, kSmiShift, 5);
2064 __ Lsr(x10, left, right);
2065 __ Tbnz(x10, kXSignBit, &stub_call);
2066 __ Bic(result, x10, kSmiShiftMask);
2069 __ Adds(x10, left, right);
2070 __ B(vs, &stub_call);
2071 __ Mov(result, x10);
2074 __ Subs(x10, left, right);
2075 __ B(vs, &stub_call);
2076 __ Mov(result, x10);
2079 Label not_minus_zero, done;
2080 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2081 STATIC_ASSERT(kSmiTag == 0);
2082 __ Smulh(x10, left, right);
2083 __ Cbnz(x10, ¬_minus_zero);
2084 __ Eor(x11, left, right);
2085 __ Tbnz(x11, kXSignBit, &stub_call);
2086 __ Mov(result, x10);
2088 __ Bind(¬_minus_zero);
2090 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2091 __ B(lt, &stub_call);
2092 __ SmiTag(result, x10);
2097 __ Orr(result, left, right);
2099 case Token::BIT_AND:
2100 __ And(result, left, right);
2102 case Token::BIT_XOR:
2103 __ Eor(result, left, right);
2110 context()->Plug(x0);
2114 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2116 OverwriteMode mode) {
2118 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2119 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2121 Assembler::BlockPoolsScope scope(masm_);
2122 CallIC(code, expr->BinaryOperationFeedbackId());
2123 patch_site.EmitPatchInfo();
2125 context()->Plug(x0);
2129 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2130 DCHECK(expr->IsValidReferenceExpression());
2132 // Left-hand side can only be a property, a global or a (parameter or local)
2134 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2135 LhsKind assign_type = VARIABLE;
2136 Property* prop = expr->AsProperty();
2138 assign_type = (prop->key()->IsPropertyName())
2143 switch (assign_type) {
2145 Variable* var = expr->AsVariableProxy()->var();
2146 EffectContext context(this);
2147 EmitVariableAssignment(var, Token::ASSIGN);
2150 case NAMED_PROPERTY: {
2151 __ Push(x0); // Preserve value.
2152 VisitForAccumulatorValue(prop->obj());
2153 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2155 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2156 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2157 __ Mov(StoreDescriptor::NameRegister(),
2158 Operand(prop->key()->AsLiteral()->value()));
2162 case KEYED_PROPERTY: {
2163 __ Push(x0); // Preserve value.
2164 VisitForStackValue(prop->obj());
2165 VisitForAccumulatorValue(prop->key());
2166 __ Mov(StoreDescriptor::NameRegister(), x0);
2167 __ Pop(StoreDescriptor::ReceiverRegister(),
2168 StoreDescriptor::ValueRegister());
2170 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2175 context()->Plug(x0);
2179 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2180 Variable* var, MemOperand location) {
2181 __ Str(result_register(), location);
2182 if (var->IsContextSlot()) {
2183 // RecordWrite may destroy all its register arguments.
2184 __ Mov(x10, result_register());
2185 int offset = Context::SlotOffset(var->index());
2186 __ RecordWriteContextSlot(
2187 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2192 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2194 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2195 if (var->IsUnallocated()) {
2196 // Global var, const, or let.
2197 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2198 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2201 } else if (op == Token::INIT_CONST_LEGACY) {
2202 // Const initializers need a write barrier.
2203 DCHECK(!var->IsParameter()); // No const parameters.
2204 if (var->IsLookupSlot()) {
2205 __ Mov(x1, Operand(var->name()));
2206 __ Push(x0, cp, x1);
2207 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2209 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2211 MemOperand location = VarOperand(var, x1);
2212 __ Ldr(x10, location);
2213 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2214 EmitStoreToStackLocalOrContextSlot(var, location);
2218 } else if (var->mode() == LET && op != Token::INIT_LET) {
2219 // Non-initializing assignment to let variable needs a write barrier.
2220 DCHECK(!var->IsLookupSlot());
2221 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2223 MemOperand location = VarOperand(var, x1);
2224 __ Ldr(x10, location);
2225 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2226 __ Mov(x10, Operand(var->name()));
2228 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2229 // Perform the assignment.
2231 EmitStoreToStackLocalOrContextSlot(var, location);
2233 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2234 if (var->IsLookupSlot()) {
2235 // Assignment to var.
2236 __ Mov(x11, Operand(var->name()));
2237 __ Mov(x10, Smi::FromInt(strict_mode()));
2240 // jssp[16] : context.
2241 // jssp[24] : value.
2242 __ Push(x0, cp, x11, x10);
2243 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2245 // Assignment to var or initializing assignment to let/const in harmony
2247 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2248 MemOperand location = VarOperand(var, x1);
2249 if (FLAG_debug_code && op == Token::INIT_LET) {
2250 __ Ldr(x10, location);
2251 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2252 __ Check(eq, kLetBindingReInitialization);
2254 EmitStoreToStackLocalOrContextSlot(var, location);
2257 // Non-initializing assignments to consts are ignored.
2261 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2262 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2263 // Assignment to a property, using a named store IC.
2264 Property* prop = expr->target()->AsProperty();
2265 DCHECK(prop != NULL);
2266 DCHECK(prop->key()->IsLiteral());
2268 // Record source code position before IC call.
2269 SetSourcePosition(expr->position());
2270 __ Mov(StoreDescriptor::NameRegister(),
2271 Operand(prop->key()->AsLiteral()->value()));
2272 __ Pop(StoreDescriptor::ReceiverRegister());
2273 CallStoreIC(expr->AssignmentFeedbackId());
2275 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2276 context()->Plug(x0);
2280 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2281 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2282 // Assignment to a property, using a keyed store IC.
2284 // Record source code position before IC call.
2285 SetSourcePosition(expr->position());
2286 // TODO(all): Could we pass this in registers rather than on the stack?
2287 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2288 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2290 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2291 CallIC(ic, expr->AssignmentFeedbackId());
2293 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2294 context()->Plug(x0);
2298 void FullCodeGenerator::VisitProperty(Property* expr) {
2299 Comment cmnt(masm_, "[ Property");
2300 Expression* key = expr->key();
2302 if (key->IsPropertyName()) {
2303 if (!expr->IsSuperAccess()) {
2304 VisitForAccumulatorValue(expr->obj());
2305 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2306 EmitNamedPropertyLoad(expr);
2308 EmitNamedSuperPropertyLoad(expr);
2310 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2311 context()->Plug(x0);
2313 VisitForStackValue(expr->obj());
2314 VisitForAccumulatorValue(expr->key());
2315 __ Move(LoadDescriptor::NameRegister(), x0);
2316 __ Pop(LoadDescriptor::ReceiverRegister());
2317 EmitKeyedPropertyLoad(expr);
2318 context()->Plug(x0);
2323 void FullCodeGenerator::CallIC(Handle<Code> code,
2324 TypeFeedbackId ast_id) {
2326 // All calls must have a predictable size in full-codegen code to ensure that
2327 // the debugger can patch them correctly.
2328 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2332 // Code common for calls using the IC.
2333 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2334 Expression* callee = expr->expression();
2336 CallICState::CallType call_type =
2337 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2339 // Get the target function.
2340 if (call_type == CallICState::FUNCTION) {
2341 { StackValueContext context(this);
2342 EmitVariableLoad(callee->AsVariableProxy());
2343 PrepareForBailout(callee, NO_REGISTERS);
2345 // Push undefined as receiver. This is patched in the method prologue if it
2346 // is a sloppy mode method.
2347 __ Push(isolate()->factory()->undefined_value());
2349 // Load the function from the receiver.
2350 DCHECK(callee->IsProperty());
2351 DCHECK(!callee->AsProperty()->IsSuperAccess());
2352 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2353 EmitNamedPropertyLoad(callee->AsProperty());
2354 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2355 // Push the target function under the receiver.
2360 EmitCall(expr, call_type);
2364 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2365 Expression* callee = expr->expression();
2366 DCHECK(callee->IsProperty());
2367 Property* prop = callee->AsProperty();
2368 DCHECK(prop->IsSuperAccess());
2370 SetSourcePosition(prop->position());
2371 Literal* key = prop->key()->AsLiteral();
2372 DCHECK(!key->value()->IsSmi());
2374 // Load the function from the receiver.
2375 const Register scratch = x10;
2376 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2377 EmitLoadHomeObject(super_ref);
2379 VisitForAccumulatorValue(super_ref->this_var());
2381 __ Peek(scratch, kPointerSize);
2382 __ Push(scratch, x0);
2383 __ Push(key->value());
2387 // - this (receiver)
2388 // - home_object <-- LoadFromSuper will pop here and below.
2389 // - this (receiver)
2391 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2393 // Replace home_object with target function.
2394 __ Poke(x0, kPointerSize);
2397 // - target function
2398 // - this (receiver)
2399 EmitCall(expr, CallICState::METHOD);
2403 // Code common for calls using the IC.
2404 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2407 VisitForAccumulatorValue(key);
2409 Expression* callee = expr->expression();
2411 // Load the function from the receiver.
2412 DCHECK(callee->IsProperty());
2413 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2414 __ Move(LoadDescriptor::NameRegister(), x0);
2415 EmitKeyedPropertyLoad(callee->AsProperty());
2416 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2418 // Push the target function under the receiver.
2422 EmitCall(expr, CallICState::METHOD);
2426 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2427 // Load the arguments.
2428 ZoneList<Expression*>* args = expr->arguments();
2429 int arg_count = args->length();
2430 { PreservePositionScope scope(masm()->positions_recorder());
2431 for (int i = 0; i < arg_count; i++) {
2432 VisitForStackValue(args->at(i));
2435 // Record source position of the IC call.
2436 SetSourcePosition(expr->position());
2438 Handle<Code> ic = CallIC::initialize_stub(
2439 isolate(), arg_count, call_type);
2440 __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
2441 __ Peek(x1, (arg_count + 1) * kXRegSize);
2442 // Don't assign a type feedback id to the IC, since type feedback is provided
2443 // by the vector above.
2446 RecordJSReturnSite(expr);
2447 // Restore context register.
2448 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2449 context()->DropAndPlug(1, x0);
2453 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2454 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2455 // Prepare to push a copy of the first argument or undefined if it doesn't
2457 if (arg_count > 0) {
2458 __ Peek(x10, arg_count * kXRegSize);
2460 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2463 // Prepare to push the receiver of the enclosing function.
2464 int receiver_offset = 2 + info_->scope()->num_parameters();
2465 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2467 // Prepare to push the language mode.
2468 __ Mov(x12, Smi::FromInt(strict_mode()));
2469 // Prepare to push the start position of the scope the calls resides in.
2470 __ Mov(x13, Smi::FromInt(scope()->start_position()));
2473 __ Push(x10, x11, x12, x13);
2475 // Do the runtime call.
2476 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2480 void FullCodeGenerator::VisitCall(Call* expr) {
2482 // We want to verify that RecordJSReturnSite gets called on all paths
2483 // through this function. Avoid early returns.
2484 expr->return_is_recorded_ = false;
2487 Comment cmnt(masm_, "[ Call");
2488 Expression* callee = expr->expression();
2489 Call::CallType call_type = expr->GetCallType(isolate());
2491 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2492 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2493 // to resolve the function we need to call and the receiver of the
2494 // call. Then we call the resolved function using the given
2496 ZoneList<Expression*>* args = expr->arguments();
2497 int arg_count = args->length();
2500 PreservePositionScope pos_scope(masm()->positions_recorder());
2501 VisitForStackValue(callee);
2502 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2503 __ Push(x10); // Reserved receiver slot.
2505 // Push the arguments.
2506 for (int i = 0; i < arg_count; i++) {
2507 VisitForStackValue(args->at(i));
2510 // Push a copy of the function (found below the arguments) and
2512 __ Peek(x10, (arg_count + 1) * kPointerSize);
2514 EmitResolvePossiblyDirectEval(arg_count);
2516 // The runtime call returns a pair of values in x0 (function) and
2517 // x1 (receiver). Touch up the stack with the right values.
2518 __ PokePair(x1, x0, arg_count * kPointerSize);
2521 // Record source position for debugger.
2522 SetSourcePosition(expr->position());
2524 // Call the evaluated function.
2525 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2526 __ Peek(x1, (arg_count + 1) * kXRegSize);
2528 RecordJSReturnSite(expr);
2529 // Restore context register.
2530 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2531 context()->DropAndPlug(1, x0);
2533 } else if (call_type == Call::GLOBAL_CALL) {
2534 EmitCallWithLoadIC(expr);
2536 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2537 // Call to a lookup slot (dynamically introduced variable).
2538 VariableProxy* proxy = callee->AsVariableProxy();
2541 { PreservePositionScope scope(masm()->positions_recorder());
2542 // Generate code for loading from variables potentially shadowed
2543 // by eval-introduced variables.
2544 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2548 // Call the runtime to find the function to call (returned in x0)
2549 // and the object holding it (returned in x1).
2550 __ Mov(x10, Operand(proxy->name()));
2551 __ Push(context_register(), x10);
2552 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2553 __ Push(x0, x1); // Receiver, function.
2555 // If fast case code has been generated, emit code to push the
2556 // function and receiver and have the slow path jump around this
2558 if (done.is_linked()) {
2563 // The receiver is implicitly the global receiver. Indicate this
2564 // by passing the undefined to the call function stub.
2565 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2570 // The receiver is either the global receiver or an object found
2571 // by LoadContextSlot.
2573 } else if (call_type == Call::PROPERTY_CALL) {
2574 Property* property = callee->AsProperty();
2575 bool is_named_call = property->key()->IsPropertyName();
2576 // super.x() is handled in EmitCallWithLoadIC.
2577 if (property->IsSuperAccess() && is_named_call) {
2578 EmitSuperCallWithLoadIC(expr);
2581 PreservePositionScope scope(masm()->positions_recorder());
2582 VisitForStackValue(property->obj());
2584 if (is_named_call) {
2585 EmitCallWithLoadIC(expr);
2587 EmitKeyedCallWithLoadIC(expr, property->key());
2591 DCHECK(call_type == Call::OTHER_CALL);
2592 // Call to an arbitrary expression not handled specially above.
2593 { PreservePositionScope scope(masm()->positions_recorder());
2594 VisitForStackValue(callee);
2596 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2598 // Emit function call.
2603 // RecordJSReturnSite should have been called.
2604 DCHECK(expr->return_is_recorded_);
2609 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2610 Comment cmnt(masm_, "[ CallNew");
2611 // According to ECMA-262, section 11.2.2, page 44, the function
2612 // expression in new calls must be evaluated before the
2615 // Push constructor on the stack. If it's not a function it's used as
2616 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2618 VisitForStackValue(expr->expression());
2620 // Push the arguments ("left-to-right") on the stack.
2621 ZoneList<Expression*>* args = expr->arguments();
2622 int arg_count = args->length();
2623 for (int i = 0; i < arg_count; i++) {
2624 VisitForStackValue(args->at(i));
2627 // Call the construct call builtin that handles allocation and
2628 // constructor invocation.
2629 SetSourcePosition(expr->position());
2631 // Load function and argument count into x1 and x0.
2632 __ Mov(x0, arg_count);
2633 __ Peek(x1, arg_count * kXRegSize);
2635 // Record call targets in unoptimized code.
2636 if (FLAG_pretenuring_call_new) {
2637 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2638 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2639 expr->CallNewFeedbackSlot() + 1);
2642 __ LoadObject(x2, FeedbackVector());
2643 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2645 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2646 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2647 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2648 context()->Plug(x0);
2652 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2653 ZoneList<Expression*>* args = expr->arguments();
2654 DCHECK(args->length() == 1);
2656 VisitForAccumulatorValue(args->at(0));
2658 Label materialize_true, materialize_false;
2659 Label* if_true = NULL;
2660 Label* if_false = NULL;
2661 Label* fall_through = NULL;
2662 context()->PrepareTest(&materialize_true, &materialize_false,
2663 &if_true, &if_false, &fall_through);
2665 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2666 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2668 context()->Plug(if_true, if_false);
2672 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2673 ZoneList<Expression*>* args = expr->arguments();
2674 DCHECK(args->length() == 1);
2676 VisitForAccumulatorValue(args->at(0));
2678 Label materialize_true, materialize_false;
2679 Label* if_true = NULL;
2680 Label* if_false = NULL;
2681 Label* fall_through = NULL;
2682 context()->PrepareTest(&materialize_true, &materialize_false,
2683 &if_true, &if_false, &fall_through);
2685 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
2687 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2688 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
2690 context()->Plug(if_true, if_false);
2694 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2695 ZoneList<Expression*>* args = expr->arguments();
2696 DCHECK(args->length() == 1);
2698 VisitForAccumulatorValue(args->at(0));
2700 Label materialize_true, materialize_false;
2701 Label* if_true = NULL;
2702 Label* if_false = NULL;
2703 Label* fall_through = NULL;
2704 context()->PrepareTest(&materialize_true, &materialize_false,
2705 &if_true, &if_false, &fall_through);
2707 __ JumpIfSmi(x0, if_false);
2708 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2709 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2710 // Undetectable objects behave like undefined when tested with typeof.
2711 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2712 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2713 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2714 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2716 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2717 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2718 Split(le, if_true, if_false, fall_through);
2720 context()->Plug(if_true, if_false);
2724 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2725 ZoneList<Expression*>* args = expr->arguments();
2726 DCHECK(args->length() == 1);
2728 VisitForAccumulatorValue(args->at(0));
2730 Label materialize_true, materialize_false;
2731 Label* if_true = NULL;
2732 Label* if_false = NULL;
2733 Label* fall_through = NULL;
2734 context()->PrepareTest(&materialize_true, &materialize_false,
2735 &if_true, &if_false, &fall_through);
2737 __ JumpIfSmi(x0, if_false);
2738 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2739 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2740 Split(ge, if_true, if_false, fall_through);
2742 context()->Plug(if_true, if_false);
2746 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2747 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2748 ZoneList<Expression*>* args = expr->arguments();
2749 DCHECK(args->length() == 1);
2751 VisitForAccumulatorValue(args->at(0));
2753 Label materialize_true, materialize_false;
2754 Label* if_true = NULL;
2755 Label* if_false = NULL;
2756 Label* fall_through = NULL;
2757 context()->PrepareTest(&materialize_true, &materialize_false,
2758 &if_true, &if_false, &fall_through);
2760 __ JumpIfSmi(x0, if_false);
2761 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2762 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2763 __ Tst(x11, 1 << Map::kIsUndetectable);
2764 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2765 Split(ne, if_true, if_false, fall_through);
2767 context()->Plug(if_true, if_false);
2771 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2772 CallRuntime* expr) {
2773 ZoneList<Expression*>* args = expr->arguments();
2774 DCHECK(args->length() == 1);
2775 VisitForAccumulatorValue(args->at(0));
2777 Label materialize_true, materialize_false, skip_lookup;
2778 Label* if_true = NULL;
2779 Label* if_false = NULL;
2780 Label* fall_through = NULL;
2781 context()->PrepareTest(&materialize_true, &materialize_false,
2782 &if_true, &if_false, &fall_through);
2784 Register object = x0;
2785 __ AssertNotSmi(object);
2788 Register bitfield2 = x11;
2789 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2790 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2791 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
2793 // Check for fast case object. Generate false result for slow case object.
2794 Register props = x12;
2795 Register props_map = x12;
2796 Register hash_table_map = x13;
2797 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
2798 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2799 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2800 __ Cmp(props_map, hash_table_map);
2803 // Look for valueOf name in the descriptor array, and indicate false if found.
2804 // Since we omit an enumeration index check, if it is added via a transition
2805 // that shares its descriptor array, this is a false positive.
2808 // Skip loop if no descriptors are valid.
2809 Register descriptors = x12;
2810 Register descriptors_length = x13;
2811 __ NumberOfOwnDescriptors(descriptors_length, map);
2812 __ Cbz(descriptors_length, &done);
2814 __ LoadInstanceDescriptors(map, descriptors);
2816 // Calculate the end of the descriptor array.
2817 Register descriptors_end = x14;
2818 __ Mov(x15, DescriptorArray::kDescriptorSize);
2819 __ Mul(descriptors_length, descriptors_length, x15);
2820 // Calculate location of the first key name.
2821 __ Add(descriptors, descriptors,
2822 DescriptorArray::kFirstOffset - kHeapObjectTag);
2823 // Calculate the end of the descriptor array.
2824 __ Add(descriptors_end, descriptors,
2825 Operand(descriptors_length, LSL, kPointerSizeLog2));
2827 // Loop through all the keys in the descriptor array. If one of these is the
2828 // string "valueOf" the result is false.
2829 Register valueof_string = x1;
2830 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
2831 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2833 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
2834 __ Cmp(x15, valueof_string);
2836 __ Cmp(descriptors, descriptors_end);
2841 // Set the bit in the map to indicate that there is no local valueOf field.
2842 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2843 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2844 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2846 __ Bind(&skip_lookup);
2848 // If a valueOf property is not found on the object check that its prototype
2849 // is the unmodified String prototype. If not result is false.
2850 Register prototype = x1;
2851 Register global_idx = x2;
2852 Register native_context = x2;
2853 Register string_proto = x3;
2854 Register proto_map = x4;
2855 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
2856 __ JumpIfSmi(prototype, if_false);
2857 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2858 __ Ldr(global_idx, GlobalObjectMemOperand());
2859 __ Ldr(native_context,
2860 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
2861 __ Ldr(string_proto,
2862 ContextMemOperand(native_context,
2863 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2864 __ Cmp(proto_map, string_proto);
2866 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2867 Split(eq, if_true, if_false, fall_through);
2869 context()->Plug(if_true, if_false);
2873 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2874 ZoneList<Expression*>* args = expr->arguments();
2875 DCHECK(args->length() == 1);
2877 VisitForAccumulatorValue(args->at(0));
2879 Label materialize_true, materialize_false;
2880 Label* if_true = NULL;
2881 Label* if_false = NULL;
2882 Label* fall_through = NULL;
2883 context()->PrepareTest(&materialize_true, &materialize_false,
2884 &if_true, &if_false, &fall_through);
2886 __ JumpIfSmi(x0, if_false);
2887 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2888 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2889 Split(eq, if_true, if_false, fall_through);
2891 context()->Plug(if_true, if_false);
2895 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2896 ZoneList<Expression*>* args = expr->arguments();
2897 DCHECK(args->length() == 1);
2899 VisitForAccumulatorValue(args->at(0));
2901 Label materialize_true, materialize_false;
2902 Label* if_true = NULL;
2903 Label* if_false = NULL;
2904 Label* fall_through = NULL;
2905 context()->PrepareTest(&materialize_true, &materialize_false,
2906 &if_true, &if_false, &fall_through);
2908 // Only a HeapNumber can be -0.0, so return false if we have something else.
2909 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
2911 // Test the bit pattern.
2912 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2913 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
2915 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2916 Split(vs, if_true, if_false, fall_through);
2918 context()->Plug(if_true, if_false);
2922 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2923 ZoneList<Expression*>* args = expr->arguments();
2924 DCHECK(args->length() == 1);
2926 VisitForAccumulatorValue(args->at(0));
2928 Label materialize_true, materialize_false;
2929 Label* if_true = NULL;
2930 Label* if_false = NULL;
2931 Label* fall_through = NULL;
2932 context()->PrepareTest(&materialize_true, &materialize_false,
2933 &if_true, &if_false, &fall_through);
2935 __ JumpIfSmi(x0, if_false);
2936 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2937 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2938 Split(eq, if_true, if_false, fall_through);
2940 context()->Plug(if_true, if_false);
2944 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2945 ZoneList<Expression*>* args = expr->arguments();
2946 DCHECK(args->length() == 1);
2948 VisitForAccumulatorValue(args->at(0));
2950 Label materialize_true, materialize_false;
2951 Label* if_true = NULL;
2952 Label* if_false = NULL;
2953 Label* fall_through = NULL;
2954 context()->PrepareTest(&materialize_true, &materialize_false,
2955 &if_true, &if_false, &fall_through);
2957 __ JumpIfSmi(x0, if_false);
2958 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2959 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2960 Split(eq, if_true, if_false, fall_through);
2962 context()->Plug(if_true, if_false);
2967 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2968 DCHECK(expr->arguments()->length() == 0);
2970 Label materialize_true, materialize_false;
2971 Label* if_true = NULL;
2972 Label* if_false = NULL;
2973 Label* fall_through = NULL;
2974 context()->PrepareTest(&materialize_true, &materialize_false,
2975 &if_true, &if_false, &fall_through);
2977 // Get the frame pointer for the calling frame.
2978 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2980 // Skip the arguments adaptor frame if it exists.
2981 Label check_frame_marker;
2982 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
2983 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2984 __ B(ne, &check_frame_marker);
2985 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
2987 // Check the marker in the calling frame.
2988 __ Bind(&check_frame_marker);
2989 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
2990 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
2991 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2992 Split(eq, if_true, if_false, fall_through);
2994 context()->Plug(if_true, if_false);
2998 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2999 ZoneList<Expression*>* args = expr->arguments();
3000 DCHECK(args->length() == 2);
3002 // Load the two objects into registers and perform the comparison.
3003 VisitForStackValue(args->at(0));
3004 VisitForAccumulatorValue(args->at(1));
3006 Label materialize_true, materialize_false;
3007 Label* if_true = NULL;
3008 Label* if_false = NULL;
3009 Label* fall_through = NULL;
3010 context()->PrepareTest(&materialize_true, &materialize_false,
3011 &if_true, &if_false, &fall_through);
3015 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3016 Split(eq, if_true, if_false, fall_through);
3018 context()->Plug(if_true, if_false);
3022 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3023 ZoneList<Expression*>* args = expr->arguments();
3024 DCHECK(args->length() == 1);
3026 // ArgumentsAccessStub expects the key in x1.
3027 VisitForAccumulatorValue(args->at(0));
3029 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3030 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3032 context()->Plug(x0);
3036 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3037 DCHECK(expr->arguments()->length() == 0);
3039 // Get the number of formal parameters.
3040 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3042 // Check if the calling frame is an arguments adaptor frame.
3043 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3044 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3045 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3048 // Arguments adaptor case: Read the arguments length from the
3050 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3053 context()->Plug(x0);
3057 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3058 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3059 ZoneList<Expression*>* args = expr->arguments();
3060 DCHECK(args->length() == 1);
3061 Label done, null, function, non_function_constructor;
3063 VisitForAccumulatorValue(args->at(0));
3065 // If the object is a smi, we return null.
3066 __ JumpIfSmi(x0, &null);
3068 // Check that the object is a JS object but take special care of JS
3069 // functions to make sure they have 'Function' as their class.
3070 // Assume that there are only two callable types, and one of them is at
3071 // either end of the type range for JS object types. Saves extra comparisons.
3072 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3073 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3074 // x10: object's map.
3075 // x11: object's type.
3077 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3078 FIRST_SPEC_OBJECT_TYPE + 1);
3079 __ B(eq, &function);
3081 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3082 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3083 LAST_SPEC_OBJECT_TYPE - 1);
3084 __ B(eq, &function);
3085 // Assume that there is no larger type.
3086 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3088 // Check if the constructor in the map is a JS function.
3089 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3090 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3091 &non_function_constructor);
3093 // x12 now contains the constructor function. Grab the
3094 // instance class name from there.
3095 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3097 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3100 // Functions have class 'Function'.
3102 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3105 // Objects with a non-function constructor have class 'Object'.
3106 __ Bind(&non_function_constructor);
3107 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3110 // Non-JS objects have class null.
3112 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3117 context()->Plug(x0);
3121 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3122 // Load the arguments on the stack and call the stub.
3123 SubStringStub stub(isolate());
3124 ZoneList<Expression*>* args = expr->arguments();
3125 DCHECK(args->length() == 3);
3126 VisitForStackValue(args->at(0));
3127 VisitForStackValue(args->at(1));
3128 VisitForStackValue(args->at(2));
3130 context()->Plug(x0);
3134 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3135 // Load the arguments on the stack and call the stub.
3136 RegExpExecStub stub(isolate());
3137 ZoneList<Expression*>* args = expr->arguments();
3138 DCHECK(args->length() == 4);
3139 VisitForStackValue(args->at(0));
3140 VisitForStackValue(args->at(1));
3141 VisitForStackValue(args->at(2));
3142 VisitForStackValue(args->at(3));
3144 context()->Plug(x0);
3148 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3149 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3150 ZoneList<Expression*>* args = expr->arguments();
3151 DCHECK(args->length() == 1);
3152 VisitForAccumulatorValue(args->at(0)); // Load the object.
3155 // If the object is a smi return the object.
3156 __ JumpIfSmi(x0, &done);
3157 // If the object is not a value type, return the object.
3158 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3159 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3162 context()->Plug(x0);
3166 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3167 ZoneList<Expression*>* args = expr->arguments();
3168 DCHECK(args->length() == 2);
3169 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3170 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3172 VisitForAccumulatorValue(args->at(0)); // Load the object.
3174 Label runtime, done, not_date_object;
3175 Register object = x0;
3176 Register result = x0;
3177 Register stamp_addr = x10;
3178 Register stamp_cache = x11;
3180 __ JumpIfSmi(object, ¬_date_object);
3181 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object);
3183 if (index->value() == 0) {
3184 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3187 if (index->value() < JSDate::kFirstUncachedField) {
3188 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3190 __ Ldr(stamp_addr, MemOperand(x10));
3191 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3192 __ Cmp(stamp_addr, stamp_cache);
3194 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3195 kPointerSize * index->value()));
3201 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3205 __ Bind(¬_date_object);
3206 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3208 context()->Plug(x0);
3212 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3213 ZoneList<Expression*>* args = expr->arguments();
3214 DCHECK_EQ(3, args->length());
3216 Register string = x0;
3217 Register index = x1;
3218 Register value = x2;
3219 Register scratch = x10;
3221 VisitForStackValue(args->at(0)); // index
3222 VisitForStackValue(args->at(1)); // value
3223 VisitForAccumulatorValue(args->at(2)); // string
3224 __ Pop(value, index);
3226 if (FLAG_debug_code) {
3227 __ AssertSmi(value, kNonSmiValue);
3228 __ AssertSmi(index, kNonSmiIndex);
3229 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3230 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3234 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3237 __ Strb(value, MemOperand(scratch, index));
3238 context()->Plug(string);
3242 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments();
3244 DCHECK_EQ(3, args->length());
3246 Register string = x0;
3247 Register index = x1;
3248 Register value = x2;
3249 Register scratch = x10;
3251 VisitForStackValue(args->at(0)); // index
3252 VisitForStackValue(args->at(1)); // value
3253 VisitForAccumulatorValue(args->at(2)); // string
3254 __ Pop(value, index);
3256 if (FLAG_debug_code) {
3257 __ AssertSmi(value, kNonSmiValue);
3258 __ AssertSmi(index, kNonSmiIndex);
3259 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3260 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3264 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3267 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3268 context()->Plug(string);
3272 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3273 // Load the arguments on the stack and call the MathPow stub.
3274 ZoneList<Expression*>* args = expr->arguments();
3275 DCHECK(args->length() == 2);
3276 VisitForStackValue(args->at(0));
3277 VisitForStackValue(args->at(1));
3278 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3280 context()->Plug(x0);
3284 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3285 ZoneList<Expression*>* args = expr->arguments();
3286 DCHECK(args->length() == 2);
3287 VisitForStackValue(args->at(0)); // Load the object.
3288 VisitForAccumulatorValue(args->at(1)); // Load the value.
3294 // If the object is a smi, return the value.
3295 __ JumpIfSmi(x1, &done);
3297 // If the object is not a value type, return the value.
3298 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3301 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3302 // Update the write barrier. Save the value as it will be
3303 // overwritten by the write barrier code and is needed afterward.
3305 __ RecordWriteField(
3306 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3309 context()->Plug(x0);
3313 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3314 ZoneList<Expression*>* args = expr->arguments();
3315 DCHECK_EQ(args->length(), 1);
3317 // Load the argument into x0 and call the stub.
3318 VisitForAccumulatorValue(args->at(0));
3320 NumberToStringStub stub(isolate());
3322 context()->Plug(x0);
3326 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3327 ZoneList<Expression*>* args = expr->arguments();
3328 DCHECK(args->length() == 1);
3330 VisitForAccumulatorValue(args->at(0));
3334 Register result = x1;
3336 StringCharFromCodeGenerator generator(code, result);
3337 generator.GenerateFast(masm_);
3340 NopRuntimeCallHelper call_helper;
3341 generator.GenerateSlow(masm_, call_helper);
3344 context()->Plug(result);
3348 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3349 ZoneList<Expression*>* args = expr->arguments();
3350 DCHECK(args->length() == 2);
3352 VisitForStackValue(args->at(0));
3353 VisitForAccumulatorValue(args->at(1));
3355 Register object = x1;
3356 Register index = x0;
3357 Register result = x3;
3361 Label need_conversion;
3362 Label index_out_of_range;
3364 StringCharCodeAtGenerator generator(object,
3369 &index_out_of_range,
3370 STRING_INDEX_IS_NUMBER);
3371 generator.GenerateFast(masm_);
3374 __ Bind(&index_out_of_range);
3375 // When the index is out of range, the spec requires us to return NaN.
3376 __ LoadRoot(result, Heap::kNanValueRootIndex);
3379 __ Bind(&need_conversion);
3380 // Load the undefined value into the result register, which will
3381 // trigger conversion.
3382 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3385 NopRuntimeCallHelper call_helper;
3386 generator.GenerateSlow(masm_, call_helper);
3389 context()->Plug(result);
3393 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3394 ZoneList<Expression*>* args = expr->arguments();
3395 DCHECK(args->length() == 2);
3397 VisitForStackValue(args->at(0));
3398 VisitForAccumulatorValue(args->at(1));
3400 Register object = x1;
3401 Register index = x0;
3402 Register result = x0;
3406 Label need_conversion;
3407 Label index_out_of_range;
3409 StringCharAtGenerator generator(object,
3415 &index_out_of_range,
3416 STRING_INDEX_IS_NUMBER);
3417 generator.GenerateFast(masm_);
3420 __ Bind(&index_out_of_range);
3421 // When the index is out of range, the spec requires us to return
3422 // the empty string.
3423 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3426 __ Bind(&need_conversion);
3427 // Move smi zero into the result register, which will trigger conversion.
3428 __ Mov(result, Smi::FromInt(0));
3431 NopRuntimeCallHelper call_helper;
3432 generator.GenerateSlow(masm_, call_helper);
3435 context()->Plug(result);
3439 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3440 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3441 ZoneList<Expression*>* args = expr->arguments();
3442 DCHECK_EQ(2, args->length());
3444 VisitForStackValue(args->at(0));
3445 VisitForAccumulatorValue(args->at(1));
3448 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3451 context()->Plug(x0);
3455 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3456 ZoneList<Expression*>* args = expr->arguments();
3457 DCHECK_EQ(2, args->length());
3458 VisitForStackValue(args->at(0));
3459 VisitForStackValue(args->at(1));
3461 StringCompareStub stub(isolate());
3463 context()->Plug(x0);
3467 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3468 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3469 ZoneList<Expression*>* args = expr->arguments();
3470 DCHECK(args->length() >= 2);
3472 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3473 for (int i = 0; i < arg_count + 1; i++) {
3474 VisitForStackValue(args->at(i));
3476 VisitForAccumulatorValue(args->last()); // Function.
3478 Label runtime, done;
3479 // Check for non-function argument (including proxy).
3480 __ JumpIfSmi(x0, &runtime);
3481 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3483 // InvokeFunction requires the function in x1. Move it in there.
3485 ParameterCount count(arg_count);
3486 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3487 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3492 __ CallRuntime(Runtime::kCall, args->length());
3495 context()->Plug(x0);
3499 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3500 RegExpConstructResultStub stub(isolate());
3501 ZoneList<Expression*>* args = expr->arguments();
3502 DCHECK(args->length() == 3);
3503 VisitForStackValue(args->at(0));
3504 VisitForStackValue(args->at(1));
3505 VisitForAccumulatorValue(args->at(2));
3508 context()->Plug(x0);
3512 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3513 ZoneList<Expression*>* args = expr->arguments();
3514 DCHECK_EQ(2, args->length());
3515 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3516 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3518 Handle<FixedArray> jsfunction_result_caches(
3519 isolate()->native_context()->jsfunction_result_caches());
3520 if (jsfunction_result_caches->length() <= cache_id) {
3521 __ Abort(kAttemptToUseUndefinedCache);
3522 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3523 context()->Plug(x0);
3527 VisitForAccumulatorValue(args->at(1));
3530 Register cache = x1;
3531 __ Ldr(cache, GlobalObjectMemOperand());
3532 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3533 __ Ldr(cache, ContextMemOperand(cache,
3534 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3536 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3539 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3540 JSFunctionResultCache::kFingerOffset));
3541 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3542 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3544 // Load the key and data from the cache.
3545 __ Ldp(x2, x3, MemOperand(x3));
3548 __ CmovX(x0, x3, eq);
3551 // Call runtime to perform the lookup.
3552 __ Push(cache, key);
3553 __ CallRuntime(Runtime::kGetFromCache, 2);
3556 context()->Plug(x0);
3560 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3561 ZoneList<Expression*>* args = expr->arguments();
3562 VisitForAccumulatorValue(args->at(0));
3564 Label materialize_true, materialize_false;
3565 Label* if_true = NULL;
3566 Label* if_false = NULL;
3567 Label* fall_through = NULL;
3568 context()->PrepareTest(&materialize_true, &materialize_false,
3569 &if_true, &if_false, &fall_through);
3571 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3572 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3573 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3574 Split(eq, if_true, if_false, fall_through);
3576 context()->Plug(if_true, if_false);
3580 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3581 ZoneList<Expression*>* args = expr->arguments();
3582 DCHECK(args->length() == 1);
3583 VisitForAccumulatorValue(args->at(0));
3585 __ AssertString(x0);
3587 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3588 __ IndexFromHash(x10, x0);
3590 context()->Plug(x0);
3594 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3595 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
3597 ZoneList<Expression*>* args = expr->arguments();
3598 DCHECK(args->length() == 2);
3599 VisitForStackValue(args->at(1));
3600 VisitForAccumulatorValue(args->at(0));
3602 Register array = x0;
3603 Register result = x0;
3604 Register elements = x1;
3605 Register element = x2;
3606 Register separator = x3;
3607 Register array_length = x4;
3608 Register result_pos = x5;
3610 Register string_length = x10;
3611 Register elements_end = x11;
3612 Register string = x12;
3613 Register scratch1 = x13;
3614 Register scratch2 = x14;
3615 Register scratch3 = x7;
3616 Register separator_length = x15;
3618 Label bailout, done, one_char_separator, long_separator,
3619 non_trivial_array, not_size_one_array, loop,
3620 empty_separator_loop, one_char_separator_loop,
3621 one_char_separator_loop_entry, long_separator_loop;
3623 // The separator operand is on the stack.
3626 // Check that the array is a JSArray.
3627 __ JumpIfSmi(array, &bailout);
3628 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3630 // Check that the array has fast elements.
3631 __ CheckFastElements(map, scratch1, &bailout);
3633 // If the array has length zero, return the empty string.
3634 // Load and untag the length of the array.
3635 // It is an unsigned value, so we can skip sign extension.
3636 // We assume little endianness.
3637 __ Ldrsw(array_length,
3638 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3639 __ Cbnz(array_length, &non_trivial_array);
3640 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3643 __ Bind(&non_trivial_array);
3644 // Get the FixedArray containing array's elements.
3645 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3647 // Check that all array elements are sequential one-byte strings, and
3648 // accumulate the sum of their lengths.
3649 __ Mov(string_length, 0);
3650 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3651 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3652 // Loop condition: while (element < elements_end).
3653 // Live values in registers:
3654 // elements: Fixed array of strings.
3655 // array_length: Length of the fixed array of strings (not smi)
3656 // separator: Separator string
3657 // string_length: Accumulated sum of string lengths (not smi).
3658 // element: Current array element.
3659 // elements_end: Array end.
3660 if (FLAG_debug_code) {
3661 __ Cmp(array_length, 0);
3662 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3665 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3666 __ JumpIfSmi(string, &bailout);
3667 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3668 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3669 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3671 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3672 __ Adds(string_length, string_length, scratch1);
3674 __ Cmp(element, elements_end);
3677 // If array_length is 1, return elements[0], a string.
3678 __ Cmp(array_length, 1);
3679 __ B(ne, ¬_size_one_array);
3680 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3683 __ Bind(¬_size_one_array);
3685 // Live values in registers:
3686 // separator: Separator string
3687 // array_length: Length of the array (not smi).
3688 // string_length: Sum of string lengths (not smi).
3689 // elements: FixedArray of strings.
3691 // Check that the separator is a flat one-byte string.
3692 __ JumpIfSmi(separator, &bailout);
3693 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3694 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3695 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3697 // Add (separator length times array_length) - separator length to the
3698 // string_length to get the length of the result string.
3699 // Load the separator length as untagged.
3700 // We assume little endianness, and that the length is positive.
3701 __ Ldrsw(separator_length,
3702 UntagSmiFieldMemOperand(separator,
3703 SeqOneByteString::kLengthOffset));
3704 __ Sub(string_length, string_length, separator_length);
3705 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3708 // Get first element in the array.
3709 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3710 // Live values in registers:
3711 // element: First array element
3712 // separator: Separator string
3713 // string_length: Length of result string (not smi)
3714 // array_length: Length of the array (not smi).
3715 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3718 // Prepare for looping. Set up elements_end to end of the array. Set
3719 // result_pos to the position of the result where to write the first
3721 // TODO(all): useless unless AllocateOneByteString trashes the register.
3722 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3723 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3725 // Check the length of the separator.
3726 __ Cmp(separator_length, 1);
3727 __ B(eq, &one_char_separator);
3728 __ B(gt, &long_separator);
3730 // Empty separator case
3731 __ Bind(&empty_separator_loop);
3732 // Live values in registers:
3733 // result_pos: the position to which we are currently copying characters.
3734 // element: Current array element.
3735 // elements_end: Array end.
3737 // Copy next array element to the result.
3738 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3739 __ Ldrsw(string_length,
3740 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3741 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3742 __ CopyBytes(result_pos, string, string_length, scratch1);
3743 __ Cmp(element, elements_end);
3744 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
3747 // One-character separator case
3748 __ Bind(&one_char_separator);
3749 // Replace separator with its one-byte character value.
3750 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3751 // Jump into the loop after the code that copies the separator, so the first
3752 // element is not preceded by a separator
3753 __ B(&one_char_separator_loop_entry);
3755 __ Bind(&one_char_separator_loop);
3756 // Live values in registers:
3757 // result_pos: the position to which we are currently copying characters.
3758 // element: Current array element.
3759 // elements_end: Array end.
3760 // separator: Single separator one-byte char (in lower byte).
3762 // Copy the separator character to the result.
3763 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3765 // Copy next array element to the result.
3766 __ Bind(&one_char_separator_loop_entry);
3767 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3768 __ Ldrsw(string_length,
3769 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3770 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3771 __ CopyBytes(result_pos, string, string_length, scratch1);
3772 __ Cmp(element, elements_end);
3773 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
3776 // Long separator case (separator is more than one character). Entry is at the
3777 // label long_separator below.
3778 __ Bind(&long_separator_loop);
3779 // Live values in registers:
3780 // result_pos: the position to which we are currently copying characters.
3781 // element: Current array element.
3782 // elements_end: Array end.
3783 // separator: Separator string.
3785 // Copy the separator to the result.
3786 // TODO(all): hoist next two instructions.
3787 __ Ldrsw(string_length,
3788 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
3789 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3790 __ CopyBytes(result_pos, string, string_length, scratch1);
3792 __ Bind(&long_separator);
3793 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3794 __ Ldrsw(string_length,
3795 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3796 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3797 __ CopyBytes(result_pos, string, string_length, scratch1);
3798 __ Cmp(element, elements_end);
3799 __ B(lt, &long_separator_loop); // End while (element < elements_end).
3803 // Returning undefined will force slower code to handle it.
3804 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3806 context()->Plug(result);
3810 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3811 DCHECK(expr->arguments()->length() == 0);
3812 ExternalReference debug_is_active =
3813 ExternalReference::debug_is_active_address(isolate());
3814 __ Mov(x10, debug_is_active);
3815 __ Ldrb(x0, MemOperand(x10));
3817 context()->Plug(x0);
3821 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3822 if (expr->function() != NULL &&
3823 expr->function()->intrinsic_type == Runtime::INLINE) {
3824 Comment cmnt(masm_, "[ InlineRuntimeCall");
3825 EmitInlineRuntimeCall(expr);
3829 Comment cmnt(masm_, "[ CallRunTime");
3830 ZoneList<Expression*>* args = expr->arguments();
3831 int arg_count = args->length();
3833 if (expr->is_jsruntime()) {
3834 // Push the builtins object as the receiver.
3835 __ Ldr(x10, GlobalObjectMemOperand());
3836 __ Ldr(LoadDescriptor::ReceiverRegister(),
3837 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
3838 __ Push(LoadDescriptor::ReceiverRegister());
3840 // Load the function from the receiver.
3841 Handle<String> name = expr->name();
3842 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
3843 if (FLAG_vector_ics) {
3844 __ Mov(VectorLoadICDescriptor::SlotRegister(),
3845 Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
3846 CallLoadIC(NOT_CONTEXTUAL);
3848 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
3851 // Push the target function under the receiver.
3855 int arg_count = args->length();
3856 for (int i = 0; i < arg_count; i++) {
3857 VisitForStackValue(args->at(i));
3860 // Record source position of the IC call.
3861 SetSourcePosition(expr->position());
3862 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3863 __ Peek(x1, (arg_count + 1) * kPointerSize);
3866 // Restore context register.
3867 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3869 context()->DropAndPlug(1, x0);
3871 // Push the arguments ("left-to-right").
3872 for (int i = 0; i < arg_count; i++) {
3873 VisitForStackValue(args->at(i));
3876 // Call the C runtime function.
3877 __ CallRuntime(expr->function(), arg_count);
3878 context()->Plug(x0);
3883 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3884 switch (expr->op()) {
3885 case Token::DELETE: {
3886 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3887 Property* property = expr->expression()->AsProperty();
3888 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3890 if (property != NULL) {
3891 VisitForStackValue(property->obj());
3892 VisitForStackValue(property->key());
3893 __ Mov(x10, Smi::FromInt(strict_mode()));
3895 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3896 context()->Plug(x0);
3897 } else if (proxy != NULL) {
3898 Variable* var = proxy->var();
3899 // Delete of an unqualified identifier is disallowed in strict mode
3900 // but "delete this" is allowed.
3901 DCHECK(strict_mode() == SLOPPY || var->is_this());
3902 if (var->IsUnallocated()) {
3903 __ Ldr(x12, GlobalObjectMemOperand());
3904 __ Mov(x11, Operand(var->name()));
3905 __ Mov(x10, Smi::FromInt(SLOPPY));
3906 __ Push(x12, x11, x10);
3907 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3908 context()->Plug(x0);
3909 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3910 // Result of deleting non-global, non-dynamic variables is false.
3911 // The subexpression does not have side effects.
3912 context()->Plug(var->is_this());
3914 // Non-global variable. Call the runtime to try to delete from the
3915 // context where the variable was introduced.
3916 __ Mov(x2, Operand(var->name()));
3917 __ Push(context_register(), x2);
3918 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
3919 context()->Plug(x0);
3922 // Result of deleting non-property, non-variable reference is true.
3923 // The subexpression may have side effects.
3924 VisitForEffect(expr->expression());
3925 context()->Plug(true);
3931 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3932 VisitForEffect(expr->expression());
3933 context()->Plug(Heap::kUndefinedValueRootIndex);
3937 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3938 if (context()->IsEffect()) {
3939 // Unary NOT has no side effects so it's only necessary to visit the
3940 // subexpression. Match the optimizing compiler by not branching.
3941 VisitForEffect(expr->expression());
3942 } else if (context()->IsTest()) {
3943 const TestContext* test = TestContext::cast(context());
3944 // The labels are swapped for the recursive call.
3945 VisitForControl(expr->expression(),
3946 test->false_label(),
3948 test->fall_through());
3949 context()->Plug(test->true_label(), test->false_label());
3951 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3952 // TODO(jbramley): This could be much more efficient using (for
3953 // example) the CSEL instruction.
3954 Label materialize_true, materialize_false, done;
3955 VisitForControl(expr->expression(),
3960 __ Bind(&materialize_true);
3961 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3962 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3965 __ Bind(&materialize_false);
3966 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3967 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3971 if (context()->IsStackValue()) {
3972 __ Push(result_register());
3977 case Token::TYPEOF: {
3978 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3980 StackValueContext context(this);
3981 VisitForTypeofValue(expr->expression());
3983 __ CallRuntime(Runtime::kTypeof, 1);
3984 context()->Plug(x0);
3993 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3994 DCHECK(expr->expression()->IsValidReferenceExpression());
3996 Comment cmnt(masm_, "[ CountOperation");
3997 SetSourcePosition(expr->position());
3999 // Expression can only be a property, a global or a (parameter or local)
4001 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4002 LhsKind assign_type = VARIABLE;
4003 Property* prop = expr->expression()->AsProperty();
4004 // In case of a property we use the uninitialized expression context
4005 // of the key to detect a named property.
4008 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4011 // Evaluate expression and get value.
4012 if (assign_type == VARIABLE) {
4013 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4014 AccumulatorValueContext context(this);
4015 EmitVariableLoad(expr->expression()->AsVariableProxy());
4017 // Reserve space for result of postfix operation.
4018 if (expr->is_postfix() && !context()->IsEffect()) {
4021 if (assign_type == NAMED_PROPERTY) {
4022 // Put the object both on the stack and in the register.
4023 VisitForStackValue(prop->obj());
4024 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4025 EmitNamedPropertyLoad(prop);
4028 VisitForStackValue(prop->obj());
4029 VisitForStackValue(prop->key());
4030 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4031 __ Peek(LoadDescriptor::NameRegister(), 0);
4032 EmitKeyedPropertyLoad(prop);
4036 // We need a second deoptimization point after loading the value
4037 // in case evaluating the property load my have a side effect.
4038 if (assign_type == VARIABLE) {
4039 PrepareForBailout(expr->expression(), TOS_REG);
4041 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4044 // Inline smi case if we are in a loop.
4045 Label stub_call, done;
4046 JumpPatchSite patch_site(masm_);
4048 int count_value = expr->op() == Token::INC ? 1 : -1;
4049 if (ShouldInlineSmiCase(expr->op())) {
4051 patch_site.EmitJumpIfNotSmi(x0, &slow);
4053 // Save result for postfix expressions.
4054 if (expr->is_postfix()) {
4055 if (!context()->IsEffect()) {
4056 // Save the result on the stack. If we have a named or keyed property we
4057 // store the result under the receiver that is currently on top of the
4059 switch (assign_type) {
4063 case NAMED_PROPERTY:
4064 __ Poke(x0, kPointerSize);
4066 case KEYED_PROPERTY:
4067 __ Poke(x0, kPointerSize * 2);
4073 __ Adds(x0, x0, Smi::FromInt(count_value));
4075 // Call stub. Undo operation first.
4076 __ Sub(x0, x0, Smi::FromInt(count_value));
4080 ToNumberStub convert_stub(isolate());
4081 __ CallStub(&convert_stub);
4083 // Save result for postfix expressions.
4084 if (expr->is_postfix()) {
4085 if (!context()->IsEffect()) {
4086 // Save the result on the stack. If we have a named or keyed property
4087 // we store the result under the receiver that is currently on top
4089 switch (assign_type) {
4093 case NAMED_PROPERTY:
4094 __ Poke(x0, kXRegSize);
4096 case KEYED_PROPERTY:
4097 __ Poke(x0, 2 * kXRegSize);
4103 __ Bind(&stub_call);
4105 __ Mov(x0, Smi::FromInt(count_value));
4107 // Record position before stub call.
4108 SetSourcePosition(expr->position());
4111 Assembler::BlockPoolsScope scope(masm_);
4113 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4114 CallIC(code, expr->CountBinOpFeedbackId());
4115 patch_site.EmitPatchInfo();
4119 // Store the value returned in x0.
4120 switch (assign_type) {
4122 if (expr->is_postfix()) {
4123 { EffectContext context(this);
4124 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4126 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4129 // For all contexts except EffectConstant We have the result on
4130 // top of the stack.
4131 if (!context()->IsEffect()) {
4132 context()->PlugTOS();
4135 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4137 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4138 context()->Plug(x0);
4141 case NAMED_PROPERTY: {
4142 __ Mov(StoreDescriptor::NameRegister(),
4143 Operand(prop->key()->AsLiteral()->value()));
4144 __ Pop(StoreDescriptor::ReceiverRegister());
4145 CallStoreIC(expr->CountStoreFeedbackId());
4146 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4147 if (expr->is_postfix()) {
4148 if (!context()->IsEffect()) {
4149 context()->PlugTOS();
4152 context()->Plug(x0);
4156 case KEYED_PROPERTY: {
4157 __ Pop(StoreDescriptor::NameRegister());
4158 __ Pop(StoreDescriptor::ReceiverRegister());
4160 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4161 CallIC(ic, expr->CountStoreFeedbackId());
4162 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4163 if (expr->is_postfix()) {
4164 if (!context()->IsEffect()) {
4165 context()->PlugTOS();
4168 context()->Plug(x0);
4176 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4177 DCHECK(!context()->IsEffect());
4178 DCHECK(!context()->IsTest());
4179 VariableProxy* proxy = expr->AsVariableProxy();
4180 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4181 Comment cmnt(masm_, "Global variable");
4182 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
4183 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4184 if (FLAG_vector_ics) {
4185 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4186 Smi::FromInt(proxy->VariableFeedbackSlot()));
4188 // Use a regular load, not a contextual load, to avoid a reference
4190 CallLoadIC(NOT_CONTEXTUAL);
4191 PrepareForBailout(expr, TOS_REG);
4192 context()->Plug(x0);
4193 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4196 // Generate code for loading from variables potentially shadowed
4197 // by eval-introduced variables.
4198 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4201 __ Mov(x0, Operand(proxy->name()));
4203 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4204 PrepareForBailout(expr, TOS_REG);
4207 context()->Plug(x0);
4209 // This expression cannot throw a reference error at the top level.
4210 VisitInDuplicateContext(expr);
4215 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4216 Expression* sub_expr,
4217 Handle<String> check) {
4218 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4219 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4220 Label materialize_true, materialize_false;
4221 Label* if_true = NULL;
4222 Label* if_false = NULL;
4223 Label* fall_through = NULL;
4224 context()->PrepareTest(&materialize_true, &materialize_false,
4225 &if_true, &if_false, &fall_through);
4227 { AccumulatorValueContext context(this);
4228 VisitForTypeofValue(sub_expr);
4230 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4232 Factory* factory = isolate()->factory();
4233 if (String::Equals(check, factory->number_string())) {
4234 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4235 __ JumpIfSmi(x0, if_true);
4236 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4237 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4238 Split(eq, if_true, if_false, fall_through);
4239 } else if (String::Equals(check, factory->string_string())) {
4240 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4241 __ JumpIfSmi(x0, if_false);
4242 // Check for undetectable objects => false.
4243 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4244 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4245 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4247 } else if (String::Equals(check, factory->symbol_string())) {
4248 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4249 __ JumpIfSmi(x0, if_false);
4250 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4251 Split(eq, if_true, if_false, fall_through);
4252 } else if (String::Equals(check, factory->boolean_string())) {
4253 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4254 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4255 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4256 Split(eq, if_true, if_false, fall_through);
4257 } else if (String::Equals(check, factory->undefined_string())) {
4259 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4260 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4261 __ JumpIfSmi(x0, if_false);
4262 // Check for undetectable objects => true.
4263 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4264 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4265 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4267 } else if (String::Equals(check, factory->function_string())) {
4268 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4269 __ JumpIfSmi(x0, if_false);
4270 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4271 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4272 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4275 } else if (String::Equals(check, factory->object_string())) {
4276 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4277 __ JumpIfSmi(x0, if_false);
4278 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4279 // Check for JS objects => true.
4281 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4283 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4285 // Check for undetectable objects => false.
4286 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4288 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4292 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4293 if (if_false != fall_through) __ B(if_false);
4295 context()->Plug(if_true, if_false);
4299 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4300 Comment cmnt(masm_, "[ CompareOperation");
4301 SetSourcePosition(expr->position());
4303 // Try to generate an optimized comparison with a literal value.
4304 // TODO(jbramley): This only checks common values like NaN or undefined.
4305 // Should it also handle ARM64 immediate operands?
4306 if (TryLiteralCompare(expr)) {
4310 // Assign labels according to context()->PrepareTest.
4311 Label materialize_true;
4312 Label materialize_false;
4313 Label* if_true = NULL;
4314 Label* if_false = NULL;
4315 Label* fall_through = NULL;
4316 context()->PrepareTest(&materialize_true, &materialize_false,
4317 &if_true, &if_false, &fall_through);
4319 Token::Value op = expr->op();
4320 VisitForStackValue(expr->left());
4323 VisitForStackValue(expr->right());
4324 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4325 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4326 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4327 Split(eq, if_true, if_false, fall_through);
4330 case Token::INSTANCEOF: {
4331 VisitForStackValue(expr->right());
4332 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4334 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4335 // The stub returns 0 for true.
4336 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4341 VisitForAccumulatorValue(expr->right());
4342 Condition cond = CompareIC::ComputeCondition(op);
4344 // Pop the stack value.
4347 JumpPatchSite patch_site(masm_);
4348 if (ShouldInlineSmiCase(op)) {
4350 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4352 Split(cond, if_true, if_false, NULL);
4353 __ Bind(&slow_case);
4356 // Record position and call the compare IC.
4357 SetSourcePosition(expr->position());
4358 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4359 CallIC(ic, expr->CompareOperationFeedbackId());
4360 patch_site.EmitPatchInfo();
4361 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4362 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4366 // Convert the result of the comparison into one expected for this
4367 // expression's context.
4368 context()->Plug(if_true, if_false);
4372 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4373 Expression* sub_expr,
4375 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4376 Label materialize_true, materialize_false;
4377 Label* if_true = NULL;
4378 Label* if_false = NULL;
4379 Label* fall_through = NULL;
4380 context()->PrepareTest(&materialize_true, &materialize_false,
4381 &if_true, &if_false, &fall_through);
4383 VisitForAccumulatorValue(sub_expr);
4384 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4386 if (expr->op() == Token::EQ_STRICT) {
4387 Heap::RootListIndex nil_value = nil == kNullValue ?
4388 Heap::kNullValueRootIndex :
4389 Heap::kUndefinedValueRootIndex;
4390 __ CompareRoot(x0, nil_value);
4391 Split(eq, if_true, if_false, fall_through);
4393 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4394 CallIC(ic, expr->CompareOperationFeedbackId());
4395 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4398 context()->Plug(if_true, if_false);
4402 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4403 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4404 context()->Plug(x0);
4408 void FullCodeGenerator::VisitYield(Yield* expr) {
4409 Comment cmnt(masm_, "[ Yield");
4410 // Evaluate yielded value first; the initial iterator definition depends on
4411 // this. It stays on the stack while we update the iterator.
4412 VisitForStackValue(expr->expression());
4414 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4415 // and suchlike. The implementation changes a little by bleeding_edge so I
4416 // don't want to spend too much time on it now.
4418 switch (expr->yield_kind()) {
4419 case Yield::kSuspend:
4420 // Pop value from top-of-stack slot; box result into result register.
4421 EmitCreateIteratorResult(false);
4422 __ Push(result_register());
4424 case Yield::kInitial: {
4425 Label suspend, continuation, post_runtime, resume;
4429 // TODO(jbramley): This label is bound here because the following code
4430 // looks at its pos(). Is it possible to do something more efficient here,
4431 // perhaps using Adr?
4432 __ Bind(&continuation);
4436 VisitForAccumulatorValue(expr->generator_object());
4437 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4438 __ Mov(x1, Smi::FromInt(continuation.pos()));
4439 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4440 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4442 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4443 kLRHasBeenSaved, kDontSaveFPRegs);
4444 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4445 __ Cmp(__ StackPointer(), x1);
4446 __ B(eq, &post_runtime);
4447 __ Push(x0); // generator object
4448 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4449 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4450 __ Bind(&post_runtime);
4451 __ Pop(result_register());
4452 EmitReturnSequence();
4455 context()->Plug(result_register());
4459 case Yield::kFinal: {
4460 VisitForAccumulatorValue(expr->generator_object());
4461 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4462 __ Str(x1, FieldMemOperand(result_register(),
4463 JSGeneratorObject::kContinuationOffset));
4464 // Pop value from top-of-stack slot, box result into result register.
4465 EmitCreateIteratorResult(true);
4466 EmitUnwindBeforeReturn();
4467 EmitReturnSequence();
4471 case Yield::kDelegating: {
4472 VisitForStackValue(expr->generator_object());
4474 // Initial stack layout is as follows:
4475 // [sp + 1 * kPointerSize] iter
4476 // [sp + 0 * kPointerSize] g
4478 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4479 Label l_next, l_call, l_loop;
4480 Register load_receiver = LoadDescriptor::ReceiverRegister();
4481 Register load_name = LoadDescriptor::NameRegister();
4483 // Initial send value is undefined.
4484 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4487 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4489 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4490 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
4491 __ Peek(x3, 1 * kPointerSize); // iter
4492 __ Push(load_name, x3, x0); // "throw", iter, except
4495 // try { received = %yield result }
4496 // Shuffle the received result above a try handler and yield it without
4499 __ Pop(x0); // result
4500 __ PushTryHandler(StackHandler::CATCH, expr->index());
4501 const int handler_size = StackHandlerConstants::kSize;
4502 __ Push(x0); // result
4505 // TODO(jbramley): This label is bound here because the following code
4506 // looks at its pos(). Is it possible to do something more efficient here,
4507 // perhaps using Adr?
4508 __ Bind(&l_continuation);
4511 __ Bind(&l_suspend);
4512 const int generator_object_depth = kPointerSize + handler_size;
4513 __ Peek(x0, generator_object_depth);
4515 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4516 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4517 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4518 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4520 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4521 kLRHasBeenSaved, kDontSaveFPRegs);
4522 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4523 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4524 __ Pop(x0); // result
4525 EmitReturnSequence();
4526 __ Bind(&l_resume); // received in x0
4529 // receiver = iter; f = 'next'; arg = received;
4532 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
4533 __ Peek(x3, 1 * kPointerSize); // iter
4534 __ Push(load_name, x3, x0); // "next", iter, received
4536 // result = receiver[f](arg);
4538 __ Peek(load_receiver, 1 * kPointerSize);
4539 __ Peek(load_name, 2 * kPointerSize);
4540 if (FLAG_vector_ics) {
4541 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4542 Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
4544 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
4545 CallIC(ic, TypeFeedbackId::None());
4547 __ Poke(x1, 2 * kPointerSize);
4548 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
4551 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4552 __ Drop(1); // The function is still on the stack; drop it.
4554 // if (!result.done) goto l_try;
4556 __ Move(load_receiver, x0);
4558 __ Push(load_receiver); // save result
4559 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
4560 if (FLAG_vector_ics) {
4561 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4562 Smi::FromInt(expr->DoneFeedbackSlot()));
4564 CallLoadIC(NOT_CONTEXTUAL); // x0=result.done
4565 // The ToBooleanStub argument (result.done) is in x0.
4566 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4571 __ Pop(load_receiver); // result
4572 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
4573 if (FLAG_vector_ics) {
4574 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4575 Smi::FromInt(expr->ValueFeedbackSlot()));
4577 CallLoadIC(NOT_CONTEXTUAL); // x0=result.value
4578 context()->DropAndPlug(2, x0); // drop iter and g
4585 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4587 JSGeneratorObject::ResumeMode resume_mode) {
4588 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4589 Register value_reg = x0;
4590 Register generator_object = x1;
4591 Register the_hole = x2;
4592 Register operand_stack_size = w3;
4593 Register function = x4;
4595 // The value stays in x0, and is ultimately read by the resumed generator, as
4596 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4597 // is read to throw the value when the resumed generator is already closed. r1
4598 // will hold the generator object until the activation has been resumed.
4599 VisitForStackValue(generator);
4600 VisitForAccumulatorValue(value);
4601 __ Pop(generator_object);
4603 // Check generator state.
4604 Label wrong_state, closed_state, done;
4605 __ Ldr(x10, FieldMemOperand(generator_object,
4606 JSGeneratorObject::kContinuationOffset));
4607 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
4608 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
4609 __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4610 __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4612 // Load suspended function and context.
4613 __ Ldr(cp, FieldMemOperand(generator_object,
4614 JSGeneratorObject::kContextOffset));
4615 __ Ldr(function, FieldMemOperand(generator_object,
4616 JSGeneratorObject::kFunctionOffset));
4618 // Load receiver and store as the first argument.
4619 __ Ldr(x10, FieldMemOperand(generator_object,
4620 JSGeneratorObject::kReceiverOffset));
4623 // Push holes for the rest of the arguments to the generator function.
4624 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4626 // The number of arguments is stored as an int32_t, and -1 is a marker
4627 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4628 // extension to correctly handle it. However, in this case, we operate on
4629 // 32-bit W registers, so extension isn't required.
4630 __ Ldr(w10, FieldMemOperand(x10,
4631 SharedFunctionInfo::kFormalParameterCountOffset));
4632 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4633 __ PushMultipleTimes(the_hole, w10);
4635 // Enter a new JavaScript frame, and initialize its slots as they were when
4636 // the generator was suspended.
4638 __ Bl(&resume_frame);
4641 __ Bind(&resume_frame);
4642 __ Push(lr, // Return address.
4643 fp, // Caller's frame pointer.
4644 cp, // Callee's context.
4645 function); // Callee's JS Function.
4646 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4648 // Load and untag the operand stack size.
4649 __ Ldr(x10, FieldMemOperand(generator_object,
4650 JSGeneratorObject::kOperandStackOffset));
4651 __ Ldr(operand_stack_size,
4652 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4654 // If we are sending a value and there is no operand stack, we can jump back
4656 if (resume_mode == JSGeneratorObject::NEXT) {
4658 __ Cbnz(operand_stack_size, &slow_resume);
4659 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4661 UntagSmiFieldMemOperand(generator_object,
4662 JSGeneratorObject::kContinuationOffset));
4663 __ Add(x10, x10, x11);
4664 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4665 __ Str(x12, FieldMemOperand(generator_object,
4666 JSGeneratorObject::kContinuationOffset));
4669 __ Bind(&slow_resume);
4672 // Otherwise, we push holes for the operand stack and call the runtime to fix
4673 // up the stack and the handlers.
4674 __ PushMultipleTimes(the_hole, operand_stack_size);
4676 __ Mov(x10, Smi::FromInt(resume_mode));
4677 __ Push(generator_object, result_register(), x10);
4678 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4679 // Not reached: the runtime call returns elsewhere.
4682 // Reach here when generator is closed.
4683 __ Bind(&closed_state);
4684 if (resume_mode == JSGeneratorObject::NEXT) {
4685 // Return completed iterator result when generator is closed.
4686 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4688 // Pop value from top-of-stack slot; box result into result register.
4689 EmitCreateIteratorResult(true);
4691 // Throw the provided value.
4693 __ CallRuntime(Runtime::kThrow, 1);
4697 // Throw error if we attempt to operate on a running generator.
4698 __ Bind(&wrong_state);
4699 __ Push(generator_object);
4700 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
4703 context()->Plug(result_register());
4707 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4711 Handle<Map> map(isolate()->native_context()->iterator_result_map());
4713 // Allocate and populate an object with this form: { value: VAL, done: DONE }
4715 Register result = x0;
4716 __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4719 __ Bind(&gc_required);
4720 __ Push(Smi::FromInt(map->instance_size()));
4721 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4722 __ Ldr(context_register(),
4723 MemOperand(fp, StandardFrameConstants::kContextOffset));
4725 __ Bind(&allocated);
4726 Register map_reg = x1;
4727 Register result_value = x2;
4728 Register boolean_done = x3;
4729 Register empty_fixed_array = x4;
4730 Register untagged_result = x5;
4731 __ Mov(map_reg, Operand(map));
4732 __ Pop(result_value);
4733 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4734 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4735 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
4736 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4737 JSObject::kElementsOffset);
4738 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
4739 JSGeneratorObject::kResultDonePropertyOffset);
4740 __ ObjectUntag(untagged_result, result);
4741 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4742 __ Stp(empty_fixed_array, empty_fixed_array,
4743 MemOperand(untagged_result, JSObject::kPropertiesOffset));
4744 __ Stp(result_value, boolean_done,
4745 MemOperand(untagged_result,
4746 JSGeneratorObject::kResultValuePropertyOffset));
4748 // Only the value field needs a write barrier, as the other values are in the
4750 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4751 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4755 // TODO(all): I don't like this method.
4756 // It seems to me that in too many places x0 is used in place of this.
4757 // Also, this function is not suitable for all places where x0 should be
4758 // abstracted (eg. when used as an argument). But some places assume that the
4759 // first argument register is x0, and use this function instead.
4760 // Considering that most of the register allocation is hard-coded in the
4761 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4762 // that abstracting the allocation through functions would not yield any
4763 // performance benefit, I think the existence of this function is debatable.
4764 Register FullCodeGenerator::result_register() {
4769 Register FullCodeGenerator::context_register() {
4774 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4775 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4776 __ Str(value, MemOperand(fp, frame_offset));
4780 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4781 __ Ldr(dst, ContextMemOperand(cp, context_index));
4785 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4786 Scope* declaration_scope = scope()->DeclarationScope();
4787 if (declaration_scope->is_global_scope() ||
4788 declaration_scope->is_module_scope()) {
4789 // Contexts nested in the native context have a canonical empty function
4790 // as their closure, not the anonymous closure containing the global
4791 // code. Pass a smi sentinel and let the runtime look up the empty
4793 DCHECK(kSmiTag == 0);
4795 } else if (declaration_scope->is_eval_scope()) {
4796 // Contexts created by a call to eval have the same closure as the
4797 // context calling eval, not the anonymous closure containing the eval
4798 // code. Fetch it from the context.
4799 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4802 DCHECK(declaration_scope->is_function_scope());
4803 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4809 void FullCodeGenerator::EnterFinallyBlock() {
4810 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4811 DCHECK(!result_register().is(x10));
4812 // Preserve the result register while executing finally block.
4813 // Also cook the return address in lr to the stack (smi encoded Code* delta).
4814 __ Sub(x10, lr, Operand(masm_->CodeObject()));
4816 __ Push(result_register(), x10);
4818 // Store pending message while executing finally block.
4819 ExternalReference pending_message_obj =
4820 ExternalReference::address_of_pending_message_obj(isolate());
4821 __ Mov(x10, pending_message_obj);
4822 __ Ldr(x10, MemOperand(x10));
4824 ExternalReference has_pending_message =
4825 ExternalReference::address_of_has_pending_message(isolate());
4826 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4827 __ Mov(x11, has_pending_message);
4828 __ Ldrb(x11, MemOperand(x11));
4833 ExternalReference pending_message_script =
4834 ExternalReference::address_of_pending_message_script(isolate());
4835 __ Mov(x10, pending_message_script);
4836 __ Ldr(x10, MemOperand(x10));
4841 void FullCodeGenerator::ExitFinallyBlock() {
4842 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4843 DCHECK(!result_register().is(x10));
4845 // Restore pending message from stack.
4846 __ Pop(x10, x11, x12);
4847 ExternalReference pending_message_script =
4848 ExternalReference::address_of_pending_message_script(isolate());
4849 __ Mov(x13, pending_message_script);
4850 __ Str(x10, MemOperand(x13));
4853 ExternalReference has_pending_message =
4854 ExternalReference::address_of_has_pending_message(isolate());
4855 __ Mov(x13, has_pending_message);
4856 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4857 __ Strb(x11, MemOperand(x13));
4859 ExternalReference pending_message_obj =
4860 ExternalReference::address_of_pending_message_obj(isolate());
4861 __ Mov(x13, pending_message_obj);
4862 __ Str(x12, MemOperand(x13));
4864 // Restore result register and cooked return address from the stack.
4865 __ Pop(x10, result_register());
4867 // Uncook the return address (see EnterFinallyBlock).
4869 __ Add(x11, x10, Operand(masm_->CodeObject()));
4877 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4879 BackEdgeState target_state,
4880 Code* replacement_code) {
4881 // Turn the jump into a nop.
4882 Address branch_address = pc - 3 * kInstructionSize;
4883 PatchingAssembler patcher(branch_address, 1);
4885 DCHECK(Instruction::Cast(branch_address)
4886 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4887 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4888 Instruction::Cast(branch_address)->ImmPCOffset() ==
4889 6 * kInstructionSize));
4891 switch (target_state) {
4893 // <decrement profiling counter>
4894 // .. .. .. .. b.pl ok
4895 // .. .. .. .. ldr x16, pc+<interrupt stub address>
4896 // .. .. .. .. blr x16
4897 // ... more instructions.
4899 // Jump offset is 6 instructions.
4902 case ON_STACK_REPLACEMENT:
4903 case OSR_AFTER_STACK_CHECK:
4904 // <decrement profiling counter>
4905 // .. .. .. .. mov x0, x0 (NOP)
4906 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4907 // .. .. .. .. blr x16
4908 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4912 // Replace the call address.
4913 Instruction* load = Instruction::Cast(pc)->preceding(2);
4914 Address interrupt_address_pointer =
4915 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4916 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4917 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4919 ->OnStackReplacement()
4921 (Memory::uint64_at(interrupt_address_pointer) ==
4922 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4926 (Memory::uint64_at(interrupt_address_pointer) ==
4927 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4929 ->OsrAfterStackCheck()
4931 (Memory::uint64_at(interrupt_address_pointer) ==
4932 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4934 ->OnStackReplacement()
4936 Memory::uint64_at(interrupt_address_pointer) =
4937 reinterpret_cast<uint64_t>(replacement_code->entry());
4939 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4940 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4944 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4946 Code* unoptimized_code,
4948 // TODO(jbramley): There should be some extra assertions here (as in the ARM
4949 // back-end), but this function is gone in bleeding_edge so it might not
4951 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4953 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4954 Instruction* load = Instruction::Cast(pc)->preceding(2);
4955 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4956 load->ImmPCOffset());
4957 if (entry == reinterpret_cast<uint64_t>(
4958 isolate->builtins()->OnStackReplacement()->entry())) {
4959 return ON_STACK_REPLACEMENT;
4960 } else if (entry == reinterpret_cast<uint64_t>(
4961 isolate->builtins()->OsrAfterStackCheck()->entry())) {
4962 return OSR_AFTER_STACK_CHECK;
4972 #define __ ACCESS_MASM(masm())
4975 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4977 int* context_length) {
4978 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4979 // The macros used here must preserve the result register.
4981 // Because the handler block contains the context of the finally
4982 // code, we can restore it directly from there for the finally code
4983 // rather than iteratively unwinding contexts via their previous
4985 __ Drop(*stack_depth); // Down to the handler block.
4986 if (*context_length > 0) {
4987 // Restore the context to its dedicated register and the stack.
4988 __ Peek(cp, StackHandlerConstants::kContextOffset);
4989 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4992 __ Bl(finally_entry_);
4995 *context_length = 0;
5003 } } // namespace v8::internal
5005 #endif // V8_TARGET_ARCH_ARM64