1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
20 #include "src/arm64/code-stubs-arm64.h"
21 #include "src/arm64/macro-assembler-arm64.h"
26 #define __ ACCESS_MASM(masm_)
28 class JumpPatchSite BASE_EMBEDDED {
30 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
32 info_emitted_ = false;
37 if (patch_site_.is_bound()) {
38 DCHECK(info_emitted_);
40 DCHECK(reg_.IsNone());
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
46 InstructionAccurateScope scope(masm_, 1);
47 DCHECK(!info_emitted_);
48 DCHECK(reg.Is64Bits());
51 __ bind(&patch_site_);
52 __ tbz(xzr, 0, target); // Always taken before patched.
55 void EmitJumpIfSmi(Register reg, Label* target) {
56 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
57 InstructionAccurateScope scope(masm_, 1);
58 DCHECK(!info_emitted_);
59 DCHECK(reg.Is64Bits());
62 __ bind(&patch_site_);
63 __ tbnz(xzr, 0, target); // Never taken before patched.
66 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
67 UseScratchRegisterScope temps(masm_);
68 Register temp = temps.AcquireX();
69 __ Orr(temp, reg1, reg2);
70 EmitJumpIfNotSmi(temp, target);
73 void EmitPatchInfo() {
74 Assembler::BlockPoolsScope scope(masm_);
75 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
82 MacroAssembler* masm_;
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
96 // The live registers are:
97 // - x1: the JS function object being called (i.e. ourselves).
99 // - fp: our caller's frame pointer.
100 // - jssp: stack pointer.
101 // - lr: return address.
103 // The function builds a JS frame. See JavaScriptFrameConstants in
104 // frames-arm.h for its layout.
105 void FullCodeGenerator::Generate() {
106 CompilationInfo* info = info_;
108 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(function());
113 Comment cmnt(masm_, "[ Function compiled by full code generator");
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120 __ Debug("stop-at", __LINE__, BREAK);
124 // Sloppy mode functions and builtins need to replace the receiver with the
125 // global proxy when called as functions (without an explicit receiver
127 if (is_sloppy(info->language_mode()) && !info->is_native()) {
129 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
130 __ Peek(x10, receiver_offset);
131 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
133 __ Ldr(x10, GlobalObjectMemOperand());
134 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
135 __ Poke(x10, receiver_offset);
141 // Open a frame scope to indicate that there is a frame on the stack.
142 // The MANUAL indicates that the scope shouldn't actually generate code
143 // to set up the frame because we do it manually below.
144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 // This call emits the following sequence in a way that can be patched for
147 // code ageing support:
148 // Push(lr, fp, cp, x1);
149 // Add(fp, jssp, 2 * kPointerSize);
150 info->set_prologue_offset(masm_->pc_offset());
151 __ Prologue(info->IsCodePreAgingActive());
152 info->AddNoFrameRange(0, masm_->pc_offset());
154 // Reserve space on the stack for locals.
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
160 if (locals_count > 0) {
161 if (locals_count >= 128) {
163 DCHECK(jssp.Is(__ StackPointer()));
164 __ Sub(x10, jssp, locals_count * kPointerSize);
165 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
170 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
171 if (FLAG_optimize_for_size) {
172 __ PushMultipleTimes(x10 , locals_count);
174 const int kMaxPushes = 32;
175 if (locals_count >= kMaxPushes) {
176 int loop_iterations = locals_count / kMaxPushes;
177 __ Mov(x3, loop_iterations);
179 __ Bind(&loop_header);
181 __ PushMultipleTimes(x10 , kMaxPushes);
183 __ B(ne, &loop_header);
185 int remaining = locals_count % kMaxPushes;
186 // Emit the remaining pushes.
187 __ PushMultipleTimes(x10 , remaining);
192 bool function_in_register_x1 = true;
194 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (heap_slots > 0) {
196 // Argument to NewContext is the function, which is still in x1.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) {
200 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
202 __ CallRuntime(Runtime::kNewScriptContext, 2);
203 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(isolate(), heap_slots);
206 // Result of FastNewContextStub is always in new space.
207 need_write_barrier = false;
210 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 function_in_register_x1 = false;
213 // Context is returned in x0. It replaces the context passed to us.
214 // It's saved in the stack and kept live in cp.
216 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
217 // Copy any necessary parameters into the context.
218 int num_parameters = info->scope()->num_parameters();
219 for (int i = 0; i < num_parameters; i++) {
220 Variable* var = scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ Ldr(x10, MemOperand(fp, parameter_offset));
226 // Store it in the context.
227 MemOperand target = ContextMemOperand(cp, var->index());
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
233 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(cp, &done);
237 __ Abort(kExpectedNewSpaceObject);
244 // Possibly allocate RestParameters
246 Variable* rest_param = scope()->rest_parameter(&rest_index);
248 Comment cmnt(masm_, "[ Allocate rest parameter array");
250 int num_parameters = info->scope()->num_parameters();
251 int offset = num_parameters * kPointerSize;
252 __ Add(x3, fp, StandardFrameConstants::kCallerSPOffset + offset);
253 __ Mov(x2, Smi::FromInt(num_parameters));
254 __ Mov(x1, Smi::FromInt(rest_index));
257 RestParamAccessStub stub(isolate());
260 SetVar(rest_param, x0, x1, x2);
263 Variable* arguments = scope()->arguments();
264 if (arguments != NULL) {
265 // Function uses arguments object.
266 Comment cmnt(masm_, "[ Allocate arguments object");
267 if (!function_in_register_x1) {
268 // Load this again, if it's used by the local context below.
269 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
273 // Receiver is just before the parameters on the caller's stack.
274 int num_parameters = info->scope()->num_parameters();
275 int offset = num_parameters * kPointerSize;
276 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
277 __ Mov(x1, Smi::FromInt(num_parameters));
280 // Arguments to ArgumentsAccessStub:
281 // function, receiver address, parameter count.
282 // The stub will rewrite receiver and parameter count if the previous
283 // stack frame was an arguments adapter frame.
284 ArgumentsAccessStub::HasNewTarget has_new_target =
285 IsSubclassConstructor(info->function()->kind())
286 ? ArgumentsAccessStub::HAS_NEW_TARGET
287 : ArgumentsAccessStub::NO_NEW_TARGET;
288 ArgumentsAccessStub::Type type;
289 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
290 type = ArgumentsAccessStub::NEW_STRICT;
291 } else if (function()->has_duplicate_parameters()) {
292 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
294 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
296 ArgumentsAccessStub stub(isolate(), type, has_new_target);
299 SetVar(arguments, x0, x1, x2);
303 __ CallRuntime(Runtime::kTraceEnter, 0);
307 // Visit the declarations and body unless there is an illegal
309 if (scope()->HasIllegalRedeclaration()) {
310 Comment cmnt(masm_, "[ Declarations");
311 scope()->VisitIllegalRedeclaration(this);
314 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
315 { Comment cmnt(masm_, "[ Declarations");
316 if (scope()->is_function_scope() && scope()->function() != NULL) {
317 VariableDeclaration* function = scope()->function();
318 DCHECK(function->proxy()->var()->mode() == CONST ||
319 function->proxy()->var()->mode() == CONST_LEGACY);
320 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
321 VisitVariableDeclaration(function);
323 VisitDeclarations(scope()->declarations());
327 Comment cmnt(masm_, "[ Stack check");
328 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
330 DCHECK(jssp.Is(__ StackPointer()));
331 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
333 PredictableCodeSizeScope predictable(masm_,
334 Assembler::kCallSizeWithRelocation);
335 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
340 Comment cmnt(masm_, "[ Body");
341 DCHECK(loop_depth() == 0);
342 VisitStatements(function()->body());
343 DCHECK(loop_depth() == 0);
347 // Always emit a 'return undefined' in case control fell off the end of
349 { Comment cmnt(masm_, "[ return <undefined>;");
350 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
352 EmitReturnSequence();
354 // Force emission of the pools, so they don't get emitted in the middle
355 // of the back edge table.
356 masm()->CheckVeneerPool(true, false);
357 masm()->CheckConstPool(true, false);
361 void FullCodeGenerator::ClearAccumulator() {
362 __ Mov(x0, Smi::FromInt(0));
366 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
367 __ Mov(x2, Operand(profiling_counter_));
368 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
369 __ Subs(x3, x3, Smi::FromInt(delta));
370 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
374 void FullCodeGenerator::EmitProfilingCounterReset() {
375 int reset_value = FLAG_interrupt_budget;
376 if (info_->is_debug()) {
377 // Detect debug break requests as soon as possible.
378 reset_value = FLAG_interrupt_budget >> 4;
380 __ Mov(x2, Operand(profiling_counter_));
381 __ Mov(x3, Smi::FromInt(reset_value));
382 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
386 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
387 Label* back_edge_target) {
388 DCHECK(jssp.Is(__ StackPointer()));
389 Comment cmnt(masm_, "[ Back edge bookkeeping");
390 // Block literal pools whilst emitting back edge code.
391 Assembler::BlockPoolsScope block_const_pool(masm_);
394 DCHECK(back_edge_target->is_bound());
395 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
396 // to reduce the absolute error due to the integer division. To do that,
397 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
400 masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
401 int weight = Min(kMaxBackEdgeWeight,
402 Max(1, distance / kCodeSizeMultiplier));
403 EmitProfilingCounterDecrement(weight);
405 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
407 // Record a mapping of this PC offset to the OSR id. This is used to find
408 // the AST id from the unoptimized code in order to use it as a key into
409 // the deoptimization input data found in the optimized code.
410 RecordBackEdge(stmt->OsrEntryId());
412 EmitProfilingCounterReset();
415 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
416 // Record a mapping of the OSR id to this PC. This is used if the OSR
417 // entry becomes the target of a bailout. We don't expect it to be, but
418 // we want it to work if it is.
419 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
423 void FullCodeGenerator::EmitReturnSequence() {
424 Comment cmnt(masm_, "[ Return sequence");
426 if (return_label_.is_bound()) {
427 __ B(&return_label_);
430 __ Bind(&return_label_);
432 // Push the return value on the stack as the parameter.
433 // Runtime::TraceExit returns its parameter in x0.
434 __ Push(result_register());
435 __ CallRuntime(Runtime::kTraceExit, 1);
436 DCHECK(x0.Is(result_register()));
438 // Pretend that the exit is a backwards jump to the entry.
440 if (info_->ShouldSelfOptimize()) {
441 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
443 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
444 weight = Min(kMaxBackEdgeWeight,
445 Max(1, distance / kCodeSizeMultiplier));
447 EmitProfilingCounterDecrement(weight);
451 __ Call(isolate()->builtins()->InterruptCheck(),
452 RelocInfo::CODE_TARGET);
454 EmitProfilingCounterReset();
457 // Make sure that the constant pool is not emitted inside of the return
458 // sequence. This sequence can get patched when the debugger is used. See
459 // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
461 InstructionAccurateScope scope(masm_,
462 Assembler::kJSRetSequenceInstructions);
463 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
465 // This code is generated using Assembler methods rather than Macro
466 // Assembler methods because it will be patched later on, and so the size
467 // of the generated code must be consistent.
468 const Register& current_sp = __ StackPointer();
469 // Nothing ensures 16 bytes alignment here.
470 DCHECK(!current_sp.Is(csp));
471 __ mov(current_sp, fp);
472 int no_frame_start = masm_->pc_offset();
473 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
474 // Drop the arguments and receiver and return.
475 // TODO(all): This implementation is overkill as it supports 2**31+1
476 // arguments, consider how to improve it without creating a security
478 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
479 __ add(current_sp, current_sp, ip0);
481 int32_t arg_count = info_->scope()->num_parameters() + 1;
482 if (IsSubclassConstructor(info_->function()->kind())) {
485 __ dc64(kXRegSize * arg_count);
486 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
492 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
493 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
497 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
498 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
499 codegen()->GetVar(result_register(), var);
503 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
504 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
505 codegen()->GetVar(result_register(), var);
506 __ Push(result_register());
510 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
511 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
512 // For simplicity we always test the accumulator register.
513 codegen()->GetVar(result_register(), var);
514 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
515 codegen()->DoTest(this);
519 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
520 // Root values have no side effects.
524 void FullCodeGenerator::AccumulatorValueContext::Plug(
525 Heap::RootListIndex index) const {
526 __ LoadRoot(result_register(), index);
530 void FullCodeGenerator::StackValueContext::Plug(
531 Heap::RootListIndex index) const {
532 __ LoadRoot(result_register(), index);
533 __ Push(result_register());
537 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
538 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
540 if (index == Heap::kUndefinedValueRootIndex ||
541 index == Heap::kNullValueRootIndex ||
542 index == Heap::kFalseValueRootIndex) {
543 if (false_label_ != fall_through_) __ B(false_label_);
544 } else if (index == Heap::kTrueValueRootIndex) {
545 if (true_label_ != fall_through_) __ B(true_label_);
547 __ LoadRoot(result_register(), index);
548 codegen()->DoTest(this);
553 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
557 void FullCodeGenerator::AccumulatorValueContext::Plug(
558 Handle<Object> lit) const {
559 __ Mov(result_register(), Operand(lit));
563 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
564 // Immediates cannot be pushed directly.
565 __ Mov(result_register(), Operand(lit));
566 __ Push(result_register());
570 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
571 codegen()->PrepareForBailoutBeforeSplit(condition(),
575 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
576 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
577 if (false_label_ != fall_through_) __ B(false_label_);
578 } else if (lit->IsTrue() || lit->IsJSObject()) {
579 if (true_label_ != fall_through_) __ B(true_label_);
580 } else if (lit->IsString()) {
581 if (String::cast(*lit)->length() == 0) {
582 if (false_label_ != fall_through_) __ B(false_label_);
584 if (true_label_ != fall_through_) __ B(true_label_);
586 } else if (lit->IsSmi()) {
587 if (Smi::cast(*lit)->value() == 0) {
588 if (false_label_ != fall_through_) __ B(false_label_);
590 if (true_label_ != fall_through_) __ B(true_label_);
593 // For simplicity we always test the accumulator register.
594 __ Mov(result_register(), Operand(lit));
595 codegen()->DoTest(this);
600 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
601 Register reg) const {
607 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
609 Register reg) const {
612 __ Move(result_register(), reg);
616 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
617 Register reg) const {
619 if (count > 1) __ Drop(count - 1);
624 void FullCodeGenerator::TestContext::DropAndPlug(int count,
625 Register reg) const {
627 // For simplicity we always test the accumulator register.
629 __ Mov(result_register(), reg);
630 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
631 codegen()->DoTest(this);
635 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
636 Label* materialize_false) const {
637 DCHECK(materialize_true == materialize_false);
638 __ Bind(materialize_true);
642 void FullCodeGenerator::AccumulatorValueContext::Plug(
643 Label* materialize_true,
644 Label* materialize_false) const {
646 __ Bind(materialize_true);
647 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
649 __ Bind(materialize_false);
650 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
655 void FullCodeGenerator::StackValueContext::Plug(
656 Label* materialize_true,
657 Label* materialize_false) const {
659 __ Bind(materialize_true);
660 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
662 __ Bind(materialize_false);
663 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
669 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
670 Label* materialize_false) const {
671 DCHECK(materialize_true == true_label_);
672 DCHECK(materialize_false == false_label_);
676 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
680 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
681 Heap::RootListIndex value_root_index =
682 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
683 __ LoadRoot(result_register(), value_root_index);
687 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
688 Heap::RootListIndex value_root_index =
689 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
690 __ LoadRoot(x10, value_root_index);
695 void FullCodeGenerator::TestContext::Plug(bool flag) const {
696 codegen()->PrepareForBailoutBeforeSplit(condition(),
701 if (true_label_ != fall_through_) {
705 if (false_label_ != fall_through_) {
712 void FullCodeGenerator::DoTest(Expression* condition,
715 Label* fall_through) {
716 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
717 CallIC(ic, condition->test_id());
718 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
722 // If (cond), branch to if_true.
723 // If (!cond), branch to if_false.
724 // fall_through is used as an optimization in cases where only one branch
725 // instruction is necessary.
726 void FullCodeGenerator::Split(Condition cond,
729 Label* fall_through) {
730 if (if_false == fall_through) {
732 } else if (if_true == fall_through) {
733 DCHECK(if_false != fall_through);
734 __ B(NegateCondition(cond), if_false);
742 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
743 // Offset is negative because higher indexes are at lower addresses.
744 int offset = -var->index() * kXRegSize;
745 // Adjust by a (parameter or local) base offset.
746 if (var->IsParameter()) {
747 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
749 offset += JavaScriptFrameConstants::kLocal0Offset;
751 return MemOperand(fp, offset);
755 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
756 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
757 if (var->IsContextSlot()) {
758 int context_chain_length = scope()->ContextChainLength(var->scope());
759 __ LoadContext(scratch, context_chain_length);
760 return ContextMemOperand(scratch, var->index());
762 return StackOperand(var);
767 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
768 // Use destination as scratch.
769 MemOperand location = VarOperand(var, dest);
770 __ Ldr(dest, location);
774 void FullCodeGenerator::SetVar(Variable* var,
778 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
779 DCHECK(!AreAliased(src, scratch0, scratch1));
780 MemOperand location = VarOperand(var, scratch0);
781 __ Str(src, location);
783 // Emit the write barrier code if the location is in the heap.
784 if (var->IsContextSlot()) {
785 // scratch0 contains the correct context.
786 __ RecordWriteContextSlot(scratch0,
796 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
797 bool should_normalize,
800 // Only prepare for bailouts before splits if we're in a test
801 // context. Otherwise, we let the Visit function deal with the
802 // preparation to avoid preparing with the same AST id twice.
803 if (!context()->IsTest() || !info_->IsOptimizable()) return;
805 // TODO(all): Investigate to see if there is something to work on here.
807 if (should_normalize) {
810 PrepareForBailout(expr, TOS_REG);
811 if (should_normalize) {
812 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
813 Split(eq, if_true, if_false, NULL);
819 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
820 // The variable in the declaration always resides in the current function
822 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
823 if (generate_debug_code_) {
824 // Check that we're not inside a with or catch context.
825 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
826 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
827 __ Check(ne, kDeclarationInWithContext);
828 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
829 __ Check(ne, kDeclarationInCatchContext);
834 void FullCodeGenerator::VisitVariableDeclaration(
835 VariableDeclaration* declaration) {
836 // If it was not possible to allocate the variable at compile time, we
837 // need to "declare" it at runtime to make sure it actually exists in the
839 VariableProxy* proxy = declaration->proxy();
840 VariableMode mode = declaration->mode();
841 Variable* variable = proxy->var();
842 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
844 switch (variable->location()) {
845 case Variable::UNALLOCATED:
846 globals_->Add(variable->name(), zone());
847 globals_->Add(variable->binding_needs_init()
848 ? isolate()->factory()->the_hole_value()
849 : isolate()->factory()->undefined_value(),
853 case Variable::PARAMETER:
854 case Variable::LOCAL:
856 Comment cmnt(masm_, "[ VariableDeclaration");
857 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
858 __ Str(x10, StackOperand(variable));
862 case Variable::CONTEXT:
864 Comment cmnt(masm_, "[ VariableDeclaration");
865 EmitDebugCheckDeclarationContext(variable);
866 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
867 __ Str(x10, ContextMemOperand(cp, variable->index()));
868 // No write barrier since the_hole_value is in old space.
869 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
873 case Variable::LOOKUP: {
874 Comment cmnt(masm_, "[ VariableDeclaration");
875 __ Mov(x2, Operand(variable->name()));
876 // Declaration nodes are always introduced in one of four modes.
877 DCHECK(IsDeclaredVariableMode(mode));
878 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
880 __ Mov(x1, Smi::FromInt(attr));
881 // Push initial value, if any.
882 // Note: For variables we must not push an initial value (such as
883 // 'undefined') because we may have a (legal) redeclaration and we
884 // must not destroy the current value.
886 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
887 __ Push(cp, x2, x1, x0);
889 // Pushing 0 (xzr) indicates no initial value.
890 __ Push(cp, x2, x1, xzr);
892 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
899 void FullCodeGenerator::VisitFunctionDeclaration(
900 FunctionDeclaration* declaration) {
901 VariableProxy* proxy = declaration->proxy();
902 Variable* variable = proxy->var();
903 switch (variable->location()) {
904 case Variable::UNALLOCATED: {
905 globals_->Add(variable->name(), zone());
906 Handle<SharedFunctionInfo> function =
907 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
908 // Check for stack overflow exception.
909 if (function.is_null()) return SetStackOverflow();
910 globals_->Add(function, zone());
914 case Variable::PARAMETER:
915 case Variable::LOCAL: {
916 Comment cmnt(masm_, "[ Function Declaration");
917 VisitForAccumulatorValue(declaration->fun());
918 __ Str(result_register(), StackOperand(variable));
922 case Variable::CONTEXT: {
923 Comment cmnt(masm_, "[ Function Declaration");
924 EmitDebugCheckDeclarationContext(variable);
925 VisitForAccumulatorValue(declaration->fun());
926 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
927 int offset = Context::SlotOffset(variable->index());
928 // We know that we have written a function, which is not a smi.
929 __ RecordWriteContextSlot(cp,
937 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
941 case Variable::LOOKUP: {
942 Comment cmnt(masm_, "[ Function Declaration");
943 __ Mov(x2, Operand(variable->name()));
944 __ Mov(x1, Smi::FromInt(NONE));
946 // Push initial value for function declaration.
947 VisitForStackValue(declaration->fun());
948 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
955 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
956 Variable* variable = declaration->proxy()->var();
957 ModuleDescriptor* descriptor = declaration->module()->descriptor();
958 DCHECK(variable->location() == Variable::CONTEXT);
959 DCHECK(descriptor->IsFrozen());
961 Comment cmnt(masm_, "[ ModuleDeclaration");
962 EmitDebugCheckDeclarationContext(variable);
964 // Load instance object.
965 __ LoadContext(x1, scope_->ContextChainLength(scope_->ScriptScope()));
966 __ Ldr(x1, ContextMemOperand(x1, descriptor->Index()));
967 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
970 __ Str(x1, ContextMemOperand(cp, variable->index()));
971 // We know that we have written a module, which is not a smi.
972 __ RecordWriteContextSlot(cp,
973 Context::SlotOffset(variable->index()),
980 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
982 // Traverse info body.
983 Visit(declaration->module());
987 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
988 VariableProxy* proxy = declaration->proxy();
989 Variable* variable = proxy->var();
990 switch (variable->location()) {
991 case Variable::UNALLOCATED:
995 case Variable::CONTEXT: {
996 Comment cmnt(masm_, "[ ImportDeclaration");
997 EmitDebugCheckDeclarationContext(variable);
1002 case Variable::PARAMETER:
1003 case Variable::LOCAL:
1004 case Variable::LOOKUP:
1010 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1015 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1016 // Call the runtime to declare the globals.
1017 __ Mov(x11, Operand(pairs));
1018 Register flags = xzr;
1019 if (Smi::FromInt(DeclareGlobalsFlags())) {
1021 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
1023 __ Push(cp, x11, flags);
1024 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1025 // Return value is ignored.
1029 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1030 // Call the runtime to declare the modules.
1031 __ Push(descriptions);
1032 __ CallRuntime(Runtime::kDeclareModules, 1);
1033 // Return value is ignored.
1037 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1038 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1039 Comment cmnt(masm_, "[ SwitchStatement");
1040 Breakable nested_statement(this, stmt);
1041 SetStatementPosition(stmt);
1043 // Keep the switch value on the stack until a case matches.
1044 VisitForStackValue(stmt->tag());
1045 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1047 ZoneList<CaseClause*>* clauses = stmt->cases();
1048 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1050 Label next_test; // Recycled for each test.
1051 // Compile all the tests with branches to their bodies.
1052 for (int i = 0; i < clauses->length(); i++) {
1053 CaseClause* clause = clauses->at(i);
1054 clause->body_target()->Unuse();
1056 // The default is not a test, but remember it as final fall through.
1057 if (clause->is_default()) {
1058 default_clause = clause;
1062 Comment cmnt(masm_, "[ Case comparison");
1063 __ Bind(&next_test);
1066 // Compile the label expression.
1067 VisitForAccumulatorValue(clause->label());
1069 // Perform the comparison as if via '==='.
1070 __ Peek(x1, 0); // Switch value.
1072 JumpPatchSite patch_site(masm_);
1073 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1075 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1077 __ B(ne, &next_test);
1078 __ Drop(1); // Switch value is no longer needed.
1079 __ B(clause->body_target());
1080 __ Bind(&slow_case);
1083 // Record position before stub call for type feedback.
1084 SetSourcePosition(clause->position());
1086 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1087 CallIC(ic, clause->CompareId());
1088 patch_site.EmitPatchInfo();
1092 PrepareForBailout(clause, TOS_REG);
1093 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1095 __ B(clause->body_target());
1098 __ Cbnz(x0, &next_test);
1099 __ Drop(1); // Switch value is no longer needed.
1100 __ B(clause->body_target());
1103 // Discard the test value and jump to the default if present, otherwise to
1104 // the end of the statement.
1105 __ Bind(&next_test);
1106 __ Drop(1); // Switch value is no longer needed.
1107 if (default_clause == NULL) {
1108 __ B(nested_statement.break_label());
1110 __ B(default_clause->body_target());
1113 // Compile all the case bodies.
1114 for (int i = 0; i < clauses->length(); i++) {
1115 Comment cmnt(masm_, "[ Case body");
1116 CaseClause* clause = clauses->at(i);
1117 __ Bind(clause->body_target());
1118 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1119 VisitStatements(clause->statements());
1122 __ Bind(nested_statement.break_label());
1123 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1127 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1128 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1129 Comment cmnt(masm_, "[ ForInStatement");
1130 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1131 // TODO(all): This visitor probably needs better comments and a revisit.
1132 SetStatementPosition(stmt);
1135 ForIn loop_statement(this, stmt);
1136 increment_loop_depth();
1138 // Get the object to enumerate over. If the object is null or undefined, skip
1139 // over the loop. See ECMA-262 version 5, section 12.6.4.
1140 SetExpressionPosition(stmt->enumerable());
1141 VisitForAccumulatorValue(stmt->enumerable());
1142 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1143 Register null_value = x15;
1144 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1145 __ Cmp(x0, null_value);
1148 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1150 // Convert the object to a JS object.
1151 Label convert, done_convert;
1152 __ JumpIfSmi(x0, &convert);
1153 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1156 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1157 __ Bind(&done_convert);
1158 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1161 // Check for proxies.
1163 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1164 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1166 // Check cache validity in generated code. This is a fast case for
1167 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1168 // guarantee cache validity, call the runtime system to check cache
1169 // validity or get the property names in a fixed array.
1170 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1172 // The enum cache is valid. Load the map of the object being
1173 // iterated over and use the cache for the iteration.
1175 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1178 // Get the set of properties to enumerate.
1179 __ Bind(&call_runtime);
1180 __ Push(x0); // Duplicate the enumerable object on the stack.
1181 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1182 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1184 // If we got a map from the runtime call, we can do a fast
1185 // modification check. Otherwise, we got a fixed array, and we have
1186 // to do a slow check.
1187 Label fixed_array, no_descriptors;
1188 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1189 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1191 // We got a map in register x0. Get the enumeration cache from it.
1192 __ Bind(&use_cache);
1194 __ EnumLengthUntagged(x1, x0);
1195 __ Cbz(x1, &no_descriptors);
1197 __ LoadInstanceDescriptors(x0, x2);
1198 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1200 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1202 // Set up the four remaining stack slots.
1204 // Map, enumeration cache, enum cache length, zero (both last as smis).
1205 __ Push(x0, x2, x1, xzr);
1208 __ Bind(&no_descriptors);
1212 // We got a fixed array in register x0. Iterate through that.
1213 __ Bind(&fixed_array);
1215 __ LoadObject(x1, FeedbackVector());
1216 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1217 int vector_index = FeedbackVector()->GetIndex(slot);
1218 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1220 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1221 __ Peek(x10, 0); // Get enumerated object.
1222 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1223 // TODO(all): similar check was done already. Can we avoid it here?
1224 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1225 DCHECK(Smi::FromInt(0) == 0);
1226 __ CzeroX(x1, le); // Zero indicates proxy.
1227 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1228 // Smi and array, fixed array length (as smi) and initial index.
1229 __ Push(x1, x0, x2, xzr);
1231 // Generate code for doing the condition check.
1232 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1234 SetExpressionPosition(stmt->each());
1236 // Load the current count to x0, load the length to x1.
1237 __ PeekPair(x0, x1, 0);
1238 __ Cmp(x0, x1); // Compare to the array length.
1239 __ B(hs, loop_statement.break_label());
1241 // Get the current entry of the array into register r3.
1242 __ Peek(x10, 2 * kXRegSize);
1243 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1244 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1246 // Get the expected map from the stack or a smi in the
1247 // permanent slow case into register x10.
1248 __ Peek(x2, 3 * kXRegSize);
1250 // Check if the expected map still matches that of the enumerable.
1251 // If not, we may have to filter the key.
1253 __ Peek(x1, 4 * kXRegSize);
1254 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1256 __ B(eq, &update_each);
1258 // For proxies, no filtering is done.
1259 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1260 STATIC_ASSERT(kSmiTag == 0);
1261 __ Cbz(x2, &update_each);
1263 // Convert the entry to a string or (smi) 0 if it isn't a property
1264 // any more. If the property has been removed while iterating, we
1267 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1269 __ Cbz(x0, loop_statement.continue_label());
1271 // Update the 'each' property or variable from the possibly filtered
1272 // entry in register x3.
1273 __ Bind(&update_each);
1274 __ Mov(result_register(), x3);
1275 // Perform the assignment as if via '='.
1276 { EffectContext context(this);
1277 EmitAssignment(stmt->each());
1278 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1281 // Generate code for the body of the loop.
1282 Visit(stmt->body());
1284 // Generate code for going to the next element by incrementing
1285 // the index (smi) stored on top of the stack.
1286 __ Bind(loop_statement.continue_label());
1287 // TODO(all): We could use a callee saved register to avoid popping.
1289 __ Add(x0, x0, Smi::FromInt(1));
1292 EmitBackEdgeBookkeeping(stmt, &loop);
1295 // Remove the pointers stored on the stack.
1296 __ Bind(loop_statement.break_label());
1299 // Exit and decrement the loop depth.
1300 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1302 decrement_loop_depth();
1306 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1308 // Use the fast case closure allocation code that allocates in new space for
1309 // nested functions that don't need literals cloning. If we're running with
1310 // the --always-opt or the --prepare-always-opt flag, we need to use the
1311 // runtime function so that the new function we are creating here gets a
1312 // chance to have its code optimized and doesn't just get a copy of the
1313 // existing unoptimized code.
1314 if (!FLAG_always_opt &&
1315 !FLAG_prepare_always_opt &&
1317 scope()->is_function_scope() &&
1318 info->num_literals() == 0) {
1319 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1320 __ Mov(x2, Operand(info));
1323 __ Mov(x11, Operand(info));
1324 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1325 : Heap::kFalseValueRootIndex);
1326 __ Push(cp, x11, x10);
1327 __ CallRuntime(Runtime::kNewClosure, 3);
1329 context()->Plug(x0);
1333 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1334 Comment cmnt(masm_, "[ VariableProxy");
1335 EmitVariableLoad(expr);
1339 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1340 Comment cnmt(masm_, "[ SuperReference ");
1342 __ ldr(LoadDescriptor::ReceiverRegister(),
1343 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1345 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1346 __ Mov(LoadDescriptor::NameRegister(), Operand(home_object_symbol));
1348 if (FLAG_vector_ics) {
1349 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1350 SmiFromSlot(expr->HomeObjectFeedbackSlot()));
1351 CallLoadIC(NOT_CONTEXTUAL);
1353 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1356 __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
1360 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1365 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1367 if (NeedsHomeObject(initializer)) {
1368 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1369 __ Mov(StoreDescriptor::NameRegister(),
1370 Operand(isolate()->factory()->home_object_symbol()));
1371 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1377 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1378 TypeofState typeof_state,
1380 Register current = cp;
1381 Register next = x10;
1382 Register temp = x11;
1386 if (s->num_heap_slots() > 0) {
1387 if (s->calls_sloppy_eval()) {
1388 // Check that extension is NULL.
1389 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1390 __ Cbnz(temp, slow);
1392 // Load next context in chain.
1393 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1394 // Walk the rest of the chain without clobbering cp.
1397 // If no outer scope calls eval, we do not need to check more
1398 // context extensions.
1399 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1400 s = s->outer_scope();
1403 if (s->is_eval_scope()) {
1405 __ Mov(next, current);
1408 // Terminate at native context.
1409 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1410 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1411 // Check that extension is NULL.
1412 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1413 __ Cbnz(temp, slow);
1414 // Load next context in chain.
1415 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1420 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1421 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1422 if (FLAG_vector_ics) {
1423 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1424 SmiFromSlot(proxy->VariableFeedbackSlot()));
1427 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1433 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1435 DCHECK(var->IsContextSlot());
1436 Register context = cp;
1437 Register next = x10;
1438 Register temp = x11;
1440 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1441 if (s->num_heap_slots() > 0) {
1442 if (s->calls_sloppy_eval()) {
1443 // Check that extension is NULL.
1444 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1445 __ Cbnz(temp, slow);
1447 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1448 // Walk the rest of the chain without clobbering cp.
1452 // Check that last extension is NULL.
1453 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1454 __ Cbnz(temp, slow);
1456 // This function is used only for loads, not stores, so it's safe to
1457 // return an cp-based operand (the write barrier cannot be allowed to
1458 // destroy the cp register).
1459 return ContextMemOperand(context, var->index());
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1464 TypeofState typeof_state,
1467 // Generate fast-case code for variables that might be shadowed by
1468 // eval-introduced variables. Eval is used a lot without
1469 // introducing variables. In those cases, we do not want to
1470 // perform a runtime call for all variables in the scope
1471 // containing the eval.
1472 Variable* var = proxy->var();
1473 if (var->mode() == DYNAMIC_GLOBAL) {
1474 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1476 } else if (var->mode() == DYNAMIC_LOCAL) {
1477 Variable* local = var->local_if_not_shadowed();
1478 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1479 if (local->mode() == LET || local->mode() == CONST ||
1480 local->mode() == CONST_LEGACY) {
1481 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1482 if (local->mode() == CONST_LEGACY) {
1483 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1484 } else { // LET || CONST
1485 __ Mov(x0, Operand(var->name()));
1487 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1495 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1496 // Record position before possible IC call.
1497 SetSourcePosition(proxy->position());
1498 Variable* var = proxy->var();
1500 // Three cases: global variables, lookup variables, and all other types of
1502 switch (var->location()) {
1503 case Variable::UNALLOCATED: {
1504 Comment cmnt(masm_, "Global variable");
1505 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1506 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1507 if (FLAG_vector_ics) {
1508 __ Mov(VectorLoadICDescriptor::SlotRegister(),
1509 SmiFromSlot(proxy->VariableFeedbackSlot()));
1511 CallLoadIC(CONTEXTUAL);
1512 context()->Plug(x0);
1516 case Variable::PARAMETER:
1517 case Variable::LOCAL:
1518 case Variable::CONTEXT: {
1519 Comment cmnt(masm_, var->IsContextSlot()
1520 ? "Context variable"
1521 : "Stack variable");
1522 if (var->binding_needs_init()) {
1523 // var->scope() may be NULL when the proxy is located in eval code and
1524 // refers to a potential outside binding. Currently those bindings are
1525 // always looked up dynamically, i.e. in that case
1526 // var->location() == LOOKUP.
1528 DCHECK(var->scope() != NULL);
1530 // Check if the binding really needs an initialization check. The check
1531 // can be skipped in the following situation: we have a LET or CONST
1532 // binding in harmony mode, both the Variable and the VariableProxy have
1533 // the same declaration scope (i.e. they are both in global code, in the
1534 // same function or in the same eval code) and the VariableProxy is in
1535 // the source physically located after the initializer of the variable.
1537 // We cannot skip any initialization checks for CONST in non-harmony
1538 // mode because const variables may be declared but never initialized:
1539 // if (false) { const x; }; var y = x;
1541 // The condition on the declaration scopes is a conservative check for
1542 // nested functions that access a binding and are called before the
1543 // binding is initialized:
1544 // function() { f(); let x = 1; function f() { x = 2; } }
1546 bool skip_init_check;
1547 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1548 skip_init_check = false;
1549 } else if (var->is_this()) {
1550 CHECK(info_->function() != nullptr &&
1551 (info_->function()->kind() & kSubclassConstructor) != 0);
1552 // TODO(dslomov): implement 'this' hole check elimination.
1553 skip_init_check = false;
1555 // Check that we always have valid source position.
1556 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1557 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1558 skip_init_check = var->mode() != CONST_LEGACY &&
1559 var->initializer_position() < proxy->position();
1562 if (!skip_init_check) {
1563 // Let and const need a read barrier.
1566 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1567 if (var->mode() == LET || var->mode() == CONST) {
1568 // Throw a reference error when using an uninitialized let/const
1569 // binding in harmony mode.
1570 __ Mov(x0, Operand(var->name()));
1572 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1575 // Uninitalized const bindings outside of harmony mode are unholed.
1576 DCHECK(var->mode() == CONST_LEGACY);
1577 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1580 context()->Plug(x0);
1584 context()->Plug(var);
1588 case Variable::LOOKUP: {
1590 // Generate code for loading from variables potentially shadowed by
1591 // eval-introduced variables.
1592 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1594 Comment cmnt(masm_, "Lookup variable");
1595 __ Mov(x1, Operand(var->name()));
1596 __ Push(cp, x1); // Context and name.
1597 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1599 context()->Plug(x0);
1606 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1607 Comment cmnt(masm_, "[ RegExpLiteral");
1609 // Registers will be used as follows:
1610 // x5 = materialized value (RegExp literal)
1611 // x4 = JS function, literals array
1612 // x3 = literal index
1613 // x2 = RegExp pattern
1614 // x1 = RegExp flags
1615 // x0 = RegExp literal clone
1616 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1617 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1618 int literal_offset =
1619 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1620 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1621 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1623 // Create regexp literal using runtime function.
1624 // Result will be in x0.
1625 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1626 __ Mov(x2, Operand(expr->pattern()));
1627 __ Mov(x1, Operand(expr->flags()));
1628 __ Push(x4, x3, x2, x1);
1629 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1632 __ Bind(&materialized);
1633 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1634 Label allocated, runtime_allocate;
1635 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1638 __ Bind(&runtime_allocate);
1639 __ Mov(x10, Smi::FromInt(size));
1641 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1644 __ Bind(&allocated);
1645 // After this, registers are used as follows:
1646 // x0: Newly allocated regexp.
1647 // x5: Materialized regexp.
1648 // x10, x11, x12: temps.
1649 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1650 context()->Plug(x0);
1654 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1655 if (expression == NULL) {
1656 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1659 VisitForStackValue(expression);
1664 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1665 Comment cmnt(masm_, "[ ObjectLiteral");
1667 expr->BuildConstantProperties(isolate());
1668 Handle<FixedArray> constant_properties = expr->constant_properties();
1669 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1670 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1671 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1672 __ Mov(x1, Operand(constant_properties));
1673 int flags = expr->ComputeFlags();
1674 __ Mov(x0, Smi::FromInt(flags));
1675 if (MustCreateObjectLiteralWithRuntime(expr)) {
1676 __ Push(x3, x2, x1, x0);
1677 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1679 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1682 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1684 // If result_saved is true the result is on top of the stack. If
1685 // result_saved is false the result is in x0.
1686 bool result_saved = false;
1688 // Mark all computed expressions that are bound to a key that
1689 // is shadowed by a later occurrence of the same key. For the
1690 // marked expressions, no store code is emitted.
1691 expr->CalculateEmitStore(zone());
1693 AccessorTable accessor_table(zone());
1694 int property_index = 0;
1695 for (; property_index < expr->properties()->length(); property_index++) {
1696 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1697 if (property->is_computed_name()) break;
1698 if (property->IsCompileTimeValue()) continue;
1700 Literal* key = property->key()->AsLiteral();
1701 Expression* value = property->value();
1702 if (!result_saved) {
1703 __ Push(x0); // Save result on stack
1704 result_saved = true;
1706 switch (property->kind()) {
1707 case ObjectLiteral::Property::CONSTANT:
1709 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1710 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1712 case ObjectLiteral::Property::COMPUTED:
1713 // It is safe to use [[Put]] here because the boilerplate already
1714 // contains computed properties with an uninitialized value.
1715 if (key->value()->IsInternalizedString()) {
1716 if (property->emit_store()) {
1717 VisitForAccumulatorValue(value);
1718 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1719 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1720 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1721 CallStoreIC(key->LiteralFeedbackId());
1722 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1724 if (NeedsHomeObject(value)) {
1725 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1726 __ Mov(StoreDescriptor::NameRegister(),
1727 Operand(isolate()->factory()->home_object_symbol()));
1728 __ Peek(StoreDescriptor::ValueRegister(), 0);
1732 VisitForEffect(value);
1736 if (property->emit_store()) {
1737 // Duplicate receiver on stack.
1740 VisitForStackValue(key);
1741 VisitForStackValue(value);
1742 EmitSetHomeObjectIfNeeded(value, 2);
1743 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1745 __ CallRuntime(Runtime::kSetProperty, 4);
1747 VisitForEffect(key);
1748 VisitForEffect(value);
1751 case ObjectLiteral::Property::PROTOTYPE:
1752 DCHECK(property->emit_store());
1753 // Duplicate receiver on stack.
1756 VisitForStackValue(value);
1757 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1759 case ObjectLiteral::Property::GETTER:
1760 if (property->emit_store()) {
1761 accessor_table.lookup(key)->second->getter = value;
1764 case ObjectLiteral::Property::SETTER:
1765 if (property->emit_store()) {
1766 accessor_table.lookup(key)->second->setter = value;
1772 // Emit code to define accessors, using only a single call to the runtime for
1773 // each pair of corresponding getters and setters.
1774 for (AccessorTable::Iterator it = accessor_table.begin();
1775 it != accessor_table.end();
1777 __ Peek(x10, 0); // Duplicate receiver.
1779 VisitForStackValue(it->first);
1780 EmitAccessor(it->second->getter);
1781 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1782 EmitAccessor(it->second->setter);
1783 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1784 __ Mov(x10, Smi::FromInt(NONE));
1786 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1789 // Object literals have two parts. The "static" part on the left contains no
1790 // computed property names, and so we can compute its map ahead of time; see
1791 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1792 // starts with the first computed property name, and continues with all
1793 // properties to its right. All the code from above initializes the static
1794 // component of the object literal, and arranges for the map of the result to
1795 // reflect the static order in which the keys appear. For the dynamic
1796 // properties, we compile them into a series of "SetOwnProperty" runtime
1797 // calls. This will preserve insertion order.
1798 for (; property_index < expr->properties()->length(); property_index++) {
1799 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1801 Expression* value = property->value();
1802 if (!result_saved) {
1803 __ Push(x0); // Save result on stack
1804 result_saved = true;
1807 __ Peek(x10, 0); // Duplicate receiver.
1810 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1811 DCHECK(!property->is_computed_name());
1812 VisitForStackValue(value);
1813 DCHECK(property->emit_store());
1814 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1816 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1817 VisitForStackValue(value);
1818 EmitSetHomeObjectIfNeeded(value, 2);
1820 switch (property->kind()) {
1821 case ObjectLiteral::Property::CONSTANT:
1822 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1823 case ObjectLiteral::Property::COMPUTED:
1824 if (property->emit_store()) {
1825 __ Mov(x0, Smi::FromInt(NONE));
1827 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1833 case ObjectLiteral::Property::PROTOTYPE:
1837 case ObjectLiteral::Property::GETTER:
1838 __ Mov(x0, Smi::FromInt(NONE));
1840 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1843 case ObjectLiteral::Property::SETTER:
1844 __ Mov(x0, Smi::FromInt(NONE));
1846 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1852 if (expr->has_function()) {
1853 DCHECK(result_saved);
1856 __ CallRuntime(Runtime::kToFastProperties, 1);
1860 context()->PlugTOS();
1862 context()->Plug(x0);
1867 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1868 Comment cmnt(masm_, "[ ArrayLiteral");
1870 expr->BuildConstantElements(isolate());
1871 Handle<FixedArray> constant_elements = expr->constant_elements();
1872 bool has_fast_elements =
1873 IsFastObjectElementsKind(expr->constant_elements_kind());
1875 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1876 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1877 // If the only customer of allocation sites is transitioning, then
1878 // we can turn it off if we don't have anywhere else to transition to.
1879 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1882 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1883 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1884 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1885 __ Mov(x1, Operand(constant_elements));
1886 if (MustCreateArrayLiteralWithRuntime(expr)) {
1887 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1888 __ Push(x3, x2, x1, x0);
1889 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1891 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1894 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1896 bool result_saved = false; // Is the result saved to the stack?
1897 ZoneList<Expression*>* subexprs = expr->values();
1898 int length = subexprs->length();
1900 // Emit code to evaluate all the non-constant subexpressions and to store
1901 // them into the newly cloned array.
1902 for (int i = 0; i < length; i++) {
1903 Expression* subexpr = subexprs->at(i);
1904 // If the subexpression is a literal or a simple materialized literal it
1905 // is already set in the cloned array.
1906 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1908 if (!result_saved) {
1909 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1911 result_saved = true;
1913 VisitForAccumulatorValue(subexpr);
1915 if (has_fast_elements) {
1916 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1917 __ Peek(x6, kPointerSize); // Copy of array literal.
1918 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1919 __ Str(result_register(), FieldMemOperand(x1, offset));
1920 // Update the write barrier for the array store.
1921 __ RecordWriteField(x1, offset, result_register(), x10,
1922 kLRHasBeenSaved, kDontSaveFPRegs,
1923 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1925 __ Mov(x3, Smi::FromInt(i));
1926 StoreArrayLiteralElementStub stub(isolate());
1930 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1934 __ Drop(1); // literal index
1935 context()->PlugTOS();
1937 context()->Plug(x0);
1942 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1943 DCHECK(expr->target()->IsValidReferenceExpression());
1945 Comment cmnt(masm_, "[ Assignment");
1947 Property* property = expr->target()->AsProperty();
1948 LhsKind assign_type = GetAssignType(property);
1950 // Evaluate LHS expression.
1951 switch (assign_type) {
1953 // Nothing to do here.
1955 case NAMED_PROPERTY:
1956 if (expr->is_compound()) {
1957 // We need the receiver both on the stack and in the register.
1958 VisitForStackValue(property->obj());
1959 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1961 VisitForStackValue(property->obj());
1964 case NAMED_SUPER_PROPERTY:
1965 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1966 EmitLoadHomeObject(property->obj()->AsSuperReference());
1967 __ Push(result_register());
1968 if (expr->is_compound()) {
1969 const Register scratch = x10;
1970 __ Peek(scratch, kPointerSize);
1971 __ Push(scratch, result_register());
1974 case KEYED_SUPER_PROPERTY:
1975 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1976 EmitLoadHomeObject(property->obj()->AsSuperReference());
1977 __ Push(result_register());
1978 VisitForAccumulatorValue(property->key());
1979 __ Push(result_register());
1980 if (expr->is_compound()) {
1981 const Register scratch1 = x10;
1982 const Register scratch2 = x11;
1983 __ Peek(scratch1, 2 * kPointerSize);
1984 __ Peek(scratch2, kPointerSize);
1985 __ Push(scratch1, scratch2, result_register());
1988 case KEYED_PROPERTY:
1989 if (expr->is_compound()) {
1990 VisitForStackValue(property->obj());
1991 VisitForStackValue(property->key());
1992 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1993 __ Peek(LoadDescriptor::NameRegister(), 0);
1995 VisitForStackValue(property->obj());
1996 VisitForStackValue(property->key());
2001 // For compound assignments we need another deoptimization point after the
2002 // variable/property load.
2003 if (expr->is_compound()) {
2004 { AccumulatorValueContext context(this);
2005 switch (assign_type) {
2007 EmitVariableLoad(expr->target()->AsVariableProxy());
2008 PrepareForBailout(expr->target(), TOS_REG);
2010 case NAMED_PROPERTY:
2011 EmitNamedPropertyLoad(property);
2012 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2014 case NAMED_SUPER_PROPERTY:
2015 EmitNamedSuperPropertyLoad(property);
2016 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2018 case KEYED_SUPER_PROPERTY:
2019 EmitKeyedSuperPropertyLoad(property);
2020 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2022 case KEYED_PROPERTY:
2023 EmitKeyedPropertyLoad(property);
2024 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2029 Token::Value op = expr->binary_op();
2030 __ Push(x0); // Left operand goes on the stack.
2031 VisitForAccumulatorValue(expr->value());
2033 SetSourcePosition(expr->position() + 1);
2034 AccumulatorValueContext context(this);
2035 if (ShouldInlineSmiCase(op)) {
2036 EmitInlineSmiBinaryOp(expr->binary_operation(),
2041 EmitBinaryOp(expr->binary_operation(), op);
2044 // Deoptimization point in case the binary operation may have side effects.
2045 PrepareForBailout(expr->binary_operation(), TOS_REG);
2047 VisitForAccumulatorValue(expr->value());
2050 // Record source position before possible IC call.
2051 SetSourcePosition(expr->position());
2054 switch (assign_type) {
2056 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2058 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2059 context()->Plug(x0);
2061 case NAMED_PROPERTY:
2062 EmitNamedPropertyAssignment(expr);
2064 case NAMED_SUPER_PROPERTY:
2065 EmitNamedSuperPropertyStore(property);
2066 context()->Plug(x0);
2068 case KEYED_SUPER_PROPERTY:
2069 EmitKeyedSuperPropertyStore(property);
2070 context()->Plug(x0);
2072 case KEYED_PROPERTY:
2073 EmitKeyedPropertyAssignment(expr);
2079 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2080 SetSourcePosition(prop->position());
2081 Literal* key = prop->key()->AsLiteral();
2082 DCHECK(!prop->IsSuperAccess());
2084 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2085 if (FLAG_vector_ics) {
2086 __ Mov(VectorLoadICDescriptor::SlotRegister(),
2087 SmiFromSlot(prop->PropertyFeedbackSlot()));
2088 CallLoadIC(NOT_CONTEXTUAL);
2090 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2095 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2096 // Stack: receiver, home_object.
2097 SetSourcePosition(prop->position());
2098 Literal* key = prop->key()->AsLiteral();
2099 DCHECK(!key->value()->IsSmi());
2100 DCHECK(prop->IsSuperAccess());
2102 __ Push(key->value());
2103 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2107 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2108 SetSourcePosition(prop->position());
2109 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2110 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2111 if (FLAG_vector_ics) {
2112 __ Mov(VectorLoadICDescriptor::SlotRegister(),
2113 SmiFromSlot(prop->PropertyFeedbackSlot()));
2116 CallIC(ic, prop->PropertyFeedbackId());
2121 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2122 // Stack: receiver, home_object, key.
2123 SetSourcePosition(prop->position());
2125 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2129 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2131 Expression* left_expr,
2132 Expression* right_expr) {
2133 Label done, both_smis, stub_call;
2135 // Get the arguments.
2137 Register right = x0;
2138 Register result = x0;
2141 // Perform combined smi check on both operands.
2142 __ Orr(x10, left, right);
2143 JumpPatchSite patch_site(masm_);
2144 patch_site.EmitJumpIfSmi(x10, &both_smis);
2146 __ Bind(&stub_call);
2148 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2150 Assembler::BlockPoolsScope scope(masm_);
2151 CallIC(code, expr->BinaryOperationFeedbackId());
2152 patch_site.EmitPatchInfo();
2156 __ Bind(&both_smis);
2157 // Smi case. This code works in the same way as the smi-smi case in the type
2158 // recording binary operation stub, see
2159 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2160 // TODO(all): That doesn't exist any more. Where are the comments?
2162 // The set of operations that needs to be supported here is controlled by
2163 // FullCodeGenerator::ShouldInlineSmiCase().
2166 __ Ubfx(right, right, kSmiShift, 5);
2167 __ Asr(result, left, right);
2168 __ Bic(result, result, kSmiShiftMask);
2171 __ Ubfx(right, right, kSmiShift, 5);
2172 __ Lsl(result, left, right);
2175 // If `left >>> right` >= 0x80000000, the result is not representable in a
2176 // signed 32-bit smi.
2177 __ Ubfx(right, right, kSmiShift, 5);
2178 __ Lsr(x10, left, right);
2179 __ Tbnz(x10, kXSignBit, &stub_call);
2180 __ Bic(result, x10, kSmiShiftMask);
2183 __ Adds(x10, left, right);
2184 __ B(vs, &stub_call);
2185 __ Mov(result, x10);
2188 __ Subs(x10, left, right);
2189 __ B(vs, &stub_call);
2190 __ Mov(result, x10);
2193 Label not_minus_zero, done;
2194 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2195 STATIC_ASSERT(kSmiTag == 0);
2196 __ Smulh(x10, left, right);
2197 __ Cbnz(x10, ¬_minus_zero);
2198 __ Eor(x11, left, right);
2199 __ Tbnz(x11, kXSignBit, &stub_call);
2200 __ Mov(result, x10);
2202 __ Bind(¬_minus_zero);
2204 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2205 __ B(lt, &stub_call);
2206 __ SmiTag(result, x10);
2211 __ Orr(result, left, right);
2213 case Token::BIT_AND:
2214 __ And(result, left, right);
2216 case Token::BIT_XOR:
2217 __ Eor(result, left, right);
2224 context()->Plug(x0);
2228 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2230 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2231 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2233 Assembler::BlockPoolsScope scope(masm_);
2234 CallIC(code, expr->BinaryOperationFeedbackId());
2235 patch_site.EmitPatchInfo();
2237 context()->Plug(x0);
2241 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2242 // Constructor is in x0.
2243 DCHECK(lit != NULL);
2246 // No access check is needed here since the constructor is created by the
2248 Register scratch = x1;
2250 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2253 for (int i = 0; i < lit->properties()->length(); i++) {
2254 ObjectLiteral::Property* property = lit->properties()->at(i);
2255 Expression* value = property->value();
2257 if (property->is_static()) {
2258 __ Peek(scratch, kPointerSize); // constructor
2260 __ Peek(scratch, 0); // prototype
2263 EmitPropertyKey(property, lit->GetIdForProperty(i));
2264 VisitForStackValue(value);
2265 EmitSetHomeObjectIfNeeded(value, 2);
2267 switch (property->kind()) {
2268 case ObjectLiteral::Property::CONSTANT:
2269 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2270 case ObjectLiteral::Property::PROTOTYPE:
2272 case ObjectLiteral::Property::COMPUTED:
2273 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2276 case ObjectLiteral::Property::GETTER:
2277 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2279 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2282 case ObjectLiteral::Property::SETTER:
2283 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2285 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2294 __ CallRuntime(Runtime::kToFastProperties, 1);
2297 __ CallRuntime(Runtime::kToFastProperties, 1);
2301 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2302 DCHECK(expr->IsValidReferenceExpression());
2304 Property* prop = expr->AsProperty();
2305 LhsKind assign_type = GetAssignType(prop);
2307 switch (assign_type) {
2309 Variable* var = expr->AsVariableProxy()->var();
2310 EffectContext context(this);
2311 EmitVariableAssignment(var, Token::ASSIGN);
2314 case NAMED_PROPERTY: {
2315 __ Push(x0); // Preserve value.
2316 VisitForAccumulatorValue(prop->obj());
2317 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2319 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2320 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2321 __ Mov(StoreDescriptor::NameRegister(),
2322 Operand(prop->key()->AsLiteral()->value()));
2326 case NAMED_SUPER_PROPERTY: {
2328 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2329 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2330 // stack: value, this; x0: home_object
2331 Register scratch = x10;
2332 Register scratch2 = x11;
2333 __ mov(scratch, result_register()); // home_object
2334 __ Peek(x0, kPointerSize); // value
2335 __ Peek(scratch2, 0); // this
2336 __ Poke(scratch2, kPointerSize); // this
2337 __ Poke(scratch, 0); // home_object
2338 // stack: this, home_object; x0: value
2339 EmitNamedSuperPropertyStore(prop);
2342 case KEYED_SUPER_PROPERTY: {
2344 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2345 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2346 __ Push(result_register());
2347 VisitForAccumulatorValue(prop->key());
2348 Register scratch = x10;
2349 Register scratch2 = x11;
2350 __ Peek(scratch2, 2 * kPointerSize); // value
2351 // stack: value, this, home_object; x0: key, x11: value
2352 __ Peek(scratch, kPointerSize); // this
2353 __ Poke(scratch, 2 * kPointerSize);
2354 __ Peek(scratch, 0); // home_object
2355 __ Poke(scratch, kPointerSize);
2357 __ Move(x0, scratch2);
2358 // stack: this, home_object, key; x0: value.
2359 EmitKeyedSuperPropertyStore(prop);
2362 case KEYED_PROPERTY: {
2363 __ Push(x0); // Preserve value.
2364 VisitForStackValue(prop->obj());
2365 VisitForAccumulatorValue(prop->key());
2366 __ Mov(StoreDescriptor::NameRegister(), x0);
2367 __ Pop(StoreDescriptor::ReceiverRegister(),
2368 StoreDescriptor::ValueRegister());
2370 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2375 context()->Plug(x0);
2379 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2380 Variable* var, MemOperand location) {
2381 __ Str(result_register(), location);
2382 if (var->IsContextSlot()) {
2383 // RecordWrite may destroy all its register arguments.
2384 __ Mov(x10, result_register());
2385 int offset = Context::SlotOffset(var->index());
2386 __ RecordWriteContextSlot(
2387 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2392 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2394 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2395 if (var->IsUnallocated()) {
2396 // Global var, const, or let.
2397 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2398 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2401 } else if (op == Token::INIT_CONST_LEGACY) {
2402 // Const initializers need a write barrier.
2403 DCHECK(!var->IsParameter()); // No const parameters.
2404 if (var->IsLookupSlot()) {
2405 __ Mov(x1, Operand(var->name()));
2406 __ Push(x0, cp, x1);
2407 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2409 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2411 MemOperand location = VarOperand(var, x1);
2412 __ Ldr(x10, location);
2413 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2414 EmitStoreToStackLocalOrContextSlot(var, location);
2418 } else if (var->mode() == LET && op != Token::INIT_LET) {
2419 // Non-initializing assignment to let variable needs a write barrier.
2420 DCHECK(!var->IsLookupSlot());
2421 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2423 MemOperand location = VarOperand(var, x1);
2424 __ Ldr(x10, location);
2425 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2426 __ Mov(x10, Operand(var->name()));
2428 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2429 // Perform the assignment.
2431 EmitStoreToStackLocalOrContextSlot(var, location);
2433 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2434 if (var->IsLookupSlot()) {
2435 // Assignment to var.
2436 __ Mov(x11, Operand(var->name()));
2437 __ Mov(x10, Smi::FromInt(language_mode()));
2440 // jssp[16] : context.
2441 // jssp[24] : value.
2442 __ Push(x0, cp, x11, x10);
2443 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2445 // Assignment to var or initializing assignment to let/const in harmony
2447 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2448 MemOperand location = VarOperand(var, x1);
2449 if (FLAG_debug_code && op == Token::INIT_LET) {
2450 __ Ldr(x10, location);
2451 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2452 __ Check(eq, kLetBindingReInitialization);
2454 EmitStoreToStackLocalOrContextSlot(var, location);
2456 } else if (IsSignallingAssignmentToConst(var, op, language_mode())) {
2457 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2462 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2463 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2464 // Assignment to a property, using a named store IC.
2465 Property* prop = expr->target()->AsProperty();
2466 DCHECK(prop != NULL);
2467 DCHECK(prop->key()->IsLiteral());
2469 // Record source code position before IC call.
2470 SetSourcePosition(expr->position());
2471 __ Mov(StoreDescriptor::NameRegister(),
2472 Operand(prop->key()->AsLiteral()->value()));
2473 __ Pop(StoreDescriptor::ReceiverRegister());
2474 CallStoreIC(expr->AssignmentFeedbackId());
2476 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2477 context()->Plug(x0);
2481 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2482 // Assignment to named property of super.
2484 // stack : receiver ('this'), home_object
2485 DCHECK(prop != NULL);
2486 Literal* key = prop->key()->AsLiteral();
2487 DCHECK(key != NULL);
2489 __ Push(key->value());
2491 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2492 : Runtime::kStoreToSuper_Sloppy),
2497 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2498 // Assignment to named property of super.
2500 // stack : receiver ('this'), home_object, key
2501 DCHECK(prop != NULL);
2505 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2506 : Runtime::kStoreKeyedToSuper_Sloppy),
2511 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2512 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2513 // Assignment to a property, using a keyed store IC.
2515 // Record source code position before IC call.
2516 SetSourcePosition(expr->position());
2517 // TODO(all): Could we pass this in registers rather than on the stack?
2518 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2519 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2522 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2523 CallIC(ic, expr->AssignmentFeedbackId());
2525 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2526 context()->Plug(x0);
2530 void FullCodeGenerator::VisitProperty(Property* expr) {
2531 Comment cmnt(masm_, "[ Property");
2532 Expression* key = expr->key();
2534 if (key->IsPropertyName()) {
2535 if (!expr->IsSuperAccess()) {
2536 VisitForAccumulatorValue(expr->obj());
2537 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2538 EmitNamedPropertyLoad(expr);
2540 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2541 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2542 __ Push(result_register());
2543 EmitNamedSuperPropertyLoad(expr);
2546 if (!expr->IsSuperAccess()) {
2547 VisitForStackValue(expr->obj());
2548 VisitForAccumulatorValue(expr->key());
2549 __ Move(LoadDescriptor::NameRegister(), x0);
2550 __ Pop(LoadDescriptor::ReceiverRegister());
2551 EmitKeyedPropertyLoad(expr);
2553 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2554 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2555 __ Push(result_register());
2556 VisitForStackValue(expr->key());
2557 EmitKeyedSuperPropertyLoad(expr);
2560 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2561 context()->Plug(x0);
2565 void FullCodeGenerator::CallIC(Handle<Code> code,
2566 TypeFeedbackId ast_id) {
2568 // All calls must have a predictable size in full-codegen code to ensure that
2569 // the debugger can patch them correctly.
2570 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2574 // Code common for calls using the IC.
2575 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2576 Expression* callee = expr->expression();
2578 CallICState::CallType call_type =
2579 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2581 // Get the target function.
2582 if (call_type == CallICState::FUNCTION) {
2583 { StackValueContext context(this);
2584 EmitVariableLoad(callee->AsVariableProxy());
2585 PrepareForBailout(callee, NO_REGISTERS);
2587 // Push undefined as receiver. This is patched in the method prologue if it
2588 // is a sloppy mode method.
2589 __ Push(isolate()->factory()->undefined_value());
2591 // Load the function from the receiver.
2592 DCHECK(callee->IsProperty());
2593 DCHECK(!callee->AsProperty()->IsSuperAccess());
2594 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2595 EmitNamedPropertyLoad(callee->AsProperty());
2596 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2597 // Push the target function under the receiver.
2602 EmitCall(expr, call_type);
2606 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2607 Expression* callee = expr->expression();
2608 DCHECK(callee->IsProperty());
2609 Property* prop = callee->AsProperty();
2610 DCHECK(prop->IsSuperAccess());
2612 SetSourcePosition(prop->position());
2613 Literal* key = prop->key()->AsLiteral();
2614 DCHECK(!key->value()->IsSmi());
2616 // Load the function from the receiver.
2617 const Register scratch = x10;
2618 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2619 EmitLoadHomeObject(super_ref);
2621 VisitForAccumulatorValue(super_ref->this_var());
2623 __ Peek(scratch, kPointerSize);
2624 __ Push(x0, scratch);
2625 __ Push(key->value());
2629 // - this (receiver)
2630 // - this (receiver) <-- LoadFromSuper will pop here and below.
2633 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2635 // Replace home_object with target function.
2636 __ Poke(x0, kPointerSize);
2639 // - target function
2640 // - this (receiver)
2641 EmitCall(expr, CallICState::METHOD);
2645 // Code common for calls using the IC.
2646 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2649 VisitForAccumulatorValue(key);
2651 Expression* callee = expr->expression();
2653 // Load the function from the receiver.
2654 DCHECK(callee->IsProperty());
2655 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2656 __ Move(LoadDescriptor::NameRegister(), x0);
2657 EmitKeyedPropertyLoad(callee->AsProperty());
2658 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2660 // Push the target function under the receiver.
2664 EmitCall(expr, CallICState::METHOD);
2668 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2669 Expression* callee = expr->expression();
2670 DCHECK(callee->IsProperty());
2671 Property* prop = callee->AsProperty();
2672 DCHECK(prop->IsSuperAccess());
2674 SetSourcePosition(prop->position());
2676 // Load the function from the receiver.
2677 const Register scratch = x10;
2678 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2679 EmitLoadHomeObject(super_ref);
2681 VisitForAccumulatorValue(super_ref->this_var());
2683 __ Peek(scratch, kPointerSize);
2684 __ Push(x0, scratch);
2685 VisitForStackValue(prop->key());
2689 // - this (receiver)
2690 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2693 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2695 // Replace home_object with target function.
2696 __ Poke(x0, kPointerSize);
2699 // - target function
2700 // - this (receiver)
2701 EmitCall(expr, CallICState::METHOD);
2705 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2706 // Load the arguments.
2707 ZoneList<Expression*>* args = expr->arguments();
2708 int arg_count = args->length();
2709 { PreservePositionScope scope(masm()->positions_recorder());
2710 for (int i = 0; i < arg_count; i++) {
2711 VisitForStackValue(args->at(i));
2714 // Record source position of the IC call.
2715 SetSourcePosition(expr->position());
2717 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2718 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2719 __ Peek(x1, (arg_count + 1) * kXRegSize);
2720 // Don't assign a type feedback id to the IC, since type feedback is provided
2721 // by the vector above.
2724 RecordJSReturnSite(expr);
2725 // Restore context register.
2726 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2727 context()->DropAndPlug(1, x0);
2731 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2732 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2733 // Prepare to push a copy of the first argument or undefined if it doesn't
2735 if (arg_count > 0) {
2736 __ Peek(x9, arg_count * kXRegSize);
2738 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2741 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2742 // Prepare to push the receiver of the enclosing function.
2743 int receiver_offset = 2 + info_->scope()->num_parameters();
2744 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2746 // Prepare to push the language mode.
2747 __ Mov(x12, Smi::FromInt(language_mode()));
2748 // Prepare to push the start position of the scope the calls resides in.
2749 __ Mov(x13, Smi::FromInt(scope()->start_position()));
2752 __ Push(x9, x10, x11, x12, x13);
2754 // Do the runtime call.
2755 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2759 void FullCodeGenerator::EmitLoadSuperConstructor() {
2760 __ ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2762 __ CallRuntime(Runtime::kGetPrototype, 1);
2766 void FullCodeGenerator::VisitCall(Call* expr) {
2768 // We want to verify that RecordJSReturnSite gets called on all paths
2769 // through this function. Avoid early returns.
2770 expr->return_is_recorded_ = false;
2773 Comment cmnt(masm_, "[ Call");
2774 Expression* callee = expr->expression();
2775 Call::CallType call_type = expr->GetCallType(isolate());
2777 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2778 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2779 // to resolve the function we need to call and the receiver of the
2780 // call. Then we call the resolved function using the given
2782 ZoneList<Expression*>* args = expr->arguments();
2783 int arg_count = args->length();
2786 PreservePositionScope pos_scope(masm()->positions_recorder());
2787 VisitForStackValue(callee);
2788 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2789 __ Push(x10); // Reserved receiver slot.
2791 // Push the arguments.
2792 for (int i = 0; i < arg_count; i++) {
2793 VisitForStackValue(args->at(i));
2796 // Push a copy of the function (found below the arguments) and
2798 __ Peek(x10, (arg_count + 1) * kPointerSize);
2800 EmitResolvePossiblyDirectEval(arg_count);
2802 // The runtime call returns a pair of values in x0 (function) and
2803 // x1 (receiver). Touch up the stack with the right values.
2804 __ PokePair(x1, x0, arg_count * kPointerSize);
2806 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2809 // Record source position for debugger.
2810 SetSourcePosition(expr->position());
2812 // Call the evaluated function.
2813 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2814 __ Peek(x1, (arg_count + 1) * kXRegSize);
2816 RecordJSReturnSite(expr);
2817 // Restore context register.
2818 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2819 context()->DropAndPlug(1, x0);
2821 } else if (call_type == Call::GLOBAL_CALL) {
2822 EmitCallWithLoadIC(expr);
2824 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2825 // Call to a lookup slot (dynamically introduced variable).
2826 VariableProxy* proxy = callee->AsVariableProxy();
2829 { PreservePositionScope scope(masm()->positions_recorder());
2830 // Generate code for loading from variables potentially shadowed
2831 // by eval-introduced variables.
2832 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2836 // Call the runtime to find the function to call (returned in x0)
2837 // and the object holding it (returned in x1).
2838 __ Mov(x10, Operand(proxy->name()));
2839 __ Push(context_register(), x10);
2840 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2841 __ Push(x0, x1); // Receiver, function.
2842 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2844 // If fast case code has been generated, emit code to push the
2845 // function and receiver and have the slow path jump around this
2847 if (done.is_linked()) {
2852 // The receiver is implicitly the global receiver. Indicate this
2853 // by passing the undefined to the call function stub.
2854 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2859 // The receiver is either the global receiver or an object found
2860 // by LoadContextSlot.
2862 } else if (call_type == Call::PROPERTY_CALL) {
2863 Property* property = callee->AsProperty();
2864 bool is_named_call = property->key()->IsPropertyName();
2865 if (property->IsSuperAccess()) {
2866 if (is_named_call) {
2867 EmitSuperCallWithLoadIC(expr);
2869 EmitKeyedSuperCallWithLoadIC(expr);
2873 PreservePositionScope scope(masm()->positions_recorder());
2874 VisitForStackValue(property->obj());
2876 if (is_named_call) {
2877 EmitCallWithLoadIC(expr);
2879 EmitKeyedCallWithLoadIC(expr, property->key());
2882 } else if (call_type == Call::SUPER_CALL) {
2883 EmitSuperConstructorCall(expr);
2885 DCHECK(call_type == Call::OTHER_CALL);
2886 // Call to an arbitrary expression not handled specially above.
2887 { PreservePositionScope scope(masm()->positions_recorder());
2888 VisitForStackValue(callee);
2890 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2892 // Emit function call.
2897 // RecordJSReturnSite should have been called.
2898 DCHECK(expr->return_is_recorded_);
2903 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2904 Comment cmnt(masm_, "[ CallNew");
2905 // According to ECMA-262, section 11.2.2, page 44, the function
2906 // expression in new calls must be evaluated before the
2909 // Push constructor on the stack. If it's not a function it's used as
2910 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2912 DCHECK(!expr->expression()->IsSuperReference());
2913 VisitForStackValue(expr->expression());
2915 // Push the arguments ("left-to-right") on the stack.
2916 ZoneList<Expression*>* args = expr->arguments();
2917 int arg_count = args->length();
2918 for (int i = 0; i < arg_count; i++) {
2919 VisitForStackValue(args->at(i));
2922 // Call the construct call builtin that handles allocation and
2923 // constructor invocation.
2924 SetSourcePosition(expr->position());
2926 // Load function and argument count into x1 and x0.
2927 __ Mov(x0, arg_count);
2928 __ Peek(x1, arg_count * kXRegSize);
2930 // Record call targets in unoptimized code.
2931 if (FLAG_pretenuring_call_new) {
2932 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2933 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2934 expr->CallNewFeedbackSlot().ToInt() + 1);
2937 __ LoadObject(x2, FeedbackVector());
2938 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2940 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2941 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2942 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2943 context()->Plug(x0);
2947 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2948 if (!ValidateSuperCall(expr)) return;
2950 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
2951 GetVar(result_register(), new_target_var);
2952 __ Push(result_register());
2954 EmitLoadSuperConstructor();
2955 __ push(result_register());
2957 // Push the arguments ("left-to-right") on the stack.
2958 ZoneList<Expression*>* args = expr->arguments();
2959 int arg_count = args->length();
2960 for (int i = 0; i < arg_count; i++) {
2961 VisitForStackValue(args->at(i));
2964 // Call the construct call builtin that handles allocation and
2965 // constructor invocation.
2966 SetSourcePosition(expr->position());
2968 // Load function and argument count into x1 and x0.
2969 __ Mov(x0, arg_count);
2970 __ Peek(x1, arg_count * kXRegSize);
2972 // Record call targets in unoptimized code.
2973 if (FLAG_pretenuring_call_new) {
2975 /* TODO(dslomov): support pretenuring.
2976 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2977 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2978 expr->CallNewFeedbackSlot().ToInt() + 1);
2982 __ LoadObject(x2, FeedbackVector());
2983 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
2985 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
2986 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2990 RecordJSReturnSite(expr);
2992 SuperReference* super_ref = expr->expression()->AsSuperReference();
2993 Variable* this_var = super_ref->this_var()->var();
2994 GetVar(x1, this_var);
2995 Label uninitialized_this;
2996 __ JumpIfRoot(x1, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2997 __ Mov(x0, Operand(this_var->name()));
2999 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3000 __ bind(&uninitialized_this);
3002 EmitVariableAssignment(this_var, Token::INIT_CONST);
3003 context()->Plug(x0);
3007 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3008 ZoneList<Expression*>* args = expr->arguments();
3009 DCHECK(args->length() == 1);
3011 VisitForAccumulatorValue(args->at(0));
3013 Label materialize_true, materialize_false;
3014 Label* if_true = NULL;
3015 Label* if_false = NULL;
3016 Label* fall_through = NULL;
3017 context()->PrepareTest(&materialize_true, &materialize_false,
3018 &if_true, &if_false, &fall_through);
3020 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3021 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
3023 context()->Plug(if_true, if_false);
3027 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3028 ZoneList<Expression*>* args = expr->arguments();
3029 DCHECK(args->length() == 1);
3031 VisitForAccumulatorValue(args->at(0));
3033 Label materialize_true, materialize_false;
3034 Label* if_true = NULL;
3035 Label* if_false = NULL;
3036 Label* fall_through = NULL;
3037 context()->PrepareTest(&materialize_true, &materialize_false,
3038 &if_true, &if_false, &fall_through);
3040 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
3042 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3043 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
3045 context()->Plug(if_true, if_false);
3049 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3050 ZoneList<Expression*>* args = expr->arguments();
3051 DCHECK(args->length() == 1);
3053 VisitForAccumulatorValue(args->at(0));
3055 Label materialize_true, materialize_false;
3056 Label* if_true = NULL;
3057 Label* if_false = NULL;
3058 Label* fall_through = NULL;
3059 context()->PrepareTest(&materialize_true, &materialize_false,
3060 &if_true, &if_false, &fall_through);
3062 __ JumpIfSmi(x0, if_false);
3063 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3064 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3065 // Undetectable objects behave like undefined when tested with typeof.
3066 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3067 __ Tbnz(x11, Map::kIsUndetectable, if_false);
3068 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
3069 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3071 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3072 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3073 Split(le, if_true, if_false, fall_through);
3075 context()->Plug(if_true, if_false);
3079 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3080 ZoneList<Expression*>* args = expr->arguments();
3081 DCHECK(args->length() == 1);
3083 VisitForAccumulatorValue(args->at(0));
3085 Label materialize_true, materialize_false;
3086 Label* if_true = NULL;
3087 Label* if_false = NULL;
3088 Label* fall_through = NULL;
3089 context()->PrepareTest(&materialize_true, &materialize_false,
3090 &if_true, &if_false, &fall_through);
3092 __ JumpIfSmi(x0, if_false);
3093 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3094 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3095 Split(ge, if_true, if_false, fall_through);
3097 context()->Plug(if_true, if_false);
3101 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3102 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
3103 ZoneList<Expression*>* args = expr->arguments();
3104 DCHECK(args->length() == 1);
3106 VisitForAccumulatorValue(args->at(0));
3108 Label materialize_true, materialize_false;
3109 Label* if_true = NULL;
3110 Label* if_false = NULL;
3111 Label* fall_through = NULL;
3112 context()->PrepareTest(&materialize_true, &materialize_false,
3113 &if_true, &if_false, &fall_through);
3115 __ JumpIfSmi(x0, if_false);
3116 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3117 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3118 __ Tst(x11, 1 << Map::kIsUndetectable);
3119 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3120 Split(ne, if_true, if_false, fall_through);
3122 context()->Plug(if_true, if_false);
3126 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3127 CallRuntime* expr) {
3128 ZoneList<Expression*>* args = expr->arguments();
3129 DCHECK(args->length() == 1);
3130 VisitForAccumulatorValue(args->at(0));
3132 Label materialize_true, materialize_false, skip_lookup;
3133 Label* if_true = NULL;
3134 Label* if_false = NULL;
3135 Label* fall_through = NULL;
3136 context()->PrepareTest(&materialize_true, &materialize_false,
3137 &if_true, &if_false, &fall_through);
3139 Register object = x0;
3140 __ AssertNotSmi(object);
3143 Register bitfield2 = x11;
3144 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3145 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3146 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3148 // Check for fast case object. Generate false result for slow case object.
3149 Register props = x12;
3150 Register props_map = x12;
3151 Register hash_table_map = x13;
3152 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3153 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3154 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3155 __ Cmp(props_map, hash_table_map);
3158 // Look for valueOf name in the descriptor array, and indicate false if found.
3159 // Since we omit an enumeration index check, if it is added via a transition
3160 // that shares its descriptor array, this is a false positive.
3163 // Skip loop if no descriptors are valid.
3164 Register descriptors = x12;
3165 Register descriptors_length = x13;
3166 __ NumberOfOwnDescriptors(descriptors_length, map);
3167 __ Cbz(descriptors_length, &done);
3169 __ LoadInstanceDescriptors(map, descriptors);
3171 // Calculate the end of the descriptor array.
3172 Register descriptors_end = x14;
3173 __ Mov(x15, DescriptorArray::kDescriptorSize);
3174 __ Mul(descriptors_length, descriptors_length, x15);
3175 // Calculate location of the first key name.
3176 __ Add(descriptors, descriptors,
3177 DescriptorArray::kFirstOffset - kHeapObjectTag);
3178 // Calculate the end of the descriptor array.
3179 __ Add(descriptors_end, descriptors,
3180 Operand(descriptors_length, LSL, kPointerSizeLog2));
3182 // Loop through all the keys in the descriptor array. If one of these is the
3183 // string "valueOf" the result is false.
3184 Register valueof_string = x1;
3185 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3186 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3188 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3189 __ Cmp(x15, valueof_string);
3191 __ Cmp(descriptors, descriptors_end);
3196 // Set the bit in the map to indicate that there is no local valueOf field.
3197 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3198 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3199 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3201 __ Bind(&skip_lookup);
3203 // If a valueOf property is not found on the object check that its prototype
3204 // is the unmodified String prototype. If not result is false.
3205 Register prototype = x1;
3206 Register global_idx = x2;
3207 Register native_context = x2;
3208 Register string_proto = x3;
3209 Register proto_map = x4;
3210 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3211 __ JumpIfSmi(prototype, if_false);
3212 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3213 __ Ldr(global_idx, GlobalObjectMemOperand());
3214 __ Ldr(native_context,
3215 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3216 __ Ldr(string_proto,
3217 ContextMemOperand(native_context,
3218 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3219 __ Cmp(proto_map, string_proto);
3221 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3222 Split(eq, if_true, if_false, fall_through);
3224 context()->Plug(if_true, if_false);
3228 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3229 ZoneList<Expression*>* args = expr->arguments();
3230 DCHECK(args->length() == 1);
3232 VisitForAccumulatorValue(args->at(0));
3234 Label materialize_true, materialize_false;
3235 Label* if_true = NULL;
3236 Label* if_false = NULL;
3237 Label* fall_through = NULL;
3238 context()->PrepareTest(&materialize_true, &materialize_false,
3239 &if_true, &if_false, &fall_through);
3241 __ JumpIfSmi(x0, if_false);
3242 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3243 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3244 Split(eq, if_true, if_false, fall_through);
3246 context()->Plug(if_true, if_false);
3250 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3251 ZoneList<Expression*>* args = expr->arguments();
3252 DCHECK(args->length() == 1);
3254 VisitForAccumulatorValue(args->at(0));
3256 Label materialize_true, materialize_false;
3257 Label* if_true = NULL;
3258 Label* if_false = NULL;
3259 Label* fall_through = NULL;
3260 context()->PrepareTest(&materialize_true, &materialize_false,
3261 &if_true, &if_false, &fall_through);
3263 // Only a HeapNumber can be -0.0, so return false if we have something else.
3264 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3266 // Test the bit pattern.
3267 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3268 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3270 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3271 Split(vs, if_true, if_false, fall_through);
3273 context()->Plug(if_true, if_false);
3277 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3278 ZoneList<Expression*>* args = expr->arguments();
3279 DCHECK(args->length() == 1);
3281 VisitForAccumulatorValue(args->at(0));
3283 Label materialize_true, materialize_false;
3284 Label* if_true = NULL;
3285 Label* if_false = NULL;
3286 Label* fall_through = NULL;
3287 context()->PrepareTest(&materialize_true, &materialize_false,
3288 &if_true, &if_false, &fall_through);
3290 __ JumpIfSmi(x0, if_false);
3291 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3292 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3293 Split(eq, if_true, if_false, fall_through);
3295 context()->Plug(if_true, if_false);
3299 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3300 ZoneList<Expression*>* args = expr->arguments();
3301 DCHECK(args->length() == 1);
3303 VisitForAccumulatorValue(args->at(0));
3305 Label materialize_true, materialize_false;
3306 Label* if_true = NULL;
3307 Label* if_false = NULL;
3308 Label* fall_through = NULL;
3309 context()->PrepareTest(&materialize_true, &materialize_false,
3310 &if_true, &if_false, &fall_through);
3312 __ JumpIfSmi(x0, if_false);
3313 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3314 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3315 Split(eq, if_true, if_false, fall_through);
3317 context()->Plug(if_true, if_false);
3321 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3322 ZoneList<Expression*>* args = expr->arguments();
3323 DCHECK(args->length() == 1);
3325 VisitForAccumulatorValue(args->at(0));
3327 Label materialize_true, materialize_false;
3328 Label* if_true = NULL;
3329 Label* if_false = NULL;
3330 Label* fall_through = NULL;
3331 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3332 &if_false, &fall_through);
3334 __ JumpIfSmi(x0, if_false);
3336 Register type_reg = x11;
3337 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3338 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3339 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3340 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3341 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3342 Split(ls, if_true, if_false, fall_through);
3344 context()->Plug(if_true, if_false);
3348 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3349 DCHECK(expr->arguments()->length() == 0);
3351 Label materialize_true, materialize_false;
3352 Label* if_true = NULL;
3353 Label* if_false = NULL;
3354 Label* fall_through = NULL;
3355 context()->PrepareTest(&materialize_true, &materialize_false,
3356 &if_true, &if_false, &fall_through);
3358 // Get the frame pointer for the calling frame.
3359 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3361 // Skip the arguments adaptor frame if it exists.
3362 Label check_frame_marker;
3363 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3364 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3365 __ B(ne, &check_frame_marker);
3366 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3368 // Check the marker in the calling frame.
3369 __ Bind(&check_frame_marker);
3370 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3371 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3372 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3373 Split(eq, if_true, if_false, fall_through);
3375 context()->Plug(if_true, if_false);
3379 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3380 ZoneList<Expression*>* args = expr->arguments();
3381 DCHECK(args->length() == 2);
3383 // Load the two objects into registers and perform the comparison.
3384 VisitForStackValue(args->at(0));
3385 VisitForAccumulatorValue(args->at(1));
3387 Label materialize_true, materialize_false;
3388 Label* if_true = NULL;
3389 Label* if_false = NULL;
3390 Label* fall_through = NULL;
3391 context()->PrepareTest(&materialize_true, &materialize_false,
3392 &if_true, &if_false, &fall_through);
3396 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3397 Split(eq, if_true, if_false, fall_through);
3399 context()->Plug(if_true, if_false);
3403 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3404 ZoneList<Expression*>* args = expr->arguments();
3405 DCHECK(args->length() == 1);
3407 // ArgumentsAccessStub expects the key in x1.
3408 VisitForAccumulatorValue(args->at(0));
3410 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3411 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3413 context()->Plug(x0);
3417 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3418 DCHECK(expr->arguments()->length() == 0);
3420 // Get the number of formal parameters.
3421 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3423 // Check if the calling frame is an arguments adaptor frame.
3424 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3425 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3426 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3429 // Arguments adaptor case: Read the arguments length from the
3431 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3434 context()->Plug(x0);
3438 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3439 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3440 ZoneList<Expression*>* args = expr->arguments();
3441 DCHECK(args->length() == 1);
3442 Label done, null, function, non_function_constructor;
3444 VisitForAccumulatorValue(args->at(0));
3446 // If the object is a smi, we return null.
3447 __ JumpIfSmi(x0, &null);
3449 // Check that the object is a JS object but take special care of JS
3450 // functions to make sure they have 'Function' as their class.
3451 // Assume that there are only two callable types, and one of them is at
3452 // either end of the type range for JS object types. Saves extra comparisons.
3453 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3454 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3455 // x10: object's map.
3456 // x11: object's type.
3458 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3459 FIRST_SPEC_OBJECT_TYPE + 1);
3460 __ B(eq, &function);
3462 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3463 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3464 LAST_SPEC_OBJECT_TYPE - 1);
3465 __ B(eq, &function);
3466 // Assume that there is no larger type.
3467 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3469 // Check if the constructor in the map is a JS function.
3470 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3471 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3472 &non_function_constructor);
3474 // x12 now contains the constructor function. Grab the
3475 // instance class name from there.
3476 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3478 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3481 // Functions have class 'Function'.
3483 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3486 // Objects with a non-function constructor have class 'Object'.
3487 __ Bind(&non_function_constructor);
3488 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3491 // Non-JS objects have class null.
3493 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3498 context()->Plug(x0);
3502 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3503 // Load the arguments on the stack and call the stub.
3504 SubStringStub stub(isolate());
3505 ZoneList<Expression*>* args = expr->arguments();
3506 DCHECK(args->length() == 3);
3507 VisitForStackValue(args->at(0));
3508 VisitForStackValue(args->at(1));
3509 VisitForStackValue(args->at(2));
3511 context()->Plug(x0);
3515 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3516 // Load the arguments on the stack and call the stub.
3517 RegExpExecStub stub(isolate());
3518 ZoneList<Expression*>* args = expr->arguments();
3519 DCHECK(args->length() == 4);
3520 VisitForStackValue(args->at(0));
3521 VisitForStackValue(args->at(1));
3522 VisitForStackValue(args->at(2));
3523 VisitForStackValue(args->at(3));
3525 context()->Plug(x0);
3529 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3530 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3531 ZoneList<Expression*>* args = expr->arguments();
3532 DCHECK(args->length() == 1);
3533 VisitForAccumulatorValue(args->at(0)); // Load the object.
3536 // If the object is a smi return the object.
3537 __ JumpIfSmi(x0, &done);
3538 // If the object is not a value type, return the object.
3539 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3540 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3543 context()->Plug(x0);
3547 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3548 ZoneList<Expression*>* args = expr->arguments();
3549 DCHECK(args->length() == 2);
3550 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3551 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3553 VisitForAccumulatorValue(args->at(0)); // Load the object.
3555 Label runtime, done, not_date_object;
3556 Register object = x0;
3557 Register result = x0;
3558 Register stamp_addr = x10;
3559 Register stamp_cache = x11;
3561 __ JumpIfSmi(object, ¬_date_object);
3562 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object);
3564 if (index->value() == 0) {
3565 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3568 if (index->value() < JSDate::kFirstUncachedField) {
3569 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3571 __ Ldr(stamp_addr, MemOperand(x10));
3572 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3573 __ Cmp(stamp_addr, stamp_cache);
3575 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3576 kPointerSize * index->value()));
3582 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3586 __ Bind(¬_date_object);
3587 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3589 context()->Plug(x0);
3593 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3594 ZoneList<Expression*>* args = expr->arguments();
3595 DCHECK_EQ(3, args->length());
3597 Register string = x0;
3598 Register index = x1;
3599 Register value = x2;
3600 Register scratch = x10;
3602 VisitForStackValue(args->at(0)); // index
3603 VisitForStackValue(args->at(1)); // value
3604 VisitForAccumulatorValue(args->at(2)); // string
3605 __ Pop(value, index);
3607 if (FLAG_debug_code) {
3608 __ AssertSmi(value, kNonSmiValue);
3609 __ AssertSmi(index, kNonSmiIndex);
3610 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3611 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3615 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3618 __ Strb(value, MemOperand(scratch, index));
3619 context()->Plug(string);
3623 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3624 ZoneList<Expression*>* args = expr->arguments();
3625 DCHECK_EQ(3, args->length());
3627 Register string = x0;
3628 Register index = x1;
3629 Register value = x2;
3630 Register scratch = x10;
3632 VisitForStackValue(args->at(0)); // index
3633 VisitForStackValue(args->at(1)); // value
3634 VisitForAccumulatorValue(args->at(2)); // string
3635 __ Pop(value, index);
3637 if (FLAG_debug_code) {
3638 __ AssertSmi(value, kNonSmiValue);
3639 __ AssertSmi(index, kNonSmiIndex);
3640 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3641 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3645 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3648 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3649 context()->Plug(string);
3653 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3654 // Load the arguments on the stack and call the MathPow stub.
3655 ZoneList<Expression*>* args = expr->arguments();
3656 DCHECK(args->length() == 2);
3657 VisitForStackValue(args->at(0));
3658 VisitForStackValue(args->at(1));
3659 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3661 context()->Plug(x0);
3665 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3666 ZoneList<Expression*>* args = expr->arguments();
3667 DCHECK(args->length() == 2);
3668 VisitForStackValue(args->at(0)); // Load the object.
3669 VisitForAccumulatorValue(args->at(1)); // Load the value.
3675 // If the object is a smi, return the value.
3676 __ JumpIfSmi(x1, &done);
3678 // If the object is not a value type, return the value.
3679 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3682 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3683 // Update the write barrier. Save the value as it will be
3684 // overwritten by the write barrier code and is needed afterward.
3686 __ RecordWriteField(
3687 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3690 context()->Plug(x0);
3694 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3695 ZoneList<Expression*>* args = expr->arguments();
3696 DCHECK_EQ(args->length(), 1);
3698 // Load the argument into x0 and call the stub.
3699 VisitForAccumulatorValue(args->at(0));
3701 NumberToStringStub stub(isolate());
3703 context()->Plug(x0);
3707 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3708 ZoneList<Expression*>* args = expr->arguments();
3709 DCHECK(args->length() == 1);
3711 VisitForAccumulatorValue(args->at(0));
3715 Register result = x1;
3717 StringCharFromCodeGenerator generator(code, result);
3718 generator.GenerateFast(masm_);
3721 NopRuntimeCallHelper call_helper;
3722 generator.GenerateSlow(masm_, call_helper);
3725 context()->Plug(result);
3729 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3730 ZoneList<Expression*>* args = expr->arguments();
3731 DCHECK(args->length() == 2);
3733 VisitForStackValue(args->at(0));
3734 VisitForAccumulatorValue(args->at(1));
3736 Register object = x1;
3737 Register index = x0;
3738 Register result = x3;
3742 Label need_conversion;
3743 Label index_out_of_range;
3745 StringCharCodeAtGenerator generator(object,
3750 &index_out_of_range,
3751 STRING_INDEX_IS_NUMBER);
3752 generator.GenerateFast(masm_);
3755 __ Bind(&index_out_of_range);
3756 // When the index is out of range, the spec requires us to return NaN.
3757 __ LoadRoot(result, Heap::kNanValueRootIndex);
3760 __ Bind(&need_conversion);
3761 // Load the undefined value into the result register, which will
3762 // trigger conversion.
3763 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3766 NopRuntimeCallHelper call_helper;
3767 generator.GenerateSlow(masm_, call_helper);
3770 context()->Plug(result);
3774 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3775 ZoneList<Expression*>* args = expr->arguments();
3776 DCHECK(args->length() == 2);
3778 VisitForStackValue(args->at(0));
3779 VisitForAccumulatorValue(args->at(1));
3781 Register object = x1;
3782 Register index = x0;
3783 Register result = x0;
3787 Label need_conversion;
3788 Label index_out_of_range;
3790 StringCharAtGenerator generator(object,
3796 &index_out_of_range,
3797 STRING_INDEX_IS_NUMBER);
3798 generator.GenerateFast(masm_);
3801 __ Bind(&index_out_of_range);
3802 // When the index is out of range, the spec requires us to return
3803 // the empty string.
3804 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3807 __ Bind(&need_conversion);
3808 // Move smi zero into the result register, which will trigger conversion.
3809 __ Mov(result, Smi::FromInt(0));
3812 NopRuntimeCallHelper call_helper;
3813 generator.GenerateSlow(masm_, call_helper);
3816 context()->Plug(result);
3820 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3821 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3822 ZoneList<Expression*>* args = expr->arguments();
3823 DCHECK_EQ(2, args->length());
3825 VisitForStackValue(args->at(0));
3826 VisitForAccumulatorValue(args->at(1));
3829 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3832 context()->Plug(x0);
3836 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3837 ZoneList<Expression*>* args = expr->arguments();
3838 DCHECK_EQ(2, args->length());
3839 VisitForStackValue(args->at(0));
3840 VisitForStackValue(args->at(1));
3842 StringCompareStub stub(isolate());
3844 context()->Plug(x0);
3848 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3849 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3850 ZoneList<Expression*>* args = expr->arguments();
3851 DCHECK(args->length() >= 2);
3853 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3854 for (int i = 0; i < arg_count + 1; i++) {
3855 VisitForStackValue(args->at(i));
3857 VisitForAccumulatorValue(args->last()); // Function.
3859 Label runtime, done;
3860 // Check for non-function argument (including proxy).
3861 __ JumpIfSmi(x0, &runtime);
3862 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3864 // InvokeFunction requires the function in x1. Move it in there.
3866 ParameterCount count(arg_count);
3867 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3868 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3873 __ CallRuntime(Runtime::kCall, args->length());
3876 context()->Plug(x0);
3880 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3881 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3882 GetVar(result_register(), new_target_var);
3883 __ Push(result_register());
3885 EmitLoadSuperConstructor();
3886 __ Push(result_register());
3888 // Check if the calling frame is an arguments adaptor frame.
3889 Label adaptor_frame, args_set_up, runtime;
3890 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3891 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3892 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3893 __ B(eq, &adaptor_frame);
3894 // default constructor has no arguments, so no adaptor frame means no args.
3895 __ Mov(x0, Operand(0));
3898 // Copy arguments from adaptor frame.
3900 __ bind(&adaptor_frame);
3901 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3902 __ SmiUntag(x1, x1);
3904 // Subtract 1 from arguments count, for new.target.
3905 __ Sub(x1, x1, Operand(1));
3908 // Get arguments pointer in x11.
3909 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3910 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3913 // Pre-decrement x11 with kPointerSize on each iteration.
3914 // Pre-decrement in order to skip receiver.
3915 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3917 __ Sub(x1, x1, Operand(1));
3921 __ bind(&args_set_up);
3922 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
3923 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3925 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
3926 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3930 context()->Plug(result_register());
3934 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3935 RegExpConstructResultStub stub(isolate());
3936 ZoneList<Expression*>* args = expr->arguments();
3937 DCHECK(args->length() == 3);
3938 VisitForStackValue(args->at(0));
3939 VisitForStackValue(args->at(1));
3940 VisitForAccumulatorValue(args->at(2));
3943 context()->Plug(x0);
3947 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3948 ZoneList<Expression*>* args = expr->arguments();
3949 DCHECK_EQ(2, args->length());
3950 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
3951 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3953 Handle<FixedArray> jsfunction_result_caches(
3954 isolate()->native_context()->jsfunction_result_caches());
3955 if (jsfunction_result_caches->length() <= cache_id) {
3956 __ Abort(kAttemptToUseUndefinedCache);
3957 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3958 context()->Plug(x0);
3962 VisitForAccumulatorValue(args->at(1));
3965 Register cache = x1;
3966 __ Ldr(cache, GlobalObjectMemOperand());
3967 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3968 __ Ldr(cache, ContextMemOperand(cache,
3969 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3971 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3974 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3975 JSFunctionResultCache::kFingerOffset));
3976 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3977 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3979 // Load the key and data from the cache.
3980 __ Ldp(x2, x3, MemOperand(x3));
3983 __ CmovX(x0, x3, eq);
3986 // Call runtime to perform the lookup.
3987 __ Push(cache, key);
3988 __ CallRuntime(Runtime::kGetFromCache, 2);
3991 context()->Plug(x0);
3995 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3996 ZoneList<Expression*>* args = expr->arguments();
3997 VisitForAccumulatorValue(args->at(0));
3999 Label materialize_true, materialize_false;
4000 Label* if_true = NULL;
4001 Label* if_false = NULL;
4002 Label* fall_through = NULL;
4003 context()->PrepareTest(&materialize_true, &materialize_false,
4004 &if_true, &if_false, &fall_through);
4006 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4007 __ Tst(x10, String::kContainsCachedArrayIndexMask);
4008 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4009 Split(eq, if_true, if_false, fall_through);
4011 context()->Plug(if_true, if_false);
4015 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4016 ZoneList<Expression*>* args = expr->arguments();
4017 DCHECK(args->length() == 1);
4018 VisitForAccumulatorValue(args->at(0));
4020 __ AssertString(x0);
4022 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4023 __ IndexFromHash(x10, x0);
4025 context()->Plug(x0);
4029 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4030 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
4032 ZoneList<Expression*>* args = expr->arguments();
4033 DCHECK(args->length() == 2);
4034 VisitForStackValue(args->at(1));
4035 VisitForAccumulatorValue(args->at(0));
4037 Register array = x0;
4038 Register result = x0;
4039 Register elements = x1;
4040 Register element = x2;
4041 Register separator = x3;
4042 Register array_length = x4;
4043 Register result_pos = x5;
4045 Register string_length = x10;
4046 Register elements_end = x11;
4047 Register string = x12;
4048 Register scratch1 = x13;
4049 Register scratch2 = x14;
4050 Register scratch3 = x7;
4051 Register separator_length = x15;
4053 Label bailout, done, one_char_separator, long_separator,
4054 non_trivial_array, not_size_one_array, loop,
4055 empty_separator_loop, one_char_separator_loop,
4056 one_char_separator_loop_entry, long_separator_loop;
4058 // The separator operand is on the stack.
4061 // Check that the array is a JSArray.
4062 __ JumpIfSmi(array, &bailout);
4063 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4065 // Check that the array has fast elements.
4066 __ CheckFastElements(map, scratch1, &bailout);
4068 // If the array has length zero, return the empty string.
4069 // Load and untag the length of the array.
4070 // It is an unsigned value, so we can skip sign extension.
4071 // We assume little endianness.
4072 __ Ldrsw(array_length,
4073 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
4074 __ Cbnz(array_length, &non_trivial_array);
4075 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4078 __ Bind(&non_trivial_array);
4079 // Get the FixedArray containing array's elements.
4080 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4082 // Check that all array elements are sequential one-byte strings, and
4083 // accumulate the sum of their lengths.
4084 __ Mov(string_length, 0);
4085 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4086 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4087 // Loop condition: while (element < elements_end).
4088 // Live values in registers:
4089 // elements: Fixed array of strings.
4090 // array_length: Length of the fixed array of strings (not smi)
4091 // separator: Separator string
4092 // string_length: Accumulated sum of string lengths (not smi).
4093 // element: Current array element.
4094 // elements_end: Array end.
4095 if (FLAG_debug_code) {
4096 __ Cmp(array_length, 0);
4097 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4100 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4101 __ JumpIfSmi(string, &bailout);
4102 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4103 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4104 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4106 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4107 __ Adds(string_length, string_length, scratch1);
4109 __ Cmp(element, elements_end);
4112 // If array_length is 1, return elements[0], a string.
4113 __ Cmp(array_length, 1);
4114 __ B(ne, ¬_size_one_array);
4115 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4118 __ Bind(¬_size_one_array);
4120 // Live values in registers:
4121 // separator: Separator string
4122 // array_length: Length of the array (not smi).
4123 // string_length: Sum of string lengths (not smi).
4124 // elements: FixedArray of strings.
4126 // Check that the separator is a flat one-byte string.
4127 __ JumpIfSmi(separator, &bailout);
4128 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4129 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4130 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4132 // Add (separator length times array_length) - separator length to the
4133 // string_length to get the length of the result string.
4134 // Load the separator length as untagged.
4135 // We assume little endianness, and that the length is positive.
4136 __ Ldrsw(separator_length,
4137 UntagSmiFieldMemOperand(separator,
4138 SeqOneByteString::kLengthOffset));
4139 __ Sub(string_length, string_length, separator_length);
4140 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4143 // Get first element in the array.
4144 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4145 // Live values in registers:
4146 // element: First array element
4147 // separator: Separator string
4148 // string_length: Length of result string (not smi)
4149 // array_length: Length of the array (not smi).
4150 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
4153 // Prepare for looping. Set up elements_end to end of the array. Set
4154 // result_pos to the position of the result where to write the first
4156 // TODO(all): useless unless AllocateOneByteString trashes the register.
4157 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4158 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4160 // Check the length of the separator.
4161 __ Cmp(separator_length, 1);
4162 __ B(eq, &one_char_separator);
4163 __ B(gt, &long_separator);
4165 // Empty separator case
4166 __ Bind(&empty_separator_loop);
4167 // Live values in registers:
4168 // result_pos: the position to which we are currently copying characters.
4169 // element: Current array element.
4170 // elements_end: Array end.
4172 // Copy next array element to the result.
4173 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4174 __ Ldrsw(string_length,
4175 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4176 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4177 __ CopyBytes(result_pos, string, string_length, scratch1);
4178 __ Cmp(element, elements_end);
4179 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4182 // One-character separator case
4183 __ Bind(&one_char_separator);
4184 // Replace separator with its one-byte character value.
4185 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4186 // Jump into the loop after the code that copies the separator, so the first
4187 // element is not preceded by a separator
4188 __ B(&one_char_separator_loop_entry);
4190 __ Bind(&one_char_separator_loop);
4191 // Live values in registers:
4192 // result_pos: the position to which we are currently copying characters.
4193 // element: Current array element.
4194 // elements_end: Array end.
4195 // separator: Single separator one-byte char (in lower byte).
4197 // Copy the separator character to the result.
4198 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4200 // Copy next array element to the result.
4201 __ Bind(&one_char_separator_loop_entry);
4202 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4203 __ Ldrsw(string_length,
4204 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4205 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4206 __ CopyBytes(result_pos, string, string_length, scratch1);
4207 __ Cmp(element, elements_end);
4208 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4211 // Long separator case (separator is more than one character). Entry is at the
4212 // label long_separator below.
4213 __ Bind(&long_separator_loop);
4214 // Live values in registers:
4215 // result_pos: the position to which we are currently copying characters.
4216 // element: Current array element.
4217 // elements_end: Array end.
4218 // separator: Separator string.
4220 // Copy the separator to the result.
4221 // TODO(all): hoist next two instructions.
4222 __ Ldrsw(string_length,
4223 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4224 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4225 __ CopyBytes(result_pos, string, string_length, scratch1);
4227 __ Bind(&long_separator);
4228 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4229 __ Ldrsw(string_length,
4230 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4231 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4232 __ CopyBytes(result_pos, string, string_length, scratch1);
4233 __ Cmp(element, elements_end);
4234 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4238 // Returning undefined will force slower code to handle it.
4239 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4241 context()->Plug(result);
4245 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4246 DCHECK(expr->arguments()->length() == 0);
4247 ExternalReference debug_is_active =
4248 ExternalReference::debug_is_active_address(isolate());
4249 __ Mov(x10, debug_is_active);
4250 __ Ldrb(x0, MemOperand(x10));
4252 context()->Plug(x0);
4256 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4257 if (expr->function() != NULL &&
4258 expr->function()->intrinsic_type == Runtime::INLINE) {
4259 Comment cmnt(masm_, "[ InlineRuntimeCall");
4260 EmitInlineRuntimeCall(expr);
4264 Comment cmnt(masm_, "[ CallRunTime");
4265 ZoneList<Expression*>* args = expr->arguments();
4266 int arg_count = args->length();
4268 if (expr->is_jsruntime()) {
4269 // Push the builtins object as the receiver.
4270 __ Ldr(x10, GlobalObjectMemOperand());
4271 __ Ldr(LoadDescriptor::ReceiverRegister(),
4272 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4273 __ Push(LoadDescriptor::ReceiverRegister());
4275 // Load the function from the receiver.
4276 Handle<String> name = expr->name();
4277 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4278 if (FLAG_vector_ics) {
4279 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4280 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4281 CallLoadIC(NOT_CONTEXTUAL);
4283 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4286 // Push the target function under the receiver.
4290 int arg_count = args->length();
4291 for (int i = 0; i < arg_count; i++) {
4292 VisitForStackValue(args->at(i));
4295 // Record source position of the IC call.
4296 SetSourcePosition(expr->position());
4297 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4298 __ Peek(x1, (arg_count + 1) * kPointerSize);
4301 // Restore context register.
4302 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4304 context()->DropAndPlug(1, x0);
4306 // Push the arguments ("left-to-right").
4307 for (int i = 0; i < arg_count; i++) {
4308 VisitForStackValue(args->at(i));
4311 // Call the C runtime function.
4312 __ CallRuntime(expr->function(), arg_count);
4313 context()->Plug(x0);
4318 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4319 switch (expr->op()) {
4320 case Token::DELETE: {
4321 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4322 Property* property = expr->expression()->AsProperty();
4323 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4325 if (property != NULL) {
4326 VisitForStackValue(property->obj());
4327 VisitForStackValue(property->key());
4328 __ Mov(x10, Smi::FromInt(language_mode()));
4330 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4331 context()->Plug(x0);
4332 } else if (proxy != NULL) {
4333 Variable* var = proxy->var();
4334 // Delete of an unqualified identifier is disallowed in strict mode
4335 // but "delete this" is allowed.
4336 DCHECK(is_sloppy(language_mode()) || var->is_this());
4337 if (var->IsUnallocated()) {
4338 __ Ldr(x12, GlobalObjectMemOperand());
4339 __ Mov(x11, Operand(var->name()));
4340 __ Mov(x10, Smi::FromInt(SLOPPY));
4341 __ Push(x12, x11, x10);
4342 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4343 context()->Plug(x0);
4344 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4345 // Result of deleting non-global, non-dynamic variables is false.
4346 // The subexpression does not have side effects.
4347 context()->Plug(var->is_this());
4349 // Non-global variable. Call the runtime to try to delete from the
4350 // context where the variable was introduced.
4351 __ Mov(x2, Operand(var->name()));
4352 __ Push(context_register(), x2);
4353 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4354 context()->Plug(x0);
4357 // Result of deleting non-property, non-variable reference is true.
4358 // The subexpression may have side effects.
4359 VisitForEffect(expr->expression());
4360 context()->Plug(true);
4366 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4367 VisitForEffect(expr->expression());
4368 context()->Plug(Heap::kUndefinedValueRootIndex);
4372 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4373 if (context()->IsEffect()) {
4374 // Unary NOT has no side effects so it's only necessary to visit the
4375 // subexpression. Match the optimizing compiler by not branching.
4376 VisitForEffect(expr->expression());
4377 } else if (context()->IsTest()) {
4378 const TestContext* test = TestContext::cast(context());
4379 // The labels are swapped for the recursive call.
4380 VisitForControl(expr->expression(),
4381 test->false_label(),
4383 test->fall_through());
4384 context()->Plug(test->true_label(), test->false_label());
4386 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4387 // TODO(jbramley): This could be much more efficient using (for
4388 // example) the CSEL instruction.
4389 Label materialize_true, materialize_false, done;
4390 VisitForControl(expr->expression(),
4395 __ Bind(&materialize_true);
4396 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4397 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4400 __ Bind(&materialize_false);
4401 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4402 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4406 if (context()->IsStackValue()) {
4407 __ Push(result_register());
4412 case Token::TYPEOF: {
4413 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4415 StackValueContext context(this);
4416 VisitForTypeofValue(expr->expression());
4418 __ CallRuntime(Runtime::kTypeof, 1);
4419 context()->Plug(x0);
4428 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4429 DCHECK(expr->expression()->IsValidReferenceExpression());
4431 Comment cmnt(masm_, "[ CountOperation");
4432 SetSourcePosition(expr->position());
4434 Property* prop = expr->expression()->AsProperty();
4435 LhsKind assign_type = GetAssignType(prop);
4437 // Evaluate expression and get value.
4438 if (assign_type == VARIABLE) {
4439 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4440 AccumulatorValueContext context(this);
4441 EmitVariableLoad(expr->expression()->AsVariableProxy());
4443 // Reserve space for result of postfix operation.
4444 if (expr->is_postfix() && !context()->IsEffect()) {
4447 switch (assign_type) {
4448 case NAMED_PROPERTY: {
4449 // Put the object both on the stack and in the register.
4450 VisitForStackValue(prop->obj());
4451 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4452 EmitNamedPropertyLoad(prop);
4456 case NAMED_SUPER_PROPERTY: {
4457 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4458 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4459 __ Push(result_register());
4460 const Register scratch = x10;
4461 __ Peek(scratch, kPointerSize);
4462 __ Push(scratch, result_register());
4463 EmitNamedSuperPropertyLoad(prop);
4467 case KEYED_SUPER_PROPERTY: {
4468 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4469 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4470 __ Push(result_register());
4471 VisitForAccumulatorValue(prop->key());
4472 __ Push(result_register());
4473 const Register scratch1 = x10;
4474 const Register scratch2 = x11;
4475 __ Peek(scratch1, 2 * kPointerSize);
4476 __ Peek(scratch2, kPointerSize);
4477 __ Push(scratch1, scratch2, result_register());
4478 EmitKeyedSuperPropertyLoad(prop);
4482 case KEYED_PROPERTY: {
4483 VisitForStackValue(prop->obj());
4484 VisitForStackValue(prop->key());
4485 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4486 __ Peek(LoadDescriptor::NameRegister(), 0);
4487 EmitKeyedPropertyLoad(prop);
4496 // We need a second deoptimization point after loading the value
4497 // in case evaluating the property load my have a side effect.
4498 if (assign_type == VARIABLE) {
4499 PrepareForBailout(expr->expression(), TOS_REG);
4501 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4504 // Inline smi case if we are in a loop.
4505 Label stub_call, done;
4506 JumpPatchSite patch_site(masm_);
4508 int count_value = expr->op() == Token::INC ? 1 : -1;
4509 if (ShouldInlineSmiCase(expr->op())) {
4511 patch_site.EmitJumpIfNotSmi(x0, &slow);
4513 // Save result for postfix expressions.
4514 if (expr->is_postfix()) {
4515 if (!context()->IsEffect()) {
4516 // Save the result on the stack. If we have a named or keyed property we
4517 // store the result under the receiver that is currently on top of the
4519 switch (assign_type) {
4523 case NAMED_PROPERTY:
4524 __ Poke(x0, kPointerSize);
4526 case NAMED_SUPER_PROPERTY:
4527 __ Poke(x0, kPointerSize * 2);
4529 case KEYED_PROPERTY:
4530 __ Poke(x0, kPointerSize * 2);
4532 case KEYED_SUPER_PROPERTY:
4533 __ Poke(x0, kPointerSize * 3);
4539 __ Adds(x0, x0, Smi::FromInt(count_value));
4541 // Call stub. Undo operation first.
4542 __ Sub(x0, x0, Smi::FromInt(count_value));
4546 ToNumberStub convert_stub(isolate());
4547 __ CallStub(&convert_stub);
4548 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4550 // Save result for postfix expressions.
4551 if (expr->is_postfix()) {
4552 if (!context()->IsEffect()) {
4553 // Save the result on the stack. If we have a named or keyed property
4554 // we store the result under the receiver that is currently on top
4556 switch (assign_type) {
4560 case NAMED_PROPERTY:
4561 __ Poke(x0, kXRegSize);
4563 case NAMED_SUPER_PROPERTY:
4564 __ Poke(x0, 2 * kXRegSize);
4566 case KEYED_PROPERTY:
4567 __ Poke(x0, 2 * kXRegSize);
4569 case KEYED_SUPER_PROPERTY:
4570 __ Poke(x0, 3 * kXRegSize);
4576 __ Bind(&stub_call);
4578 __ Mov(x0, Smi::FromInt(count_value));
4580 // Record position before stub call.
4581 SetSourcePosition(expr->position());
4584 Assembler::BlockPoolsScope scope(masm_);
4585 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4586 CallIC(code, expr->CountBinOpFeedbackId());
4587 patch_site.EmitPatchInfo();
4591 // Store the value returned in x0.
4592 switch (assign_type) {
4594 if (expr->is_postfix()) {
4595 { EffectContext context(this);
4596 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4598 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4601 // For all contexts except EffectConstant We have the result on
4602 // top of the stack.
4603 if (!context()->IsEffect()) {
4604 context()->PlugTOS();
4607 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4609 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4610 context()->Plug(x0);
4613 case NAMED_PROPERTY: {
4614 __ Mov(StoreDescriptor::NameRegister(),
4615 Operand(prop->key()->AsLiteral()->value()));
4616 __ Pop(StoreDescriptor::ReceiverRegister());
4617 CallStoreIC(expr->CountStoreFeedbackId());
4618 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4619 if (expr->is_postfix()) {
4620 if (!context()->IsEffect()) {
4621 context()->PlugTOS();
4624 context()->Plug(x0);
4628 case NAMED_SUPER_PROPERTY: {
4629 EmitNamedSuperPropertyStore(prop);
4630 if (expr->is_postfix()) {
4631 if (!context()->IsEffect()) {
4632 context()->PlugTOS();
4635 context()->Plug(x0);
4639 case KEYED_SUPER_PROPERTY: {
4640 EmitKeyedSuperPropertyStore(prop);
4641 if (expr->is_postfix()) {
4642 if (!context()->IsEffect()) {
4643 context()->PlugTOS();
4646 context()->Plug(x0);
4650 case KEYED_PROPERTY: {
4651 __ Pop(StoreDescriptor::NameRegister());
4652 __ Pop(StoreDescriptor::ReceiverRegister());
4654 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4655 CallIC(ic, expr->CountStoreFeedbackId());
4656 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4657 if (expr->is_postfix()) {
4658 if (!context()->IsEffect()) {
4659 context()->PlugTOS();
4662 context()->Plug(x0);
4670 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4671 DCHECK(!context()->IsEffect());
4672 DCHECK(!context()->IsTest());
4673 VariableProxy* proxy = expr->AsVariableProxy();
4674 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4675 Comment cmnt(masm_, "Global variable");
4676 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
4677 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4678 if (FLAG_vector_ics) {
4679 __ Mov(VectorLoadICDescriptor::SlotRegister(),
4680 SmiFromSlot(proxy->VariableFeedbackSlot()));
4682 // Use a regular load, not a contextual load, to avoid a reference
4684 CallLoadIC(NOT_CONTEXTUAL);
4685 PrepareForBailout(expr, TOS_REG);
4686 context()->Plug(x0);
4687 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4690 // Generate code for loading from variables potentially shadowed
4691 // by eval-introduced variables.
4692 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4695 __ Mov(x0, Operand(proxy->name()));
4697 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4698 PrepareForBailout(expr, TOS_REG);
4701 context()->Plug(x0);
4703 // This expression cannot throw a reference error at the top level.
4704 VisitInDuplicateContext(expr);
4709 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4710 Expression* sub_expr,
4711 Handle<String> check) {
4712 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4713 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4714 Label materialize_true, materialize_false;
4715 Label* if_true = NULL;
4716 Label* if_false = NULL;
4717 Label* fall_through = NULL;
4718 context()->PrepareTest(&materialize_true, &materialize_false,
4719 &if_true, &if_false, &fall_through);
4721 { AccumulatorValueContext context(this);
4722 VisitForTypeofValue(sub_expr);
4724 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4726 Factory* factory = isolate()->factory();
4727 if (String::Equals(check, factory->number_string())) {
4728 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4729 __ JumpIfSmi(x0, if_true);
4730 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4731 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4732 Split(eq, if_true, if_false, fall_through);
4733 } else if (String::Equals(check, factory->string_string())) {
4734 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4735 __ JumpIfSmi(x0, if_false);
4736 // Check for undetectable objects => false.
4737 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4738 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4739 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4741 } else if (String::Equals(check, factory->symbol_string())) {
4742 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4743 __ JumpIfSmi(x0, if_false);
4744 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4745 Split(eq, if_true, if_false, fall_through);
4746 } else if (String::Equals(check, factory->boolean_string())) {
4747 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4748 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4749 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4750 Split(eq, if_true, if_false, fall_through);
4751 } else if (String::Equals(check, factory->undefined_string())) {
4753 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4754 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4755 __ JumpIfSmi(x0, if_false);
4756 // Check for undetectable objects => true.
4757 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4758 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4759 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4761 } else if (String::Equals(check, factory->function_string())) {
4762 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4763 __ JumpIfSmi(x0, if_false);
4764 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4765 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4766 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4769 } else if (String::Equals(check, factory->object_string())) {
4770 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4771 __ JumpIfSmi(x0, if_false);
4772 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4773 // Check for JS objects => true.
4775 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4777 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4779 // Check for undetectable objects => false.
4780 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4782 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4786 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4787 if (if_false != fall_through) __ B(if_false);
4789 context()->Plug(if_true, if_false);
4793 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4794 Comment cmnt(masm_, "[ CompareOperation");
4795 SetSourcePosition(expr->position());
4797 // Try to generate an optimized comparison with a literal value.
4798 // TODO(jbramley): This only checks common values like NaN or undefined.
4799 // Should it also handle ARM64 immediate operands?
4800 if (TryLiteralCompare(expr)) {
4804 // Assign labels according to context()->PrepareTest.
4805 Label materialize_true;
4806 Label materialize_false;
4807 Label* if_true = NULL;
4808 Label* if_false = NULL;
4809 Label* fall_through = NULL;
4810 context()->PrepareTest(&materialize_true, &materialize_false,
4811 &if_true, &if_false, &fall_through);
4813 Token::Value op = expr->op();
4814 VisitForStackValue(expr->left());
4817 VisitForStackValue(expr->right());
4818 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4819 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4820 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4821 Split(eq, if_true, if_false, fall_through);
4824 case Token::INSTANCEOF: {
4825 VisitForStackValue(expr->right());
4826 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4828 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4829 // The stub returns 0 for true.
4830 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4835 VisitForAccumulatorValue(expr->right());
4836 Condition cond = CompareIC::ComputeCondition(op);
4838 // Pop the stack value.
4841 JumpPatchSite patch_site(masm_);
4842 if (ShouldInlineSmiCase(op)) {
4844 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4846 Split(cond, if_true, if_false, NULL);
4847 __ Bind(&slow_case);
4850 // Record position and call the compare IC.
4851 SetSourcePosition(expr->position());
4852 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4853 CallIC(ic, expr->CompareOperationFeedbackId());
4854 patch_site.EmitPatchInfo();
4855 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4856 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4860 // Convert the result of the comparison into one expected for this
4861 // expression's context.
4862 context()->Plug(if_true, if_false);
4866 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4867 Expression* sub_expr,
4869 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4870 Label materialize_true, materialize_false;
4871 Label* if_true = NULL;
4872 Label* if_false = NULL;
4873 Label* fall_through = NULL;
4874 context()->PrepareTest(&materialize_true, &materialize_false,
4875 &if_true, &if_false, &fall_through);
4877 VisitForAccumulatorValue(sub_expr);
4878 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4880 if (expr->op() == Token::EQ_STRICT) {
4881 Heap::RootListIndex nil_value = nil == kNullValue ?
4882 Heap::kNullValueRootIndex :
4883 Heap::kUndefinedValueRootIndex;
4884 __ CompareRoot(x0, nil_value);
4885 Split(eq, if_true, if_false, fall_through);
4887 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4888 CallIC(ic, expr->CompareOperationFeedbackId());
4889 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4892 context()->Plug(if_true, if_false);
4896 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4897 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4898 context()->Plug(x0);
4902 void FullCodeGenerator::VisitYield(Yield* expr) {
4903 Comment cmnt(masm_, "[ Yield");
4904 // Evaluate yielded value first; the initial iterator definition depends on
4905 // this. It stays on the stack while we update the iterator.
4906 VisitForStackValue(expr->expression());
4908 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4909 // and suchlike. The implementation changes a little by bleeding_edge so I
4910 // don't want to spend too much time on it now.
4912 switch (expr->yield_kind()) {
4913 case Yield::kSuspend:
4914 // Pop value from top-of-stack slot; box result into result register.
4915 EmitCreateIteratorResult(false);
4916 __ Push(result_register());
4918 case Yield::kInitial: {
4919 Label suspend, continuation, post_runtime, resume;
4923 // TODO(jbramley): This label is bound here because the following code
4924 // looks at its pos(). Is it possible to do something more efficient here,
4925 // perhaps using Adr?
4926 __ Bind(&continuation);
4930 VisitForAccumulatorValue(expr->generator_object());
4931 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4932 __ Mov(x1, Smi::FromInt(continuation.pos()));
4933 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4934 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4936 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4937 kLRHasBeenSaved, kDontSaveFPRegs);
4938 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4939 __ Cmp(__ StackPointer(), x1);
4940 __ B(eq, &post_runtime);
4941 __ Push(x0); // generator object
4942 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4943 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4944 __ Bind(&post_runtime);
4945 __ Pop(result_register());
4946 EmitReturnSequence();
4949 context()->Plug(result_register());
4953 case Yield::kFinal: {
4954 VisitForAccumulatorValue(expr->generator_object());
4955 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4956 __ Str(x1, FieldMemOperand(result_register(),
4957 JSGeneratorObject::kContinuationOffset));
4958 // Pop value from top-of-stack slot, box result into result register.
4959 EmitCreateIteratorResult(true);
4960 EmitUnwindBeforeReturn();
4961 EmitReturnSequence();
4965 case Yield::kDelegating: {
4966 VisitForStackValue(expr->generator_object());
4968 // Initial stack layout is as follows:
4969 // [sp + 1 * kPointerSize] iter
4970 // [sp + 0 * kPointerSize] g
4972 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4973 Label l_next, l_call, l_loop;
4974 Register load_receiver = LoadDescriptor::ReceiverRegister();
4975 Register load_name = LoadDescriptor::NameRegister();
4977 // Initial send value is undefined.
4978 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4981 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4983 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4984 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
4985 __ Peek(x3, 1 * kPointerSize); // iter
4986 __ Push(load_name, x3, x0); // "throw", iter, except
4989 // try { received = %yield result }
4990 // Shuffle the received result above a try handler and yield it without
4993 __ Pop(x0); // result
4994 __ PushTryHandler(StackHandler::CATCH, expr->index());
4995 const int handler_size = StackHandlerConstants::kSize;
4996 __ Push(x0); // result
4999 // TODO(jbramley): This label is bound here because the following code
5000 // looks at its pos(). Is it possible to do something more efficient here,
5001 // perhaps using Adr?
5002 __ Bind(&l_continuation);
5005 __ Bind(&l_suspend);
5006 const int generator_object_depth = kPointerSize + handler_size;
5007 __ Peek(x0, generator_object_depth);
5009 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
5010 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
5011 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
5012 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
5014 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
5015 kLRHasBeenSaved, kDontSaveFPRegs);
5016 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
5017 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5018 __ Pop(x0); // result
5019 EmitReturnSequence();
5020 __ Bind(&l_resume); // received in x0
5023 // receiver = iter; f = 'next'; arg = received;
5026 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
5027 __ Peek(x3, 1 * kPointerSize); // iter
5028 __ Push(load_name, x3, x0); // "next", iter, received
5030 // result = receiver[f](arg);
5032 __ Peek(load_receiver, 1 * kPointerSize);
5033 __ Peek(load_name, 2 * kPointerSize);
5034 if (FLAG_vector_ics) {
5035 __ Mov(VectorLoadICDescriptor::SlotRegister(),
5036 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
5038 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
5039 CallIC(ic, TypeFeedbackId::None());
5041 __ Poke(x1, 2 * kPointerSize);
5042 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
5045 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5046 __ Drop(1); // The function is still on the stack; drop it.
5048 // if (!result.done) goto l_try;
5050 __ Move(load_receiver, x0);
5052 __ Push(load_receiver); // save result
5053 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
5054 if (FLAG_vector_ics) {
5055 __ Mov(VectorLoadICDescriptor::SlotRegister(),
5056 SmiFromSlot(expr->DoneFeedbackSlot()));
5058 CallLoadIC(NOT_CONTEXTUAL); // x0=result.done
5059 // The ToBooleanStub argument (result.done) is in x0.
5060 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
5065 __ Pop(load_receiver); // result
5066 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
5067 if (FLAG_vector_ics) {
5068 __ Mov(VectorLoadICDescriptor::SlotRegister(),
5069 SmiFromSlot(expr->ValueFeedbackSlot()));
5071 CallLoadIC(NOT_CONTEXTUAL); // x0=result.value
5072 context()->DropAndPlug(2, x0); // drop iter and g
5079 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
5081 JSGeneratorObject::ResumeMode resume_mode) {
5082 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
5083 Register generator_object = x1;
5084 Register the_hole = x2;
5085 Register operand_stack_size = w3;
5086 Register function = x4;
5088 // The value stays in x0, and is ultimately read by the resumed generator, as
5089 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
5090 // is read to throw the value when the resumed generator is already closed. x1
5091 // will hold the generator object until the activation has been resumed.
5092 VisitForStackValue(generator);
5093 VisitForAccumulatorValue(value);
5094 __ Pop(generator_object);
5096 // Load suspended function and context.
5097 __ Ldr(cp, FieldMemOperand(generator_object,
5098 JSGeneratorObject::kContextOffset));
5099 __ Ldr(function, FieldMemOperand(generator_object,
5100 JSGeneratorObject::kFunctionOffset));
5102 // Load receiver and store as the first argument.
5103 __ Ldr(x10, FieldMemOperand(generator_object,
5104 JSGeneratorObject::kReceiverOffset));
5107 // Push holes for the rest of the arguments to the generator function.
5108 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
5110 // The number of arguments is stored as an int32_t, and -1 is a marker
5111 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
5112 // extension to correctly handle it. However, in this case, we operate on
5113 // 32-bit W registers, so extension isn't required.
5114 __ Ldr(w10, FieldMemOperand(x10,
5115 SharedFunctionInfo::kFormalParameterCountOffset));
5116 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
5117 __ PushMultipleTimes(the_hole, w10);
5119 // Enter a new JavaScript frame, and initialize its slots as they were when
5120 // the generator was suspended.
5121 Label resume_frame, done;
5122 __ Bl(&resume_frame);
5125 __ Bind(&resume_frame);
5126 __ Push(lr, // Return address.
5127 fp, // Caller's frame pointer.
5128 cp, // Callee's context.
5129 function); // Callee's JS Function.
5130 __ Add(fp, __ StackPointer(), kPointerSize * 2);
5132 // Load and untag the operand stack size.
5133 __ Ldr(x10, FieldMemOperand(generator_object,
5134 JSGeneratorObject::kOperandStackOffset));
5135 __ Ldr(operand_stack_size,
5136 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
5138 // If we are sending a value and there is no operand stack, we can jump back
5140 if (resume_mode == JSGeneratorObject::NEXT) {
5142 __ Cbnz(operand_stack_size, &slow_resume);
5143 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5145 UntagSmiFieldMemOperand(generator_object,
5146 JSGeneratorObject::kContinuationOffset));
5147 __ Add(x10, x10, x11);
5148 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
5149 __ Str(x12, FieldMemOperand(generator_object,
5150 JSGeneratorObject::kContinuationOffset));
5153 __ Bind(&slow_resume);
5156 // Otherwise, we push holes for the operand stack and call the runtime to fix
5157 // up the stack and the handlers.
5158 __ PushMultipleTimes(the_hole, operand_stack_size);
5160 __ Mov(x10, Smi::FromInt(resume_mode));
5161 __ Push(generator_object, result_register(), x10);
5162 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5163 // Not reached: the runtime call returns elsewhere.
5167 context()->Plug(result_register());
5171 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5175 const int instance_size = 5 * kPointerSize;
5176 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5179 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5181 Register result = x0;
5182 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5185 __ Bind(&gc_required);
5186 __ Push(Smi::FromInt(instance_size));
5187 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5188 __ Ldr(context_register(),
5189 MemOperand(fp, StandardFrameConstants::kContextOffset));
5191 __ Bind(&allocated);
5192 Register map_reg = x1;
5193 Register result_value = x2;
5194 Register boolean_done = x3;
5195 Register empty_fixed_array = x4;
5196 Register untagged_result = x5;
5197 __ Ldr(map_reg, GlobalObjectMemOperand());
5198 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5200 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5201 __ Pop(result_value);
5202 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5203 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5204 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5205 JSObject::kElementsOffset);
5206 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5207 JSGeneratorObject::kResultDonePropertyOffset);
5208 __ ObjectUntag(untagged_result, result);
5209 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5210 __ Stp(empty_fixed_array, empty_fixed_array,
5211 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5212 __ Stp(result_value, boolean_done,
5213 MemOperand(untagged_result,
5214 JSGeneratorObject::kResultValuePropertyOffset));
5216 // Only the value field needs a write barrier, as the other values are in the
5218 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5219 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5223 // TODO(all): I don't like this method.
5224 // It seems to me that in too many places x0 is used in place of this.
5225 // Also, this function is not suitable for all places where x0 should be
5226 // abstracted (eg. when used as an argument). But some places assume that the
5227 // first argument register is x0, and use this function instead.
5228 // Considering that most of the register allocation is hard-coded in the
5229 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5230 // that abstracting the allocation through functions would not yield any
5231 // performance benefit, I think the existence of this function is debatable.
5232 Register FullCodeGenerator::result_register() {
5237 Register FullCodeGenerator::context_register() {
5242 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5243 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5244 __ Str(value, MemOperand(fp, frame_offset));
5248 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5249 __ Ldr(dst, ContextMemOperand(cp, context_index));
5253 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5254 Scope* declaration_scope = scope()->DeclarationScope();
5255 if (declaration_scope->is_script_scope() ||
5256 declaration_scope->is_module_scope()) {
5257 // Contexts nested in the native context have a canonical empty function
5258 // as their closure, not the anonymous closure containing the global
5259 // code. Pass a smi sentinel and let the runtime look up the empty
5261 DCHECK(kSmiTag == 0);
5263 } else if (declaration_scope->is_eval_scope()) {
5264 // Contexts created by a call to eval have the same closure as the
5265 // context calling eval, not the anonymous closure containing the eval
5266 // code. Fetch it from the context.
5267 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5270 DCHECK(declaration_scope->is_function_scope());
5271 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5277 void FullCodeGenerator::EnterFinallyBlock() {
5278 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5279 DCHECK(!result_register().is(x10));
5280 // Preserve the result register while executing finally block.
5281 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5282 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5284 __ Push(result_register(), x10);
5286 // Store pending message while executing finally block.
5287 ExternalReference pending_message_obj =
5288 ExternalReference::address_of_pending_message_obj(isolate());
5289 __ Mov(x10, pending_message_obj);
5290 __ Ldr(x10, MemOperand(x10));
5292 ExternalReference has_pending_message =
5293 ExternalReference::address_of_has_pending_message(isolate());
5294 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
5295 __ Mov(x11, has_pending_message);
5296 __ Ldrb(x11, MemOperand(x11));
5301 ExternalReference pending_message_script =
5302 ExternalReference::address_of_pending_message_script(isolate());
5303 __ Mov(x10, pending_message_script);
5304 __ Ldr(x10, MemOperand(x10));
5309 void FullCodeGenerator::ExitFinallyBlock() {
5310 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5311 DCHECK(!result_register().is(x10));
5313 // Restore pending message from stack.
5314 __ Pop(x10, x11, x12);
5315 ExternalReference pending_message_script =
5316 ExternalReference::address_of_pending_message_script(isolate());
5317 __ Mov(x13, pending_message_script);
5318 __ Str(x10, MemOperand(x13));
5321 ExternalReference has_pending_message =
5322 ExternalReference::address_of_has_pending_message(isolate());
5323 __ Mov(x13, has_pending_message);
5324 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
5325 __ Strb(x11, MemOperand(x13));
5327 ExternalReference pending_message_obj =
5328 ExternalReference::address_of_pending_message_obj(isolate());
5329 __ Mov(x13, pending_message_obj);
5330 __ Str(x12, MemOperand(x13));
5332 // Restore result register and cooked return address from the stack.
5333 __ Pop(x10, result_register());
5335 // Uncook the return address (see EnterFinallyBlock).
5337 __ Add(x11, x10, Operand(masm_->CodeObject()));
5345 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5347 BackEdgeState target_state,
5348 Code* replacement_code) {
5349 // Turn the jump into a nop.
5350 Address branch_address = pc - 3 * kInstructionSize;
5351 PatchingAssembler patcher(branch_address, 1);
5353 DCHECK(Instruction::Cast(branch_address)
5354 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5355 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5356 Instruction::Cast(branch_address)->ImmPCOffset() ==
5357 6 * kInstructionSize));
5359 switch (target_state) {
5361 // <decrement profiling counter>
5362 // .. .. .. .. b.pl ok
5363 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5364 // .. .. .. .. blr x16
5365 // ... more instructions.
5367 // Jump offset is 6 instructions.
5370 case ON_STACK_REPLACEMENT:
5371 case OSR_AFTER_STACK_CHECK:
5372 // <decrement profiling counter>
5373 // .. .. .. .. mov x0, x0 (NOP)
5374 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5375 // .. .. .. .. blr x16
5376 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5380 // Replace the call address.
5381 Instruction* load = Instruction::Cast(pc)->preceding(2);
5382 Address interrupt_address_pointer =
5383 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5384 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5385 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5387 ->OnStackReplacement()
5389 (Memory::uint64_at(interrupt_address_pointer) ==
5390 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5394 (Memory::uint64_at(interrupt_address_pointer) ==
5395 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5397 ->OsrAfterStackCheck()
5399 (Memory::uint64_at(interrupt_address_pointer) ==
5400 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5402 ->OnStackReplacement()
5404 Memory::uint64_at(interrupt_address_pointer) =
5405 reinterpret_cast<uint64_t>(replacement_code->entry());
5407 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5408 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5412 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5414 Code* unoptimized_code,
5416 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5417 // back-end), but this function is gone in bleeding_edge so it might not
5419 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5421 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5422 Instruction* load = Instruction::Cast(pc)->preceding(2);
5423 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5424 load->ImmPCOffset());
5425 if (entry == reinterpret_cast<uint64_t>(
5426 isolate->builtins()->OnStackReplacement()->entry())) {
5427 return ON_STACK_REPLACEMENT;
5428 } else if (entry == reinterpret_cast<uint64_t>(
5429 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5430 return OSR_AFTER_STACK_CHECK;
5440 #define __ ACCESS_MASM(masm())
5443 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5445 int* context_length) {
5446 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
5447 // The macros used here must preserve the result register.
5449 // Because the handler block contains the context of the finally
5450 // code, we can restore it directly from there for the finally code
5451 // rather than iteratively unwinding contexts via their previous
5453 __ Drop(*stack_depth); // Down to the handler block.
5454 if (*context_length > 0) {
5455 // Restore the context to its dedicated register and the stack.
5456 __ Peek(cp, StackHandlerConstants::kContextOffset);
5457 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5460 __ Bl(finally_entry_);
5463 *context_length = 0;
5471 } } // namespace v8::internal
5473 #endif // V8_TARGET_ARCH_ARM64