1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/arm64/code-stubs-arm64.h"
20 #include "src/arm64/macro-assembler-arm64.h"
25 #define __ ACCESS_MASM(masm_)
27 class JumpPatchSite BASE_EMBEDDED {
29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
31 info_emitted_ = false;
36 if (patch_site_.is_bound()) {
37 DCHECK(info_emitted_);
39 DCHECK(reg_.IsNone());
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1);
46 DCHECK(!info_emitted_);
47 DCHECK(reg.Is64Bits());
50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1);
57 DCHECK(!info_emitted_);
58 DCHECK(reg.Is64Bits());
61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched.
65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target);
72 void EmitPatchInfo() {
73 Assembler::BlockPoolsScope scope(masm_);
74 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
81 MacroAssembler* masm_;
90 // Generate code for a JS function. On entry to the function the receiver
91 // and arguments have been pushed on the stack left to right. The actual
92 // argument count matches the formal parameter count expected by the
95 // The live registers are:
96 // - x1: the JS function object being called (i.e. ourselves).
98 // - fp: our caller's frame pointer.
99 // - jssp: stack pointer.
100 // - lr: return address.
102 // The function builds a JS frame. See JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
107 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
108 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(function());
113 Comment cmnt(masm_, "[ Function compiled by full code generator");
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120 __ Debug("stop-at", __LINE__, BREAK);
124 // Sloppy mode functions and builtins need to replace the receiver with the
125 // global proxy when called as functions (without an explicit receiver
127 if (is_sloppy(info->language_mode()) && !info->is_native() &&
128 info->MayUseThis() && info->scope()->has_this_declaration()) {
130 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
131 __ Peek(x10, receiver_offset);
132 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
134 __ Ldr(x10, GlobalObjectMemOperand());
135 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
136 __ Poke(x10, receiver_offset);
142 // Open a frame scope to indicate that there is a frame on the stack.
143 // The MANUAL indicates that the scope shouldn't actually generate code
144 // to set up the frame because we do it manually below.
145 FrameScope frame_scope(masm_, StackFrame::MANUAL);
147 // This call emits the following sequence in a way that can be patched for
148 // code ageing support:
149 // Push(lr, fp, cp, x1);
150 // Add(fp, jssp, 2 * kPointerSize);
151 info->set_prologue_offset(masm_->pc_offset());
152 __ Prologue(info->IsCodePreAgingActive());
153 info->AddNoFrameRange(0, masm_->pc_offset());
155 // Reserve space on the stack for locals.
156 { Comment cmnt(masm_, "[ Allocate locals");
157 int locals_count = info->scope()->num_stack_slots();
158 // Generators allocate locals, if any, in context slots.
159 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
161 if (locals_count > 0) {
162 if (locals_count >= 128) {
164 DCHECK(jssp.Is(__ StackPointer()));
165 __ Sub(x10, jssp, locals_count * kPointerSize);
166 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
168 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
171 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
172 if (FLAG_optimize_for_size) {
173 __ PushMultipleTimes(x10 , locals_count);
175 const int kMaxPushes = 32;
176 if (locals_count >= kMaxPushes) {
177 int loop_iterations = locals_count / kMaxPushes;
178 __ Mov(x3, loop_iterations);
180 __ Bind(&loop_header);
182 __ PushMultipleTimes(x10 , kMaxPushes);
184 __ B(ne, &loop_header);
186 int remaining = locals_count % kMaxPushes;
187 // Emit the remaining pushes.
188 __ PushMultipleTimes(x10 , remaining);
193 bool function_in_register_x1 = true;
195 if (info->scope()->num_heap_slots() > 0) {
196 // Argument to NewContext is the function, which is still in x1.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (info->scope()->is_script_scope()) {
201 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
203 __ CallRuntime(Runtime::kNewScriptContext, 2);
204 } else if (slots <= FastNewContextStub::kMaximumSlots) {
205 FastNewContextStub stub(isolate(), slots);
207 // Result of FastNewContextStub is always in new space.
208 need_write_barrier = false;
211 __ CallRuntime(Runtime::kNewFunctionContext, 1);
213 function_in_register_x1 = false;
214 // Context is returned in x0. It replaces the context passed to us.
215 // It's saved in the stack and kept live in cp.
217 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
218 // Copy any necessary parameters into the context.
219 int num_parameters = info->scope()->num_parameters();
220 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
221 for (int i = first_parameter; i < num_parameters; i++) {
222 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
223 if (var->IsContextSlot()) {
224 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
225 (num_parameters - 1 - i) * kPointerSize;
226 // Load parameter from stack.
227 __ Ldr(x10, MemOperand(fp, parameter_offset));
228 // Store it in the context.
229 MemOperand target = ContextMemOperand(cp, var->index());
232 // Update the write barrier.
233 if (need_write_barrier) {
234 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
235 x11, kLRHasBeenSaved, kDontSaveFPRegs);
236 } else if (FLAG_debug_code) {
238 __ JumpIfInNewSpace(cp, &done);
239 __ Abort(kExpectedNewSpaceObject);
246 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function");
251 SetVar(this_function_var, x1, x0, x2);
254 Variable* new_target_var = scope()->new_target_var();
255 if (new_target_var != nullptr) {
256 Comment cmnt(masm_, "[ new.target");
257 // new.target is parameter -2.
259 2 * kXRegSize + (info_->scope()->num_parameters() + 1) * kPointerSize;
260 __ Ldr(x0, MemOperand(fp, offset));
261 SetVar(new_target_var, x0, x2, x3);
264 ArgumentsAccessStub::HasNewTarget has_new_target =
265 IsSubclassConstructor(info->function()->kind())
266 ? ArgumentsAccessStub::HAS_NEW_TARGET
267 : ArgumentsAccessStub::NO_NEW_TARGET;
269 // Possibly allocate RestParameters
271 Variable* rest_param = scope()->rest_parameter(&rest_index);
273 Comment cmnt(masm_, "[ Allocate rest parameter array");
275 int num_parameters = info->scope()->num_parameters();
276 int offset = num_parameters * kPointerSize;
277 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
282 __ Add(x3, fp, StandardFrameConstants::kCallerSPOffset + offset);
283 __ Mov(x2, Smi::FromInt(num_parameters));
284 __ Mov(x1, Smi::FromInt(rest_index));
285 __ Mov(x0, Smi::FromInt(language_mode()));
286 __ Push(x3, x2, x1, x0);
288 RestParamAccessStub stub(isolate());
291 SetVar(rest_param, x0, x1, x2);
294 Variable* arguments = scope()->arguments();
295 if (arguments != NULL) {
296 // Function uses arguments object.
297 Comment cmnt(masm_, "[ Allocate arguments object");
298 if (!function_in_register_x1) {
299 // Load this again, if it's used by the local context below.
300 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
304 // Receiver is just before the parameters on the caller's stack.
305 int num_parameters = info->scope()->num_parameters();
306 int offset = num_parameters * kPointerSize;
307 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
308 __ Mov(x1, Smi::FromInt(num_parameters));
311 // Arguments to ArgumentsAccessStub:
312 // function, receiver address, parameter count.
313 // The stub will rewrite receiver and parameter count if the previous
314 // stack frame was an arguments adapter frame.
315 ArgumentsAccessStub::Type type;
316 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
317 type = ArgumentsAccessStub::NEW_STRICT;
318 } else if (function()->has_duplicate_parameters()) {
319 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
321 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
323 ArgumentsAccessStub stub(isolate(), type, has_new_target);
326 SetVar(arguments, x0, x1, x2);
330 __ CallRuntime(Runtime::kTraceEnter, 0);
333 // Visit the declarations and body unless there is an illegal
335 if (scope()->HasIllegalRedeclaration()) {
336 Comment cmnt(masm_, "[ Declarations");
337 scope()->VisitIllegalRedeclaration(this);
340 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
341 { Comment cmnt(masm_, "[ Declarations");
342 if (scope()->is_function_scope() && scope()->function() != NULL) {
343 VariableDeclaration* function = scope()->function();
344 DCHECK(function->proxy()->var()->mode() == CONST ||
345 function->proxy()->var()->mode() == CONST_LEGACY);
346 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
347 VisitVariableDeclaration(function);
349 VisitDeclarations(scope()->declarations());
353 Comment cmnt(masm_, "[ Stack check");
354 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
356 DCHECK(jssp.Is(__ StackPointer()));
357 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
359 PredictableCodeSizeScope predictable(masm_,
360 Assembler::kCallSizeWithRelocation);
361 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
366 Comment cmnt(masm_, "[ Body");
367 DCHECK(loop_depth() == 0);
368 VisitStatements(function()->body());
369 DCHECK(loop_depth() == 0);
373 // Always emit a 'return undefined' in case control fell off the end of
375 { Comment cmnt(masm_, "[ return <undefined>;");
376 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
378 EmitReturnSequence();
380 // Force emission of the pools, so they don't get emitted in the middle
381 // of the back edge table.
382 masm()->CheckVeneerPool(true, false);
383 masm()->CheckConstPool(true, false);
387 void FullCodeGenerator::ClearAccumulator() {
388 __ Mov(x0, Smi::FromInt(0));
392 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
393 __ Mov(x2, Operand(profiling_counter_));
394 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
395 __ Subs(x3, x3, Smi::FromInt(delta));
396 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
400 void FullCodeGenerator::EmitProfilingCounterReset() {
401 int reset_value = FLAG_interrupt_budget;
402 if (info_->is_debug()) {
403 // Detect debug break requests as soon as possible.
404 reset_value = FLAG_interrupt_budget >> 4;
406 __ Mov(x2, Operand(profiling_counter_));
407 __ Mov(x3, Smi::FromInt(reset_value));
408 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
412 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
413 Label* back_edge_target) {
414 DCHECK(jssp.Is(__ StackPointer()));
415 Comment cmnt(masm_, "[ Back edge bookkeeping");
416 // Block literal pools whilst emitting back edge code.
417 Assembler::BlockPoolsScope block_const_pool(masm_);
420 DCHECK(back_edge_target->is_bound());
421 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
422 // to reduce the absolute error due to the integer division. To do that,
423 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
426 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
427 kCodeSizeMultiplier / 2);
428 int weight = Min(kMaxBackEdgeWeight,
429 Max(1, distance / kCodeSizeMultiplier));
430 EmitProfilingCounterDecrement(weight);
432 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
434 // Record a mapping of this PC offset to the OSR id. This is used to find
435 // the AST id from the unoptimized code in order to use it as a key into
436 // the deoptimization input data found in the optimized code.
437 RecordBackEdge(stmt->OsrEntryId());
439 EmitProfilingCounterReset();
442 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
443 // Record a mapping of the OSR id to this PC. This is used if the OSR
444 // entry becomes the target of a bailout. We don't expect it to be, but
445 // we want it to work if it is.
446 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
450 void FullCodeGenerator::EmitReturnSequence() {
451 Comment cmnt(masm_, "[ Return sequence");
453 if (return_label_.is_bound()) {
454 __ B(&return_label_);
457 __ Bind(&return_label_);
459 // Push the return value on the stack as the parameter.
460 // Runtime::TraceExit returns its parameter in x0.
461 __ Push(result_register());
462 __ CallRuntime(Runtime::kTraceExit, 1);
463 DCHECK(x0.Is(result_register()));
465 // Pretend that the exit is a backwards jump to the entry.
467 if (info_->ShouldSelfOptimize()) {
468 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
470 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
471 weight = Min(kMaxBackEdgeWeight,
472 Max(1, distance / kCodeSizeMultiplier));
474 EmitProfilingCounterDecrement(weight);
478 __ Call(isolate()->builtins()->InterruptCheck(),
479 RelocInfo::CODE_TARGET);
481 EmitProfilingCounterReset();
484 // Make sure that the constant pool is not emitted inside of the return
485 // sequence. This sequence can get patched when the debugger is used. See
486 // debug-arm64.cc:BreakLocation::SetDebugBreakAtReturn().
488 InstructionAccurateScope scope(masm_,
489 Assembler::kJSReturnSequenceInstructions);
490 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
492 // This code is generated using Assembler methods rather than Macro
493 // Assembler methods because it will be patched later on, and so the size
494 // of the generated code must be consistent.
495 const Register& current_sp = __ StackPointer();
496 // Nothing ensures 16 bytes alignment here.
497 DCHECK(!current_sp.Is(csp));
498 __ mov(current_sp, fp);
499 int no_frame_start = masm_->pc_offset();
500 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
501 // Drop the arguments and receiver and return.
502 // TODO(all): This implementation is overkill as it supports 2**31+1
503 // arguments, consider how to improve it without creating a security
505 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
506 __ add(current_sp, current_sp, ip0);
508 int32_t arg_count = info_->scope()->num_parameters() + 1;
509 if (IsSubclassConstructor(info_->function()->kind())) {
512 __ dc64(kXRegSize * arg_count);
513 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
519 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
520 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
524 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
525 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
526 codegen()->GetVar(result_register(), var);
530 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
531 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
532 codegen()->GetVar(result_register(), var);
533 __ Push(result_register());
537 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
538 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
539 // For simplicity we always test the accumulator register.
540 codegen()->GetVar(result_register(), var);
541 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
542 codegen()->DoTest(this);
546 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
547 // Root values have no side effects.
551 void FullCodeGenerator::AccumulatorValueContext::Plug(
552 Heap::RootListIndex index) const {
553 __ LoadRoot(result_register(), index);
557 void FullCodeGenerator::StackValueContext::Plug(
558 Heap::RootListIndex index) const {
559 __ LoadRoot(result_register(), index);
560 __ Push(result_register());
564 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
565 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
567 if (index == Heap::kUndefinedValueRootIndex ||
568 index == Heap::kNullValueRootIndex ||
569 index == Heap::kFalseValueRootIndex) {
570 if (false_label_ != fall_through_) __ B(false_label_);
571 } else if (index == Heap::kTrueValueRootIndex) {
572 if (true_label_ != fall_through_) __ B(true_label_);
574 __ LoadRoot(result_register(), index);
575 codegen()->DoTest(this);
580 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
584 void FullCodeGenerator::AccumulatorValueContext::Plug(
585 Handle<Object> lit) const {
586 __ Mov(result_register(), Operand(lit));
590 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
591 // Immediates cannot be pushed directly.
592 __ Mov(result_register(), Operand(lit));
593 __ Push(result_register());
597 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
598 codegen()->PrepareForBailoutBeforeSplit(condition(),
602 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
603 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
604 if (false_label_ != fall_through_) __ B(false_label_);
605 } else if (lit->IsTrue() || lit->IsJSObject()) {
606 if (true_label_ != fall_through_) __ B(true_label_);
607 } else if (lit->IsString()) {
608 if (String::cast(*lit)->length() == 0) {
609 if (false_label_ != fall_through_) __ B(false_label_);
611 if (true_label_ != fall_through_) __ B(true_label_);
613 } else if (lit->IsSmi()) {
614 if (Smi::cast(*lit)->value() == 0) {
615 if (false_label_ != fall_through_) __ B(false_label_);
617 if (true_label_ != fall_through_) __ B(true_label_);
620 // For simplicity we always test the accumulator register.
621 __ Mov(result_register(), Operand(lit));
622 codegen()->DoTest(this);
627 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
628 Register reg) const {
634 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
636 Register reg) const {
639 __ Move(result_register(), reg);
643 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
644 Register reg) const {
646 if (count > 1) __ Drop(count - 1);
651 void FullCodeGenerator::TestContext::DropAndPlug(int count,
652 Register reg) const {
654 // For simplicity we always test the accumulator register.
656 __ Mov(result_register(), reg);
657 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
658 codegen()->DoTest(this);
662 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
663 Label* materialize_false) const {
664 DCHECK(materialize_true == materialize_false);
665 __ Bind(materialize_true);
669 void FullCodeGenerator::AccumulatorValueContext::Plug(
670 Label* materialize_true,
671 Label* materialize_false) const {
673 __ Bind(materialize_true);
674 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
676 __ Bind(materialize_false);
677 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
682 void FullCodeGenerator::StackValueContext::Plug(
683 Label* materialize_true,
684 Label* materialize_false) const {
686 __ Bind(materialize_true);
687 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
689 __ Bind(materialize_false);
690 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
696 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
697 Label* materialize_false) const {
698 DCHECK(materialize_true == true_label_);
699 DCHECK(materialize_false == false_label_);
703 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
707 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
708 Heap::RootListIndex value_root_index =
709 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
710 __ LoadRoot(result_register(), value_root_index);
714 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
715 Heap::RootListIndex value_root_index =
716 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
717 __ LoadRoot(x10, value_root_index);
722 void FullCodeGenerator::TestContext::Plug(bool flag) const {
723 codegen()->PrepareForBailoutBeforeSplit(condition(),
728 if (true_label_ != fall_through_) {
732 if (false_label_ != fall_through_) {
739 void FullCodeGenerator::DoTest(Expression* condition,
742 Label* fall_through) {
743 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
744 CallIC(ic, condition->test_id());
745 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
749 // If (cond), branch to if_true.
750 // If (!cond), branch to if_false.
751 // fall_through is used as an optimization in cases where only one branch
752 // instruction is necessary.
753 void FullCodeGenerator::Split(Condition cond,
756 Label* fall_through) {
757 if (if_false == fall_through) {
759 } else if (if_true == fall_through) {
760 DCHECK(if_false != fall_through);
761 __ B(NegateCondition(cond), if_false);
769 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
770 // Offset is negative because higher indexes are at lower addresses.
771 int offset = -var->index() * kXRegSize;
772 // Adjust by a (parameter or local) base offset.
773 if (var->IsParameter()) {
774 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
776 offset += JavaScriptFrameConstants::kLocal0Offset;
778 return MemOperand(fp, offset);
782 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
783 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
784 if (var->IsContextSlot()) {
785 int context_chain_length = scope()->ContextChainLength(var->scope());
786 __ LoadContext(scratch, context_chain_length);
787 return ContextMemOperand(scratch, var->index());
789 return StackOperand(var);
794 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
795 // Use destination as scratch.
796 MemOperand location = VarOperand(var, dest);
797 __ Ldr(dest, location);
801 void FullCodeGenerator::SetVar(Variable* var,
805 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
806 DCHECK(!AreAliased(src, scratch0, scratch1));
807 MemOperand location = VarOperand(var, scratch0);
808 __ Str(src, location);
810 // Emit the write barrier code if the location is in the heap.
811 if (var->IsContextSlot()) {
812 // scratch0 contains the correct context.
813 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
814 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
819 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
820 bool should_normalize,
823 // Only prepare for bailouts before splits if we're in a test
824 // context. Otherwise, we let the Visit function deal with the
825 // preparation to avoid preparing with the same AST id twice.
826 if (!context()->IsTest() || !info_->IsOptimizable()) return;
828 // TODO(all): Investigate to see if there is something to work on here.
830 if (should_normalize) {
833 PrepareForBailout(expr, TOS_REG);
834 if (should_normalize) {
835 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
836 Split(eq, if_true, if_false, NULL);
842 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
843 // The variable in the declaration always resides in the current function
845 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
846 if (generate_debug_code_) {
847 // Check that we're not inside a with or catch context.
848 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
849 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
850 __ Check(ne, kDeclarationInWithContext);
851 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
852 __ Check(ne, kDeclarationInCatchContext);
857 void FullCodeGenerator::VisitVariableDeclaration(
858 VariableDeclaration* declaration) {
859 // If it was not possible to allocate the variable at compile time, we
860 // need to "declare" it at runtime to make sure it actually exists in the
862 VariableProxy* proxy = declaration->proxy();
863 VariableMode mode = declaration->mode();
864 Variable* variable = proxy->var();
865 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
867 switch (variable->location()) {
868 case Variable::UNALLOCATED:
869 globals_->Add(variable->name(), zone());
870 globals_->Add(variable->binding_needs_init()
871 ? isolate()->factory()->the_hole_value()
872 : isolate()->factory()->undefined_value(),
876 case Variable::PARAMETER:
877 case Variable::LOCAL:
879 Comment cmnt(masm_, "[ VariableDeclaration");
880 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
881 __ Str(x10, StackOperand(variable));
885 case Variable::CONTEXT:
887 Comment cmnt(masm_, "[ VariableDeclaration");
888 EmitDebugCheckDeclarationContext(variable);
889 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
890 __ Str(x10, ContextMemOperand(cp, variable->index()));
891 // No write barrier since the_hole_value is in old space.
892 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
896 case Variable::LOOKUP: {
897 Comment cmnt(masm_, "[ VariableDeclaration");
898 __ Mov(x2, Operand(variable->name()));
899 // Declaration nodes are always introduced in one of four modes.
900 DCHECK(IsDeclaredVariableMode(mode));
901 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
903 __ Mov(x1, Smi::FromInt(attr));
904 // Push initial value, if any.
905 // Note: For variables we must not push an initial value (such as
906 // 'undefined') because we may have a (legal) redeclaration and we
907 // must not destroy the current value.
909 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
910 __ Push(cp, x2, x1, x0);
912 // Pushing 0 (xzr) indicates no initial value.
913 __ Push(cp, x2, x1, xzr);
915 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
922 void FullCodeGenerator::VisitFunctionDeclaration(
923 FunctionDeclaration* declaration) {
924 VariableProxy* proxy = declaration->proxy();
925 Variable* variable = proxy->var();
926 switch (variable->location()) {
927 case Variable::UNALLOCATED: {
928 globals_->Add(variable->name(), zone());
929 Handle<SharedFunctionInfo> function =
930 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
931 // Check for stack overflow exception.
932 if (function.is_null()) return SetStackOverflow();
933 globals_->Add(function, zone());
937 case Variable::PARAMETER:
938 case Variable::LOCAL: {
939 Comment cmnt(masm_, "[ Function Declaration");
940 VisitForAccumulatorValue(declaration->fun());
941 __ Str(result_register(), StackOperand(variable));
945 case Variable::CONTEXT: {
946 Comment cmnt(masm_, "[ Function Declaration");
947 EmitDebugCheckDeclarationContext(variable);
948 VisitForAccumulatorValue(declaration->fun());
949 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
950 int offset = Context::SlotOffset(variable->index());
951 // We know that we have written a function, which is not a smi.
952 __ RecordWriteContextSlot(cp,
960 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
964 case Variable::LOOKUP: {
965 Comment cmnt(masm_, "[ Function Declaration");
966 __ Mov(x2, Operand(variable->name()));
967 __ Mov(x1, Smi::FromInt(NONE));
969 // Push initial value for function declaration.
970 VisitForStackValue(declaration->fun());
971 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
978 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
979 VariableProxy* proxy = declaration->proxy();
980 Variable* variable = proxy->var();
981 switch (variable->location()) {
982 case Variable::UNALLOCATED:
986 case Variable::CONTEXT: {
987 Comment cmnt(masm_, "[ ImportDeclaration");
988 EmitDebugCheckDeclarationContext(variable);
993 case Variable::PARAMETER:
994 case Variable::LOCAL:
995 case Variable::LOOKUP:
1001 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1006 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1007 // Call the runtime to declare the globals.
1008 __ Mov(x11, Operand(pairs));
1009 Register flags = xzr;
1010 if (Smi::FromInt(DeclareGlobalsFlags())) {
1012 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
1014 __ Push(cp, x11, flags);
1015 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1016 // Return value is ignored.
1020 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1021 // Call the runtime to declare the modules.
1022 __ Push(descriptions);
1023 __ CallRuntime(Runtime::kDeclareModules, 1);
1024 // Return value is ignored.
1028 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1029 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1030 Comment cmnt(masm_, "[ SwitchStatement");
1031 Breakable nested_statement(this, stmt);
1032 SetStatementPosition(stmt);
1034 // Keep the switch value on the stack until a case matches.
1035 VisitForStackValue(stmt->tag());
1036 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1038 ZoneList<CaseClause*>* clauses = stmt->cases();
1039 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1041 Label next_test; // Recycled for each test.
1042 // Compile all the tests with branches to their bodies.
1043 for (int i = 0; i < clauses->length(); i++) {
1044 CaseClause* clause = clauses->at(i);
1045 clause->body_target()->Unuse();
1047 // The default is not a test, but remember it as final fall through.
1048 if (clause->is_default()) {
1049 default_clause = clause;
1053 Comment cmnt(masm_, "[ Case comparison");
1054 __ Bind(&next_test);
1057 // Compile the label expression.
1058 VisitForAccumulatorValue(clause->label());
1060 // Perform the comparison as if via '==='.
1061 __ Peek(x1, 0); // Switch value.
1063 JumpPatchSite patch_site(masm_);
1064 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1066 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1068 __ B(ne, &next_test);
1069 __ Drop(1); // Switch value is no longer needed.
1070 __ B(clause->body_target());
1071 __ Bind(&slow_case);
1074 // Record position before stub call for type feedback.
1075 SetSourcePosition(clause->position());
1076 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1077 language_mode()).code();
1078 CallIC(ic, clause->CompareId());
1079 patch_site.EmitPatchInfo();
1083 PrepareForBailout(clause, TOS_REG);
1084 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1086 __ B(clause->body_target());
1089 __ Cbnz(x0, &next_test);
1090 __ Drop(1); // Switch value is no longer needed.
1091 __ B(clause->body_target());
1094 // Discard the test value and jump to the default if present, otherwise to
1095 // the end of the statement.
1096 __ Bind(&next_test);
1097 __ Drop(1); // Switch value is no longer needed.
1098 if (default_clause == NULL) {
1099 __ B(nested_statement.break_label());
1101 __ B(default_clause->body_target());
1104 // Compile all the case bodies.
1105 for (int i = 0; i < clauses->length(); i++) {
1106 Comment cmnt(masm_, "[ Case body");
1107 CaseClause* clause = clauses->at(i);
1108 __ Bind(clause->body_target());
1109 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1110 VisitStatements(clause->statements());
1113 __ Bind(nested_statement.break_label());
1114 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1118 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1119 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1120 Comment cmnt(masm_, "[ ForInStatement");
1121 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1122 // TODO(all): This visitor probably needs better comments and a revisit.
1123 SetStatementPosition(stmt);
1126 ForIn loop_statement(this, stmt);
1127 increment_loop_depth();
1129 // Get the object to enumerate over. If the object is null or undefined, skip
1130 // over the loop. See ECMA-262 version 5, section 12.6.4.
1131 SetExpressionPosition(stmt->enumerable());
1132 VisitForAccumulatorValue(stmt->enumerable());
1133 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1134 Register null_value = x15;
1135 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1136 __ Cmp(x0, null_value);
1139 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1141 // Convert the object to a JS object.
1142 Label convert, done_convert;
1143 __ JumpIfSmi(x0, &convert);
1144 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1147 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1148 __ Bind(&done_convert);
1149 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1152 // Check for proxies.
1154 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1155 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1157 // Check cache validity in generated code. This is a fast case for
1158 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1159 // guarantee cache validity, call the runtime system to check cache
1160 // validity or get the property names in a fixed array.
1161 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1163 // The enum cache is valid. Load the map of the object being
1164 // iterated over and use the cache for the iteration.
1166 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1169 // Get the set of properties to enumerate.
1170 __ Bind(&call_runtime);
1171 __ Push(x0); // Duplicate the enumerable object on the stack.
1172 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1173 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1175 // If we got a map from the runtime call, we can do a fast
1176 // modification check. Otherwise, we got a fixed array, and we have
1177 // to do a slow check.
1178 Label fixed_array, no_descriptors;
1179 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1180 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1182 // We got a map in register x0. Get the enumeration cache from it.
1183 __ Bind(&use_cache);
1185 __ EnumLengthUntagged(x1, x0);
1186 __ Cbz(x1, &no_descriptors);
1188 __ LoadInstanceDescriptors(x0, x2);
1189 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1191 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1193 // Set up the four remaining stack slots.
1195 // Map, enumeration cache, enum cache length, zero (both last as smis).
1196 __ Push(x0, x2, x1, xzr);
1199 __ Bind(&no_descriptors);
1203 // We got a fixed array in register x0. Iterate through that.
1204 __ Bind(&fixed_array);
1206 __ LoadObject(x1, FeedbackVector());
1207 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1208 int vector_index = FeedbackVector()->GetIndex(slot);
1209 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1211 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1212 __ Peek(x10, 0); // Get enumerated object.
1213 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1214 // TODO(all): similar check was done already. Can we avoid it here?
1215 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1216 DCHECK(Smi::FromInt(0) == 0);
1217 __ CzeroX(x1, le); // Zero indicates proxy.
1218 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1219 // Smi and array, fixed array length (as smi) and initial index.
1220 __ Push(x1, x0, x2, xzr);
1222 // Generate code for doing the condition check.
1223 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1225 SetExpressionPosition(stmt->each());
1227 // Load the current count to x0, load the length to x1.
1228 __ PeekPair(x0, x1, 0);
1229 __ Cmp(x0, x1); // Compare to the array length.
1230 __ B(hs, loop_statement.break_label());
1232 // Get the current entry of the array into register r3.
1233 __ Peek(x10, 2 * kXRegSize);
1234 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1235 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1237 // Get the expected map from the stack or a smi in the
1238 // permanent slow case into register x10.
1239 __ Peek(x2, 3 * kXRegSize);
1241 // Check if the expected map still matches that of the enumerable.
1242 // If not, we may have to filter the key.
1244 __ Peek(x1, 4 * kXRegSize);
1245 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1247 __ B(eq, &update_each);
1249 // For proxies, no filtering is done.
1250 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1251 STATIC_ASSERT(kSmiTag == 0);
1252 __ Cbz(x2, &update_each);
1254 // Convert the entry to a string or (smi) 0 if it isn't a property
1255 // any more. If the property has been removed while iterating, we
1258 __ CallRuntime(Runtime::kForInFilter, 2);
1259 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1261 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1262 loop_statement.continue_label());
1264 // Update the 'each' property or variable from the possibly filtered
1265 // entry in register x3.
1266 __ Bind(&update_each);
1267 __ Mov(result_register(), x3);
1268 // Perform the assignment as if via '='.
1269 { EffectContext context(this);
1270 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1271 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1274 // Generate code for the body of the loop.
1275 Visit(stmt->body());
1277 // Generate code for going to the next element by incrementing
1278 // the index (smi) stored on top of the stack.
1279 __ Bind(loop_statement.continue_label());
1280 // TODO(all): We could use a callee saved register to avoid popping.
1282 __ Add(x0, x0, Smi::FromInt(1));
1285 EmitBackEdgeBookkeeping(stmt, &loop);
1288 // Remove the pointers stored on the stack.
1289 __ Bind(loop_statement.break_label());
1292 // Exit and decrement the loop depth.
1293 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1295 decrement_loop_depth();
1299 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1301 // Use the fast case closure allocation code that allocates in new space for
1302 // nested functions that don't need literals cloning. If we're running with
1303 // the --always-opt or the --prepare-always-opt flag, we need to use the
1304 // runtime function so that the new function we are creating here gets a
1305 // chance to have its code optimized and doesn't just get a copy of the
1306 // existing unoptimized code.
1307 if (!FLAG_always_opt &&
1308 !FLAG_prepare_always_opt &&
1310 scope()->is_function_scope() &&
1311 info->num_literals() == 0) {
1312 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1313 __ Mov(x2, Operand(info));
1316 __ Mov(x11, Operand(info));
1317 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1318 : Heap::kFalseValueRootIndex);
1319 __ Push(cp, x11, x10);
1320 __ CallRuntime(Runtime::kNewClosure, 3);
1322 context()->Plug(x0);
1326 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1327 Comment cmnt(masm_, "[ VariableProxy");
1328 EmitVariableLoad(expr);
1332 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1334 FeedbackVectorICSlot slot) {
1335 if (NeedsHomeObject(initializer)) {
1336 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1337 __ Mov(StoreDescriptor::NameRegister(),
1338 Operand(isolate()->factory()->home_object_symbol()));
1339 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1340 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1346 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1347 TypeofState typeof_state,
1349 Register current = cp;
1350 Register next = x10;
1351 Register temp = x11;
1355 if (s->num_heap_slots() > 0) {
1356 if (s->calls_sloppy_eval()) {
1357 // Check that extension is NULL.
1358 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1359 __ Cbnz(temp, slow);
1361 // Load next context in chain.
1362 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1363 // Walk the rest of the chain without clobbering cp.
1366 // If no outer scope calls eval, we do not need to check more
1367 // context extensions.
1368 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1369 s = s->outer_scope();
1372 if (s->is_eval_scope()) {
1374 __ Mov(next, current);
1377 // Terminate at native context.
1378 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1379 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1380 // Check that extension is NULL.
1381 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1382 __ Cbnz(temp, slow);
1383 // Load next context in chain.
1384 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1389 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1390 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1391 __ Mov(LoadDescriptor::SlotRegister(),
1392 SmiFromSlot(proxy->VariableFeedbackSlot()));
1394 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1400 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1402 DCHECK(var->IsContextSlot());
1403 Register context = cp;
1404 Register next = x10;
1405 Register temp = x11;
1407 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1408 if (s->num_heap_slots() > 0) {
1409 if (s->calls_sloppy_eval()) {
1410 // Check that extension is NULL.
1411 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1412 __ Cbnz(temp, slow);
1414 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1415 // Walk the rest of the chain without clobbering cp.
1419 // Check that last extension is NULL.
1420 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1421 __ Cbnz(temp, slow);
1423 // This function is used only for loads, not stores, so it's safe to
1424 // return an cp-based operand (the write barrier cannot be allowed to
1425 // destroy the cp register).
1426 return ContextMemOperand(context, var->index());
1430 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1431 TypeofState typeof_state,
1434 // Generate fast-case code for variables that might be shadowed by
1435 // eval-introduced variables. Eval is used a lot without
1436 // introducing variables. In those cases, we do not want to
1437 // perform a runtime call for all variables in the scope
1438 // containing the eval.
1439 Variable* var = proxy->var();
1440 if (var->mode() == DYNAMIC_GLOBAL) {
1441 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1443 } else if (var->mode() == DYNAMIC_LOCAL) {
1444 Variable* local = var->local_if_not_shadowed();
1445 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1446 if (local->mode() == LET || local->mode() == CONST ||
1447 local->mode() == CONST_LEGACY) {
1448 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1449 if (local->mode() == CONST_LEGACY) {
1450 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1451 } else { // LET || CONST
1452 __ Mov(x0, Operand(var->name()));
1454 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1462 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1463 // Record position before possible IC call.
1464 SetSourcePosition(proxy->position());
1465 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1466 Variable* var = proxy->var();
1468 // Three cases: global variables, lookup variables, and all other types of
1470 switch (var->location()) {
1471 case Variable::UNALLOCATED: {
1472 Comment cmnt(masm_, "Global variable");
1473 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1474 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1475 __ Mov(LoadDescriptor::SlotRegister(),
1476 SmiFromSlot(proxy->VariableFeedbackSlot()));
1477 CallGlobalLoadIC(var->name());
1478 context()->Plug(x0);
1482 case Variable::PARAMETER:
1483 case Variable::LOCAL:
1484 case Variable::CONTEXT: {
1485 Comment cmnt(masm_, var->IsContextSlot()
1486 ? "Context variable"
1487 : "Stack variable");
1488 if (var->binding_needs_init()) {
1489 // var->scope() may be NULL when the proxy is located in eval code and
1490 // refers to a potential outside binding. Currently those bindings are
1491 // always looked up dynamically, i.e. in that case
1492 // var->location() == LOOKUP.
1494 DCHECK(var->scope() != NULL);
1496 // Check if the binding really needs an initialization check. The check
1497 // can be skipped in the following situation: we have a LET or CONST
1498 // binding in harmony mode, both the Variable and the VariableProxy have
1499 // the same declaration scope (i.e. they are both in global code, in the
1500 // same function or in the same eval code) and the VariableProxy is in
1501 // the source physically located after the initializer of the variable.
1503 // We cannot skip any initialization checks for CONST in non-harmony
1504 // mode because const variables may be declared but never initialized:
1505 // if (false) { const x; }; var y = x;
1507 // The condition on the declaration scopes is a conservative check for
1508 // nested functions that access a binding and are called before the
1509 // binding is initialized:
1510 // function() { f(); let x = 1; function f() { x = 2; } }
1512 bool skip_init_check;
1513 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1514 skip_init_check = false;
1515 } else if (var->is_this()) {
1516 CHECK(info_->function() != nullptr &&
1517 (info_->function()->kind() & kSubclassConstructor) != 0);
1518 // TODO(dslomov): implement 'this' hole check elimination.
1519 skip_init_check = false;
1521 // Check that we always have valid source position.
1522 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1523 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1524 skip_init_check = var->mode() != CONST_LEGACY &&
1525 var->initializer_position() < proxy->position();
1528 if (!skip_init_check) {
1529 // Let and const need a read barrier.
1532 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1533 if (var->mode() == LET || var->mode() == CONST) {
1534 // Throw a reference error when using an uninitialized let/const
1535 // binding in harmony mode.
1536 __ Mov(x0, Operand(var->name()));
1538 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1541 // Uninitalized const bindings outside of harmony mode are unholed.
1542 DCHECK(var->mode() == CONST_LEGACY);
1543 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1546 context()->Plug(x0);
1550 context()->Plug(var);
1554 case Variable::LOOKUP: {
1556 // Generate code for loading from variables potentially shadowed by
1557 // eval-introduced variables.
1558 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1560 Comment cmnt(masm_, "Lookup variable");
1561 __ Mov(x1, Operand(var->name()));
1562 __ Push(cp, x1); // Context and name.
1563 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1565 context()->Plug(x0);
1572 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1573 Comment cmnt(masm_, "[ RegExpLiteral");
1575 // Registers will be used as follows:
1576 // x5 = materialized value (RegExp literal)
1577 // x4 = JS function, literals array
1578 // x3 = literal index
1579 // x2 = RegExp pattern
1580 // x1 = RegExp flags
1581 // x0 = RegExp literal clone
1582 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1583 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1584 int literal_offset =
1585 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1586 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1587 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1589 // Create regexp literal using runtime function.
1590 // Result will be in x0.
1591 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1592 __ Mov(x2, Operand(expr->pattern()));
1593 __ Mov(x1, Operand(expr->flags()));
1594 __ Push(x4, x3, x2, x1);
1595 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1598 __ Bind(&materialized);
1599 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1600 Label allocated, runtime_allocate;
1601 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1604 __ Bind(&runtime_allocate);
1605 __ Mov(x10, Smi::FromInt(size));
1607 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1610 __ Bind(&allocated);
1611 // After this, registers are used as follows:
1612 // x0: Newly allocated regexp.
1613 // x5: Materialized regexp.
1614 // x10, x11, x12: temps.
1615 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1616 context()->Plug(x0);
1620 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1621 if (expression == NULL) {
1622 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1625 VisitForStackValue(expression);
1630 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1631 Comment cmnt(masm_, "[ ObjectLiteral");
1633 Handle<FixedArray> constant_properties = expr->constant_properties();
1634 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1635 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1636 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1637 __ Mov(x1, Operand(constant_properties));
1638 int flags = expr->ComputeFlags();
1639 __ Mov(x0, Smi::FromInt(flags));
1640 if (MustCreateObjectLiteralWithRuntime(expr)) {
1641 __ Push(x3, x2, x1, x0);
1642 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1644 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1647 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1649 // If result_saved is true the result is on top of the stack. If
1650 // result_saved is false the result is in x0.
1651 bool result_saved = false;
1653 AccessorTable accessor_table(zone());
1654 int property_index = 0;
1655 // store_slot_index points to the vector IC slot for the next store IC used.
1656 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1657 // and must be updated if the number of store ICs emitted here changes.
1658 int store_slot_index = 0;
1659 for (; property_index < expr->properties()->length(); property_index++) {
1660 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1661 if (property->is_computed_name()) break;
1662 if (property->IsCompileTimeValue()) continue;
1664 Literal* key = property->key()->AsLiteral();
1665 Expression* value = property->value();
1666 if (!result_saved) {
1667 __ Push(x0); // Save result on stack
1668 result_saved = true;
1670 switch (property->kind()) {
1671 case ObjectLiteral::Property::CONSTANT:
1673 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1674 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1676 case ObjectLiteral::Property::COMPUTED:
1677 // It is safe to use [[Put]] here because the boilerplate already
1678 // contains computed properties with an uninitialized value.
1679 if (key->value()->IsInternalizedString()) {
1680 if (property->emit_store()) {
1681 VisitForAccumulatorValue(value);
1682 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1683 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1684 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1685 if (FLAG_vector_stores) {
1686 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1689 CallStoreIC(key->LiteralFeedbackId());
1691 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1693 if (NeedsHomeObject(value)) {
1694 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1695 __ Mov(StoreDescriptor::NameRegister(),
1696 Operand(isolate()->factory()->home_object_symbol()));
1697 __ Peek(StoreDescriptor::ValueRegister(), 0);
1698 if (FLAG_vector_stores) {
1699 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1704 VisitForEffect(value);
1710 VisitForStackValue(key);
1711 VisitForStackValue(value);
1712 if (property->emit_store()) {
1713 EmitSetHomeObjectIfNeeded(
1714 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1715 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1717 __ CallRuntime(Runtime::kSetProperty, 4);
1722 case ObjectLiteral::Property::PROTOTYPE:
1723 DCHECK(property->emit_store());
1724 // Duplicate receiver on stack.
1727 VisitForStackValue(value);
1728 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1730 case ObjectLiteral::Property::GETTER:
1731 if (property->emit_store()) {
1732 accessor_table.lookup(key)->second->getter = value;
1735 case ObjectLiteral::Property::SETTER:
1736 if (property->emit_store()) {
1737 accessor_table.lookup(key)->second->setter = value;
1743 // Emit code to define accessors, using only a single call to the runtime for
1744 // each pair of corresponding getters and setters.
1745 for (AccessorTable::Iterator it = accessor_table.begin();
1746 it != accessor_table.end();
1748 __ Peek(x10, 0); // Duplicate receiver.
1750 VisitForStackValue(it->first);
1751 EmitAccessor(it->second->getter);
1752 EmitSetHomeObjectIfNeeded(
1753 it->second->getter, 2,
1754 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1755 EmitAccessor(it->second->setter);
1756 EmitSetHomeObjectIfNeeded(
1757 it->second->setter, 3,
1758 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1759 __ Mov(x10, Smi::FromInt(NONE));
1761 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1764 // Object literals have two parts. The "static" part on the left contains no
1765 // computed property names, and so we can compute its map ahead of time; see
1766 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1767 // starts with the first computed property name, and continues with all
1768 // properties to its right. All the code from above initializes the static
1769 // component of the object literal, and arranges for the map of the result to
1770 // reflect the static order in which the keys appear. For the dynamic
1771 // properties, we compile them into a series of "SetOwnProperty" runtime
1772 // calls. This will preserve insertion order.
1773 for (; property_index < expr->properties()->length(); property_index++) {
1774 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1776 Expression* value = property->value();
1777 if (!result_saved) {
1778 __ Push(x0); // Save result on stack
1779 result_saved = true;
1782 __ Peek(x10, 0); // Duplicate receiver.
1785 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1786 DCHECK(!property->is_computed_name());
1787 VisitForStackValue(value);
1788 DCHECK(property->emit_store());
1789 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1791 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1792 VisitForStackValue(value);
1793 EmitSetHomeObjectIfNeeded(
1794 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1796 switch (property->kind()) {
1797 case ObjectLiteral::Property::CONSTANT:
1798 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1799 case ObjectLiteral::Property::COMPUTED:
1800 if (property->emit_store()) {
1801 __ Mov(x0, Smi::FromInt(NONE));
1803 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1809 case ObjectLiteral::Property::PROTOTYPE:
1813 case ObjectLiteral::Property::GETTER:
1814 __ Mov(x0, Smi::FromInt(NONE));
1816 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1819 case ObjectLiteral::Property::SETTER:
1820 __ Mov(x0, Smi::FromInt(NONE));
1822 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1828 if (expr->has_function()) {
1829 DCHECK(result_saved);
1832 __ CallRuntime(Runtime::kToFastProperties, 1);
1836 context()->PlugTOS();
1838 context()->Plug(x0);
1841 // Verify that compilation exactly consumed the number of store ic slots that
1842 // the ObjectLiteral node had to offer.
1843 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1847 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1848 Comment cmnt(masm_, "[ ArrayLiteral");
1850 expr->BuildConstantElements(isolate());
1851 Handle<FixedArray> constant_elements = expr->constant_elements();
1852 bool has_fast_elements =
1853 IsFastObjectElementsKind(expr->constant_elements_kind());
1855 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1856 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1857 // If the only customer of allocation sites is transitioning, then
1858 // we can turn it off if we don't have anywhere else to transition to.
1859 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1862 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1863 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1864 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1865 __ Mov(x1, Operand(constant_elements));
1866 if (MustCreateArrayLiteralWithRuntime(expr)) {
1867 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1868 __ Push(x3, x2, x1, x0);
1869 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1871 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1874 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1876 bool result_saved = false; // Is the result saved to the stack?
1877 ZoneList<Expression*>* subexprs = expr->values();
1878 int length = subexprs->length();
1880 // Emit code to evaluate all the non-constant subexpressions and to store
1881 // them into the newly cloned array.
1882 int array_index = 0;
1883 for (; array_index < length; array_index++) {
1884 Expression* subexpr = subexprs->at(array_index);
1885 if (subexpr->IsSpread()) break;
1887 // If the subexpression is a literal or a simple materialized literal it
1888 // is already set in the cloned array.
1889 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1891 if (!result_saved) {
1892 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1894 result_saved = true;
1896 VisitForAccumulatorValue(subexpr);
1898 if (has_fast_elements) {
1899 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1900 __ Peek(x6, kPointerSize); // Copy of array literal.
1901 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1902 __ Str(result_register(), FieldMemOperand(x1, offset));
1903 // Update the write barrier for the array store.
1904 __ RecordWriteField(x1, offset, result_register(), x10,
1905 kLRHasBeenSaved, kDontSaveFPRegs,
1906 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1908 __ Mov(x3, Smi::FromInt(array_index));
1909 StoreArrayLiteralElementStub stub(isolate());
1913 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1916 // In case the array literal contains spread expressions it has two parts. The
1917 // first part is the "static" array which has a literal index is handled
1918 // above. The second part is the part after the first spread expression
1919 // (inclusive) and these elements gets appended to the array. Note that the
1920 // number elements an iterable produces is unknown ahead of time.
1921 if (array_index < length && result_saved) {
1922 __ Drop(1); // literal index
1924 result_saved = false;
1926 for (; array_index < length; array_index++) {
1927 Expression* subexpr = subexprs->at(array_index);
1930 if (subexpr->IsSpread()) {
1931 VisitForStackValue(subexpr->AsSpread()->expression());
1932 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1934 VisitForStackValue(subexpr);
1935 __ CallRuntime(Runtime::kAppendElement, 2);
1938 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1942 __ Drop(1); // literal index
1943 context()->PlugTOS();
1945 context()->Plug(x0);
1950 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1951 DCHECK(expr->target()->IsValidReferenceExpression());
1953 Comment cmnt(masm_, "[ Assignment");
1955 Property* property = expr->target()->AsProperty();
1956 LhsKind assign_type = Property::GetAssignType(property);
1958 // Evaluate LHS expression.
1959 switch (assign_type) {
1961 // Nothing to do here.
1963 case NAMED_PROPERTY:
1964 if (expr->is_compound()) {
1965 // We need the receiver both on the stack and in the register.
1966 VisitForStackValue(property->obj());
1967 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1969 VisitForStackValue(property->obj());
1972 case NAMED_SUPER_PROPERTY:
1974 property->obj()->AsSuperPropertyReference()->this_var());
1975 VisitForAccumulatorValue(
1976 property->obj()->AsSuperPropertyReference()->home_object());
1977 __ Push(result_register());
1978 if (expr->is_compound()) {
1979 const Register scratch = x10;
1980 __ Peek(scratch, kPointerSize);
1981 __ Push(scratch, result_register());
1984 case KEYED_SUPER_PROPERTY:
1986 property->obj()->AsSuperPropertyReference()->this_var());
1988 property->obj()->AsSuperPropertyReference()->home_object());
1989 VisitForAccumulatorValue(property->key());
1990 __ Push(result_register());
1991 if (expr->is_compound()) {
1992 const Register scratch1 = x10;
1993 const Register scratch2 = x11;
1994 __ Peek(scratch1, 2 * kPointerSize);
1995 __ Peek(scratch2, kPointerSize);
1996 __ Push(scratch1, scratch2, result_register());
1999 case KEYED_PROPERTY:
2000 if (expr->is_compound()) {
2001 VisitForStackValue(property->obj());
2002 VisitForStackValue(property->key());
2003 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
2004 __ Peek(LoadDescriptor::NameRegister(), 0);
2006 VisitForStackValue(property->obj());
2007 VisitForStackValue(property->key());
2012 // For compound assignments we need another deoptimization point after the
2013 // variable/property load.
2014 if (expr->is_compound()) {
2015 { AccumulatorValueContext context(this);
2016 switch (assign_type) {
2018 EmitVariableLoad(expr->target()->AsVariableProxy());
2019 PrepareForBailout(expr->target(), TOS_REG);
2021 case NAMED_PROPERTY:
2022 EmitNamedPropertyLoad(property);
2023 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2025 case NAMED_SUPER_PROPERTY:
2026 EmitNamedSuperPropertyLoad(property);
2027 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2029 case KEYED_SUPER_PROPERTY:
2030 EmitKeyedSuperPropertyLoad(property);
2031 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2033 case KEYED_PROPERTY:
2034 EmitKeyedPropertyLoad(property);
2035 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2040 Token::Value op = expr->binary_op();
2041 __ Push(x0); // Left operand goes on the stack.
2042 VisitForAccumulatorValue(expr->value());
2044 SetSourcePosition(expr->position() + 1);
2045 AccumulatorValueContext context(this);
2046 if (ShouldInlineSmiCase(op)) {
2047 EmitInlineSmiBinaryOp(expr->binary_operation(),
2052 EmitBinaryOp(expr->binary_operation(), op);
2055 // Deoptimization point in case the binary operation may have side effects.
2056 PrepareForBailout(expr->binary_operation(), TOS_REG);
2058 VisitForAccumulatorValue(expr->value());
2061 // Record source position before possible IC call.
2062 SetSourcePosition(expr->position());
2065 switch (assign_type) {
2067 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2068 expr->op(), expr->AssignmentSlot());
2069 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2070 context()->Plug(x0);
2072 case NAMED_PROPERTY:
2073 EmitNamedPropertyAssignment(expr);
2075 case NAMED_SUPER_PROPERTY:
2076 EmitNamedSuperPropertyStore(property);
2077 context()->Plug(x0);
2079 case KEYED_SUPER_PROPERTY:
2080 EmitKeyedSuperPropertyStore(property);
2081 context()->Plug(x0);
2083 case KEYED_PROPERTY:
2084 EmitKeyedPropertyAssignment(expr);
2090 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2091 SetSourcePosition(prop->position());
2092 Literal* key = prop->key()->AsLiteral();
2093 DCHECK(!prop->IsSuperAccess());
2095 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2096 __ Mov(LoadDescriptor::SlotRegister(),
2097 SmiFromSlot(prop->PropertyFeedbackSlot()));
2098 CallLoadIC(NOT_CONTEXTUAL);
2102 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2103 // Stack: receiver, home_object.
2104 SetSourcePosition(prop->position());
2105 Literal* key = prop->key()->AsLiteral();
2106 DCHECK(!key->value()->IsSmi());
2107 DCHECK(prop->IsSuperAccess());
2109 __ Push(key->value());
2110 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2114 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2115 SetSourcePosition(prop->position());
2116 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2117 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2118 __ Mov(LoadDescriptor::SlotRegister(),
2119 SmiFromSlot(prop->PropertyFeedbackSlot()));
2124 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2125 // Stack: receiver, home_object, key.
2126 SetSourcePosition(prop->position());
2128 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2132 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2134 Expression* left_expr,
2135 Expression* right_expr) {
2136 Label done, both_smis, stub_call;
2138 // Get the arguments.
2140 Register right = x0;
2141 Register result = x0;
2144 // Perform combined smi check on both operands.
2145 __ Orr(x10, left, right);
2146 JumpPatchSite patch_site(masm_);
2147 patch_site.EmitJumpIfSmi(x10, &both_smis);
2149 __ Bind(&stub_call);
2151 Handle<Code> code = CodeFactory::BinaryOpIC(
2152 isolate(), op, language_mode()).code();
2154 Assembler::BlockPoolsScope scope(masm_);
2155 CallIC(code, expr->BinaryOperationFeedbackId());
2156 patch_site.EmitPatchInfo();
2160 __ Bind(&both_smis);
2161 // Smi case. This code works in the same way as the smi-smi case in the type
2162 // recording binary operation stub, see
2163 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2164 // TODO(all): That doesn't exist any more. Where are the comments?
2166 // The set of operations that needs to be supported here is controlled by
2167 // FullCodeGenerator::ShouldInlineSmiCase().
2170 __ Ubfx(right, right, kSmiShift, 5);
2171 __ Asr(result, left, right);
2172 __ Bic(result, result, kSmiShiftMask);
2175 __ Ubfx(right, right, kSmiShift, 5);
2176 __ Lsl(result, left, right);
2179 // If `left >>> right` >= 0x80000000, the result is not representable in a
2180 // signed 32-bit smi.
2181 __ Ubfx(right, right, kSmiShift, 5);
2182 __ Lsr(x10, left, right);
2183 __ Tbnz(x10, kXSignBit, &stub_call);
2184 __ Bic(result, x10, kSmiShiftMask);
2187 __ Adds(x10, left, right);
2188 __ B(vs, &stub_call);
2189 __ Mov(result, x10);
2192 __ Subs(x10, left, right);
2193 __ B(vs, &stub_call);
2194 __ Mov(result, x10);
2197 Label not_minus_zero, done;
2198 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2199 STATIC_ASSERT(kSmiTag == 0);
2200 __ Smulh(x10, left, right);
2201 __ Cbnz(x10, ¬_minus_zero);
2202 __ Eor(x11, left, right);
2203 __ Tbnz(x11, kXSignBit, &stub_call);
2204 __ Mov(result, x10);
2206 __ Bind(¬_minus_zero);
2208 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2209 __ B(lt, &stub_call);
2210 __ SmiTag(result, x10);
2215 __ Orr(result, left, right);
2217 case Token::BIT_AND:
2218 __ And(result, left, right);
2220 case Token::BIT_XOR:
2221 __ Eor(result, left, right);
2228 context()->Plug(x0);
2232 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2234 Handle<Code> code = CodeFactory::BinaryOpIC(
2235 isolate(), op, language_mode()).code();
2236 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2238 Assembler::BlockPoolsScope scope(masm_);
2239 CallIC(code, expr->BinaryOperationFeedbackId());
2240 patch_site.EmitPatchInfo();
2242 context()->Plug(x0);
2246 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2247 // Constructor is in x0.
2248 DCHECK(lit != NULL);
2251 // No access check is needed here since the constructor is created by the
2253 Register scratch = x1;
2255 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2258 // store_slot_index points to the vector IC slot for the next store IC used.
2259 // ClassLiteral::ComputeFeedbackRequirements controls the allocation of slots
2260 // and must be updated if the number of store ICs emitted here changes.
2261 int store_slot_index = 0;
2262 for (int i = 0; i < lit->properties()->length(); i++) {
2263 ObjectLiteral::Property* property = lit->properties()->at(i);
2264 Expression* value = property->value();
2266 if (property->is_static()) {
2267 __ Peek(scratch, kPointerSize); // constructor
2269 __ Peek(scratch, 0); // prototype
2272 EmitPropertyKey(property, lit->GetIdForProperty(i));
2274 // The static prototype property is read only. We handle the non computed
2275 // property name case in the parser. Since this is the only case where we
2276 // need to check for an own read only property we special case this so we do
2277 // not need to do this for every property.
2278 if (property->is_static() && property->is_computed_name()) {
2279 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2283 VisitForStackValue(value);
2284 EmitSetHomeObjectIfNeeded(value, 2,
2285 lit->SlotForHomeObject(value, &store_slot_index));
2287 switch (property->kind()) {
2288 case ObjectLiteral::Property::CONSTANT:
2289 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2290 case ObjectLiteral::Property::PROTOTYPE:
2292 case ObjectLiteral::Property::COMPUTED:
2293 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2296 case ObjectLiteral::Property::GETTER:
2297 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2299 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2302 case ObjectLiteral::Property::SETTER:
2303 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2305 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2314 __ CallRuntime(Runtime::kToFastProperties, 1);
2317 __ CallRuntime(Runtime::kToFastProperties, 1);
2319 // Verify that compilation exactly consumed the number of store ic slots that
2320 // the ClassLiteral node had to offer.
2321 DCHECK(!FLAG_vector_stores || store_slot_index == lit->slot_count());
2325 void FullCodeGenerator::EmitAssignment(Expression* expr,
2326 FeedbackVectorICSlot slot) {
2327 DCHECK(expr->IsValidReferenceExpression());
2329 Property* prop = expr->AsProperty();
2330 LhsKind assign_type = Property::GetAssignType(prop);
2332 switch (assign_type) {
2334 Variable* var = expr->AsVariableProxy()->var();
2335 EffectContext context(this);
2336 EmitVariableAssignment(var, Token::ASSIGN, slot);
2339 case NAMED_PROPERTY: {
2340 __ Push(x0); // Preserve value.
2341 VisitForAccumulatorValue(prop->obj());
2342 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2344 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2345 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2346 __ Mov(StoreDescriptor::NameRegister(),
2347 Operand(prop->key()->AsLiteral()->value()));
2348 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2352 case NAMED_SUPER_PROPERTY: {
2354 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2355 VisitForAccumulatorValue(
2356 prop->obj()->AsSuperPropertyReference()->home_object());
2357 // stack: value, this; x0: home_object
2358 Register scratch = x10;
2359 Register scratch2 = x11;
2360 __ mov(scratch, result_register()); // home_object
2361 __ Peek(x0, kPointerSize); // value
2362 __ Peek(scratch2, 0); // this
2363 __ Poke(scratch2, kPointerSize); // this
2364 __ Poke(scratch, 0); // home_object
2365 // stack: this, home_object; x0: value
2366 EmitNamedSuperPropertyStore(prop);
2369 case KEYED_SUPER_PROPERTY: {
2371 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2373 prop->obj()->AsSuperPropertyReference()->home_object());
2374 VisitForAccumulatorValue(prop->key());
2375 Register scratch = x10;
2376 Register scratch2 = x11;
2377 __ Peek(scratch2, 2 * kPointerSize); // value
2378 // stack: value, this, home_object; x0: key, x11: value
2379 __ Peek(scratch, kPointerSize); // this
2380 __ Poke(scratch, 2 * kPointerSize);
2381 __ Peek(scratch, 0); // home_object
2382 __ Poke(scratch, kPointerSize);
2384 __ Move(x0, scratch2);
2385 // stack: this, home_object, key; x0: value.
2386 EmitKeyedSuperPropertyStore(prop);
2389 case KEYED_PROPERTY: {
2390 __ Push(x0); // Preserve value.
2391 VisitForStackValue(prop->obj());
2392 VisitForAccumulatorValue(prop->key());
2393 __ Mov(StoreDescriptor::NameRegister(), x0);
2394 __ Pop(StoreDescriptor::ReceiverRegister(),
2395 StoreDescriptor::ValueRegister());
2396 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2398 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2403 context()->Plug(x0);
2407 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2408 Variable* var, MemOperand location) {
2409 __ Str(result_register(), location);
2410 if (var->IsContextSlot()) {
2411 // RecordWrite may destroy all its register arguments.
2412 __ Mov(x10, result_register());
2413 int offset = Context::SlotOffset(var->index());
2414 __ RecordWriteContextSlot(
2415 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2420 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2421 FeedbackVectorICSlot slot) {
2422 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2423 if (var->IsUnallocated()) {
2424 // Global var, const, or let.
2425 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2426 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2427 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2430 } else if (var->mode() == LET && op != Token::INIT_LET) {
2431 // Non-initializing assignment to let variable needs a write barrier.
2432 DCHECK(!var->IsLookupSlot());
2433 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2435 MemOperand location = VarOperand(var, x1);
2436 __ Ldr(x10, location);
2437 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2438 __ Mov(x10, Operand(var->name()));
2440 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2441 // Perform the assignment.
2443 EmitStoreToStackLocalOrContextSlot(var, location);
2445 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2446 // Assignment to const variable needs a write barrier.
2447 DCHECK(!var->IsLookupSlot());
2448 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2450 MemOperand location = VarOperand(var, x1);
2451 __ Ldr(x10, location);
2452 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2453 __ Mov(x10, Operand(var->name()));
2455 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2456 __ Bind(&const_error);
2457 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2459 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2460 if (var->IsLookupSlot()) {
2461 // Assignment to var.
2462 __ Mov(x11, Operand(var->name()));
2463 __ Mov(x10, Smi::FromInt(language_mode()));
2466 // jssp[16] : context.
2467 // jssp[24] : value.
2468 __ Push(x0, cp, x11, x10);
2469 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2471 // Assignment to var or initializing assignment to let/const in harmony
2473 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2474 MemOperand location = VarOperand(var, x1);
2475 if (FLAG_debug_code && op == Token::INIT_LET) {
2476 __ Ldr(x10, location);
2477 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2478 __ Check(eq, kLetBindingReInitialization);
2480 EmitStoreToStackLocalOrContextSlot(var, location);
2483 } else if (op == Token::INIT_CONST_LEGACY) {
2484 // Const initializers need a write barrier.
2485 DCHECK(var->mode() == CONST_LEGACY);
2486 DCHECK(!var->IsParameter()); // No const parameters.
2487 if (var->IsLookupSlot()) {
2488 __ Mov(x1, Operand(var->name()));
2489 __ Push(x0, cp, x1);
2490 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2492 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2494 MemOperand location = VarOperand(var, x1);
2495 __ Ldr(x10, location);
2496 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2497 EmitStoreToStackLocalOrContextSlot(var, location);
2502 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2503 if (is_strict(language_mode())) {
2504 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2506 // Silently ignore store in sloppy mode.
2511 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2512 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2513 // Assignment to a property, using a named store IC.
2514 Property* prop = expr->target()->AsProperty();
2515 DCHECK(prop != NULL);
2516 DCHECK(prop->key()->IsLiteral());
2518 // Record source code position before IC call.
2519 SetSourcePosition(expr->position());
2520 __ Mov(StoreDescriptor::NameRegister(),
2521 Operand(prop->key()->AsLiteral()->value()));
2522 __ Pop(StoreDescriptor::ReceiverRegister());
2523 if (FLAG_vector_stores) {
2524 EmitLoadStoreICSlot(expr->AssignmentSlot());
2527 CallStoreIC(expr->AssignmentFeedbackId());
2530 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2531 context()->Plug(x0);
2535 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2536 // Assignment to named property of super.
2538 // stack : receiver ('this'), home_object
2539 DCHECK(prop != NULL);
2540 Literal* key = prop->key()->AsLiteral();
2541 DCHECK(key != NULL);
2543 __ Push(key->value());
2545 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2546 : Runtime::kStoreToSuper_Sloppy),
2551 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2552 // Assignment to named property of super.
2554 // stack : receiver ('this'), home_object, key
2555 DCHECK(prop != NULL);
2559 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2560 : Runtime::kStoreKeyedToSuper_Sloppy),
2565 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2566 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2567 // Assignment to a property, using a keyed store IC.
2569 // Record source code position before IC call.
2570 SetSourcePosition(expr->position());
2571 // TODO(all): Could we pass this in registers rather than on the stack?
2572 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2573 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2576 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2577 if (FLAG_vector_stores) {
2578 EmitLoadStoreICSlot(expr->AssignmentSlot());
2581 CallIC(ic, expr->AssignmentFeedbackId());
2584 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2585 context()->Plug(x0);
2589 void FullCodeGenerator::VisitProperty(Property* expr) {
2590 Comment cmnt(masm_, "[ Property");
2591 Expression* key = expr->key();
2593 if (key->IsPropertyName()) {
2594 if (!expr->IsSuperAccess()) {
2595 VisitForAccumulatorValue(expr->obj());
2596 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2597 EmitNamedPropertyLoad(expr);
2599 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2601 expr->obj()->AsSuperPropertyReference()->home_object());
2602 EmitNamedSuperPropertyLoad(expr);
2605 if (!expr->IsSuperAccess()) {
2606 VisitForStackValue(expr->obj());
2607 VisitForAccumulatorValue(expr->key());
2608 __ Move(LoadDescriptor::NameRegister(), x0);
2609 __ Pop(LoadDescriptor::ReceiverRegister());
2610 EmitKeyedPropertyLoad(expr);
2612 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2614 expr->obj()->AsSuperPropertyReference()->home_object());
2615 VisitForStackValue(expr->key());
2616 EmitKeyedSuperPropertyLoad(expr);
2619 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2620 context()->Plug(x0);
2624 void FullCodeGenerator::CallIC(Handle<Code> code,
2625 TypeFeedbackId ast_id) {
2627 // All calls must have a predictable size in full-codegen code to ensure that
2628 // the debugger can patch them correctly.
2629 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2633 // Code common for calls using the IC.
2634 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2635 Expression* callee = expr->expression();
2637 CallICState::CallType call_type =
2638 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2640 // Get the target function.
2641 if (call_type == CallICState::FUNCTION) {
2642 { StackValueContext context(this);
2643 EmitVariableLoad(callee->AsVariableProxy());
2644 PrepareForBailout(callee, NO_REGISTERS);
2646 // Push undefined as receiver. This is patched in the method prologue if it
2647 // is a sloppy mode method.
2649 UseScratchRegisterScope temps(masm_);
2650 Register temp = temps.AcquireX();
2651 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2655 // Load the function from the receiver.
2656 DCHECK(callee->IsProperty());
2657 DCHECK(!callee->AsProperty()->IsSuperAccess());
2658 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2659 EmitNamedPropertyLoad(callee->AsProperty());
2660 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2661 // Push the target function under the receiver.
2666 EmitCall(expr, call_type);
2670 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2671 Expression* callee = expr->expression();
2672 DCHECK(callee->IsProperty());
2673 Property* prop = callee->AsProperty();
2674 DCHECK(prop->IsSuperAccess());
2676 SetSourcePosition(prop->position());
2677 Literal* key = prop->key()->AsLiteral();
2678 DCHECK(!key->value()->IsSmi());
2680 // Load the function from the receiver.
2681 const Register scratch = x10;
2682 SuperPropertyReference* super_ref =
2683 callee->AsProperty()->obj()->AsSuperPropertyReference();
2684 VisitForStackValue(super_ref->home_object());
2685 VisitForAccumulatorValue(super_ref->this_var());
2687 __ Peek(scratch, kPointerSize);
2688 __ Push(x0, scratch);
2689 __ Push(key->value());
2693 // - this (receiver)
2694 // - this (receiver) <-- LoadFromSuper will pop here and below.
2697 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2699 // Replace home_object with target function.
2700 __ Poke(x0, kPointerSize);
2703 // - target function
2704 // - this (receiver)
2705 EmitCall(expr, CallICState::METHOD);
2709 // Code common for calls using the IC.
2710 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2713 VisitForAccumulatorValue(key);
2715 Expression* callee = expr->expression();
2717 // Load the function from the receiver.
2718 DCHECK(callee->IsProperty());
2719 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2720 __ Move(LoadDescriptor::NameRegister(), x0);
2721 EmitKeyedPropertyLoad(callee->AsProperty());
2722 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2724 // Push the target function under the receiver.
2728 EmitCall(expr, CallICState::METHOD);
2732 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2733 Expression* callee = expr->expression();
2734 DCHECK(callee->IsProperty());
2735 Property* prop = callee->AsProperty();
2736 DCHECK(prop->IsSuperAccess());
2738 SetSourcePosition(prop->position());
2740 // Load the function from the receiver.
2741 const Register scratch = x10;
2742 SuperPropertyReference* super_ref =
2743 callee->AsProperty()->obj()->AsSuperPropertyReference();
2744 VisitForStackValue(super_ref->home_object());
2745 VisitForAccumulatorValue(super_ref->this_var());
2747 __ Peek(scratch, kPointerSize);
2748 __ Push(x0, scratch);
2749 VisitForStackValue(prop->key());
2753 // - this (receiver)
2754 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2757 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2759 // Replace home_object with target function.
2760 __ Poke(x0, kPointerSize);
2763 // - target function
2764 // - this (receiver)
2765 EmitCall(expr, CallICState::METHOD);
2769 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2770 // Load the arguments.
2771 ZoneList<Expression*>* args = expr->arguments();
2772 int arg_count = args->length();
2773 { PreservePositionScope scope(masm()->positions_recorder());
2774 for (int i = 0; i < arg_count; i++) {
2775 VisitForStackValue(args->at(i));
2778 // Record source position of the IC call.
2779 SetSourcePosition(expr->position());
2781 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2782 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2783 __ Peek(x1, (arg_count + 1) * kXRegSize);
2784 // Don't assign a type feedback id to the IC, since type feedback is provided
2785 // by the vector above.
2788 RecordJSReturnSite(expr);
2789 // Restore context register.
2790 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2791 context()->DropAndPlug(1, x0);
2795 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2796 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2797 // Prepare to push a copy of the first argument or undefined if it doesn't
2799 if (arg_count > 0) {
2800 __ Peek(x9, arg_count * kXRegSize);
2802 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2805 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2806 // Prepare to push the receiver of the enclosing function.
2807 Variable* this_var = scope()->LookupThis();
2808 DCHECK_NOT_NULL(this_var);
2809 __ Ldr(x11, VarOperand(this_var, x11));
2811 // Prepare to push the language mode.
2812 __ Mov(x12, Smi::FromInt(language_mode()));
2813 // Prepare to push the start position of the scope the calls resides in.
2814 __ Mov(x13, Smi::FromInt(scope()->start_position()));
2817 __ Push(x9, x10, x11, x12, x13);
2819 // Do the runtime call.
2820 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2824 void FullCodeGenerator::EmitInitializeThisAfterSuper(
2825 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
2826 Variable* this_var = super_ref->this_var()->var();
2827 GetVar(x1, this_var);
2828 Label uninitialized_this;
2829 __ JumpIfRoot(x1, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2830 __ Mov(x0, Operand(this_var->name()));
2832 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2833 __ bind(&uninitialized_this);
2835 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
2839 void FullCodeGenerator::VisitCall(Call* expr) {
2841 // We want to verify that RecordJSReturnSite gets called on all paths
2842 // through this function. Avoid early returns.
2843 expr->return_is_recorded_ = false;
2846 Comment cmnt(masm_, "[ Call");
2847 Expression* callee = expr->expression();
2848 Call::CallType call_type = expr->GetCallType(isolate());
2850 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2851 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2852 // to resolve the function we need to call and the receiver of the
2853 // call. Then we call the resolved function using the given
2855 ZoneList<Expression*>* args = expr->arguments();
2856 int arg_count = args->length();
2859 PreservePositionScope pos_scope(masm()->positions_recorder());
2860 VisitForStackValue(callee);
2861 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2862 __ Push(x10); // Reserved receiver slot.
2864 // Push the arguments.
2865 for (int i = 0; i < arg_count; i++) {
2866 VisitForStackValue(args->at(i));
2869 // Push a copy of the function (found below the arguments) and
2871 __ Peek(x10, (arg_count + 1) * kPointerSize);
2873 EmitResolvePossiblyDirectEval(arg_count);
2875 // The runtime call returns a pair of values in x0 (function) and
2876 // x1 (receiver). Touch up the stack with the right values.
2877 __ PokePair(x1, x0, arg_count * kPointerSize);
2879 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2882 // Record source position for debugger.
2883 SetSourcePosition(expr->position());
2885 // Call the evaluated function.
2886 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2887 __ Peek(x1, (arg_count + 1) * kXRegSize);
2889 RecordJSReturnSite(expr);
2890 // Restore context register.
2891 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2892 context()->DropAndPlug(1, x0);
2894 } else if (call_type == Call::GLOBAL_CALL) {
2895 EmitCallWithLoadIC(expr);
2897 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2898 // Call to a lookup slot (dynamically introduced variable).
2899 VariableProxy* proxy = callee->AsVariableProxy();
2902 { PreservePositionScope scope(masm()->positions_recorder());
2903 // Generate code for loading from variables potentially shadowed
2904 // by eval-introduced variables.
2905 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2909 // Call the runtime to find the function to call (returned in x0)
2910 // and the object holding it (returned in x1).
2911 __ Mov(x10, Operand(proxy->name()));
2912 __ Push(context_register(), x10);
2913 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2914 __ Push(x0, x1); // Receiver, function.
2915 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2917 // If fast case code has been generated, emit code to push the
2918 // function and receiver and have the slow path jump around this
2920 if (done.is_linked()) {
2925 // The receiver is implicitly the global receiver. Indicate this
2926 // by passing the undefined to the call function stub.
2927 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2932 // The receiver is either the global receiver or an object found
2933 // by LoadContextSlot.
2935 } else if (call_type == Call::PROPERTY_CALL) {
2936 Property* property = callee->AsProperty();
2937 bool is_named_call = property->key()->IsPropertyName();
2938 if (property->IsSuperAccess()) {
2939 if (is_named_call) {
2940 EmitSuperCallWithLoadIC(expr);
2942 EmitKeyedSuperCallWithLoadIC(expr);
2946 PreservePositionScope scope(masm()->positions_recorder());
2947 VisitForStackValue(property->obj());
2949 if (is_named_call) {
2950 EmitCallWithLoadIC(expr);
2952 EmitKeyedCallWithLoadIC(expr, property->key());
2955 } else if (call_type == Call::SUPER_CALL) {
2956 EmitSuperConstructorCall(expr);
2958 DCHECK(call_type == Call::OTHER_CALL);
2959 // Call to an arbitrary expression not handled specially above.
2960 { PreservePositionScope scope(masm()->positions_recorder());
2961 VisitForStackValue(callee);
2963 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2965 // Emit function call.
2970 // RecordJSReturnSite should have been called.
2971 DCHECK(expr->return_is_recorded_);
2976 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2977 Comment cmnt(masm_, "[ CallNew");
2978 // According to ECMA-262, section 11.2.2, page 44, the function
2979 // expression in new calls must be evaluated before the
2982 // Push constructor on the stack. If it's not a function it's used as
2983 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2985 DCHECK(!expr->expression()->IsSuperPropertyReference());
2986 VisitForStackValue(expr->expression());
2988 // Push the arguments ("left-to-right") on the stack.
2989 ZoneList<Expression*>* args = expr->arguments();
2990 int arg_count = args->length();
2991 for (int i = 0; i < arg_count; i++) {
2992 VisitForStackValue(args->at(i));
2995 // Call the construct call builtin that handles allocation and
2996 // constructor invocation.
2997 SetSourcePosition(expr->position());
2999 // Load function and argument count into x1 and x0.
3000 __ Mov(x0, arg_count);
3001 __ Peek(x1, arg_count * kXRegSize);
3003 // Record call targets in unoptimized code.
3004 if (FLAG_pretenuring_call_new) {
3005 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3006 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3007 expr->CallNewFeedbackSlot().ToInt() + 1);
3010 __ LoadObject(x2, FeedbackVector());
3011 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
3013 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3014 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3015 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3016 context()->Plug(x0);
3020 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3021 SuperCallReference* super_call_ref =
3022 expr->expression()->AsSuperCallReference();
3023 DCHECK_NOT_NULL(super_call_ref);
3025 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3026 VisitForStackValue(new_target_proxy);
3028 EmitLoadSuperConstructor(super_call_ref);
3029 __ push(result_register());
3031 // Push the arguments ("left-to-right") on the stack.
3032 ZoneList<Expression*>* args = expr->arguments();
3033 int arg_count = args->length();
3034 for (int i = 0; i < arg_count; i++) {
3035 VisitForStackValue(args->at(i));
3038 // Call the construct call builtin that handles allocation and
3039 // constructor invocation.
3040 SetSourcePosition(expr->position());
3042 // Load function and argument count into x1 and x0.
3043 __ Mov(x0, arg_count);
3044 __ Peek(x1, arg_count * kXRegSize);
3046 // Record call targets in unoptimized code.
3047 if (FLAG_pretenuring_call_new) {
3049 /* TODO(dslomov): support pretenuring.
3050 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3051 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3052 expr->CallNewFeedbackSlot().ToInt() + 1);
3056 __ LoadObject(x2, FeedbackVector());
3057 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
3059 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3060 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3064 RecordJSReturnSite(expr);
3066 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3067 context()->Plug(x0);
3071 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3072 ZoneList<Expression*>* args = expr->arguments();
3073 DCHECK(args->length() == 1);
3075 VisitForAccumulatorValue(args->at(0));
3077 Label materialize_true, materialize_false;
3078 Label* if_true = NULL;
3079 Label* if_false = NULL;
3080 Label* fall_through = NULL;
3081 context()->PrepareTest(&materialize_true, &materialize_false,
3082 &if_true, &if_false, &fall_through);
3084 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3085 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
3087 context()->Plug(if_true, if_false);
3091 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3092 ZoneList<Expression*>* args = expr->arguments();
3093 DCHECK(args->length() == 1);
3095 VisitForAccumulatorValue(args->at(0));
3097 Label materialize_true, materialize_false;
3098 Label* if_true = NULL;
3099 Label* if_false = NULL;
3100 Label* fall_through = NULL;
3101 context()->PrepareTest(&materialize_true, &materialize_false,
3102 &if_true, &if_false, &fall_through);
3104 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
3106 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3107 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
3109 context()->Plug(if_true, if_false);
3113 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3114 ZoneList<Expression*>* args = expr->arguments();
3115 DCHECK(args->length() == 1);
3117 VisitForAccumulatorValue(args->at(0));
3119 Label materialize_true, materialize_false;
3120 Label* if_true = NULL;
3121 Label* if_false = NULL;
3122 Label* fall_through = NULL;
3123 context()->PrepareTest(&materialize_true, &materialize_false,
3124 &if_true, &if_false, &fall_through);
3126 __ JumpIfSmi(x0, if_false);
3127 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3128 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3129 // Undetectable objects behave like undefined when tested with typeof.
3130 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3131 __ Tbnz(x11, Map::kIsUndetectable, if_false);
3132 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
3133 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3135 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3136 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3137 Split(le, if_true, if_false, fall_through);
3139 context()->Plug(if_true, if_false);
3143 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3144 ZoneList<Expression*>* args = expr->arguments();
3145 DCHECK(args->length() == 1);
3147 VisitForAccumulatorValue(args->at(0));
3149 Label materialize_true, materialize_false;
3150 Label* if_true = NULL;
3151 Label* if_false = NULL;
3152 Label* fall_through = NULL;
3153 context()->PrepareTest(&materialize_true, &materialize_false,
3154 &if_true, &if_false, &fall_through);
3156 __ JumpIfSmi(x0, if_false);
3157 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3158 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3159 Split(ge, if_true, if_false, fall_through);
3161 context()->Plug(if_true, if_false);
3165 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3166 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
3167 ZoneList<Expression*>* args = expr->arguments();
3168 DCHECK(args->length() == 1);
3170 VisitForAccumulatorValue(args->at(0));
3172 Label materialize_true, materialize_false;
3173 Label* if_true = NULL;
3174 Label* if_false = NULL;
3175 Label* fall_through = NULL;
3176 context()->PrepareTest(&materialize_true, &materialize_false,
3177 &if_true, &if_false, &fall_through);
3179 __ JumpIfSmi(x0, if_false);
3180 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3181 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3182 __ Tst(x11, 1 << Map::kIsUndetectable);
3183 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3184 Split(ne, if_true, if_false, fall_through);
3186 context()->Plug(if_true, if_false);
3190 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3191 CallRuntime* expr) {
3192 ZoneList<Expression*>* args = expr->arguments();
3193 DCHECK(args->length() == 1);
3194 VisitForAccumulatorValue(args->at(0));
3196 Label materialize_true, materialize_false, skip_lookup;
3197 Label* if_true = NULL;
3198 Label* if_false = NULL;
3199 Label* fall_through = NULL;
3200 context()->PrepareTest(&materialize_true, &materialize_false,
3201 &if_true, &if_false, &fall_through);
3203 Register object = x0;
3204 __ AssertNotSmi(object);
3207 Register bitfield2 = x11;
3208 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3209 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3210 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3212 // Check for fast case object. Generate false result for slow case object.
3213 Register props = x12;
3214 Register props_map = x12;
3215 Register hash_table_map = x13;
3216 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3217 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3218 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3219 __ Cmp(props_map, hash_table_map);
3222 // Look for valueOf name in the descriptor array, and indicate false if found.
3223 // Since we omit an enumeration index check, if it is added via a transition
3224 // that shares its descriptor array, this is a false positive.
3227 // Skip loop if no descriptors are valid.
3228 Register descriptors = x12;
3229 Register descriptors_length = x13;
3230 __ NumberOfOwnDescriptors(descriptors_length, map);
3231 __ Cbz(descriptors_length, &done);
3233 __ LoadInstanceDescriptors(map, descriptors);
3235 // Calculate the end of the descriptor array.
3236 Register descriptors_end = x14;
3237 __ Mov(x15, DescriptorArray::kDescriptorSize);
3238 __ Mul(descriptors_length, descriptors_length, x15);
3239 // Calculate location of the first key name.
3240 __ Add(descriptors, descriptors,
3241 DescriptorArray::kFirstOffset - kHeapObjectTag);
3242 // Calculate the end of the descriptor array.
3243 __ Add(descriptors_end, descriptors,
3244 Operand(descriptors_length, LSL, kPointerSizeLog2));
3246 // Loop through all the keys in the descriptor array. If one of these is the
3247 // string "valueOf" the result is false.
3248 Register valueof_string = x1;
3249 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3250 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3252 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3253 __ Cmp(x15, valueof_string);
3255 __ Cmp(descriptors, descriptors_end);
3260 // Set the bit in the map to indicate that there is no local valueOf field.
3261 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3262 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3263 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3265 __ Bind(&skip_lookup);
3267 // If a valueOf property is not found on the object check that its prototype
3268 // is the unmodified String prototype. If not result is false.
3269 Register prototype = x1;
3270 Register global_idx = x2;
3271 Register native_context = x2;
3272 Register string_proto = x3;
3273 Register proto_map = x4;
3274 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3275 __ JumpIfSmi(prototype, if_false);
3276 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3277 __ Ldr(global_idx, GlobalObjectMemOperand());
3278 __ Ldr(native_context,
3279 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3280 __ Ldr(string_proto,
3281 ContextMemOperand(native_context,
3282 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3283 __ Cmp(proto_map, string_proto);
3285 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3286 Split(eq, if_true, if_false, fall_through);
3288 context()->Plug(if_true, if_false);
3292 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3293 ZoneList<Expression*>* args = expr->arguments();
3294 DCHECK(args->length() == 1);
3296 VisitForAccumulatorValue(args->at(0));
3298 Label materialize_true, materialize_false;
3299 Label* if_true = NULL;
3300 Label* if_false = NULL;
3301 Label* fall_through = NULL;
3302 context()->PrepareTest(&materialize_true, &materialize_false,
3303 &if_true, &if_false, &fall_through);
3305 __ JumpIfSmi(x0, if_false);
3306 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3307 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3308 Split(eq, if_true, if_false, fall_through);
3310 context()->Plug(if_true, if_false);
3314 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3315 ZoneList<Expression*>* args = expr->arguments();
3316 DCHECK(args->length() == 1);
3318 VisitForAccumulatorValue(args->at(0));
3320 Label materialize_true, materialize_false;
3321 Label* if_true = NULL;
3322 Label* if_false = NULL;
3323 Label* fall_through = NULL;
3324 context()->PrepareTest(&materialize_true, &materialize_false,
3325 &if_true, &if_false, &fall_through);
3327 // Only a HeapNumber can be -0.0, so return false if we have something else.
3328 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3330 // Test the bit pattern.
3331 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3332 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3334 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3335 Split(vs, if_true, if_false, fall_through);
3337 context()->Plug(if_true, if_false);
3341 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3342 ZoneList<Expression*>* args = expr->arguments();
3343 DCHECK(args->length() == 1);
3345 VisitForAccumulatorValue(args->at(0));
3347 Label materialize_true, materialize_false;
3348 Label* if_true = NULL;
3349 Label* if_false = NULL;
3350 Label* fall_through = NULL;
3351 context()->PrepareTest(&materialize_true, &materialize_false,
3352 &if_true, &if_false, &fall_through);
3354 __ JumpIfSmi(x0, if_false);
3355 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3356 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3357 Split(eq, if_true, if_false, fall_through);
3359 context()->Plug(if_true, if_false);
3363 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3364 ZoneList<Expression*>* args = expr->arguments();
3365 DCHECK(args->length() == 1);
3367 VisitForAccumulatorValue(args->at(0));
3369 Label materialize_true, materialize_false;
3370 Label* if_true = NULL;
3371 Label* if_false = NULL;
3372 Label* fall_through = NULL;
3373 context()->PrepareTest(&materialize_true, &materialize_false,
3374 &if_true, &if_false, &fall_through);
3376 __ JumpIfSmi(x0, if_false);
3377 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3378 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3379 Split(eq, if_true, if_false, fall_through);
3381 context()->Plug(if_true, if_false);
3385 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3386 ZoneList<Expression*>* args = expr->arguments();
3387 DCHECK(args->length() == 1);
3389 VisitForAccumulatorValue(args->at(0));
3391 Label materialize_true, materialize_false;
3392 Label* if_true = NULL;
3393 Label* if_false = NULL;
3394 Label* fall_through = NULL;
3395 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3396 &if_false, &fall_through);
3398 __ JumpIfSmi(x0, if_false);
3400 Register type_reg = x11;
3401 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3402 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3403 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3404 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3405 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3406 Split(ls, if_true, if_false, fall_through);
3408 context()->Plug(if_true, if_false);
3412 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3413 DCHECK(expr->arguments()->length() == 0);
3415 Label materialize_true, materialize_false;
3416 Label* if_true = NULL;
3417 Label* if_false = NULL;
3418 Label* fall_through = NULL;
3419 context()->PrepareTest(&materialize_true, &materialize_false,
3420 &if_true, &if_false, &fall_through);
3422 // Get the frame pointer for the calling frame.
3423 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3425 // Skip the arguments adaptor frame if it exists.
3426 Label check_frame_marker;
3427 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3428 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3429 __ B(ne, &check_frame_marker);
3430 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3432 // Check the marker in the calling frame.
3433 __ Bind(&check_frame_marker);
3434 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3435 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3436 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3437 Split(eq, if_true, if_false, fall_through);
3439 context()->Plug(if_true, if_false);
3443 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3444 ZoneList<Expression*>* args = expr->arguments();
3445 DCHECK(args->length() == 2);
3447 // Load the two objects into registers and perform the comparison.
3448 VisitForStackValue(args->at(0));
3449 VisitForAccumulatorValue(args->at(1));
3451 Label materialize_true, materialize_false;
3452 Label* if_true = NULL;
3453 Label* if_false = NULL;
3454 Label* fall_through = NULL;
3455 context()->PrepareTest(&materialize_true, &materialize_false,
3456 &if_true, &if_false, &fall_through);
3460 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3461 Split(eq, if_true, if_false, fall_through);
3463 context()->Plug(if_true, if_false);
3467 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3468 ZoneList<Expression*>* args = expr->arguments();
3469 DCHECK(args->length() == 1);
3471 // ArgumentsAccessStub expects the key in x1.
3472 VisitForAccumulatorValue(args->at(0));
3474 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3475 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3477 context()->Plug(x0);
3481 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3482 DCHECK(expr->arguments()->length() == 0);
3484 // Get the number of formal parameters.
3485 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3487 // Check if the calling frame is an arguments adaptor frame.
3488 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3489 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3490 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3493 // Arguments adaptor case: Read the arguments length from the
3495 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3498 context()->Plug(x0);
3502 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3503 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3504 ZoneList<Expression*>* args = expr->arguments();
3505 DCHECK(args->length() == 1);
3506 Label done, null, function, non_function_constructor;
3508 VisitForAccumulatorValue(args->at(0));
3510 // If the object is a smi, we return null.
3511 __ JumpIfSmi(x0, &null);
3513 // Check that the object is a JS object but take special care of JS
3514 // functions to make sure they have 'Function' as their class.
3515 // Assume that there are only two callable types, and one of them is at
3516 // either end of the type range for JS object types. Saves extra comparisons.
3517 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3518 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3519 // x10: object's map.
3520 // x11: object's type.
3522 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3523 FIRST_SPEC_OBJECT_TYPE + 1);
3524 __ B(eq, &function);
3526 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3527 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3528 LAST_SPEC_OBJECT_TYPE - 1);
3529 __ B(eq, &function);
3530 // Assume that there is no larger type.
3531 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3533 // Check if the constructor in the map is a JS function.
3534 Register instance_type = x14;
3535 __ GetMapConstructor(x12, x10, x13, instance_type);
3536 __ Cmp(instance_type, JS_FUNCTION_TYPE);
3537 __ B(ne, &non_function_constructor);
3539 // x12 now contains the constructor function. Grab the
3540 // instance class name from there.
3541 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3543 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3546 // Functions have class 'Function'.
3548 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3551 // Objects with a non-function constructor have class 'Object'.
3552 __ Bind(&non_function_constructor);
3553 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3556 // Non-JS objects have class null.
3558 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3563 context()->Plug(x0);
3567 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3568 // Load the arguments on the stack and call the stub.
3569 SubStringStub stub(isolate());
3570 ZoneList<Expression*>* args = expr->arguments();
3571 DCHECK(args->length() == 3);
3572 VisitForStackValue(args->at(0));
3573 VisitForStackValue(args->at(1));
3574 VisitForStackValue(args->at(2));
3576 context()->Plug(x0);
3580 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3581 // Load the arguments on the stack and call the stub.
3582 RegExpExecStub stub(isolate());
3583 ZoneList<Expression*>* args = expr->arguments();
3584 DCHECK(args->length() == 4);
3585 VisitForStackValue(args->at(0));
3586 VisitForStackValue(args->at(1));
3587 VisitForStackValue(args->at(2));
3588 VisitForStackValue(args->at(3));
3590 context()->Plug(x0);
3594 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3595 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3596 ZoneList<Expression*>* args = expr->arguments();
3597 DCHECK(args->length() == 1);
3598 VisitForAccumulatorValue(args->at(0)); // Load the object.
3601 // If the object is a smi return the object.
3602 __ JumpIfSmi(x0, &done);
3603 // If the object is not a value type, return the object.
3604 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3605 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3608 context()->Plug(x0);
3612 void FullCodeGenerator::EmitThrowIfNotADate(CallRuntime* expr) {
3613 ZoneList<Expression*>* args = expr->arguments();
3614 DCHECK_EQ(1, args->length());
3616 VisitForAccumulatorValue(args->at(0)); // Load the object.
3618 Label done, not_date_object;
3619 Register object = x0;
3620 Register result = x0;
3622 __ JumpIfSmi(object, ¬_date_object);
3623 __ JumpIfObjectType(object, x10, x10, JS_DATE_TYPE, &done);
3624 __ Bind(¬_date_object);
3625 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3628 context()->Plug(result);
3632 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3633 ZoneList<Expression*>* args = expr->arguments();
3634 DCHECK(args->length() == 2);
3635 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3636 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3638 VisitForAccumulatorValue(args->at(0)); // Load the object.
3640 Register object = x0;
3641 Register result = x0;
3642 Register stamp_addr = x10;
3643 Register stamp_cache = x11;
3645 if (index->value() == 0) {
3646 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3648 Label runtime, done;
3649 if (index->value() < JSDate::kFirstUncachedField) {
3650 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3651 __ Mov(stamp_addr, stamp);
3652 __ Ldr(stamp_addr, MemOperand(stamp_addr));
3653 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3654 __ Cmp(stamp_addr, stamp_cache);
3656 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3657 kPointerSize * index->value()));
3663 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3667 context()->Plug(result);
3671 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3672 ZoneList<Expression*>* args = expr->arguments();
3673 DCHECK_EQ(3, args->length());
3675 Register string = x0;
3676 Register index = x1;
3677 Register value = x2;
3678 Register scratch = x10;
3680 VisitForStackValue(args->at(0)); // index
3681 VisitForStackValue(args->at(1)); // value
3682 VisitForAccumulatorValue(args->at(2)); // string
3683 __ Pop(value, index);
3685 if (FLAG_debug_code) {
3686 __ AssertSmi(value, kNonSmiValue);
3687 __ AssertSmi(index, kNonSmiIndex);
3688 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3689 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3693 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3696 __ Strb(value, MemOperand(scratch, index));
3697 context()->Plug(string);
3701 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3702 ZoneList<Expression*>* args = expr->arguments();
3703 DCHECK_EQ(3, args->length());
3705 Register string = x0;
3706 Register index = x1;
3707 Register value = x2;
3708 Register scratch = x10;
3710 VisitForStackValue(args->at(0)); // index
3711 VisitForStackValue(args->at(1)); // value
3712 VisitForAccumulatorValue(args->at(2)); // string
3713 __ Pop(value, index);
3715 if (FLAG_debug_code) {
3716 __ AssertSmi(value, kNonSmiValue);
3717 __ AssertSmi(index, kNonSmiIndex);
3718 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3719 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3723 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3726 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3727 context()->Plug(string);
3731 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3732 // Load the arguments on the stack and call the MathPow stub.
3733 ZoneList<Expression*>* args = expr->arguments();
3734 DCHECK(args->length() == 2);
3735 VisitForStackValue(args->at(0));
3736 VisitForStackValue(args->at(1));
3737 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3739 context()->Plug(x0);
3743 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3744 ZoneList<Expression*>* args = expr->arguments();
3745 DCHECK(args->length() == 2);
3746 VisitForStackValue(args->at(0)); // Load the object.
3747 VisitForAccumulatorValue(args->at(1)); // Load the value.
3753 // If the object is a smi, return the value.
3754 __ JumpIfSmi(x1, &done);
3756 // If the object is not a value type, return the value.
3757 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3760 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3761 // Update the write barrier. Save the value as it will be
3762 // overwritten by the write barrier code and is needed afterward.
3764 __ RecordWriteField(
3765 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3768 context()->Plug(x0);
3772 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3773 ZoneList<Expression*>* args = expr->arguments();
3774 DCHECK_EQ(args->length(), 1);
3776 // Load the argument into x0 and call the stub.
3777 VisitForAccumulatorValue(args->at(0));
3779 NumberToStringStub stub(isolate());
3781 context()->Plug(x0);
3785 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3786 ZoneList<Expression*>* args = expr->arguments();
3787 DCHECK(args->length() == 1);
3789 VisitForAccumulatorValue(args->at(0));
3793 Register result = x1;
3795 StringCharFromCodeGenerator generator(code, result);
3796 generator.GenerateFast(masm_);
3799 NopRuntimeCallHelper call_helper;
3800 generator.GenerateSlow(masm_, call_helper);
3803 context()->Plug(result);
3807 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3808 ZoneList<Expression*>* args = expr->arguments();
3809 DCHECK(args->length() == 2);
3811 VisitForStackValue(args->at(0));
3812 VisitForAccumulatorValue(args->at(1));
3814 Register object = x1;
3815 Register index = x0;
3816 Register result = x3;
3820 Label need_conversion;
3821 Label index_out_of_range;
3823 StringCharCodeAtGenerator generator(object,
3828 &index_out_of_range,
3829 STRING_INDEX_IS_NUMBER);
3830 generator.GenerateFast(masm_);
3833 __ Bind(&index_out_of_range);
3834 // When the index is out of range, the spec requires us to return NaN.
3835 __ LoadRoot(result, Heap::kNanValueRootIndex);
3838 __ Bind(&need_conversion);
3839 // Load the undefined value into the result register, which will
3840 // trigger conversion.
3841 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3844 NopRuntimeCallHelper call_helper;
3845 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3848 context()->Plug(result);
3852 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3853 ZoneList<Expression*>* args = expr->arguments();
3854 DCHECK(args->length() == 2);
3856 VisitForStackValue(args->at(0));
3857 VisitForAccumulatorValue(args->at(1));
3859 Register object = x1;
3860 Register index = x0;
3861 Register result = x0;
3865 Label need_conversion;
3866 Label index_out_of_range;
3868 StringCharAtGenerator generator(object,
3874 &index_out_of_range,
3875 STRING_INDEX_IS_NUMBER);
3876 generator.GenerateFast(masm_);
3879 __ Bind(&index_out_of_range);
3880 // When the index is out of range, the spec requires us to return
3881 // the empty string.
3882 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3885 __ Bind(&need_conversion);
3886 // Move smi zero into the result register, which will trigger conversion.
3887 __ Mov(result, Smi::FromInt(0));
3890 NopRuntimeCallHelper call_helper;
3891 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3894 context()->Plug(result);
3898 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3899 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3900 ZoneList<Expression*>* args = expr->arguments();
3901 DCHECK_EQ(2, args->length());
3903 VisitForStackValue(args->at(0));
3904 VisitForAccumulatorValue(args->at(1));
3907 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3910 context()->Plug(x0);
3914 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3915 ZoneList<Expression*>* args = expr->arguments();
3916 DCHECK_EQ(2, args->length());
3917 VisitForStackValue(args->at(0));
3918 VisitForStackValue(args->at(1));
3920 StringCompareStub stub(isolate());
3922 context()->Plug(x0);
3926 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3927 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3928 ZoneList<Expression*>* args = expr->arguments();
3929 DCHECK(args->length() >= 2);
3931 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3932 for (int i = 0; i < arg_count + 1; i++) {
3933 VisitForStackValue(args->at(i));
3935 VisitForAccumulatorValue(args->last()); // Function.
3937 Label runtime, done;
3938 // Check for non-function argument (including proxy).
3939 __ JumpIfSmi(x0, &runtime);
3940 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3942 // InvokeFunction requires the function in x1. Move it in there.
3944 ParameterCount count(arg_count);
3945 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3946 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3951 __ CallRuntime(Runtime::kCall, args->length());
3954 context()->Plug(x0);
3958 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3959 ZoneList<Expression*>* args = expr->arguments();
3960 DCHECK(args->length() == 2);
3963 VisitForStackValue(args->at(0));
3966 VisitForStackValue(args->at(1));
3967 __ CallRuntime(Runtime::kGetPrototype, 1);
3968 __ Push(result_register());
3970 // Check if the calling frame is an arguments adaptor frame.
3971 Label adaptor_frame, args_set_up, runtime;
3972 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3973 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3974 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3975 __ B(eq, &adaptor_frame);
3976 // default constructor has no arguments, so no adaptor frame means no args.
3977 __ Mov(x0, Operand(0));
3980 // Copy arguments from adaptor frame.
3982 __ bind(&adaptor_frame);
3983 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3984 __ SmiUntag(x1, x1);
3986 // Subtract 1 from arguments count, for new.target.
3987 __ Sub(x1, x1, Operand(1));
3990 // Get arguments pointer in x11.
3991 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3992 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3995 // Pre-decrement x11 with kPointerSize on each iteration.
3996 // Pre-decrement in order to skip receiver.
3997 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3999 __ Sub(x1, x1, Operand(1));
4003 __ bind(&args_set_up);
4004 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
4005 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
4007 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4008 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4012 context()->Plug(result_register());
4016 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4017 RegExpConstructResultStub stub(isolate());
4018 ZoneList<Expression*>* args = expr->arguments();
4019 DCHECK(args->length() == 3);
4020 VisitForStackValue(args->at(0));
4021 VisitForStackValue(args->at(1));
4022 VisitForAccumulatorValue(args->at(2));
4025 context()->Plug(x0);
4029 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4030 ZoneList<Expression*>* args = expr->arguments();
4031 DCHECK_EQ(2, args->length());
4032 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4033 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4035 Handle<FixedArray> jsfunction_result_caches(
4036 isolate()->native_context()->jsfunction_result_caches());
4037 if (jsfunction_result_caches->length() <= cache_id) {
4038 __ Abort(kAttemptToUseUndefinedCache);
4039 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4040 context()->Plug(x0);
4044 VisitForAccumulatorValue(args->at(1));
4047 Register cache = x1;
4048 __ Ldr(cache, GlobalObjectMemOperand());
4049 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4050 __ Ldr(cache, ContextMemOperand(cache,
4051 Context::JSFUNCTION_RESULT_CACHES_INDEX));
4053 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4056 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
4057 JSFunctionResultCache::kFingerOffset));
4058 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
4059 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
4061 // Load the key and data from the cache.
4062 __ Ldp(x2, x3, MemOperand(x3));
4065 __ CmovX(x0, x3, eq);
4068 // Call runtime to perform the lookup.
4069 __ Push(cache, key);
4070 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4073 context()->Plug(x0);
4077 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4078 ZoneList<Expression*>* args = expr->arguments();
4079 VisitForAccumulatorValue(args->at(0));
4081 Label materialize_true, materialize_false;
4082 Label* if_true = NULL;
4083 Label* if_false = NULL;
4084 Label* fall_through = NULL;
4085 context()->PrepareTest(&materialize_true, &materialize_false,
4086 &if_true, &if_false, &fall_through);
4088 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4089 __ Tst(x10, String::kContainsCachedArrayIndexMask);
4090 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4091 Split(eq, if_true, if_false, fall_through);
4093 context()->Plug(if_true, if_false);
4097 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4098 ZoneList<Expression*>* args = expr->arguments();
4099 DCHECK(args->length() == 1);
4100 VisitForAccumulatorValue(args->at(0));
4102 __ AssertString(x0);
4104 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4105 __ IndexFromHash(x10, x0);
4107 context()->Plug(x0);
4111 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4112 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
4114 ZoneList<Expression*>* args = expr->arguments();
4115 DCHECK(args->length() == 2);
4116 VisitForStackValue(args->at(1));
4117 VisitForAccumulatorValue(args->at(0));
4119 Register array = x0;
4120 Register result = x0;
4121 Register elements = x1;
4122 Register element = x2;
4123 Register separator = x3;
4124 Register array_length = x4;
4125 Register result_pos = x5;
4127 Register string_length = x10;
4128 Register elements_end = x11;
4129 Register string = x12;
4130 Register scratch1 = x13;
4131 Register scratch2 = x14;
4132 Register scratch3 = x7;
4133 Register separator_length = x15;
4135 Label bailout, done, one_char_separator, long_separator,
4136 non_trivial_array, not_size_one_array, loop,
4137 empty_separator_loop, one_char_separator_loop,
4138 one_char_separator_loop_entry, long_separator_loop;
4140 // The separator operand is on the stack.
4143 // Check that the array is a JSArray.
4144 __ JumpIfSmi(array, &bailout);
4145 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4147 // Check that the array has fast elements.
4148 __ CheckFastElements(map, scratch1, &bailout);
4150 // If the array has length zero, return the empty string.
4151 // Load and untag the length of the array.
4152 // It is an unsigned value, so we can skip sign extension.
4153 // We assume little endianness.
4154 __ Ldrsw(array_length,
4155 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
4156 __ Cbnz(array_length, &non_trivial_array);
4157 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4160 __ Bind(&non_trivial_array);
4161 // Get the FixedArray containing array's elements.
4162 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4164 // Check that all array elements are sequential one-byte strings, and
4165 // accumulate the sum of their lengths.
4166 __ Mov(string_length, 0);
4167 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4168 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4169 // Loop condition: while (element < elements_end).
4170 // Live values in registers:
4171 // elements: Fixed array of strings.
4172 // array_length: Length of the fixed array of strings (not smi)
4173 // separator: Separator string
4174 // string_length: Accumulated sum of string lengths (not smi).
4175 // element: Current array element.
4176 // elements_end: Array end.
4177 if (FLAG_debug_code) {
4178 __ Cmp(array_length, 0);
4179 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4182 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4183 __ JumpIfSmi(string, &bailout);
4184 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4185 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4186 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4188 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4189 __ Adds(string_length, string_length, scratch1);
4191 __ Cmp(element, elements_end);
4194 // If array_length is 1, return elements[0], a string.
4195 __ Cmp(array_length, 1);
4196 __ B(ne, ¬_size_one_array);
4197 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4200 __ Bind(¬_size_one_array);
4202 // Live values in registers:
4203 // separator: Separator string
4204 // array_length: Length of the array (not smi).
4205 // string_length: Sum of string lengths (not smi).
4206 // elements: FixedArray of strings.
4208 // Check that the separator is a flat one-byte string.
4209 __ JumpIfSmi(separator, &bailout);
4210 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4211 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4212 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4214 // Add (separator length times array_length) - separator length to the
4215 // string_length to get the length of the result string.
4216 // Load the separator length as untagged.
4217 // We assume little endianness, and that the length is positive.
4218 __ Ldrsw(separator_length,
4219 UntagSmiFieldMemOperand(separator,
4220 SeqOneByteString::kLengthOffset));
4221 __ Sub(string_length, string_length, separator_length);
4222 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4225 // Get first element in the array.
4226 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4227 // Live values in registers:
4228 // element: First array element
4229 // separator: Separator string
4230 // string_length: Length of result string (not smi)
4231 // array_length: Length of the array (not smi).
4232 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
4235 // Prepare for looping. Set up elements_end to end of the array. Set
4236 // result_pos to the position of the result where to write the first
4238 // TODO(all): useless unless AllocateOneByteString trashes the register.
4239 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4240 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4242 // Check the length of the separator.
4243 __ Cmp(separator_length, 1);
4244 __ B(eq, &one_char_separator);
4245 __ B(gt, &long_separator);
4247 // Empty separator case
4248 __ Bind(&empty_separator_loop);
4249 // Live values in registers:
4250 // result_pos: the position to which we are currently copying characters.
4251 // element: Current array element.
4252 // elements_end: Array end.
4254 // Copy next array element to the result.
4255 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4256 __ Ldrsw(string_length,
4257 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4258 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4259 __ CopyBytes(result_pos, string, string_length, scratch1);
4260 __ Cmp(element, elements_end);
4261 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4264 // One-character separator case
4265 __ Bind(&one_char_separator);
4266 // Replace separator with its one-byte character value.
4267 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4268 // Jump into the loop after the code that copies the separator, so the first
4269 // element is not preceded by a separator
4270 __ B(&one_char_separator_loop_entry);
4272 __ Bind(&one_char_separator_loop);
4273 // Live values in registers:
4274 // result_pos: the position to which we are currently copying characters.
4275 // element: Current array element.
4276 // elements_end: Array end.
4277 // separator: Single separator one-byte char (in lower byte).
4279 // Copy the separator character to the result.
4280 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4282 // Copy next array element to the result.
4283 __ Bind(&one_char_separator_loop_entry);
4284 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4285 __ Ldrsw(string_length,
4286 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4287 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4288 __ CopyBytes(result_pos, string, string_length, scratch1);
4289 __ Cmp(element, elements_end);
4290 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4293 // Long separator case (separator is more than one character). Entry is at the
4294 // label long_separator below.
4295 __ Bind(&long_separator_loop);
4296 // Live values in registers:
4297 // result_pos: the position to which we are currently copying characters.
4298 // element: Current array element.
4299 // elements_end: Array end.
4300 // separator: Separator string.
4302 // Copy the separator to the result.
4303 // TODO(all): hoist next two instructions.
4304 __ Ldrsw(string_length,
4305 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4306 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4307 __ CopyBytes(result_pos, string, string_length, scratch1);
4309 __ Bind(&long_separator);
4310 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4311 __ Ldrsw(string_length,
4312 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4313 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4314 __ CopyBytes(result_pos, string, string_length, scratch1);
4315 __ Cmp(element, elements_end);
4316 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4320 // Returning undefined will force slower code to handle it.
4321 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4323 context()->Plug(result);
4327 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4328 DCHECK(expr->arguments()->length() == 0);
4329 ExternalReference debug_is_active =
4330 ExternalReference::debug_is_active_address(isolate());
4331 __ Mov(x10, debug_is_active);
4332 __ Ldrb(x0, MemOperand(x10));
4334 context()->Plug(x0);
4338 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4339 // Assert: expr === CallRuntime("ReflectConstruct")
4340 DCHECK_EQ(1, expr->arguments()->length());
4341 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4343 ZoneList<Expression*>* args = call->arguments();
4344 DCHECK_EQ(3, args->length());
4346 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4347 DCHECK_NOT_NULL(super_call_ref);
4349 // Load ReflectConstruct function
4350 EmitLoadJSRuntimeFunction(call);
4352 // Push the target function under the receiver.
4356 // Push super constructor
4357 EmitLoadSuperConstructor(super_call_ref);
4358 __ Push(result_register());
4360 // Push arguments array
4361 VisitForStackValue(args->at(1));
4364 DCHECK(args->at(2)->IsVariableProxy());
4365 VisitForStackValue(args->at(2));
4367 EmitCallJSRuntimeFunction(call);
4369 // Restore context register.
4370 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4371 context()->DropAndPlug(1, x0);
4373 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4374 EmitInitializeThisAfterSuper(super_call_ref);
4378 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4379 // Push the builtins object as the receiver.
4380 __ Ldr(x10, GlobalObjectMemOperand());
4381 __ Ldr(LoadDescriptor::ReceiverRegister(),
4382 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4383 __ Push(LoadDescriptor::ReceiverRegister());
4385 // Load the function from the receiver.
4386 Handle<String> name = expr->name();
4387 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4388 __ Mov(LoadDescriptor::SlotRegister(),
4389 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4390 CallLoadIC(NOT_CONTEXTUAL);
4394 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4395 ZoneList<Expression*>* args = expr->arguments();
4396 int arg_count = args->length();
4398 // Record source position of the IC call.
4399 SetSourcePosition(expr->position());
4400 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4401 __ Peek(x1, (arg_count + 1) * kPointerSize);
4406 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4407 ZoneList<Expression*>* args = expr->arguments();
4408 int arg_count = args->length();
4410 if (expr->is_jsruntime()) {
4411 Comment cmnt(masm_, "[ CallRunTime");
4412 EmitLoadJSRuntimeFunction(expr);
4414 // Push the target function under the receiver.
4418 for (int i = 0; i < arg_count; i++) {
4419 VisitForStackValue(args->at(i));
4422 EmitCallJSRuntimeFunction(expr);
4424 // Restore context register.
4425 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4427 context()->DropAndPlug(1, x0);
4430 const Runtime::Function* function = expr->function();
4431 switch (function->function_id) {
4432 #define CALL_INTRINSIC_GENERATOR(Name) \
4433 case Runtime::kInline##Name: { \
4434 Comment cmnt(masm_, "[ Inline" #Name); \
4435 return Emit##Name(expr); \
4437 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4438 #undef CALL_INTRINSIC_GENERATOR
4440 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4441 // Push the arguments ("left-to-right").
4442 for (int i = 0; i < arg_count; i++) {
4443 VisitForStackValue(args->at(i));
4446 // Call the C runtime function.
4447 __ CallRuntime(expr->function(), arg_count);
4448 context()->Plug(x0);
4455 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4456 switch (expr->op()) {
4457 case Token::DELETE: {
4458 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4459 Property* property = expr->expression()->AsProperty();
4460 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4462 if (property != NULL) {
4463 VisitForStackValue(property->obj());
4464 VisitForStackValue(property->key());
4465 __ Mov(x10, Smi::FromInt(language_mode()));
4467 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4468 context()->Plug(x0);
4469 } else if (proxy != NULL) {
4470 Variable* var = proxy->var();
4471 // Delete of an unqualified identifier is disallowed in strict mode
4472 // but "delete this" is allowed.
4473 DCHECK(is_sloppy(language_mode()) || var->is_this());
4474 if (var->IsUnallocated()) {
4475 __ Ldr(x12, GlobalObjectMemOperand());
4476 __ Mov(x11, Operand(var->name()));
4477 __ Mov(x10, Smi::FromInt(SLOPPY));
4478 __ Push(x12, x11, x10);
4479 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4480 context()->Plug(x0);
4481 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4482 // Result of deleting non-global, non-dynamic variables is false.
4483 // The subexpression does not have side effects.
4484 context()->Plug(var->is_this());
4486 // Non-global variable. Call the runtime to try to delete from the
4487 // context where the variable was introduced.
4488 __ Mov(x2, Operand(var->name()));
4489 __ Push(context_register(), x2);
4490 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4491 context()->Plug(x0);
4494 // Result of deleting non-property, non-variable reference is true.
4495 // The subexpression may have side effects.
4496 VisitForEffect(expr->expression());
4497 context()->Plug(true);
4503 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4504 VisitForEffect(expr->expression());
4505 context()->Plug(Heap::kUndefinedValueRootIndex);
4509 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4510 if (context()->IsEffect()) {
4511 // Unary NOT has no side effects so it's only necessary to visit the
4512 // subexpression. Match the optimizing compiler by not branching.
4513 VisitForEffect(expr->expression());
4514 } else if (context()->IsTest()) {
4515 const TestContext* test = TestContext::cast(context());
4516 // The labels are swapped for the recursive call.
4517 VisitForControl(expr->expression(),
4518 test->false_label(),
4520 test->fall_through());
4521 context()->Plug(test->true_label(), test->false_label());
4523 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4524 // TODO(jbramley): This could be much more efficient using (for
4525 // example) the CSEL instruction.
4526 Label materialize_true, materialize_false, done;
4527 VisitForControl(expr->expression(),
4532 __ Bind(&materialize_true);
4533 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4534 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4537 __ Bind(&materialize_false);
4538 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4539 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4543 if (context()->IsStackValue()) {
4544 __ Push(result_register());
4549 case Token::TYPEOF: {
4550 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4552 AccumulatorValueContext context(this);
4553 VisitForTypeofValue(expr->expression());
4556 TypeofStub typeof_stub(isolate());
4557 __ CallStub(&typeof_stub);
4558 context()->Plug(x0);
4567 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4568 DCHECK(expr->expression()->IsValidReferenceExpression());
4570 Comment cmnt(masm_, "[ CountOperation");
4571 SetSourcePosition(expr->position());
4573 Property* prop = expr->expression()->AsProperty();
4574 LhsKind assign_type = Property::GetAssignType(prop);
4576 // Evaluate expression and get value.
4577 if (assign_type == VARIABLE) {
4578 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4579 AccumulatorValueContext context(this);
4580 EmitVariableLoad(expr->expression()->AsVariableProxy());
4582 // Reserve space for result of postfix operation.
4583 if (expr->is_postfix() && !context()->IsEffect()) {
4586 switch (assign_type) {
4587 case NAMED_PROPERTY: {
4588 // Put the object both on the stack and in the register.
4589 VisitForStackValue(prop->obj());
4590 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4591 EmitNamedPropertyLoad(prop);
4595 case NAMED_SUPER_PROPERTY: {
4596 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4597 VisitForAccumulatorValue(
4598 prop->obj()->AsSuperPropertyReference()->home_object());
4599 __ Push(result_register());
4600 const Register scratch = x10;
4601 __ Peek(scratch, kPointerSize);
4602 __ Push(scratch, result_register());
4603 EmitNamedSuperPropertyLoad(prop);
4607 case KEYED_SUPER_PROPERTY: {
4608 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4610 prop->obj()->AsSuperPropertyReference()->home_object());
4611 VisitForAccumulatorValue(prop->key());
4612 __ Push(result_register());
4613 const Register scratch1 = x10;
4614 const Register scratch2 = x11;
4615 __ Peek(scratch1, 2 * kPointerSize);
4616 __ Peek(scratch2, kPointerSize);
4617 __ Push(scratch1, scratch2, result_register());
4618 EmitKeyedSuperPropertyLoad(prop);
4622 case KEYED_PROPERTY: {
4623 VisitForStackValue(prop->obj());
4624 VisitForStackValue(prop->key());
4625 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4626 __ Peek(LoadDescriptor::NameRegister(), 0);
4627 EmitKeyedPropertyLoad(prop);
4636 // We need a second deoptimization point after loading the value
4637 // in case evaluating the property load my have a side effect.
4638 if (assign_type == VARIABLE) {
4639 PrepareForBailout(expr->expression(), TOS_REG);
4641 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4644 // Inline smi case if we are in a loop.
4645 Label stub_call, done;
4646 JumpPatchSite patch_site(masm_);
4648 int count_value = expr->op() == Token::INC ? 1 : -1;
4649 if (ShouldInlineSmiCase(expr->op())) {
4651 patch_site.EmitJumpIfNotSmi(x0, &slow);
4653 // Save result for postfix expressions.
4654 if (expr->is_postfix()) {
4655 if (!context()->IsEffect()) {
4656 // Save the result on the stack. If we have a named or keyed property we
4657 // store the result under the receiver that is currently on top of the
4659 switch (assign_type) {
4663 case NAMED_PROPERTY:
4664 __ Poke(x0, kPointerSize);
4666 case NAMED_SUPER_PROPERTY:
4667 __ Poke(x0, kPointerSize * 2);
4669 case KEYED_PROPERTY:
4670 __ Poke(x0, kPointerSize * 2);
4672 case KEYED_SUPER_PROPERTY:
4673 __ Poke(x0, kPointerSize * 3);
4679 __ Adds(x0, x0, Smi::FromInt(count_value));
4681 // Call stub. Undo operation first.
4682 __ Sub(x0, x0, Smi::FromInt(count_value));
4686 ToNumberStub convert_stub(isolate());
4687 __ CallStub(&convert_stub);
4688 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4690 // Save result for postfix expressions.
4691 if (expr->is_postfix()) {
4692 if (!context()->IsEffect()) {
4693 // Save the result on the stack. If we have a named or keyed property
4694 // we store the result under the receiver that is currently on top
4696 switch (assign_type) {
4700 case NAMED_PROPERTY:
4701 __ Poke(x0, kXRegSize);
4703 case NAMED_SUPER_PROPERTY:
4704 __ Poke(x0, 2 * kXRegSize);
4706 case KEYED_PROPERTY:
4707 __ Poke(x0, 2 * kXRegSize);
4709 case KEYED_SUPER_PROPERTY:
4710 __ Poke(x0, 3 * kXRegSize);
4716 __ Bind(&stub_call);
4718 __ Mov(x0, Smi::FromInt(count_value));
4720 // Record position before stub call.
4721 SetSourcePosition(expr->position());
4724 Assembler::BlockPoolsScope scope(masm_);
4725 Handle<Code> code = CodeFactory::BinaryOpIC(
4726 isolate(), Token::ADD, language_mode()).code();
4727 CallIC(code, expr->CountBinOpFeedbackId());
4728 patch_site.EmitPatchInfo();
4732 // Store the value returned in x0.
4733 switch (assign_type) {
4735 if (expr->is_postfix()) {
4736 { EffectContext context(this);
4737 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4738 Token::ASSIGN, expr->CountSlot());
4739 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4742 // For all contexts except EffectConstant We have the result on
4743 // top of the stack.
4744 if (!context()->IsEffect()) {
4745 context()->PlugTOS();
4748 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4749 Token::ASSIGN, expr->CountSlot());
4750 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4751 context()->Plug(x0);
4754 case NAMED_PROPERTY: {
4755 __ Mov(StoreDescriptor::NameRegister(),
4756 Operand(prop->key()->AsLiteral()->value()));
4757 __ Pop(StoreDescriptor::ReceiverRegister());
4758 if (FLAG_vector_stores) {
4759 EmitLoadStoreICSlot(expr->CountSlot());
4762 CallStoreIC(expr->CountStoreFeedbackId());
4764 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4765 if (expr->is_postfix()) {
4766 if (!context()->IsEffect()) {
4767 context()->PlugTOS();
4770 context()->Plug(x0);
4774 case NAMED_SUPER_PROPERTY: {
4775 EmitNamedSuperPropertyStore(prop);
4776 if (expr->is_postfix()) {
4777 if (!context()->IsEffect()) {
4778 context()->PlugTOS();
4781 context()->Plug(x0);
4785 case KEYED_SUPER_PROPERTY: {
4786 EmitKeyedSuperPropertyStore(prop);
4787 if (expr->is_postfix()) {
4788 if (!context()->IsEffect()) {
4789 context()->PlugTOS();
4792 context()->Plug(x0);
4796 case KEYED_PROPERTY: {
4797 __ Pop(StoreDescriptor::NameRegister());
4798 __ Pop(StoreDescriptor::ReceiverRegister());
4800 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4801 if (FLAG_vector_stores) {
4802 EmitLoadStoreICSlot(expr->CountSlot());
4805 CallIC(ic, expr->CountStoreFeedbackId());
4807 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4808 if (expr->is_postfix()) {
4809 if (!context()->IsEffect()) {
4810 context()->PlugTOS();
4813 context()->Plug(x0);
4821 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4822 DCHECK(!context()->IsEffect());
4823 DCHECK(!context()->IsTest());
4824 VariableProxy* proxy = expr->AsVariableProxy();
4825 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4826 Comment cmnt(masm_, "Global variable");
4827 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
4828 __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4829 __ Mov(LoadDescriptor::SlotRegister(),
4830 SmiFromSlot(proxy->VariableFeedbackSlot()));
4831 // Use a regular load, not a contextual load, to avoid a reference
4833 CallLoadIC(NOT_CONTEXTUAL);
4834 PrepareForBailout(expr, TOS_REG);
4835 context()->Plug(x0);
4836 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4839 // Generate code for loading from variables potentially shadowed
4840 // by eval-introduced variables.
4841 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4844 __ Mov(x0, Operand(proxy->name()));
4846 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4847 PrepareForBailout(expr, TOS_REG);
4850 context()->Plug(x0);
4852 // This expression cannot throw a reference error at the top level.
4853 VisitInDuplicateContext(expr);
4858 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4859 Expression* sub_expr,
4860 Handle<String> check) {
4861 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4862 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4863 Label materialize_true, materialize_false;
4864 Label* if_true = NULL;
4865 Label* if_false = NULL;
4866 Label* fall_through = NULL;
4867 context()->PrepareTest(&materialize_true, &materialize_false,
4868 &if_true, &if_false, &fall_through);
4870 { AccumulatorValueContext context(this);
4871 VisitForTypeofValue(sub_expr);
4873 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4875 Factory* factory = isolate()->factory();
4876 if (String::Equals(check, factory->number_string())) {
4877 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4878 __ JumpIfSmi(x0, if_true);
4879 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4880 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4881 Split(eq, if_true, if_false, fall_through);
4882 } else if (String::Equals(check, factory->string_string())) {
4883 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4884 __ JumpIfSmi(x0, if_false);
4885 // Check for undetectable objects => false.
4886 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4887 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4888 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4890 } else if (String::Equals(check, factory->symbol_string())) {
4891 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4892 __ JumpIfSmi(x0, if_false);
4893 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4894 Split(eq, if_true, if_false, fall_through);
4895 } else if (String::Equals(check, factory->boolean_string())) {
4896 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4897 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4898 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4899 Split(eq, if_true, if_false, fall_through);
4900 } else if (String::Equals(check, factory->undefined_string())) {
4902 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4903 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4904 __ JumpIfSmi(x0, if_false);
4905 // Check for undetectable objects => true.
4906 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4907 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4908 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4910 } else if (String::Equals(check, factory->function_string())) {
4911 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4912 __ JumpIfSmi(x0, if_false);
4913 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4914 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4915 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4918 } else if (String::Equals(check, factory->object_string())) {
4919 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4920 __ JumpIfSmi(x0, if_false);
4921 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4922 // Check for JS objects => true.
4924 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4926 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4928 // Check for undetectable objects => false.
4929 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4931 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4935 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4936 if (if_false != fall_through) __ B(if_false);
4938 context()->Plug(if_true, if_false);
4942 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4943 Comment cmnt(masm_, "[ CompareOperation");
4944 SetSourcePosition(expr->position());
4946 // Try to generate an optimized comparison with a literal value.
4947 // TODO(jbramley): This only checks common values like NaN or undefined.
4948 // Should it also handle ARM64 immediate operands?
4949 if (TryLiteralCompare(expr)) {
4953 // Assign labels according to context()->PrepareTest.
4954 Label materialize_true;
4955 Label materialize_false;
4956 Label* if_true = NULL;
4957 Label* if_false = NULL;
4958 Label* fall_through = NULL;
4959 context()->PrepareTest(&materialize_true, &materialize_false,
4960 &if_true, &if_false, &fall_through);
4962 Token::Value op = expr->op();
4963 VisitForStackValue(expr->left());
4966 VisitForStackValue(expr->right());
4967 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4968 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4969 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4970 Split(eq, if_true, if_false, fall_through);
4973 case Token::INSTANCEOF: {
4974 VisitForStackValue(expr->right());
4975 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4978 // The stub returns 0 for true.
4979 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4984 VisitForAccumulatorValue(expr->right());
4985 Condition cond = CompareIC::ComputeCondition(op);
4987 // Pop the stack value.
4990 JumpPatchSite patch_site(masm_);
4991 if (ShouldInlineSmiCase(op)) {
4993 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4995 Split(cond, if_true, if_false, NULL);
4996 __ Bind(&slow_case);
4999 // Record position and call the compare IC.
5000 SetSourcePosition(expr->position());
5002 CodeFactory::CompareIC(isolate(), op, language_mode()).code();
5003 CallIC(ic, expr->CompareOperationFeedbackId());
5004 patch_site.EmitPatchInfo();
5005 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5006 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
5010 // Convert the result of the comparison into one expected for this
5011 // expression's context.
5012 context()->Plug(if_true, if_false);
5016 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5017 Expression* sub_expr,
5019 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
5020 Label materialize_true, materialize_false;
5021 Label* if_true = NULL;
5022 Label* if_false = NULL;
5023 Label* fall_through = NULL;
5024 context()->PrepareTest(&materialize_true, &materialize_false,
5025 &if_true, &if_false, &fall_through);
5027 VisitForAccumulatorValue(sub_expr);
5028 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5030 if (expr->op() == Token::EQ_STRICT) {
5031 Heap::RootListIndex nil_value = nil == kNullValue ?
5032 Heap::kNullValueRootIndex :
5033 Heap::kUndefinedValueRootIndex;
5034 __ CompareRoot(x0, nil_value);
5035 Split(eq, if_true, if_false, fall_through);
5037 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5038 CallIC(ic, expr->CompareOperationFeedbackId());
5039 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
5042 context()->Plug(if_true, if_false);
5046 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5047 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5048 context()->Plug(x0);
5052 void FullCodeGenerator::VisitYield(Yield* expr) {
5053 Comment cmnt(masm_, "[ Yield");
5054 // Evaluate yielded value first; the initial iterator definition depends on
5055 // this. It stays on the stack while we update the iterator.
5056 VisitForStackValue(expr->expression());
5058 // TODO(jbramley): Tidy this up once the merge is done, using named registers
5059 // and suchlike. The implementation changes a little by bleeding_edge so I
5060 // don't want to spend too much time on it now.
5062 switch (expr->yield_kind()) {
5063 case Yield::kSuspend:
5064 // Pop value from top-of-stack slot; box result into result register.
5065 EmitCreateIteratorResult(false);
5066 __ Push(result_register());
5068 case Yield::kInitial: {
5069 Label suspend, continuation, post_runtime, resume;
5073 // TODO(jbramley): This label is bound here because the following code
5074 // looks at its pos(). Is it possible to do something more efficient here,
5075 // perhaps using Adr?
5076 __ Bind(&continuation);
5080 VisitForAccumulatorValue(expr->generator_object());
5081 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
5082 __ Mov(x1, Smi::FromInt(continuation.pos()));
5083 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
5084 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
5086 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
5087 kLRHasBeenSaved, kDontSaveFPRegs);
5088 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
5089 __ Cmp(__ StackPointer(), x1);
5090 __ B(eq, &post_runtime);
5091 __ Push(x0); // generator object
5092 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
5093 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5094 __ Bind(&post_runtime);
5095 __ Pop(result_register());
5096 EmitReturnSequence();
5099 context()->Plug(result_register());
5103 case Yield::kFinal: {
5104 VisitForAccumulatorValue(expr->generator_object());
5105 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
5106 __ Str(x1, FieldMemOperand(result_register(),
5107 JSGeneratorObject::kContinuationOffset));
5108 // Pop value from top-of-stack slot, box result into result register.
5109 EmitCreateIteratorResult(true);
5110 EmitUnwindBeforeReturn();
5111 EmitReturnSequence();
5115 case Yield::kDelegating: {
5116 VisitForStackValue(expr->generator_object());
5118 // Initial stack layout is as follows:
5119 // [sp + 1 * kPointerSize] iter
5120 // [sp + 0 * kPointerSize] g
5122 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
5123 Label l_next, l_call, l_loop;
5124 Register load_receiver = LoadDescriptor::ReceiverRegister();
5125 Register load_name = LoadDescriptor::NameRegister();
5127 // Initial send value is undefined.
5128 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
5131 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
5133 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
5134 __ Peek(x3, 1 * kPointerSize); // iter
5135 __ Push(load_name, x3, x0); // "throw", iter, except
5138 // try { received = %yield result }
5139 // Shuffle the received result above a try handler and yield it without
5142 __ Pop(x0); // result
5143 EnterTryBlock(expr->index(), &l_catch);
5144 const int try_block_size = TryCatch::kElementCount * kPointerSize;
5145 __ Push(x0); // result
5148 // TODO(jbramley): This label is bound here because the following code
5149 // looks at its pos(). Is it possible to do something more efficient here,
5150 // perhaps using Adr?
5151 __ Bind(&l_continuation);
5154 __ Bind(&l_suspend);
5155 const int generator_object_depth = kPointerSize + try_block_size;
5156 __ Peek(x0, generator_object_depth);
5158 __ Push(Smi::FromInt(expr->index())); // handler-index
5159 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
5160 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
5161 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
5162 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
5164 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
5165 kLRHasBeenSaved, kDontSaveFPRegs);
5166 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
5167 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5168 __ Pop(x0); // result
5169 EmitReturnSequence();
5170 __ Bind(&l_resume); // received in x0
5171 ExitTryBlock(expr->index());
5173 // receiver = iter; f = 'next'; arg = received;
5176 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
5177 __ Peek(x3, 1 * kPointerSize); // iter
5178 __ Push(load_name, x3, x0); // "next", iter, received
5180 // result = receiver[f](arg);
5182 __ Peek(load_receiver, 1 * kPointerSize);
5183 __ Peek(load_name, 2 * kPointerSize);
5184 __ Mov(LoadDescriptor::SlotRegister(),
5185 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
5186 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
5187 CallIC(ic, TypeFeedbackId::None());
5189 __ Poke(x1, 2 * kPointerSize);
5190 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
5193 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5194 __ Drop(1); // The function is still on the stack; drop it.
5196 // if (!result.done) goto l_try;
5198 __ Move(load_receiver, x0);
5200 __ Push(load_receiver); // save result
5201 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
5202 __ Mov(LoadDescriptor::SlotRegister(),
5203 SmiFromSlot(expr->DoneFeedbackSlot()));
5204 CallLoadIC(NOT_CONTEXTUAL); // x0=result.done
5205 // The ToBooleanStub argument (result.done) is in x0.
5206 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
5211 __ Pop(load_receiver); // result
5212 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
5213 __ Mov(LoadDescriptor::SlotRegister(),
5214 SmiFromSlot(expr->ValueFeedbackSlot()));
5215 CallLoadIC(NOT_CONTEXTUAL); // x0=result.value
5216 context()->DropAndPlug(2, x0); // drop iter and g
5223 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
5225 JSGeneratorObject::ResumeMode resume_mode) {
5226 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
5227 Register generator_object = x1;
5228 Register the_hole = x2;
5229 Register operand_stack_size = w3;
5230 Register function = x4;
5232 // The value stays in x0, and is ultimately read by the resumed generator, as
5233 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
5234 // is read to throw the value when the resumed generator is already closed. x1
5235 // will hold the generator object until the activation has been resumed.
5236 VisitForStackValue(generator);
5237 VisitForAccumulatorValue(value);
5238 __ Pop(generator_object);
5240 // Load suspended function and context.
5241 __ Ldr(cp, FieldMemOperand(generator_object,
5242 JSGeneratorObject::kContextOffset));
5243 __ Ldr(function, FieldMemOperand(generator_object,
5244 JSGeneratorObject::kFunctionOffset));
5246 // Load receiver and store as the first argument.
5247 __ Ldr(x10, FieldMemOperand(generator_object,
5248 JSGeneratorObject::kReceiverOffset));
5251 // Push holes for the rest of the arguments to the generator function.
5252 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
5254 // The number of arguments is stored as an int32_t, and -1 is a marker
5255 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
5256 // extension to correctly handle it. However, in this case, we operate on
5257 // 32-bit W registers, so extension isn't required.
5258 __ Ldr(w10, FieldMemOperand(x10,
5259 SharedFunctionInfo::kFormalParameterCountOffset));
5260 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
5261 __ PushMultipleTimes(the_hole, w10);
5263 // Enter a new JavaScript frame, and initialize its slots as they were when
5264 // the generator was suspended.
5265 Label resume_frame, done;
5266 __ Bl(&resume_frame);
5269 __ Bind(&resume_frame);
5270 __ Push(lr, // Return address.
5271 fp, // Caller's frame pointer.
5272 cp, // Callee's context.
5273 function); // Callee's JS Function.
5274 __ Add(fp, __ StackPointer(), kPointerSize * 2);
5276 // Load and untag the operand stack size.
5277 __ Ldr(x10, FieldMemOperand(generator_object,
5278 JSGeneratorObject::kOperandStackOffset));
5279 __ Ldr(operand_stack_size,
5280 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
5282 // If we are sending a value and there is no operand stack, we can jump back
5284 if (resume_mode == JSGeneratorObject::NEXT) {
5286 __ Cbnz(operand_stack_size, &slow_resume);
5287 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5289 UntagSmiFieldMemOperand(generator_object,
5290 JSGeneratorObject::kContinuationOffset));
5291 __ Add(x10, x10, x11);
5292 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
5293 __ Str(x12, FieldMemOperand(generator_object,
5294 JSGeneratorObject::kContinuationOffset));
5297 __ Bind(&slow_resume);
5300 // Otherwise, we push holes for the operand stack and call the runtime to fix
5301 // up the stack and the handlers.
5302 __ PushMultipleTimes(the_hole, operand_stack_size);
5304 __ Mov(x10, Smi::FromInt(resume_mode));
5305 __ Push(generator_object, result_register(), x10);
5306 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5307 // Not reached: the runtime call returns elsewhere.
5311 context()->Plug(result_register());
5315 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5319 const int instance_size = 5 * kPointerSize;
5320 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5323 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5325 Register result = x0;
5326 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5329 __ Bind(&gc_required);
5330 __ Push(Smi::FromInt(instance_size));
5331 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5332 __ Ldr(context_register(),
5333 MemOperand(fp, StandardFrameConstants::kContextOffset));
5335 __ Bind(&allocated);
5336 Register map_reg = x1;
5337 Register result_value = x2;
5338 Register boolean_done = x3;
5339 Register empty_fixed_array = x4;
5340 Register untagged_result = x5;
5341 __ Ldr(map_reg, GlobalObjectMemOperand());
5342 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5344 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5345 __ Pop(result_value);
5346 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5347 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5348 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5349 JSObject::kElementsOffset);
5350 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5351 JSGeneratorObject::kResultDonePropertyOffset);
5352 __ ObjectUntag(untagged_result, result);
5353 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5354 __ Stp(empty_fixed_array, empty_fixed_array,
5355 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5356 __ Stp(result_value, boolean_done,
5357 MemOperand(untagged_result,
5358 JSGeneratorObject::kResultValuePropertyOffset));
5360 // Only the value field needs a write barrier, as the other values are in the
5362 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5363 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5367 // TODO(all): I don't like this method.
5368 // It seems to me that in too many places x0 is used in place of this.
5369 // Also, this function is not suitable for all places where x0 should be
5370 // abstracted (eg. when used as an argument). But some places assume that the
5371 // first argument register is x0, and use this function instead.
5372 // Considering that most of the register allocation is hard-coded in the
5373 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5374 // that abstracting the allocation through functions would not yield any
5375 // performance benefit, I think the existence of this function is debatable.
5376 Register FullCodeGenerator::result_register() {
5381 Register FullCodeGenerator::context_register() {
5386 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5387 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5388 __ Str(value, MemOperand(fp, frame_offset));
5392 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5393 __ Ldr(dst, ContextMemOperand(cp, context_index));
5397 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5398 Scope* declaration_scope = scope()->DeclarationScope();
5399 if (declaration_scope->is_script_scope() ||
5400 declaration_scope->is_module_scope()) {
5401 // Contexts nested in the native context have a canonical empty function
5402 // as their closure, not the anonymous closure containing the global
5403 // code. Pass a smi sentinel and let the runtime look up the empty
5405 DCHECK(kSmiTag == 0);
5407 } else if (declaration_scope->is_eval_scope()) {
5408 // Contexts created by a call to eval have the same closure as the
5409 // context calling eval, not the anonymous closure containing the eval
5410 // code. Fetch it from the context.
5411 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5414 DCHECK(declaration_scope->is_function_scope());
5415 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5421 void FullCodeGenerator::EnterFinallyBlock() {
5422 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5423 DCHECK(!result_register().is(x10));
5424 // Preserve the result register while executing finally block.
5425 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5426 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5428 __ Push(result_register(), x10);
5430 // Store pending message while executing finally block.
5431 ExternalReference pending_message_obj =
5432 ExternalReference::address_of_pending_message_obj(isolate());
5433 __ Mov(x10, pending_message_obj);
5434 __ Ldr(x10, MemOperand(x10));
5437 ClearPendingMessage();
5441 void FullCodeGenerator::ExitFinallyBlock() {
5442 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5443 DCHECK(!result_register().is(x10));
5445 // Restore pending message from stack.
5447 ExternalReference pending_message_obj =
5448 ExternalReference::address_of_pending_message_obj(isolate());
5449 __ Mov(x13, pending_message_obj);
5450 __ Str(x10, MemOperand(x13));
5452 // Restore result register and cooked return address from the stack.
5453 __ Pop(x10, result_register());
5455 // Uncook the return address (see EnterFinallyBlock).
5457 __ Add(x11, x10, Operand(masm_->CodeObject()));
5462 void FullCodeGenerator::ClearPendingMessage() {
5463 DCHECK(!result_register().is(x10));
5464 ExternalReference pending_message_obj =
5465 ExternalReference::address_of_pending_message_obj(isolate());
5466 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
5467 __ Mov(x13, pending_message_obj);
5468 __ Str(x10, MemOperand(x13));
5472 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5473 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5474 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5481 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5483 BackEdgeState target_state,
5484 Code* replacement_code) {
5485 // Turn the jump into a nop.
5486 Address branch_address = pc - 3 * kInstructionSize;
5487 PatchingAssembler patcher(branch_address, 1);
5489 DCHECK(Instruction::Cast(branch_address)
5490 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5491 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5492 Instruction::Cast(branch_address)->ImmPCOffset() ==
5493 6 * kInstructionSize));
5495 switch (target_state) {
5497 // <decrement profiling counter>
5498 // .. .. .. .. b.pl ok
5499 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5500 // .. .. .. .. blr x16
5501 // ... more instructions.
5503 // Jump offset is 6 instructions.
5506 case ON_STACK_REPLACEMENT:
5507 case OSR_AFTER_STACK_CHECK:
5508 // <decrement profiling counter>
5509 // .. .. .. .. mov x0, x0 (NOP)
5510 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5511 // .. .. .. .. blr x16
5512 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5516 // Replace the call address.
5517 Instruction* load = Instruction::Cast(pc)->preceding(2);
5518 Address interrupt_address_pointer =
5519 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5520 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5521 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5523 ->OnStackReplacement()
5525 (Memory::uint64_at(interrupt_address_pointer) ==
5526 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5530 (Memory::uint64_at(interrupt_address_pointer) ==
5531 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5533 ->OsrAfterStackCheck()
5535 (Memory::uint64_at(interrupt_address_pointer) ==
5536 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5538 ->OnStackReplacement()
5540 Memory::uint64_at(interrupt_address_pointer) =
5541 reinterpret_cast<uint64_t>(replacement_code->entry());
5543 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5544 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5548 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5550 Code* unoptimized_code,
5552 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5553 // back-end), but this function is gone in bleeding_edge so it might not
5555 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5557 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5558 Instruction* load = Instruction::Cast(pc)->preceding(2);
5559 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5560 load->ImmPCOffset());
5561 if (entry == reinterpret_cast<uint64_t>(
5562 isolate->builtins()->OnStackReplacement()->entry())) {
5563 return ON_STACK_REPLACEMENT;
5564 } else if (entry == reinterpret_cast<uint64_t>(
5565 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5566 return OSR_AFTER_STACK_CHECK;
5576 } // namespace internal
5579 #endif // V8_TARGET_ARCH_ARM64