1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_ARM64
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parser.h"
15 #include "src/scopes.h"
17 #include "src/arm64/code-stubs-arm64.h"
18 #include "src/arm64/frames-arm64.h"
19 #include "src/arm64/macro-assembler-arm64.h"
24 #define __ ACCESS_MASM(masm_)
26 class JumpPatchSite BASE_EMBEDDED {
28 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
30 info_emitted_ = false;
35 if (patch_site_.is_bound()) {
36 DCHECK(info_emitted_);
38 DCHECK(reg_.IsNone());
42 void EmitJumpIfNotSmi(Register reg, Label* target) {
43 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
44 InstructionAccurateScope scope(masm_, 1);
45 DCHECK(!info_emitted_);
46 DCHECK(reg.Is64Bits());
49 __ bind(&patch_site_);
50 __ tbz(xzr, 0, target); // Always taken before patched.
53 void EmitJumpIfSmi(Register reg, Label* target) {
54 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
55 InstructionAccurateScope scope(masm_, 1);
56 DCHECK(!info_emitted_);
57 DCHECK(reg.Is64Bits());
60 __ bind(&patch_site_);
61 __ tbnz(xzr, 0, target); // Never taken before patched.
64 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
65 UseScratchRegisterScope temps(masm_);
66 Register temp = temps.AcquireX();
67 __ Orr(temp, reg1, reg2);
68 EmitJumpIfNotSmi(temp, target);
71 void EmitPatchInfo() {
72 Assembler::BlockPoolsScope scope(masm_);
73 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
80 MacroAssembler* masm_;
89 // Generate code for a JS function. On entry to the function the receiver
90 // and arguments have been pushed on the stack left to right. The actual
91 // argument count matches the formal parameter count expected by the
94 // The live registers are:
95 // - x1: the JS function object being called (i.e. ourselves).
97 // - fp: our caller's frame pointer.
98 // - jssp: stack pointer.
99 // - lr: return address.
101 // The function builds a JS frame. See JavaScriptFrameConstants in
102 // frames-arm.h for its layout.
103 void FullCodeGenerator::Generate() {
104 CompilationInfo* info = info_;
105 profiling_counter_ = isolate()->factory()->NewCell(
106 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
107 SetFunctionPosition(literal());
108 Comment cmnt(masm_, "[ Function compiled by full code generator");
110 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
113 if (strlen(FLAG_stop_at) > 0 &&
114 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
115 __ Debug("stop-at", __LINE__, BREAK);
119 // Sloppy mode functions and builtins need to replace the receiver with the
120 // global proxy when called as functions (without an explicit receiver
122 if (is_sloppy(info->language_mode()) && !info->is_native() &&
123 info->MayUseThis() && info->scope()->has_this_declaration()) {
125 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
126 __ Peek(x10, receiver_offset);
127 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
129 __ Ldr(x10, GlobalObjectMemOperand());
130 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
131 __ Poke(x10, receiver_offset);
137 // Open a frame scope to indicate that there is a frame on the stack.
138 // The MANUAL indicates that the scope shouldn't actually generate code
139 // to set up the frame because we do it manually below.
140 FrameScope frame_scope(masm_, StackFrame::MANUAL);
142 // This call emits the following sequence in a way that can be patched for
143 // code ageing support:
144 // Push(lr, fp, cp, x1);
145 // Add(fp, jssp, 2 * kPointerSize);
146 info->set_prologue_offset(masm_->pc_offset());
147 __ Prologue(info->IsCodePreAgingActive());
148 info->AddNoFrameRange(0, masm_->pc_offset());
150 // Reserve space on the stack for locals.
151 { Comment cmnt(masm_, "[ Allocate locals");
152 int locals_count = info->scope()->num_stack_slots();
153 // Generators allocate locals, if any, in context slots.
154 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
156 if (locals_count > 0) {
157 if (locals_count >= 128) {
159 DCHECK(jssp.Is(__ StackPointer()));
160 __ Sub(x10, jssp, locals_count * kPointerSize);
161 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
163 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
166 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
167 if (FLAG_optimize_for_size) {
168 __ PushMultipleTimes(x10 , locals_count);
170 const int kMaxPushes = 32;
171 if (locals_count >= kMaxPushes) {
172 int loop_iterations = locals_count / kMaxPushes;
173 __ Mov(x3, loop_iterations);
175 __ Bind(&loop_header);
177 __ PushMultipleTimes(x10 , kMaxPushes);
179 __ B(ne, &loop_header);
181 int remaining = locals_count % kMaxPushes;
182 // Emit the remaining pushes.
183 __ PushMultipleTimes(x10 , remaining);
188 bool function_in_register_x1 = true;
190 if (info->scope()->num_heap_slots() > 0) {
191 // Argument to NewContext is the function, which is still in x1.
192 Comment cmnt(masm_, "[ Allocate context");
193 bool need_write_barrier = true;
194 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (info->scope()->is_script_scope()) {
196 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
198 __ CallRuntime(Runtime::kNewScriptContext, 2);
199 } else if (slots <= FastNewContextStub::kMaximumSlots) {
200 FastNewContextStub stub(isolate(), slots);
202 // Result of FastNewContextStub is always in new space.
203 need_write_barrier = false;
206 __ CallRuntime(Runtime::kNewFunctionContext, 1);
208 function_in_register_x1 = false;
209 // Context is returned in x0. It replaces the context passed to us.
210 // It's saved in the stack and kept live in cp.
212 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
213 // Copy any necessary parameters into the context.
214 int num_parameters = info->scope()->num_parameters();
215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216 for (int i = first_parameter; i < num_parameters; i++) {
217 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
218 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220 (num_parameters - 1 - i) * kPointerSize;
221 // Load parameter from stack.
222 __ Ldr(x10, MemOperand(fp, parameter_offset));
223 // Store it in the context.
224 MemOperand target = ContextMemOperand(cp, var->index());
227 // Update the write barrier.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
230 x11, kLRHasBeenSaved, kDontSaveFPRegs);
231 } else if (FLAG_debug_code) {
233 __ JumpIfInNewSpace(cp, &done);
234 __ Abort(kExpectedNewSpaceObject);
241 // Possibly set up a local binding to the this function which is used in
242 // derived constructors with super calls.
243 Variable* this_function_var = scope()->this_function_var();
244 if (this_function_var != nullptr) {
245 Comment cmnt(masm_, "[ This function");
246 if (!function_in_register_x1) {
247 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
248 // The write barrier clobbers register again, keep is marked as such.
250 SetVar(this_function_var, x1, x0, x2);
253 Variable* new_target_var = scope()->new_target_var();
254 if (new_target_var != nullptr) {
255 Comment cmnt(masm_, "[ new.target");
256 // Get the frame pointer for the calling frame.
257 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
259 Label check_frame_marker;
260 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
261 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
262 __ B(ne, &check_frame_marker);
263 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
264 __ Bind(&check_frame_marker);
265 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
266 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
268 Label non_construct_frame, done;
270 __ B(ne, &non_construct_frame);
272 MemOperand(x2, ConstructFrameConstants::kOriginalConstructorOffset));
275 __ Bind(&non_construct_frame);
276 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
280 SetVar(new_target_var, x0, x2, x3);
283 // Possibly allocate RestParameters
285 Variable* rest_param = scope()->rest_parameter(&rest_index);
287 Comment cmnt(masm_, "[ Allocate rest parameter array");
289 int num_parameters = info->scope()->num_parameters();
290 int offset = num_parameters * kPointerSize;
292 __ Add(x3, fp, StandardFrameConstants::kCallerSPOffset + offset);
293 __ Mov(x2, Smi::FromInt(num_parameters));
294 __ Mov(x1, Smi::FromInt(rest_index));
295 __ Mov(x0, Smi::FromInt(language_mode()));
296 __ Push(x3, x2, x1, x0);
298 RestParamAccessStub stub(isolate());
301 SetVar(rest_param, x0, x1, x2);
304 Variable* arguments = scope()->arguments();
305 if (arguments != NULL) {
306 // Function uses arguments object.
307 Comment cmnt(masm_, "[ Allocate arguments object");
308 if (!function_in_register_x1) {
309 // Load this again, if it's used by the local context below.
310 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
314 // Receiver is just before the parameters on the caller's stack.
315 int num_parameters = info->scope()->num_parameters();
316 int offset = num_parameters * kPointerSize;
317 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
318 __ Mov(x1, Smi::FromInt(num_parameters));
321 // Arguments to ArgumentsAccessStub:
322 // function, receiver address, parameter count.
323 // The stub will rewrite receiver and parameter count if the previous
324 // stack frame was an arguments adapter frame.
325 ArgumentsAccessStub::Type type;
326 if (is_strict(language_mode()) || !has_simple_parameters()) {
327 type = ArgumentsAccessStub::NEW_STRICT;
328 } else if (literal()->has_duplicate_parameters()) {
329 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
331 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
333 ArgumentsAccessStub stub(isolate(), type);
336 SetVar(arguments, x0, x1, x2);
340 __ CallRuntime(Runtime::kTraceEnter, 0);
343 // Visit the declarations and body unless there is an illegal
345 if (scope()->HasIllegalRedeclaration()) {
346 Comment cmnt(masm_, "[ Declarations");
347 scope()->VisitIllegalRedeclaration(this);
350 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
351 { Comment cmnt(masm_, "[ Declarations");
352 VisitDeclarations(scope()->declarations());
355 // Assert that the declarations do not use ICs. Otherwise the debugger
356 // won't be able to redirect a PC at an IC to the correct IC in newly
358 DCHECK_EQ(0, ic_total_count_);
361 Comment cmnt(masm_, "[ Stack check");
362 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
364 DCHECK(jssp.Is(__ StackPointer()));
365 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
367 PredictableCodeSizeScope predictable(masm_,
368 Assembler::kCallSizeWithRelocation);
369 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
374 Comment cmnt(masm_, "[ Body");
375 DCHECK(loop_depth() == 0);
376 VisitStatements(literal()->body());
377 DCHECK(loop_depth() == 0);
381 // Always emit a 'return undefined' in case control fell off the end of
383 { Comment cmnt(masm_, "[ return <undefined>;");
384 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
386 EmitReturnSequence();
388 // Force emission of the pools, so they don't get emitted in the middle
389 // of the back edge table.
390 masm()->CheckVeneerPool(true, false);
391 masm()->CheckConstPool(true, false);
395 void FullCodeGenerator::ClearAccumulator() {
396 __ Mov(x0, Smi::FromInt(0));
400 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
401 __ Mov(x2, Operand(profiling_counter_));
402 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
403 __ Subs(x3, x3, Smi::FromInt(delta));
404 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
408 void FullCodeGenerator::EmitProfilingCounterReset() {
409 int reset_value = FLAG_interrupt_budget;
410 __ Mov(x2, Operand(profiling_counter_));
411 __ Mov(x3, Smi::FromInt(reset_value));
412 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
416 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
417 Label* back_edge_target) {
418 DCHECK(jssp.Is(__ StackPointer()));
419 Comment cmnt(masm_, "[ Back edge bookkeeping");
420 // Block literal pools whilst emitting back edge code.
421 Assembler::BlockPoolsScope block_const_pool(masm_);
424 DCHECK(back_edge_target->is_bound());
425 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
426 // to reduce the absolute error due to the integer division. To do that,
427 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
430 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
431 kCodeSizeMultiplier / 2);
432 int weight = Min(kMaxBackEdgeWeight,
433 Max(1, distance / kCodeSizeMultiplier));
434 EmitProfilingCounterDecrement(weight);
436 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
438 // Record a mapping of this PC offset to the OSR id. This is used to find
439 // the AST id from the unoptimized code in order to use it as a key into
440 // the deoptimization input data found in the optimized code.
441 RecordBackEdge(stmt->OsrEntryId());
443 EmitProfilingCounterReset();
446 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
447 // Record a mapping of the OSR id to this PC. This is used if the OSR
448 // entry becomes the target of a bailout. We don't expect it to be, but
449 // we want it to work if it is.
450 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
454 void FullCodeGenerator::EmitReturnSequence() {
455 Comment cmnt(masm_, "[ Return sequence");
457 if (return_label_.is_bound()) {
458 __ B(&return_label_);
461 __ Bind(&return_label_);
463 // Push the return value on the stack as the parameter.
464 // Runtime::TraceExit returns its parameter in x0.
465 __ Push(result_register());
466 __ CallRuntime(Runtime::kTraceExit, 1);
467 DCHECK(x0.Is(result_register()));
469 // Pretend that the exit is a backwards jump to the entry.
471 if (info_->ShouldSelfOptimize()) {
472 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
474 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
475 weight = Min(kMaxBackEdgeWeight,
476 Max(1, distance / kCodeSizeMultiplier));
478 EmitProfilingCounterDecrement(weight);
482 __ Call(isolate()->builtins()->InterruptCheck(),
483 RelocInfo::CODE_TARGET);
485 EmitProfilingCounterReset();
488 SetReturnPosition(literal());
489 const Register& current_sp = __ StackPointer();
490 // Nothing ensures 16 bytes alignment here.
491 DCHECK(!current_sp.Is(csp));
492 __ Mov(current_sp, fp);
493 int no_frame_start = masm_->pc_offset();
494 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
495 // Drop the arguments and receiver and return.
496 // TODO(all): This implementation is overkill as it supports 2**31+1
497 // arguments, consider how to improve it without creating a security
499 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
500 __ Add(current_sp, current_sp, ip0);
502 int32_t arg_count = info_->scope()->num_parameters() + 1;
503 __ dc64(kXRegSize * arg_count);
504 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
509 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
510 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
511 codegen()->GetVar(result_register(), var);
512 __ Push(result_register());
516 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
517 // Root values have no side effects.
521 void FullCodeGenerator::AccumulatorValueContext::Plug(
522 Heap::RootListIndex index) const {
523 __ LoadRoot(result_register(), index);
527 void FullCodeGenerator::StackValueContext::Plug(
528 Heap::RootListIndex index) const {
529 __ LoadRoot(result_register(), index);
530 __ Push(result_register());
534 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
535 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
537 if (index == Heap::kUndefinedValueRootIndex ||
538 index == Heap::kNullValueRootIndex ||
539 index == Heap::kFalseValueRootIndex) {
540 if (false_label_ != fall_through_) __ B(false_label_);
541 } else if (index == Heap::kTrueValueRootIndex) {
542 if (true_label_ != fall_through_) __ B(true_label_);
544 __ LoadRoot(result_register(), index);
545 codegen()->DoTest(this);
550 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
554 void FullCodeGenerator::AccumulatorValueContext::Plug(
555 Handle<Object> lit) const {
556 __ Mov(result_register(), Operand(lit));
560 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
561 // Immediates cannot be pushed directly.
562 __ Mov(result_register(), Operand(lit));
563 __ Push(result_register());
567 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
568 codegen()->PrepareForBailoutBeforeSplit(condition(),
572 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
573 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
574 if (false_label_ != fall_through_) __ B(false_label_);
575 } else if (lit->IsTrue() || lit->IsJSObject()) {
576 if (true_label_ != fall_through_) __ B(true_label_);
577 } else if (lit->IsString()) {
578 if (String::cast(*lit)->length() == 0) {
579 if (false_label_ != fall_through_) __ B(false_label_);
581 if (true_label_ != fall_through_) __ B(true_label_);
583 } else if (lit->IsSmi()) {
584 if (Smi::cast(*lit)->value() == 0) {
585 if (false_label_ != fall_through_) __ B(false_label_);
587 if (true_label_ != fall_through_) __ B(true_label_);
590 // For simplicity we always test the accumulator register.
591 __ Mov(result_register(), Operand(lit));
592 codegen()->DoTest(this);
597 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
598 Register reg) const {
604 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
606 Register reg) const {
609 __ Move(result_register(), reg);
613 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
614 Register reg) const {
616 if (count > 1) __ Drop(count - 1);
621 void FullCodeGenerator::TestContext::DropAndPlug(int count,
622 Register reg) const {
624 // For simplicity we always test the accumulator register.
626 __ Mov(result_register(), reg);
627 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
628 codegen()->DoTest(this);
632 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
633 Label* materialize_false) const {
634 DCHECK(materialize_true == materialize_false);
635 __ Bind(materialize_true);
639 void FullCodeGenerator::AccumulatorValueContext::Plug(
640 Label* materialize_true,
641 Label* materialize_false) const {
643 __ Bind(materialize_true);
644 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
646 __ Bind(materialize_false);
647 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
652 void FullCodeGenerator::StackValueContext::Plug(
653 Label* materialize_true,
654 Label* materialize_false) const {
656 __ Bind(materialize_true);
657 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
659 __ Bind(materialize_false);
660 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
666 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
667 Label* materialize_false) const {
668 DCHECK(materialize_true == true_label_);
669 DCHECK(materialize_false == false_label_);
673 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
674 Heap::RootListIndex value_root_index =
675 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676 __ LoadRoot(result_register(), value_root_index);
680 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
681 Heap::RootListIndex value_root_index =
682 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
683 __ LoadRoot(x10, value_root_index);
688 void FullCodeGenerator::TestContext::Plug(bool flag) const {
689 codegen()->PrepareForBailoutBeforeSplit(condition(),
694 if (true_label_ != fall_through_) {
698 if (false_label_ != fall_through_) {
705 void FullCodeGenerator::DoTest(Expression* condition,
708 Label* fall_through) {
709 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
710 CallIC(ic, condition->test_id());
711 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
715 // If (cond), branch to if_true.
716 // If (!cond), branch to if_false.
717 // fall_through is used as an optimization in cases where only one branch
718 // instruction is necessary.
719 void FullCodeGenerator::Split(Condition cond,
722 Label* fall_through) {
723 if (if_false == fall_through) {
725 } else if (if_true == fall_through) {
726 DCHECK(if_false != fall_through);
727 __ B(NegateCondition(cond), if_false);
735 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
736 // Offset is negative because higher indexes are at lower addresses.
737 int offset = -var->index() * kXRegSize;
738 // Adjust by a (parameter or local) base offset.
739 if (var->IsParameter()) {
740 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
742 offset += JavaScriptFrameConstants::kLocal0Offset;
744 return MemOperand(fp, offset);
748 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
749 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750 if (var->IsContextSlot()) {
751 int context_chain_length = scope()->ContextChainLength(var->scope());
752 __ LoadContext(scratch, context_chain_length);
753 return ContextMemOperand(scratch, var->index());
755 return StackOperand(var);
760 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
761 // Use destination as scratch.
762 MemOperand location = VarOperand(var, dest);
763 __ Ldr(dest, location);
767 void FullCodeGenerator::SetVar(Variable* var,
771 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
772 DCHECK(!AreAliased(src, scratch0, scratch1));
773 MemOperand location = VarOperand(var, scratch0);
774 __ Str(src, location);
776 // Emit the write barrier code if the location is in the heap.
777 if (var->IsContextSlot()) {
778 // scratch0 contains the correct context.
779 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
780 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
785 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
786 bool should_normalize,
789 // Only prepare for bailouts before splits if we're in a test
790 // context. Otherwise, we let the Visit function deal with the
791 // preparation to avoid preparing with the same AST id twice.
792 if (!context()->IsTest()) return;
794 // TODO(all): Investigate to see if there is something to work on here.
796 if (should_normalize) {
799 PrepareForBailout(expr, TOS_REG);
800 if (should_normalize) {
801 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
802 Split(eq, if_true, if_false, NULL);
808 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
809 // The variable in the declaration always resides in the current function
811 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
812 if (generate_debug_code_) {
813 // Check that we're not inside a with or catch context.
814 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
815 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
816 __ Check(ne, kDeclarationInWithContext);
817 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
818 __ Check(ne, kDeclarationInCatchContext);
823 void FullCodeGenerator::VisitVariableDeclaration(
824 VariableDeclaration* declaration) {
825 // If it was not possible to allocate the variable at compile time, we
826 // need to "declare" it at runtime to make sure it actually exists in the
828 VariableProxy* proxy = declaration->proxy();
829 VariableMode mode = declaration->mode();
830 Variable* variable = proxy->var();
831 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
833 switch (variable->location()) {
834 case VariableLocation::GLOBAL:
835 case VariableLocation::UNALLOCATED:
836 globals_->Add(variable->name(), zone());
837 globals_->Add(variable->binding_needs_init()
838 ? isolate()->factory()->the_hole_value()
839 : isolate()->factory()->undefined_value(),
843 case VariableLocation::PARAMETER:
844 case VariableLocation::LOCAL:
846 Comment cmnt(masm_, "[ VariableDeclaration");
847 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
848 __ Str(x10, StackOperand(variable));
852 case VariableLocation::CONTEXT:
854 Comment cmnt(masm_, "[ VariableDeclaration");
855 EmitDebugCheckDeclarationContext(variable);
856 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
857 __ Str(x10, ContextMemOperand(cp, variable->index()));
858 // No write barrier since the_hole_value is in old space.
859 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
863 case VariableLocation::LOOKUP: {
864 Comment cmnt(masm_, "[ VariableDeclaration");
865 __ Mov(x2, Operand(variable->name()));
866 // Declaration nodes are always introduced in one of four modes.
867 DCHECK(IsDeclaredVariableMode(mode));
868 // Push initial value, if any.
869 // Note: For variables we must not push an initial value (such as
870 // 'undefined') because we may have a (legal) redeclaration and we
871 // must not destroy the current value.
873 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
876 // Pushing 0 (xzr) indicates no initial value.
879 __ CallRuntime(IsImmutableVariableMode(mode)
880 ? Runtime::kDeclareReadOnlyLookupSlot
881 : Runtime::kDeclareLookupSlot,
889 void FullCodeGenerator::VisitFunctionDeclaration(
890 FunctionDeclaration* declaration) {
891 VariableProxy* proxy = declaration->proxy();
892 Variable* variable = proxy->var();
893 switch (variable->location()) {
894 case VariableLocation::GLOBAL:
895 case VariableLocation::UNALLOCATED: {
896 globals_->Add(variable->name(), zone());
897 Handle<SharedFunctionInfo> function =
898 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
899 // Check for stack overflow exception.
900 if (function.is_null()) return SetStackOverflow();
901 globals_->Add(function, zone());
905 case VariableLocation::PARAMETER:
906 case VariableLocation::LOCAL: {
907 Comment cmnt(masm_, "[ Function Declaration");
908 VisitForAccumulatorValue(declaration->fun());
909 __ Str(result_register(), StackOperand(variable));
913 case VariableLocation::CONTEXT: {
914 Comment cmnt(masm_, "[ Function Declaration");
915 EmitDebugCheckDeclarationContext(variable);
916 VisitForAccumulatorValue(declaration->fun());
917 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
918 int offset = Context::SlotOffset(variable->index());
919 // We know that we have written a function, which is not a smi.
920 __ RecordWriteContextSlot(cp,
928 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
932 case VariableLocation::LOOKUP: {
933 Comment cmnt(masm_, "[ Function Declaration");
934 __ Mov(x2, Operand(variable->name()));
936 // Push initial value for function declaration.
937 VisitForStackValue(declaration->fun());
938 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
946 // Call the runtime to declare the globals.
947 __ Mov(x11, Operand(pairs));
948 Register flags = xzr;
949 if (Smi::FromInt(DeclareGlobalsFlags())) {
951 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
954 __ CallRuntime(Runtime::kDeclareGlobals, 2);
955 // Return value is ignored.
959 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
960 // Call the runtime to declare the modules.
961 __ Push(descriptions);
962 __ CallRuntime(Runtime::kDeclareModules, 1);
963 // Return value is ignored.
967 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
968 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
969 Comment cmnt(masm_, "[ SwitchStatement");
970 Breakable nested_statement(this, stmt);
971 SetStatementPosition(stmt);
973 // Keep the switch value on the stack until a case matches.
974 VisitForStackValue(stmt->tag());
975 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
977 ZoneList<CaseClause*>* clauses = stmt->cases();
978 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
980 Label next_test; // Recycled for each test.
981 // Compile all the tests with branches to their bodies.
982 for (int i = 0; i < clauses->length(); i++) {
983 CaseClause* clause = clauses->at(i);
984 clause->body_target()->Unuse();
986 // The default is not a test, but remember it as final fall through.
987 if (clause->is_default()) {
988 default_clause = clause;
992 Comment cmnt(masm_, "[ Case comparison");
996 // Compile the label expression.
997 VisitForAccumulatorValue(clause->label());
999 // Perform the comparison as if via '==='.
1000 __ Peek(x1, 0); // Switch value.
1002 JumpPatchSite patch_site(masm_);
1003 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1005 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1007 __ B(ne, &next_test);
1008 __ Drop(1); // Switch value is no longer needed.
1009 __ B(clause->body_target());
1010 __ Bind(&slow_case);
1013 // Record position before stub call for type feedback.
1014 SetExpressionPosition(clause);
1015 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1016 strength(language_mode())).code();
1017 CallIC(ic, clause->CompareId());
1018 patch_site.EmitPatchInfo();
1022 PrepareForBailout(clause, TOS_REG);
1023 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1025 __ B(clause->body_target());
1028 __ Cbnz(x0, &next_test);
1029 __ Drop(1); // Switch value is no longer needed.
1030 __ B(clause->body_target());
1033 // Discard the test value and jump to the default if present, otherwise to
1034 // the end of the statement.
1035 __ Bind(&next_test);
1036 __ Drop(1); // Switch value is no longer needed.
1037 if (default_clause == NULL) {
1038 __ B(nested_statement.break_label());
1040 __ B(default_clause->body_target());
1043 // Compile all the case bodies.
1044 for (int i = 0; i < clauses->length(); i++) {
1045 Comment cmnt(masm_, "[ Case body");
1046 CaseClause* clause = clauses->at(i);
1047 __ Bind(clause->body_target());
1048 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1049 VisitStatements(clause->statements());
1052 __ Bind(nested_statement.break_label());
1053 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1057 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1058 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1059 Comment cmnt(masm_, "[ ForInStatement");
1060 SetStatementPosition(stmt, SKIP_BREAK);
1062 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1064 // TODO(all): This visitor probably needs better comments and a revisit.
1067 ForIn loop_statement(this, stmt);
1068 increment_loop_depth();
1070 // Get the object to enumerate over. If the object is null or undefined, skip
1071 // over the loop. See ECMA-262 version 5, section 12.6.4.
1072 SetExpressionAsStatementPosition(stmt->enumerable());
1073 VisitForAccumulatorValue(stmt->enumerable());
1074 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1075 Register null_value = x15;
1076 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1077 __ Cmp(x0, null_value);
1080 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1082 // Convert the object to a JS object.
1083 Label convert, done_convert;
1084 __ JumpIfSmi(x0, &convert);
1085 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1087 ToObjectStub stub(isolate());
1089 __ Bind(&done_convert);
1090 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1093 // Check for proxies.
1095 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1096 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1098 // Check cache validity in generated code. This is a fast case for
1099 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1100 // guarantee cache validity, call the runtime system to check cache
1101 // validity or get the property names in a fixed array.
1102 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1104 // The enum cache is valid. Load the map of the object being
1105 // iterated over and use the cache for the iteration.
1107 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1110 // Get the set of properties to enumerate.
1111 __ Bind(&call_runtime);
1112 __ Push(x0); // Duplicate the enumerable object on the stack.
1113 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1114 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1116 // If we got a map from the runtime call, we can do a fast
1117 // modification check. Otherwise, we got a fixed array, and we have
1118 // to do a slow check.
1119 Label fixed_array, no_descriptors;
1120 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1121 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1123 // We got a map in register x0. Get the enumeration cache from it.
1124 __ Bind(&use_cache);
1126 __ EnumLengthUntagged(x1, x0);
1127 __ Cbz(x1, &no_descriptors);
1129 __ LoadInstanceDescriptors(x0, x2);
1130 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1132 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1134 // Set up the four remaining stack slots.
1136 // Map, enumeration cache, enum cache length, zero (both last as smis).
1137 __ Push(x0, x2, x1, xzr);
1140 __ Bind(&no_descriptors);
1144 // We got a fixed array in register x0. Iterate through that.
1145 __ Bind(&fixed_array);
1147 __ LoadObject(x1, FeedbackVector());
1148 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1149 int vector_index = FeedbackVector()->GetIndex(slot);
1150 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1152 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1153 __ Peek(x10, 0); // Get enumerated object.
1154 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1155 // TODO(all): similar check was done already. Can we avoid it here?
1156 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1157 DCHECK(Smi::FromInt(0) == 0);
1158 __ CzeroX(x1, le); // Zero indicates proxy.
1159 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1160 // Smi and array, fixed array length (as smi) and initial index.
1161 __ Push(x1, x0, x2, xzr);
1163 // Generate code for doing the condition check.
1164 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1166 SetExpressionAsStatementPosition(stmt->each());
1168 // Load the current count to x0, load the length to x1.
1169 __ PeekPair(x0, x1, 0);
1170 __ Cmp(x0, x1); // Compare to the array length.
1171 __ B(hs, loop_statement.break_label());
1173 // Get the current entry of the array into register r3.
1174 __ Peek(x10, 2 * kXRegSize);
1175 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1176 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1178 // Get the expected map from the stack or a smi in the
1179 // permanent slow case into register x10.
1180 __ Peek(x2, 3 * kXRegSize);
1182 // Check if the expected map still matches that of the enumerable.
1183 // If not, we may have to filter the key.
1185 __ Peek(x1, 4 * kXRegSize);
1186 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1188 __ B(eq, &update_each);
1190 // For proxies, no filtering is done.
1191 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1192 STATIC_ASSERT(kSmiTag == 0);
1193 __ Cbz(x2, &update_each);
1195 // Convert the entry to a string or (smi) 0 if it isn't a property
1196 // any more. If the property has been removed while iterating, we
1199 __ CallRuntime(Runtime::kForInFilter, 2);
1200 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1202 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1203 loop_statement.continue_label());
1205 // Update the 'each' property or variable from the possibly filtered
1206 // entry in register x3.
1207 __ Bind(&update_each);
1208 __ Mov(result_register(), x3);
1209 // Perform the assignment as if via '='.
1210 { EffectContext context(this);
1211 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1212 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1215 // Generate code for the body of the loop.
1216 Visit(stmt->body());
1218 // Generate code for going to the next element by incrementing
1219 // the index (smi) stored on top of the stack.
1220 __ Bind(loop_statement.continue_label());
1221 // TODO(all): We could use a callee saved register to avoid popping.
1223 __ Add(x0, x0, Smi::FromInt(1));
1226 EmitBackEdgeBookkeeping(stmt, &loop);
1229 // Remove the pointers stored on the stack.
1230 __ Bind(loop_statement.break_label());
1233 // Exit and decrement the loop depth.
1234 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1236 decrement_loop_depth();
1240 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1242 // Use the fast case closure allocation code that allocates in new space for
1243 // nested functions that don't need literals cloning. If we're running with
1244 // the --always-opt or the --prepare-always-opt flag, we need to use the
1245 // runtime function so that the new function we are creating here gets a
1246 // chance to have its code optimized and doesn't just get a copy of the
1247 // existing unoptimized code.
1248 if (!FLAG_always_opt &&
1249 !FLAG_prepare_always_opt &&
1251 scope()->is_function_scope() &&
1252 info->num_literals() == 0) {
1253 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1254 __ Mov(x2, Operand(info));
1257 __ Mov(x11, Operand(info));
1258 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1259 : Heap::kFalseValueRootIndex);
1260 __ Push(cp, x11, x10);
1261 __ CallRuntime(Runtime::kNewClosure, 3);
1263 context()->Plug(x0);
1267 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1269 FeedbackVectorICSlot slot) {
1270 if (NeedsHomeObject(initializer)) {
1271 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1272 __ Mov(StoreDescriptor::NameRegister(),
1273 Operand(isolate()->factory()->home_object_symbol()));
1274 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1275 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1281 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1282 TypeofMode typeof_mode,
1284 Register current = cp;
1285 Register next = x10;
1286 Register temp = x11;
1290 if (s->num_heap_slots() > 0) {
1291 if (s->calls_sloppy_eval()) {
1292 // Check that extension is NULL.
1293 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1294 __ Cbnz(temp, slow);
1296 // Load next context in chain.
1297 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1298 // Walk the rest of the chain without clobbering cp.
1301 // If no outer scope calls eval, we do not need to check more
1302 // context extensions.
1303 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1304 s = s->outer_scope();
1307 if (s->is_eval_scope()) {
1309 __ Mov(next, current);
1312 // Terminate at native context.
1313 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1314 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1315 // Check that extension is NULL.
1316 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1317 __ Cbnz(temp, slow);
1318 // Load next context in chain.
1319 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1324 // All extension objects were empty and it is safe to use a normal global
1326 EmitGlobalVariableLoad(proxy, typeof_mode);
1330 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1332 DCHECK(var->IsContextSlot());
1333 Register context = cp;
1334 Register next = x10;
1335 Register temp = x11;
1337 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1338 if (s->num_heap_slots() > 0) {
1339 if (s->calls_sloppy_eval()) {
1340 // Check that extension is NULL.
1341 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1342 __ Cbnz(temp, slow);
1344 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1345 // Walk the rest of the chain without clobbering cp.
1349 // Check that last extension is NULL.
1350 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1351 __ Cbnz(temp, slow);
1353 // This function is used only for loads, not stores, so it's safe to
1354 // return an cp-based operand (the write barrier cannot be allowed to
1355 // destroy the cp register).
1356 return ContextMemOperand(context, var->index());
1360 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1361 TypeofMode typeof_mode,
1362 Label* slow, Label* done) {
1363 // Generate fast-case code for variables that might be shadowed by
1364 // eval-introduced variables. Eval is used a lot without
1365 // introducing variables. In those cases, we do not want to
1366 // perform a runtime call for all variables in the scope
1367 // containing the eval.
1368 Variable* var = proxy->var();
1369 if (var->mode() == DYNAMIC_GLOBAL) {
1370 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1372 } else if (var->mode() == DYNAMIC_LOCAL) {
1373 Variable* local = var->local_if_not_shadowed();
1374 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1375 if (local->mode() == LET || local->mode() == CONST ||
1376 local->mode() == CONST_LEGACY) {
1377 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1378 if (local->mode() == CONST_LEGACY) {
1379 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1380 } else { // LET || CONST
1381 __ Mov(x0, Operand(var->name()));
1383 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1391 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1392 TypeofMode typeof_mode) {
1393 Variable* var = proxy->var();
1394 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1395 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1396 if (var->IsGlobalSlot()) {
1397 DCHECK(var->index() > 0);
1398 DCHECK(var->IsStaticGlobalObjectProperty());
1399 int const slot = var->index();
1400 int const depth = scope()->ContextChainLength(var->scope());
1401 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1402 __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
1403 LoadGlobalViaContextStub stub(isolate(), depth);
1406 __ Push(Smi::FromInt(slot));
1407 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1410 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1411 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1412 __ Mov(LoadDescriptor::SlotRegister(),
1413 SmiFromSlot(proxy->VariableFeedbackSlot()));
1414 CallLoadIC(typeof_mode);
1419 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1420 TypeofMode typeof_mode) {
1421 // Record position before possible IC call.
1422 SetExpressionPosition(proxy);
1423 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1424 Variable* var = proxy->var();
1426 // Three cases: global variables, lookup variables, and all other types of
1428 switch (var->location()) {
1429 case VariableLocation::GLOBAL:
1430 case VariableLocation::UNALLOCATED: {
1431 Comment cmnt(masm_, "Global variable");
1432 EmitGlobalVariableLoad(proxy, typeof_mode);
1433 context()->Plug(x0);
1437 case VariableLocation::PARAMETER:
1438 case VariableLocation::LOCAL:
1439 case VariableLocation::CONTEXT: {
1440 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1441 Comment cmnt(masm_, var->IsContextSlot()
1442 ? "Context variable"
1443 : "Stack variable");
1444 if (var->binding_needs_init()) {
1445 // var->scope() may be NULL when the proxy is located in eval code and
1446 // refers to a potential outside binding. Currently those bindings are
1447 // always looked up dynamically, i.e. in that case
1448 // var->location() == LOOKUP.
1450 DCHECK(var->scope() != NULL);
1452 // Check if the binding really needs an initialization check. The check
1453 // can be skipped in the following situation: we have a LET or CONST
1454 // binding in harmony mode, both the Variable and the VariableProxy have
1455 // the same declaration scope (i.e. they are both in global code, in the
1456 // same function or in the same eval code) and the VariableProxy is in
1457 // the source physically located after the initializer of the variable.
1459 // We cannot skip any initialization checks for CONST in non-harmony
1460 // mode because const variables may be declared but never initialized:
1461 // if (false) { const x; }; var y = x;
1463 // The condition on the declaration scopes is a conservative check for
1464 // nested functions that access a binding and are called before the
1465 // binding is initialized:
1466 // function() { f(); let x = 1; function f() { x = 2; } }
1468 bool skip_init_check;
1469 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1470 skip_init_check = false;
1471 } else if (var->is_this()) {
1472 CHECK(info_->has_literal() &&
1473 (info_->literal()->kind() & kSubclassConstructor) != 0);
1474 // TODO(dslomov): implement 'this' hole check elimination.
1475 skip_init_check = false;
1477 // Check that we always have valid source position.
1478 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1479 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1480 skip_init_check = var->mode() != CONST_LEGACY &&
1481 var->initializer_position() < proxy->position();
1484 if (!skip_init_check) {
1485 // Let and const need a read barrier.
1488 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1489 if (var->mode() == LET || var->mode() == CONST) {
1490 // Throw a reference error when using an uninitialized let/const
1491 // binding in harmony mode.
1492 __ Mov(x0, Operand(var->name()));
1494 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1497 // Uninitalized const bindings outside of harmony mode are unholed.
1498 DCHECK(var->mode() == CONST_LEGACY);
1499 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1502 context()->Plug(x0);
1506 context()->Plug(var);
1510 case VariableLocation::LOOKUP: {
1512 // Generate code for loading from variables potentially shadowed by
1513 // eval-introduced variables.
1514 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1516 Comment cmnt(masm_, "Lookup variable");
1517 __ Mov(x1, Operand(var->name()));
1518 __ Push(cp, x1); // Context and name.
1519 Runtime::FunctionId function_id =
1520 typeof_mode == NOT_INSIDE_TYPEOF
1521 ? Runtime::kLoadLookupSlot
1522 : Runtime::kLoadLookupSlotNoReferenceError;
1523 __ CallRuntime(function_id, 2);
1525 context()->Plug(x0);
1532 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1533 Comment cmnt(masm_, "[ RegExpLiteral");
1535 // Registers will be used as follows:
1536 // x5 = materialized value (RegExp literal)
1537 // x4 = JS function, literals array
1538 // x3 = literal index
1539 // x2 = RegExp pattern
1540 // x1 = RegExp flags
1541 // x0 = RegExp literal clone
1542 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1543 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1544 int literal_offset =
1545 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1546 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1547 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1549 // Create regexp literal using runtime function.
1550 // Result will be in x0.
1551 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1552 __ Mov(x2, Operand(expr->pattern()));
1553 __ Mov(x1, Operand(expr->flags()));
1554 __ Push(x4, x3, x2, x1);
1555 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1558 __ Bind(&materialized);
1559 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1560 Label allocated, runtime_allocate;
1561 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1564 __ Bind(&runtime_allocate);
1565 __ Mov(x10, Smi::FromInt(size));
1567 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1570 __ Bind(&allocated);
1571 // After this, registers are used as follows:
1572 // x0: Newly allocated regexp.
1573 // x5: Materialized regexp.
1574 // x10, x11, x12: temps.
1575 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1576 context()->Plug(x0);
1580 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1581 if (expression == NULL) {
1582 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1585 VisitForStackValue(expression);
1590 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1591 Comment cmnt(masm_, "[ ObjectLiteral");
1593 Handle<FixedArray> constant_properties = expr->constant_properties();
1594 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1595 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1596 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1597 __ Mov(x1, Operand(constant_properties));
1598 int flags = expr->ComputeFlags();
1599 __ Mov(x0, Smi::FromInt(flags));
1600 if (MustCreateObjectLiteralWithRuntime(expr)) {
1601 __ Push(x3, x2, x1, x0);
1602 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1604 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1607 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1609 // If result_saved is true the result is on top of the stack. If
1610 // result_saved is false the result is in x0.
1611 bool result_saved = false;
1613 AccessorTable accessor_table(zone());
1614 int property_index = 0;
1615 // store_slot_index points to the vector IC slot for the next store IC used.
1616 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1617 // and must be updated if the number of store ICs emitted here changes.
1618 int store_slot_index = 0;
1619 for (; property_index < expr->properties()->length(); property_index++) {
1620 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1621 if (property->is_computed_name()) break;
1622 if (property->IsCompileTimeValue()) continue;
1624 Literal* key = property->key()->AsLiteral();
1625 Expression* value = property->value();
1626 if (!result_saved) {
1627 __ Push(x0); // Save result on stack
1628 result_saved = true;
1630 switch (property->kind()) {
1631 case ObjectLiteral::Property::CONSTANT:
1633 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1634 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1636 case ObjectLiteral::Property::COMPUTED:
1637 // It is safe to use [[Put]] here because the boilerplate already
1638 // contains computed properties with an uninitialized value.
1639 if (key->value()->IsInternalizedString()) {
1640 if (property->emit_store()) {
1641 VisitForAccumulatorValue(value);
1642 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1643 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1644 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1645 if (FLAG_vector_stores) {
1646 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1649 CallStoreIC(key->LiteralFeedbackId());
1651 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1653 if (NeedsHomeObject(value)) {
1654 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1655 __ Mov(StoreDescriptor::NameRegister(),
1656 Operand(isolate()->factory()->home_object_symbol()));
1657 __ Peek(StoreDescriptor::ValueRegister(), 0);
1658 if (FLAG_vector_stores) {
1659 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1664 VisitForEffect(value);
1670 VisitForStackValue(key);
1671 VisitForStackValue(value);
1672 if (property->emit_store()) {
1673 EmitSetHomeObjectIfNeeded(
1674 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1675 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1677 __ CallRuntime(Runtime::kSetProperty, 4);
1682 case ObjectLiteral::Property::PROTOTYPE:
1683 DCHECK(property->emit_store());
1684 // Duplicate receiver on stack.
1687 VisitForStackValue(value);
1688 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1690 case ObjectLiteral::Property::GETTER:
1691 if (property->emit_store()) {
1692 accessor_table.lookup(key)->second->getter = value;
1695 case ObjectLiteral::Property::SETTER:
1696 if (property->emit_store()) {
1697 accessor_table.lookup(key)->second->setter = value;
1703 // Emit code to define accessors, using only a single call to the runtime for
1704 // each pair of corresponding getters and setters.
1705 for (AccessorTable::Iterator it = accessor_table.begin();
1706 it != accessor_table.end();
1708 __ Peek(x10, 0); // Duplicate receiver.
1710 VisitForStackValue(it->first);
1711 EmitAccessor(it->second->getter);
1712 EmitSetHomeObjectIfNeeded(
1713 it->second->getter, 2,
1714 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1715 EmitAccessor(it->second->setter);
1716 EmitSetHomeObjectIfNeeded(
1717 it->second->setter, 3,
1718 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1719 __ Mov(x10, Smi::FromInt(NONE));
1721 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1724 // Object literals have two parts. The "static" part on the left contains no
1725 // computed property names, and so we can compute its map ahead of time; see
1726 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1727 // starts with the first computed property name, and continues with all
1728 // properties to its right. All the code from above initializes the static
1729 // component of the object literal, and arranges for the map of the result to
1730 // reflect the static order in which the keys appear. For the dynamic
1731 // properties, we compile them into a series of "SetOwnProperty" runtime
1732 // calls. This will preserve insertion order.
1733 for (; property_index < expr->properties()->length(); property_index++) {
1734 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1736 Expression* value = property->value();
1737 if (!result_saved) {
1738 __ Push(x0); // Save result on stack
1739 result_saved = true;
1742 __ Peek(x10, 0); // Duplicate receiver.
1745 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1746 DCHECK(!property->is_computed_name());
1747 VisitForStackValue(value);
1748 DCHECK(property->emit_store());
1749 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1751 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1752 VisitForStackValue(value);
1753 EmitSetHomeObjectIfNeeded(
1754 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1756 switch (property->kind()) {
1757 case ObjectLiteral::Property::CONSTANT:
1758 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1759 case ObjectLiteral::Property::COMPUTED:
1760 if (property->emit_store()) {
1761 __ Mov(x0, Smi::FromInt(NONE));
1763 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1769 case ObjectLiteral::Property::PROTOTYPE:
1773 case ObjectLiteral::Property::GETTER:
1774 __ Mov(x0, Smi::FromInt(NONE));
1776 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1779 case ObjectLiteral::Property::SETTER:
1780 __ Mov(x0, Smi::FromInt(NONE));
1782 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1788 if (expr->has_function()) {
1789 DCHECK(result_saved);
1792 __ CallRuntime(Runtime::kToFastProperties, 1);
1796 context()->PlugTOS();
1798 context()->Plug(x0);
1801 // Verify that compilation exactly consumed the number of store ic slots that
1802 // the ObjectLiteral node had to offer.
1803 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1807 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1808 Comment cmnt(masm_, "[ ArrayLiteral");
1810 expr->BuildConstantElements(isolate());
1811 Handle<FixedArray> constant_elements = expr->constant_elements();
1812 bool has_fast_elements =
1813 IsFastObjectElementsKind(expr->constant_elements_kind());
1815 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1816 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1817 // If the only customer of allocation sites is transitioning, then
1818 // we can turn it off if we don't have anywhere else to transition to.
1819 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1822 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1823 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1824 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1825 __ Mov(x1, Operand(constant_elements));
1826 if (MustCreateArrayLiteralWithRuntime(expr)) {
1827 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1828 __ Push(x3, x2, x1, x0);
1829 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1831 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1834 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1836 bool result_saved = false; // Is the result saved to the stack?
1837 ZoneList<Expression*>* subexprs = expr->values();
1838 int length = subexprs->length();
1840 // Emit code to evaluate all the non-constant subexpressions and to store
1841 // them into the newly cloned array.
1842 int array_index = 0;
1843 for (; array_index < length; array_index++) {
1844 Expression* subexpr = subexprs->at(array_index);
1845 if (subexpr->IsSpread()) break;
1847 // If the subexpression is a literal or a simple materialized literal it
1848 // is already set in the cloned array.
1849 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1851 if (!result_saved) {
1852 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1854 result_saved = true;
1856 VisitForAccumulatorValue(subexpr);
1858 if (has_fast_elements) {
1859 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1860 __ Peek(x6, kPointerSize); // Copy of array literal.
1861 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1862 __ Str(result_register(), FieldMemOperand(x1, offset));
1863 // Update the write barrier for the array store.
1864 __ RecordWriteField(x1, offset, result_register(), x10,
1865 kLRHasBeenSaved, kDontSaveFPRegs,
1866 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1868 __ Mov(x3, Smi::FromInt(array_index));
1869 StoreArrayLiteralElementStub stub(isolate());
1873 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1876 // In case the array literal contains spread expressions it has two parts. The
1877 // first part is the "static" array which has a literal index is handled
1878 // above. The second part is the part after the first spread expression
1879 // (inclusive) and these elements gets appended to the array. Note that the
1880 // number elements an iterable produces is unknown ahead of time.
1881 if (array_index < length && result_saved) {
1882 __ Drop(1); // literal index
1884 result_saved = false;
1886 for (; array_index < length; array_index++) {
1887 Expression* subexpr = subexprs->at(array_index);
1890 if (subexpr->IsSpread()) {
1891 VisitForStackValue(subexpr->AsSpread()->expression());
1892 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1894 VisitForStackValue(subexpr);
1895 __ CallRuntime(Runtime::kAppendElement, 2);
1898 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1902 __ Drop(1); // literal index
1903 context()->PlugTOS();
1905 context()->Plug(x0);
1910 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1911 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1913 Comment cmnt(masm_, "[ Assignment");
1914 SetExpressionPosition(expr, INSERT_BREAK);
1916 Property* property = expr->target()->AsProperty();
1917 LhsKind assign_type = Property::GetAssignType(property);
1919 // Evaluate LHS expression.
1920 switch (assign_type) {
1922 // Nothing to do here.
1924 case NAMED_PROPERTY:
1925 if (expr->is_compound()) {
1926 // We need the receiver both on the stack and in the register.
1927 VisitForStackValue(property->obj());
1928 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1930 VisitForStackValue(property->obj());
1933 case NAMED_SUPER_PROPERTY:
1935 property->obj()->AsSuperPropertyReference()->this_var());
1936 VisitForAccumulatorValue(
1937 property->obj()->AsSuperPropertyReference()->home_object());
1938 __ Push(result_register());
1939 if (expr->is_compound()) {
1940 const Register scratch = x10;
1941 __ Peek(scratch, kPointerSize);
1942 __ Push(scratch, result_register());
1945 case KEYED_SUPER_PROPERTY:
1947 property->obj()->AsSuperPropertyReference()->this_var());
1949 property->obj()->AsSuperPropertyReference()->home_object());
1950 VisitForAccumulatorValue(property->key());
1951 __ Push(result_register());
1952 if (expr->is_compound()) {
1953 const Register scratch1 = x10;
1954 const Register scratch2 = x11;
1955 __ Peek(scratch1, 2 * kPointerSize);
1956 __ Peek(scratch2, kPointerSize);
1957 __ Push(scratch1, scratch2, result_register());
1960 case KEYED_PROPERTY:
1961 if (expr->is_compound()) {
1962 VisitForStackValue(property->obj());
1963 VisitForStackValue(property->key());
1964 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1965 __ Peek(LoadDescriptor::NameRegister(), 0);
1967 VisitForStackValue(property->obj());
1968 VisitForStackValue(property->key());
1973 // For compound assignments we need another deoptimization point after the
1974 // variable/property load.
1975 if (expr->is_compound()) {
1976 { AccumulatorValueContext context(this);
1977 switch (assign_type) {
1979 EmitVariableLoad(expr->target()->AsVariableProxy());
1980 PrepareForBailout(expr->target(), TOS_REG);
1982 case NAMED_PROPERTY:
1983 EmitNamedPropertyLoad(property);
1984 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1986 case NAMED_SUPER_PROPERTY:
1987 EmitNamedSuperPropertyLoad(property);
1988 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1990 case KEYED_SUPER_PROPERTY:
1991 EmitKeyedSuperPropertyLoad(property);
1992 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1994 case KEYED_PROPERTY:
1995 EmitKeyedPropertyLoad(property);
1996 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2001 Token::Value op = expr->binary_op();
2002 __ Push(x0); // Left operand goes on the stack.
2003 VisitForAccumulatorValue(expr->value());
2005 AccumulatorValueContext context(this);
2006 if (ShouldInlineSmiCase(op)) {
2007 EmitInlineSmiBinaryOp(expr->binary_operation(),
2012 EmitBinaryOp(expr->binary_operation(), op);
2015 // Deoptimization point in case the binary operation may have side effects.
2016 PrepareForBailout(expr->binary_operation(), TOS_REG);
2018 VisitForAccumulatorValue(expr->value());
2021 SetExpressionPosition(expr);
2024 switch (assign_type) {
2026 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2027 expr->op(), expr->AssignmentSlot());
2028 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2029 context()->Plug(x0);
2031 case NAMED_PROPERTY:
2032 EmitNamedPropertyAssignment(expr);
2034 case NAMED_SUPER_PROPERTY:
2035 EmitNamedSuperPropertyStore(property);
2036 context()->Plug(x0);
2038 case KEYED_SUPER_PROPERTY:
2039 EmitKeyedSuperPropertyStore(property);
2040 context()->Plug(x0);
2042 case KEYED_PROPERTY:
2043 EmitKeyedPropertyAssignment(expr);
2049 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2050 SetExpressionPosition(prop);
2051 Literal* key = prop->key()->AsLiteral();
2052 DCHECK(!prop->IsSuperAccess());
2054 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2055 __ Mov(LoadDescriptor::SlotRegister(),
2056 SmiFromSlot(prop->PropertyFeedbackSlot()));
2057 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2061 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2062 // Stack: receiver, home_object.
2063 SetExpressionPosition(prop);
2064 Literal* key = prop->key()->AsLiteral();
2065 DCHECK(!key->value()->IsSmi());
2066 DCHECK(prop->IsSuperAccess());
2068 __ Push(key->value());
2069 __ Push(Smi::FromInt(language_mode()));
2070 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2074 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2075 SetExpressionPosition(prop);
2076 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2077 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2078 __ Mov(LoadDescriptor::SlotRegister(),
2079 SmiFromSlot(prop->PropertyFeedbackSlot()));
2084 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2085 // Stack: receiver, home_object, key.
2086 SetExpressionPosition(prop);
2087 __ Push(Smi::FromInt(language_mode()));
2088 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2092 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2094 Expression* left_expr,
2095 Expression* right_expr) {
2096 Label done, both_smis, stub_call;
2098 // Get the arguments.
2100 Register right = x0;
2101 Register result = x0;
2104 // Perform combined smi check on both operands.
2105 __ Orr(x10, left, right);
2106 JumpPatchSite patch_site(masm_);
2107 patch_site.EmitJumpIfSmi(x10, &both_smis);
2109 __ Bind(&stub_call);
2112 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2114 Assembler::BlockPoolsScope scope(masm_);
2115 CallIC(code, expr->BinaryOperationFeedbackId());
2116 patch_site.EmitPatchInfo();
2120 __ Bind(&both_smis);
2121 // Smi case. This code works in the same way as the smi-smi case in the type
2122 // recording binary operation stub, see
2123 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2124 // TODO(all): That doesn't exist any more. Where are the comments?
2126 // The set of operations that needs to be supported here is controlled by
2127 // FullCodeGenerator::ShouldInlineSmiCase().
2130 __ Ubfx(right, right, kSmiShift, 5);
2131 __ Asr(result, left, right);
2132 __ Bic(result, result, kSmiShiftMask);
2135 __ Ubfx(right, right, kSmiShift, 5);
2136 __ Lsl(result, left, right);
2139 // If `left >>> right` >= 0x80000000, the result is not representable in a
2140 // signed 32-bit smi.
2141 __ Ubfx(right, right, kSmiShift, 5);
2142 __ Lsr(x10, left, right);
2143 __ Tbnz(x10, kXSignBit, &stub_call);
2144 __ Bic(result, x10, kSmiShiftMask);
2147 __ Adds(x10, left, right);
2148 __ B(vs, &stub_call);
2149 __ Mov(result, x10);
2152 __ Subs(x10, left, right);
2153 __ B(vs, &stub_call);
2154 __ Mov(result, x10);
2157 Label not_minus_zero, done;
2158 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2159 STATIC_ASSERT(kSmiTag == 0);
2160 __ Smulh(x10, left, right);
2161 __ Cbnz(x10, ¬_minus_zero);
2162 __ Eor(x11, left, right);
2163 __ Tbnz(x11, kXSignBit, &stub_call);
2164 __ Mov(result, x10);
2166 __ Bind(¬_minus_zero);
2168 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2169 __ B(lt, &stub_call);
2170 __ SmiTag(result, x10);
2175 __ Orr(result, left, right);
2177 case Token::BIT_AND:
2178 __ And(result, left, right);
2180 case Token::BIT_XOR:
2181 __ Eor(result, left, right);
2188 context()->Plug(x0);
2192 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2195 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2196 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2198 Assembler::BlockPoolsScope scope(masm_);
2199 CallIC(code, expr->BinaryOperationFeedbackId());
2200 patch_site.EmitPatchInfo();
2202 context()->Plug(x0);
2206 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2207 int* used_store_slots) {
2208 // Constructor is in x0.
2209 DCHECK(lit != NULL);
2212 // No access check is needed here since the constructor is created by the
2214 Register scratch = x1;
2216 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2219 for (int i = 0; i < lit->properties()->length(); i++) {
2220 ObjectLiteral::Property* property = lit->properties()->at(i);
2221 Expression* value = property->value();
2223 if (property->is_static()) {
2224 __ Peek(scratch, kPointerSize); // constructor
2226 __ Peek(scratch, 0); // prototype
2229 EmitPropertyKey(property, lit->GetIdForProperty(i));
2231 // The static prototype property is read only. We handle the non computed
2232 // property name case in the parser. Since this is the only case where we
2233 // need to check for an own read only property we special case this so we do
2234 // not need to do this for every property.
2235 if (property->is_static() && property->is_computed_name()) {
2236 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2240 VisitForStackValue(value);
2241 EmitSetHomeObjectIfNeeded(value, 2,
2242 lit->SlotForHomeObject(value, used_store_slots));
2244 switch (property->kind()) {
2245 case ObjectLiteral::Property::CONSTANT:
2246 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2247 case ObjectLiteral::Property::PROTOTYPE:
2249 case ObjectLiteral::Property::COMPUTED:
2250 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2253 case ObjectLiteral::Property::GETTER:
2254 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2256 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2259 case ObjectLiteral::Property::SETTER:
2260 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2262 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2270 // Set both the prototype and constructor to have fast properties, and also
2271 // freeze them in strong mode.
2272 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2276 void FullCodeGenerator::EmitAssignment(Expression* expr,
2277 FeedbackVectorICSlot slot) {
2278 DCHECK(expr->IsValidReferenceExpressionOrThis());
2280 Property* prop = expr->AsProperty();
2281 LhsKind assign_type = Property::GetAssignType(prop);
2283 switch (assign_type) {
2285 Variable* var = expr->AsVariableProxy()->var();
2286 EffectContext context(this);
2287 EmitVariableAssignment(var, Token::ASSIGN, slot);
2290 case NAMED_PROPERTY: {
2291 __ Push(x0); // Preserve value.
2292 VisitForAccumulatorValue(prop->obj());
2293 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2295 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2296 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2297 __ Mov(StoreDescriptor::NameRegister(),
2298 Operand(prop->key()->AsLiteral()->value()));
2299 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2303 case NAMED_SUPER_PROPERTY: {
2305 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2306 VisitForAccumulatorValue(
2307 prop->obj()->AsSuperPropertyReference()->home_object());
2308 // stack: value, this; x0: home_object
2309 Register scratch = x10;
2310 Register scratch2 = x11;
2311 __ mov(scratch, result_register()); // home_object
2312 __ Peek(x0, kPointerSize); // value
2313 __ Peek(scratch2, 0); // this
2314 __ Poke(scratch2, kPointerSize); // this
2315 __ Poke(scratch, 0); // home_object
2316 // stack: this, home_object; x0: value
2317 EmitNamedSuperPropertyStore(prop);
2320 case KEYED_SUPER_PROPERTY: {
2322 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2324 prop->obj()->AsSuperPropertyReference()->home_object());
2325 VisitForAccumulatorValue(prop->key());
2326 Register scratch = x10;
2327 Register scratch2 = x11;
2328 __ Peek(scratch2, 2 * kPointerSize); // value
2329 // stack: value, this, home_object; x0: key, x11: value
2330 __ Peek(scratch, kPointerSize); // this
2331 __ Poke(scratch, 2 * kPointerSize);
2332 __ Peek(scratch, 0); // home_object
2333 __ Poke(scratch, kPointerSize);
2335 __ Move(x0, scratch2);
2336 // stack: this, home_object, key; x0: value.
2337 EmitKeyedSuperPropertyStore(prop);
2340 case KEYED_PROPERTY: {
2341 __ Push(x0); // Preserve value.
2342 VisitForStackValue(prop->obj());
2343 VisitForAccumulatorValue(prop->key());
2344 __ Mov(StoreDescriptor::NameRegister(), x0);
2345 __ Pop(StoreDescriptor::ReceiverRegister(),
2346 StoreDescriptor::ValueRegister());
2347 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2349 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2354 context()->Plug(x0);
2358 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2359 Variable* var, MemOperand location) {
2360 __ Str(result_register(), location);
2361 if (var->IsContextSlot()) {
2362 // RecordWrite may destroy all its register arguments.
2363 __ Mov(x10, result_register());
2364 int offset = Context::SlotOffset(var->index());
2365 __ RecordWriteContextSlot(
2366 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2371 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2372 FeedbackVectorICSlot slot) {
2373 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2374 if (var->IsUnallocated()) {
2375 // Global var, const, or let.
2376 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2377 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2378 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2381 } else if (var->IsGlobalSlot()) {
2382 // Global var, const, or let.
2383 DCHECK(var->index() > 0);
2384 DCHECK(var->IsStaticGlobalObjectProperty());
2385 int const slot = var->index();
2386 int const depth = scope()->ContextChainLength(var->scope());
2387 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2388 __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
2389 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
2390 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2393 __ Push(Smi::FromInt(slot));
2395 __ CallRuntime(is_strict(language_mode())
2396 ? Runtime::kStoreGlobalViaContext_Strict
2397 : Runtime::kStoreGlobalViaContext_Sloppy,
2400 } else if (var->mode() == LET && op != Token::INIT_LET) {
2401 // Non-initializing assignment to let variable needs a write barrier.
2402 DCHECK(!var->IsLookupSlot());
2403 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2405 MemOperand location = VarOperand(var, x1);
2406 __ Ldr(x10, location);
2407 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2408 __ Mov(x10, Operand(var->name()));
2410 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2411 // Perform the assignment.
2413 EmitStoreToStackLocalOrContextSlot(var, location);
2415 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2416 // Assignment to const variable needs a write barrier.
2417 DCHECK(!var->IsLookupSlot());
2418 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2420 MemOperand location = VarOperand(var, x1);
2421 __ Ldr(x10, location);
2422 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2423 __ Mov(x10, Operand(var->name()));
2425 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2426 __ Bind(&const_error);
2427 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2429 } else if (var->is_this() && op == Token::INIT_CONST) {
2430 // Initializing assignment to const {this} needs a write barrier.
2431 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2432 Label uninitialized_this;
2433 MemOperand location = VarOperand(var, x1);
2434 __ Ldr(x10, location);
2435 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2436 __ Mov(x0, Operand(var->name()));
2438 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2439 __ bind(&uninitialized_this);
2440 EmitStoreToStackLocalOrContextSlot(var, location);
2442 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2443 if (var->IsLookupSlot()) {
2444 // Assignment to var.
2445 __ Mov(x11, Operand(var->name()));
2446 __ Mov(x10, Smi::FromInt(language_mode()));
2449 // jssp[16] : context.
2450 // jssp[24] : value.
2451 __ Push(x0, cp, x11, x10);
2452 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2454 // Assignment to var or initializing assignment to let/const in harmony
2456 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2457 MemOperand location = VarOperand(var, x1);
2458 if (FLAG_debug_code && op == Token::INIT_LET) {
2459 __ Ldr(x10, location);
2460 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2461 __ Check(eq, kLetBindingReInitialization);
2463 EmitStoreToStackLocalOrContextSlot(var, location);
2466 } else if (op == Token::INIT_CONST_LEGACY) {
2467 // Const initializers need a write barrier.
2468 DCHECK(var->mode() == CONST_LEGACY);
2469 DCHECK(!var->IsParameter()); // No const parameters.
2470 if (var->IsLookupSlot()) {
2471 __ Mov(x1, Operand(var->name()));
2472 __ Push(x0, cp, x1);
2473 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2475 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2477 MemOperand location = VarOperand(var, x1);
2478 __ Ldr(x10, location);
2479 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2480 EmitStoreToStackLocalOrContextSlot(var, location);
2485 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2486 if (is_strict(language_mode())) {
2487 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2489 // Silently ignore store in sloppy mode.
2494 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2495 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2496 // Assignment to a property, using a named store IC.
2497 Property* prop = expr->target()->AsProperty();
2498 DCHECK(prop != NULL);
2499 DCHECK(prop->key()->IsLiteral());
2501 __ Mov(StoreDescriptor::NameRegister(),
2502 Operand(prop->key()->AsLiteral()->value()));
2503 __ Pop(StoreDescriptor::ReceiverRegister());
2504 if (FLAG_vector_stores) {
2505 EmitLoadStoreICSlot(expr->AssignmentSlot());
2508 CallStoreIC(expr->AssignmentFeedbackId());
2511 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2512 context()->Plug(x0);
2516 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2517 // Assignment to named property of super.
2519 // stack : receiver ('this'), home_object
2520 DCHECK(prop != NULL);
2521 Literal* key = prop->key()->AsLiteral();
2522 DCHECK(key != NULL);
2524 __ Push(key->value());
2526 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2527 : Runtime::kStoreToSuper_Sloppy),
2532 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2533 // Assignment to named property of super.
2535 // stack : receiver ('this'), home_object, key
2536 DCHECK(prop != NULL);
2540 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2541 : Runtime::kStoreKeyedToSuper_Sloppy),
2546 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2547 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2548 // Assignment to a property, using a keyed store IC.
2550 // TODO(all): Could we pass this in registers rather than on the stack?
2551 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2552 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2555 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2556 if (FLAG_vector_stores) {
2557 EmitLoadStoreICSlot(expr->AssignmentSlot());
2560 CallIC(ic, expr->AssignmentFeedbackId());
2563 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2564 context()->Plug(x0);
2568 void FullCodeGenerator::VisitProperty(Property* expr) {
2569 Comment cmnt(masm_, "[ Property");
2570 SetExpressionPosition(expr);
2571 Expression* key = expr->key();
2573 if (key->IsPropertyName()) {
2574 if (!expr->IsSuperAccess()) {
2575 VisitForAccumulatorValue(expr->obj());
2576 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2577 EmitNamedPropertyLoad(expr);
2579 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2581 expr->obj()->AsSuperPropertyReference()->home_object());
2582 EmitNamedSuperPropertyLoad(expr);
2585 if (!expr->IsSuperAccess()) {
2586 VisitForStackValue(expr->obj());
2587 VisitForAccumulatorValue(expr->key());
2588 __ Move(LoadDescriptor::NameRegister(), x0);
2589 __ Pop(LoadDescriptor::ReceiverRegister());
2590 EmitKeyedPropertyLoad(expr);
2592 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2594 expr->obj()->AsSuperPropertyReference()->home_object());
2595 VisitForStackValue(expr->key());
2596 EmitKeyedSuperPropertyLoad(expr);
2599 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2600 context()->Plug(x0);
2604 void FullCodeGenerator::CallIC(Handle<Code> code,
2605 TypeFeedbackId ast_id) {
2607 // All calls must have a predictable size in full-codegen code to ensure that
2608 // the debugger can patch them correctly.
2609 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2613 // Code common for calls using the IC.
2614 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2615 Expression* callee = expr->expression();
2617 CallICState::CallType call_type =
2618 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2620 // Get the target function.
2621 if (call_type == CallICState::FUNCTION) {
2622 { StackValueContext context(this);
2623 EmitVariableLoad(callee->AsVariableProxy());
2624 PrepareForBailout(callee, NO_REGISTERS);
2626 // Push undefined as receiver. This is patched in the method prologue if it
2627 // is a sloppy mode method.
2629 UseScratchRegisterScope temps(masm_);
2630 Register temp = temps.AcquireX();
2631 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2635 // Load the function from the receiver.
2636 DCHECK(callee->IsProperty());
2637 DCHECK(!callee->AsProperty()->IsSuperAccess());
2638 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2639 EmitNamedPropertyLoad(callee->AsProperty());
2640 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2641 // Push the target function under the receiver.
2646 EmitCall(expr, call_type);
2650 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2651 Expression* callee = expr->expression();
2652 DCHECK(callee->IsProperty());
2653 Property* prop = callee->AsProperty();
2654 DCHECK(prop->IsSuperAccess());
2655 SetExpressionPosition(prop);
2657 Literal* key = prop->key()->AsLiteral();
2658 DCHECK(!key->value()->IsSmi());
2660 // Load the function from the receiver.
2661 const Register scratch = x10;
2662 SuperPropertyReference* super_ref =
2663 callee->AsProperty()->obj()->AsSuperPropertyReference();
2664 VisitForStackValue(super_ref->home_object());
2665 VisitForAccumulatorValue(super_ref->this_var());
2667 __ Peek(scratch, kPointerSize);
2668 __ Push(x0, scratch);
2669 __ Push(key->value());
2670 __ Push(Smi::FromInt(language_mode()));
2674 // - this (receiver)
2675 // - this (receiver) <-- LoadFromSuper will pop here and below.
2678 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2680 // Replace home_object with target function.
2681 __ Poke(x0, kPointerSize);
2684 // - target function
2685 // - this (receiver)
2686 EmitCall(expr, CallICState::METHOD);
2690 // Code common for calls using the IC.
2691 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2694 VisitForAccumulatorValue(key);
2696 Expression* callee = expr->expression();
2698 // Load the function from the receiver.
2699 DCHECK(callee->IsProperty());
2700 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2701 __ Move(LoadDescriptor::NameRegister(), x0);
2702 EmitKeyedPropertyLoad(callee->AsProperty());
2703 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2705 // Push the target function under the receiver.
2709 EmitCall(expr, CallICState::METHOD);
2713 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2714 Expression* callee = expr->expression();
2715 DCHECK(callee->IsProperty());
2716 Property* prop = callee->AsProperty();
2717 DCHECK(prop->IsSuperAccess());
2718 SetExpressionPosition(prop);
2720 // Load the function from the receiver.
2721 const Register scratch = x10;
2722 SuperPropertyReference* super_ref =
2723 callee->AsProperty()->obj()->AsSuperPropertyReference();
2724 VisitForStackValue(super_ref->home_object());
2725 VisitForAccumulatorValue(super_ref->this_var());
2727 __ Peek(scratch, kPointerSize);
2728 __ Push(x0, scratch);
2729 VisitForStackValue(prop->key());
2730 __ Push(Smi::FromInt(language_mode()));
2734 // - this (receiver)
2735 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2739 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2741 // Replace home_object with target function.
2742 __ Poke(x0, kPointerSize);
2745 // - target function
2746 // - this (receiver)
2747 EmitCall(expr, CallICState::METHOD);
2751 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2752 // Load the arguments.
2753 ZoneList<Expression*>* args = expr->arguments();
2754 int arg_count = args->length();
2755 for (int i = 0; i < arg_count; i++) {
2756 VisitForStackValue(args->at(i));
2759 SetCallPosition(expr, arg_count);
2761 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2762 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2763 __ Peek(x1, (arg_count + 1) * kXRegSize);
2764 // Don't assign a type feedback id to the IC, since type feedback is provided
2765 // by the vector above.
2768 RecordJSReturnSite(expr);
2769 // Restore context register.
2770 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2771 context()->DropAndPlug(1, x0);
2775 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2776 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2777 // Prepare to push a copy of the first argument or undefined if it doesn't
2779 if (arg_count > 0) {
2780 __ Peek(x9, arg_count * kXRegSize);
2782 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2785 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2787 // Prepare to push the language mode.
2788 __ Mov(x11, Smi::FromInt(language_mode()));
2789 // Prepare to push the start position of the scope the calls resides in.
2790 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2793 __ Push(x9, x10, x11, x12);
2795 // Do the runtime call.
2796 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2800 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2801 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2802 VariableProxy* callee = expr->expression()->AsVariableProxy();
2803 if (callee->var()->IsLookupSlot()) {
2805 SetExpressionPosition(callee);
2806 // Generate code for loading from variables potentially shadowed
2807 // by eval-introduced variables.
2808 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2811 // Call the runtime to find the function to call (returned in x0)
2812 // and the object holding it (returned in x1).
2813 __ Mov(x10, Operand(callee->name()));
2814 __ Push(context_register(), x10);
2815 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2816 __ Push(x0, x1); // Receiver, function.
2817 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2819 // If fast case code has been generated, emit code to push the
2820 // function and receiver and have the slow path jump around this
2822 if (done.is_linked()) {
2827 // The receiver is implicitly the global receiver. Indicate this
2828 // by passing the undefined to the call function stub.
2829 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2834 VisitForStackValue(callee);
2835 // refEnv.WithBaseObject()
2836 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2837 __ Push(x10); // Reserved receiver slot.
2842 void FullCodeGenerator::VisitCall(Call* expr) {
2844 // We want to verify that RecordJSReturnSite gets called on all paths
2845 // through this function. Avoid early returns.
2846 expr->return_is_recorded_ = false;
2849 Comment cmnt(masm_, "[ Call");
2850 Expression* callee = expr->expression();
2851 Call::CallType call_type = expr->GetCallType(isolate());
2853 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2854 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2855 // to resolve the function we need to call. Then we call the resolved
2856 // function using the given arguments.
2857 ZoneList<Expression*>* args = expr->arguments();
2858 int arg_count = args->length();
2860 PushCalleeAndWithBaseObject(expr);
2862 // Push the arguments.
2863 for (int i = 0; i < arg_count; i++) {
2864 VisitForStackValue(args->at(i));
2867 // Push a copy of the function (found below the arguments) and
2869 __ Peek(x10, (arg_count + 1) * kPointerSize);
2871 EmitResolvePossiblyDirectEval(arg_count);
2873 // Touch up the stack with the resolved function.
2874 __ Poke(x0, (arg_count + 1) * kPointerSize);
2876 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2878 // Record source position for debugger.
2879 SetCallPosition(expr, arg_count);
2881 // Call the evaluated function.
2882 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2883 __ Peek(x1, (arg_count + 1) * kXRegSize);
2885 RecordJSReturnSite(expr);
2886 // Restore context register.
2887 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2888 context()->DropAndPlug(1, x0);
2890 } else if (call_type == Call::GLOBAL_CALL) {
2891 EmitCallWithLoadIC(expr);
2893 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2894 // Call to a lookup slot (dynamically introduced variable).
2895 PushCalleeAndWithBaseObject(expr);
2897 } else if (call_type == Call::PROPERTY_CALL) {
2898 Property* property = callee->AsProperty();
2899 bool is_named_call = property->key()->IsPropertyName();
2900 if (property->IsSuperAccess()) {
2901 if (is_named_call) {
2902 EmitSuperCallWithLoadIC(expr);
2904 EmitKeyedSuperCallWithLoadIC(expr);
2907 VisitForStackValue(property->obj());
2908 if (is_named_call) {
2909 EmitCallWithLoadIC(expr);
2911 EmitKeyedCallWithLoadIC(expr, property->key());
2914 } else if (call_type == Call::SUPER_CALL) {
2915 EmitSuperConstructorCall(expr);
2917 DCHECK(call_type == Call::OTHER_CALL);
2918 // Call to an arbitrary expression not handled specially above.
2919 VisitForStackValue(callee);
2920 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2922 // Emit function call.
2927 // RecordJSReturnSite should have been called.
2928 DCHECK(expr->return_is_recorded_);
2933 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2934 Comment cmnt(masm_, "[ CallNew");
2935 // According to ECMA-262, section 11.2.2, page 44, the function
2936 // expression in new calls must be evaluated before the
2939 // Push constructor on the stack. If it's not a function it's used as
2940 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2942 DCHECK(!expr->expression()->IsSuperPropertyReference());
2943 VisitForStackValue(expr->expression());
2945 // Push the arguments ("left-to-right") on the stack.
2946 ZoneList<Expression*>* args = expr->arguments();
2947 int arg_count = args->length();
2948 for (int i = 0; i < arg_count; i++) {
2949 VisitForStackValue(args->at(i));
2952 // Call the construct call builtin that handles allocation and
2953 // constructor invocation.
2954 SetConstructCallPosition(expr);
2956 // Load function and argument count into x1 and x0.
2957 __ Mov(x0, arg_count);
2958 __ Peek(x1, arg_count * kXRegSize);
2960 // Record call targets in unoptimized code.
2961 if (FLAG_pretenuring_call_new) {
2962 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2963 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2964 expr->CallNewFeedbackSlot().ToInt() + 1);
2967 __ LoadObject(x2, FeedbackVector());
2968 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2970 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2971 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2972 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2973 context()->Plug(x0);
2977 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2978 SuperCallReference* super_call_ref =
2979 expr->expression()->AsSuperCallReference();
2980 DCHECK_NOT_NULL(super_call_ref);
2982 EmitLoadSuperConstructor(super_call_ref);
2983 __ push(result_register());
2985 // Push the arguments ("left-to-right") on the stack.
2986 ZoneList<Expression*>* args = expr->arguments();
2987 int arg_count = args->length();
2988 for (int i = 0; i < arg_count; i++) {
2989 VisitForStackValue(args->at(i));
2992 // Call the construct call builtin that handles allocation and
2993 // constructor invocation.
2994 SetConstructCallPosition(expr);
2996 // Load original constructor into x4.
2997 VisitForAccumulatorValue(super_call_ref->new_target_var());
2998 __ Mov(x4, result_register());
3000 // Load function and argument count into x1 and x0.
3001 __ Mov(x0, arg_count);
3002 __ Peek(x1, arg_count * kXRegSize);
3004 // Record call targets in unoptimized code.
3005 if (FLAG_pretenuring_call_new) {
3007 /* TODO(dslomov): support pretenuring.
3008 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3009 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3010 expr->CallNewFeedbackSlot().ToInt() + 1);
3014 __ LoadObject(x2, FeedbackVector());
3015 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
3017 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3018 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3020 RecordJSReturnSite(expr);
3022 context()->Plug(x0);
3026 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3027 ZoneList<Expression*>* args = expr->arguments();
3028 DCHECK(args->length() == 1);
3030 VisitForAccumulatorValue(args->at(0));
3032 Label materialize_true, materialize_false;
3033 Label* if_true = NULL;
3034 Label* if_false = NULL;
3035 Label* fall_through = NULL;
3036 context()->PrepareTest(&materialize_true, &materialize_false,
3037 &if_true, &if_false, &fall_through);
3039 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3040 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
3042 context()->Plug(if_true, if_false);
3046 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3047 ZoneList<Expression*>* args = expr->arguments();
3048 DCHECK(args->length() == 1);
3050 VisitForAccumulatorValue(args->at(0));
3052 Label materialize_true, materialize_false;
3053 Label* if_true = NULL;
3054 Label* if_false = NULL;
3055 Label* fall_through = NULL;
3056 context()->PrepareTest(&materialize_true, &materialize_false,
3057 &if_true, &if_false, &fall_through);
3059 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
3061 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3062 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
3064 context()->Plug(if_true, if_false);
3068 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3069 ZoneList<Expression*>* args = expr->arguments();
3070 DCHECK(args->length() == 1);
3072 VisitForAccumulatorValue(args->at(0));
3074 Label materialize_true, materialize_false;
3075 Label* if_true = NULL;
3076 Label* if_false = NULL;
3077 Label* fall_through = NULL;
3078 context()->PrepareTest(&materialize_true, &materialize_false,
3079 &if_true, &if_false, &fall_through);
3081 __ JumpIfSmi(x0, if_false);
3082 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3083 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3084 // Undetectable objects behave like undefined when tested with typeof.
3085 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3086 __ Tbnz(x11, Map::kIsUndetectable, if_false);
3087 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
3088 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3090 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3091 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3092 Split(le, if_true, if_false, fall_through);
3094 context()->Plug(if_true, if_false);
3098 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3099 ZoneList<Expression*>* args = expr->arguments();
3100 DCHECK(args->length() == 1);
3102 VisitForAccumulatorValue(args->at(0));
3104 Label materialize_true, materialize_false;
3105 Label* if_true = NULL;
3106 Label* if_false = NULL;
3107 Label* fall_through = NULL;
3108 context()->PrepareTest(&materialize_true, &materialize_false,
3109 &if_true, &if_false, &fall_through);
3111 __ JumpIfSmi(x0, if_false);
3112 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3113 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3114 Split(ge, if_true, if_false, fall_through);
3116 context()->Plug(if_true, if_false);
3120 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3121 ZoneList<Expression*>* args = expr->arguments();
3122 DCHECK(args->length() == 1);
3124 VisitForAccumulatorValue(args->at(0));
3126 Label materialize_true, materialize_false;
3127 Label* if_true = NULL;
3128 Label* if_false = NULL;
3129 Label* fall_through = NULL;
3130 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3131 &if_false, &fall_through);
3133 __ JumpIfSmi(x0, if_false);
3134 __ CompareObjectType(x0, x10, x11, SIMD128_VALUE_TYPE);
3135 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3136 Split(eq, if_true, if_false, fall_through);
3138 context()->Plug(if_true, if_false);
3142 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3143 CallRuntime* expr) {
3144 ZoneList<Expression*>* args = expr->arguments();
3145 DCHECK(args->length() == 1);
3146 VisitForAccumulatorValue(args->at(0));
3148 Label materialize_true, materialize_false, skip_lookup;
3149 Label* if_true = NULL;
3150 Label* if_false = NULL;
3151 Label* fall_through = NULL;
3152 context()->PrepareTest(&materialize_true, &materialize_false,
3153 &if_true, &if_false, &fall_through);
3155 Register object = x0;
3156 __ AssertNotSmi(object);
3159 Register bitfield2 = x11;
3160 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3161 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3162 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3164 // Check for fast case object. Generate false result for slow case object.
3165 Register props = x12;
3166 Register props_map = x12;
3167 Register hash_table_map = x13;
3168 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3169 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3170 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3171 __ Cmp(props_map, hash_table_map);
3174 // Look for valueOf name in the descriptor array, and indicate false if found.
3175 // Since we omit an enumeration index check, if it is added via a transition
3176 // that shares its descriptor array, this is a false positive.
3179 // Skip loop if no descriptors are valid.
3180 Register descriptors = x12;
3181 Register descriptors_length = x13;
3182 __ NumberOfOwnDescriptors(descriptors_length, map);
3183 __ Cbz(descriptors_length, &done);
3185 __ LoadInstanceDescriptors(map, descriptors);
3187 // Calculate the end of the descriptor array.
3188 Register descriptors_end = x14;
3189 __ Mov(x15, DescriptorArray::kDescriptorSize);
3190 __ Mul(descriptors_length, descriptors_length, x15);
3191 // Calculate location of the first key name.
3192 __ Add(descriptors, descriptors,
3193 DescriptorArray::kFirstOffset - kHeapObjectTag);
3194 // Calculate the end of the descriptor array.
3195 __ Add(descriptors_end, descriptors,
3196 Operand(descriptors_length, LSL, kPointerSizeLog2));
3198 // Loop through all the keys in the descriptor array. If one of these is the
3199 // string "valueOf" the result is false.
3200 Register valueof_string = x1;
3201 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3202 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3204 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3205 __ Cmp(x15, valueof_string);
3207 __ Cmp(descriptors, descriptors_end);
3212 // Set the bit in the map to indicate that there is no local valueOf field.
3213 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3214 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3215 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3217 __ Bind(&skip_lookup);
3219 // If a valueOf property is not found on the object check that its prototype
3220 // is the unmodified String prototype. If not result is false.
3221 Register prototype = x1;
3222 Register global_idx = x2;
3223 Register native_context = x2;
3224 Register string_proto = x3;
3225 Register proto_map = x4;
3226 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3227 __ JumpIfSmi(prototype, if_false);
3228 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3229 __ Ldr(global_idx, GlobalObjectMemOperand());
3230 __ Ldr(native_context,
3231 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3232 __ Ldr(string_proto,
3233 ContextMemOperand(native_context,
3234 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3235 __ Cmp(proto_map, string_proto);
3237 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3238 Split(eq, if_true, if_false, fall_through);
3240 context()->Plug(if_true, if_false);
3244 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3245 ZoneList<Expression*>* args = expr->arguments();
3246 DCHECK(args->length() == 1);
3248 VisitForAccumulatorValue(args->at(0));
3250 Label materialize_true, materialize_false;
3251 Label* if_true = NULL;
3252 Label* if_false = NULL;
3253 Label* fall_through = NULL;
3254 context()->PrepareTest(&materialize_true, &materialize_false,
3255 &if_true, &if_false, &fall_through);
3257 __ JumpIfSmi(x0, if_false);
3258 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3259 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3260 Split(eq, if_true, if_false, fall_through);
3262 context()->Plug(if_true, if_false);
3266 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3267 ZoneList<Expression*>* args = expr->arguments();
3268 DCHECK(args->length() == 1);
3270 VisitForAccumulatorValue(args->at(0));
3272 Label materialize_true, materialize_false;
3273 Label* if_true = NULL;
3274 Label* if_false = NULL;
3275 Label* fall_through = NULL;
3276 context()->PrepareTest(&materialize_true, &materialize_false,
3277 &if_true, &if_false, &fall_through);
3279 // Only a HeapNumber can be -0.0, so return false if we have something else.
3280 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3282 // Test the bit pattern.
3283 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3284 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3286 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3287 Split(vs, if_true, if_false, fall_through);
3289 context()->Plug(if_true, if_false);
3293 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3294 ZoneList<Expression*>* args = expr->arguments();
3295 DCHECK(args->length() == 1);
3297 VisitForAccumulatorValue(args->at(0));
3299 Label materialize_true, materialize_false;
3300 Label* if_true = NULL;
3301 Label* if_false = NULL;
3302 Label* fall_through = NULL;
3303 context()->PrepareTest(&materialize_true, &materialize_false,
3304 &if_true, &if_false, &fall_through);
3306 __ JumpIfSmi(x0, if_false);
3307 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3308 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3309 Split(eq, if_true, if_false, fall_through);
3311 context()->Plug(if_true, if_false);
3315 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3316 ZoneList<Expression*>* args = expr->arguments();
3317 DCHECK(args->length() == 1);
3319 VisitForAccumulatorValue(args->at(0));
3321 Label materialize_true, materialize_false;
3322 Label* if_true = NULL;
3323 Label* if_false = NULL;
3324 Label* fall_through = NULL;
3325 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3326 &if_false, &fall_through);
3328 __ JumpIfSmi(x0, if_false);
3329 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
3330 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3331 Split(eq, if_true, if_false, fall_through);
3333 context()->Plug(if_true, if_false);
3337 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3338 ZoneList<Expression*>* args = expr->arguments();
3339 DCHECK(args->length() == 1);
3341 VisitForAccumulatorValue(args->at(0));
3343 Label materialize_true, materialize_false;
3344 Label* if_true = NULL;
3345 Label* if_false = NULL;
3346 Label* fall_through = NULL;
3347 context()->PrepareTest(&materialize_true, &materialize_false,
3348 &if_true, &if_false, &fall_through);
3350 __ JumpIfSmi(x0, if_false);
3351 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3352 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3353 Split(eq, if_true, if_false, fall_through);
3355 context()->Plug(if_true, if_false);
3359 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3360 ZoneList<Expression*>* args = expr->arguments();
3361 DCHECK(args->length() == 1);
3363 VisitForAccumulatorValue(args->at(0));
3365 Label materialize_true, materialize_false;
3366 Label* if_true = NULL;
3367 Label* if_false = NULL;
3368 Label* fall_through = NULL;
3369 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3370 &if_false, &fall_through);
3372 __ JumpIfSmi(x0, if_false);
3374 Register type_reg = x11;
3375 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3376 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3377 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3378 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3379 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3380 Split(ls, if_true, if_false, fall_through);
3382 context()->Plug(if_true, if_false);
3386 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3387 DCHECK(expr->arguments()->length() == 0);
3389 Label materialize_true, materialize_false;
3390 Label* if_true = NULL;
3391 Label* if_false = NULL;
3392 Label* fall_through = NULL;
3393 context()->PrepareTest(&materialize_true, &materialize_false,
3394 &if_true, &if_false, &fall_through);
3396 // Get the frame pointer for the calling frame.
3397 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3399 // Skip the arguments adaptor frame if it exists.
3400 Label check_frame_marker;
3401 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3402 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3403 __ B(ne, &check_frame_marker);
3404 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3406 // Check the marker in the calling frame.
3407 __ Bind(&check_frame_marker);
3408 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3409 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3410 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3411 Split(eq, if_true, if_false, fall_through);
3413 context()->Plug(if_true, if_false);
3417 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3418 ZoneList<Expression*>* args = expr->arguments();
3419 DCHECK(args->length() == 2);
3421 // Load the two objects into registers and perform the comparison.
3422 VisitForStackValue(args->at(0));
3423 VisitForAccumulatorValue(args->at(1));
3425 Label materialize_true, materialize_false;
3426 Label* if_true = NULL;
3427 Label* if_false = NULL;
3428 Label* fall_through = NULL;
3429 context()->PrepareTest(&materialize_true, &materialize_false,
3430 &if_true, &if_false, &fall_through);
3434 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3435 Split(eq, if_true, if_false, fall_through);
3437 context()->Plug(if_true, if_false);
3441 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3442 ZoneList<Expression*>* args = expr->arguments();
3443 DCHECK(args->length() == 1);
3445 // ArgumentsAccessStub expects the key in x1.
3446 VisitForAccumulatorValue(args->at(0));
3448 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3449 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3451 context()->Plug(x0);
3455 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3456 DCHECK(expr->arguments()->length() == 0);
3458 // Get the number of formal parameters.
3459 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3461 // Check if the calling frame is an arguments adaptor frame.
3462 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3463 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3464 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3467 // Arguments adaptor case: Read the arguments length from the
3469 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3472 context()->Plug(x0);
3476 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3477 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3478 ZoneList<Expression*>* args = expr->arguments();
3479 DCHECK(args->length() == 1);
3480 Label done, null, function, non_function_constructor;
3482 VisitForAccumulatorValue(args->at(0));
3484 // If the object is a smi, we return null.
3485 __ JumpIfSmi(x0, &null);
3487 // Check that the object is a JS object but take special care of JS
3488 // functions to make sure they have 'Function' as their class.
3489 // Assume that there are only two callable types, and one of them is at
3490 // either end of the type range for JS object types. Saves extra comparisons.
3491 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3492 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3493 // x10: object's map.
3494 // x11: object's type.
3496 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3497 FIRST_SPEC_OBJECT_TYPE + 1);
3498 __ B(eq, &function);
3500 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3501 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3502 LAST_SPEC_OBJECT_TYPE - 1);
3503 __ B(eq, &function);
3504 // Assume that there is no larger type.
3505 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3507 // Check if the constructor in the map is a JS function.
3508 Register instance_type = x14;
3509 __ GetMapConstructor(x12, x10, x13, instance_type);
3510 __ Cmp(instance_type, JS_FUNCTION_TYPE);
3511 __ B(ne, &non_function_constructor);
3513 // x12 now contains the constructor function. Grab the
3514 // instance class name from there.
3515 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3517 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3520 // Functions have class 'Function'.
3522 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3525 // Objects with a non-function constructor have class 'Object'.
3526 __ Bind(&non_function_constructor);
3527 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3530 // Non-JS objects have class null.
3532 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3537 context()->Plug(x0);
3541 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3542 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3543 ZoneList<Expression*>* args = expr->arguments();
3544 DCHECK(args->length() == 1);
3545 VisitForAccumulatorValue(args->at(0)); // Load the object.
3548 // If the object is a smi return the object.
3549 __ JumpIfSmi(x0, &done);
3550 // If the object is not a value type, return the object.
3551 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3552 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3555 context()->Plug(x0);
3559 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3560 ZoneList<Expression*>* args = expr->arguments();
3561 DCHECK_EQ(1, args->length());
3563 VisitForAccumulatorValue(args->at(0));
3565 Label materialize_true, materialize_false;
3566 Label* if_true = nullptr;
3567 Label* if_false = nullptr;
3568 Label* fall_through = nullptr;
3569 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3570 &if_false, &fall_through);
3572 __ JumpIfSmi(x0, if_false);
3573 __ CompareObjectType(x0, x10, x11, JS_DATE_TYPE);
3574 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3575 Split(eq, if_true, if_false, fall_through);
3577 context()->Plug(if_true, if_false);
3581 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3582 ZoneList<Expression*>* args = expr->arguments();
3583 DCHECK(args->length() == 2);
3584 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3585 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3587 VisitForAccumulatorValue(args->at(0)); // Load the object.
3589 Register object = x0;
3590 Register result = x0;
3591 Register stamp_addr = x10;
3592 Register stamp_cache = x11;
3594 if (index->value() == 0) {
3595 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3597 Label runtime, done;
3598 if (index->value() < JSDate::kFirstUncachedField) {
3599 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3600 __ Mov(stamp_addr, stamp);
3601 __ Ldr(stamp_addr, MemOperand(stamp_addr));
3602 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3603 __ Cmp(stamp_addr, stamp_cache);
3605 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3606 kPointerSize * index->value()));
3612 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3616 context()->Plug(result);
3620 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3621 ZoneList<Expression*>* args = expr->arguments();
3622 DCHECK_EQ(3, args->length());
3624 Register string = x0;
3625 Register index = x1;
3626 Register value = x2;
3627 Register scratch = x10;
3629 VisitForStackValue(args->at(0)); // index
3630 VisitForStackValue(args->at(1)); // value
3631 VisitForAccumulatorValue(args->at(2)); // string
3632 __ Pop(value, index);
3634 if (FLAG_debug_code) {
3635 __ AssertSmi(value, kNonSmiValue);
3636 __ AssertSmi(index, kNonSmiIndex);
3637 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3638 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3642 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3645 __ Strb(value, MemOperand(scratch, index));
3646 context()->Plug(string);
3650 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3651 ZoneList<Expression*>* args = expr->arguments();
3652 DCHECK_EQ(3, args->length());
3654 Register string = x0;
3655 Register index = x1;
3656 Register value = x2;
3657 Register scratch = x10;
3659 VisitForStackValue(args->at(0)); // index
3660 VisitForStackValue(args->at(1)); // value
3661 VisitForAccumulatorValue(args->at(2)); // string
3662 __ Pop(value, index);
3664 if (FLAG_debug_code) {
3665 __ AssertSmi(value, kNonSmiValue);
3666 __ AssertSmi(index, kNonSmiIndex);
3667 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3668 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3672 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3675 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3676 context()->Plug(string);
3680 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3681 ZoneList<Expression*>* args = expr->arguments();
3682 DCHECK(args->length() == 2);
3683 VisitForStackValue(args->at(0)); // Load the object.
3684 VisitForAccumulatorValue(args->at(1)); // Load the value.
3690 // If the object is a smi, return the value.
3691 __ JumpIfSmi(x1, &done);
3693 // If the object is not a value type, return the value.
3694 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3697 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3698 // Update the write barrier. Save the value as it will be
3699 // overwritten by the write barrier code and is needed afterward.
3701 __ RecordWriteField(
3702 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3705 context()->Plug(x0);
3709 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3710 ZoneList<Expression*>* args = expr->arguments();
3711 DCHECK_EQ(args->length(), 1);
3713 // Load the argument into x0 and call the stub.
3714 VisitForAccumulatorValue(args->at(0));
3716 NumberToStringStub stub(isolate());
3718 context()->Plug(x0);
3722 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3723 ZoneList<Expression*>* args = expr->arguments();
3724 DCHECK_EQ(1, args->length());
3726 // Load the argument into x0 and convert it.
3727 VisitForAccumulatorValue(args->at(0));
3729 ToObjectStub stub(isolate());
3731 context()->Plug(x0);
3735 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3736 ZoneList<Expression*>* args = expr->arguments();
3737 DCHECK(args->length() == 1);
3739 VisitForAccumulatorValue(args->at(0));
3743 Register result = x1;
3745 StringCharFromCodeGenerator generator(code, result);
3746 generator.GenerateFast(masm_);
3749 NopRuntimeCallHelper call_helper;
3750 generator.GenerateSlow(masm_, call_helper);
3753 context()->Plug(result);
3757 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3758 ZoneList<Expression*>* args = expr->arguments();
3759 DCHECK(args->length() == 2);
3761 VisitForStackValue(args->at(0));
3762 VisitForAccumulatorValue(args->at(1));
3764 Register object = x1;
3765 Register index = x0;
3766 Register result = x3;
3770 Label need_conversion;
3771 Label index_out_of_range;
3773 StringCharCodeAtGenerator generator(object,
3778 &index_out_of_range,
3779 STRING_INDEX_IS_NUMBER);
3780 generator.GenerateFast(masm_);
3783 __ Bind(&index_out_of_range);
3784 // When the index is out of range, the spec requires us to return NaN.
3785 __ LoadRoot(result, Heap::kNanValueRootIndex);
3788 __ Bind(&need_conversion);
3789 // Load the undefined value into the result register, which will
3790 // trigger conversion.
3791 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3794 NopRuntimeCallHelper call_helper;
3795 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3798 context()->Plug(result);
3802 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3803 ZoneList<Expression*>* args = expr->arguments();
3804 DCHECK(args->length() == 2);
3806 VisitForStackValue(args->at(0));
3807 VisitForAccumulatorValue(args->at(1));
3809 Register object = x1;
3810 Register index = x0;
3811 Register result = x0;
3815 Label need_conversion;
3816 Label index_out_of_range;
3818 StringCharAtGenerator generator(object,
3824 &index_out_of_range,
3825 STRING_INDEX_IS_NUMBER);
3826 generator.GenerateFast(masm_);
3829 __ Bind(&index_out_of_range);
3830 // When the index is out of range, the spec requires us to return
3831 // the empty string.
3832 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3835 __ Bind(&need_conversion);
3836 // Move smi zero into the result register, which will trigger conversion.
3837 __ Mov(result, Smi::FromInt(0));
3840 NopRuntimeCallHelper call_helper;
3841 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3844 context()->Plug(result);
3848 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3849 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3850 ZoneList<Expression*>* args = expr->arguments();
3851 DCHECK_EQ(2, args->length());
3853 VisitForStackValue(args->at(0));
3854 VisitForAccumulatorValue(args->at(1));
3857 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3860 context()->Plug(x0);
3864 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3865 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3866 ZoneList<Expression*>* args = expr->arguments();
3867 DCHECK(args->length() >= 2);
3869 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3870 for (int i = 0; i < arg_count + 1; i++) {
3871 VisitForStackValue(args->at(i));
3873 VisitForAccumulatorValue(args->last()); // Function.
3875 Label runtime, done;
3876 // Check for non-function argument (including proxy).
3877 __ JumpIfSmi(x0, &runtime);
3878 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3880 // InvokeFunction requires the function in x1. Move it in there.
3882 ParameterCount count(arg_count);
3883 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3884 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3889 __ CallRuntime(Runtime::kCall, args->length());
3892 context()->Plug(x0);
3896 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3897 ZoneList<Expression*>* args = expr->arguments();
3898 DCHECK(args->length() == 2);
3901 VisitForStackValue(args->at(0));
3904 VisitForStackValue(args->at(1));
3905 __ CallRuntime(Runtime::kGetPrototype, 1);
3906 __ Push(result_register());
3908 // Load original constructor into x4.
3909 __ Peek(x4, 1 * kPointerSize);
3911 // Check if the calling frame is an arguments adaptor frame.
3912 Label adaptor_frame, args_set_up, runtime;
3913 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3914 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3915 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3916 __ B(eq, &adaptor_frame);
3917 // default constructor has no arguments, so no adaptor frame means no args.
3918 __ Mov(x0, Operand(0));
3921 // Copy arguments from adaptor frame.
3923 __ bind(&adaptor_frame);
3924 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3925 __ SmiUntag(x1, x1);
3929 // Get arguments pointer in x11.
3930 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3931 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3934 // Pre-decrement x11 with kPointerSize on each iteration.
3935 // Pre-decrement in order to skip receiver.
3936 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3938 __ Sub(x1, x1, Operand(1));
3942 __ bind(&args_set_up);
3943 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
3944 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3946 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
3947 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3951 context()->Plug(result_register());
3955 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3956 RegExpConstructResultStub stub(isolate());
3957 ZoneList<Expression*>* args = expr->arguments();
3958 DCHECK(args->length() == 3);
3959 VisitForStackValue(args->at(0));
3960 VisitForStackValue(args->at(1));
3961 VisitForAccumulatorValue(args->at(2));
3964 context()->Plug(x0);
3968 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3969 ZoneList<Expression*>* args = expr->arguments();
3970 VisitForAccumulatorValue(args->at(0));
3972 Label materialize_true, materialize_false;
3973 Label* if_true = NULL;
3974 Label* if_false = NULL;
3975 Label* fall_through = NULL;
3976 context()->PrepareTest(&materialize_true, &materialize_false,
3977 &if_true, &if_false, &fall_through);
3979 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3980 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3981 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3982 Split(eq, if_true, if_false, fall_through);
3984 context()->Plug(if_true, if_false);
3988 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3989 ZoneList<Expression*>* args = expr->arguments();
3990 DCHECK(args->length() == 1);
3991 VisitForAccumulatorValue(args->at(0));
3993 __ AssertString(x0);
3995 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3996 __ IndexFromHash(x10, x0);
3998 context()->Plug(x0);
4002 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4003 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
4005 ZoneList<Expression*>* args = expr->arguments();
4006 DCHECK(args->length() == 2);
4007 VisitForStackValue(args->at(1));
4008 VisitForAccumulatorValue(args->at(0));
4010 Register array = x0;
4011 Register result = x0;
4012 Register elements = x1;
4013 Register element = x2;
4014 Register separator = x3;
4015 Register array_length = x4;
4016 Register result_pos = x5;
4018 Register string_length = x10;
4019 Register elements_end = x11;
4020 Register string = x12;
4021 Register scratch1 = x13;
4022 Register scratch2 = x14;
4023 Register scratch3 = x7;
4024 Register separator_length = x15;
4026 Label bailout, done, one_char_separator, long_separator,
4027 non_trivial_array, not_size_one_array, loop,
4028 empty_separator_loop, one_char_separator_loop,
4029 one_char_separator_loop_entry, long_separator_loop;
4031 // The separator operand is on the stack.
4034 // Check that the array is a JSArray.
4035 __ JumpIfSmi(array, &bailout);
4036 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4038 // Check that the array has fast elements.
4039 __ CheckFastElements(map, scratch1, &bailout);
4041 // If the array has length zero, return the empty string.
4042 // Load and untag the length of the array.
4043 // It is an unsigned value, so we can skip sign extension.
4044 // We assume little endianness.
4045 __ Ldrsw(array_length,
4046 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
4047 __ Cbnz(array_length, &non_trivial_array);
4048 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4051 __ Bind(&non_trivial_array);
4052 // Get the FixedArray containing array's elements.
4053 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4055 // Check that all array elements are sequential one-byte strings, and
4056 // accumulate the sum of their lengths.
4057 __ Mov(string_length, 0);
4058 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4059 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4060 // Loop condition: while (element < elements_end).
4061 // Live values in registers:
4062 // elements: Fixed array of strings.
4063 // array_length: Length of the fixed array of strings (not smi)
4064 // separator: Separator string
4065 // string_length: Accumulated sum of string lengths (not smi).
4066 // element: Current array element.
4067 // elements_end: Array end.
4068 if (FLAG_debug_code) {
4069 __ Cmp(array_length, 0);
4070 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4073 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4074 __ JumpIfSmi(string, &bailout);
4075 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4076 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4077 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4079 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4080 __ Adds(string_length, string_length, scratch1);
4082 __ Cmp(element, elements_end);
4085 // If array_length is 1, return elements[0], a string.
4086 __ Cmp(array_length, 1);
4087 __ B(ne, ¬_size_one_array);
4088 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4091 __ Bind(¬_size_one_array);
4093 // Live values in registers:
4094 // separator: Separator string
4095 // array_length: Length of the array (not smi).
4096 // string_length: Sum of string lengths (not smi).
4097 // elements: FixedArray of strings.
4099 // Check that the separator is a flat one-byte string.
4100 __ JumpIfSmi(separator, &bailout);
4101 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4102 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4103 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4105 // Add (separator length times array_length) - separator length to the
4106 // string_length to get the length of the result string.
4107 // Load the separator length as untagged.
4108 // We assume little endianness, and that the length is positive.
4109 __ Ldrsw(separator_length,
4110 UntagSmiFieldMemOperand(separator,
4111 SeqOneByteString::kLengthOffset));
4112 __ Sub(string_length, string_length, separator_length);
4113 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4116 // Get first element in the array.
4117 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4118 // Live values in registers:
4119 // element: First array element
4120 // separator: Separator string
4121 // string_length: Length of result string (not smi)
4122 // array_length: Length of the array (not smi).
4123 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
4126 // Prepare for looping. Set up elements_end to end of the array. Set
4127 // result_pos to the position of the result where to write the first
4129 // TODO(all): useless unless AllocateOneByteString trashes the register.
4130 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4131 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4133 // Check the length of the separator.
4134 __ Cmp(separator_length, 1);
4135 __ B(eq, &one_char_separator);
4136 __ B(gt, &long_separator);
4138 // Empty separator case
4139 __ Bind(&empty_separator_loop);
4140 // Live values in registers:
4141 // result_pos: the position to which we are currently copying characters.
4142 // element: Current array element.
4143 // elements_end: Array end.
4145 // Copy next array element to the result.
4146 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4147 __ Ldrsw(string_length,
4148 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4149 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4150 __ CopyBytes(result_pos, string, string_length, scratch1);
4151 __ Cmp(element, elements_end);
4152 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4155 // One-character separator case
4156 __ Bind(&one_char_separator);
4157 // Replace separator with its one-byte character value.
4158 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4159 // Jump into the loop after the code that copies the separator, so the first
4160 // element is not preceded by a separator
4161 __ B(&one_char_separator_loop_entry);
4163 __ Bind(&one_char_separator_loop);
4164 // Live values in registers:
4165 // result_pos: the position to which we are currently copying characters.
4166 // element: Current array element.
4167 // elements_end: Array end.
4168 // separator: Single separator one-byte char (in lower byte).
4170 // Copy the separator character to the result.
4171 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4173 // Copy next array element to the result.
4174 __ Bind(&one_char_separator_loop_entry);
4175 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4176 __ Ldrsw(string_length,
4177 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4178 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4179 __ CopyBytes(result_pos, string, string_length, scratch1);
4180 __ Cmp(element, elements_end);
4181 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4184 // Long separator case (separator is more than one character). Entry is at the
4185 // label long_separator below.
4186 __ Bind(&long_separator_loop);
4187 // Live values in registers:
4188 // result_pos: the position to which we are currently copying characters.
4189 // element: Current array element.
4190 // elements_end: Array end.
4191 // separator: Separator string.
4193 // Copy the separator to the result.
4194 // TODO(all): hoist next two instructions.
4195 __ Ldrsw(string_length,
4196 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4197 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4198 __ CopyBytes(result_pos, string, string_length, scratch1);
4200 __ Bind(&long_separator);
4201 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4202 __ Ldrsw(string_length,
4203 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4204 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4205 __ CopyBytes(result_pos, string, string_length, scratch1);
4206 __ Cmp(element, elements_end);
4207 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4211 // Returning undefined will force slower code to handle it.
4212 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4214 context()->Plug(result);
4218 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4219 DCHECK(expr->arguments()->length() == 0);
4220 ExternalReference debug_is_active =
4221 ExternalReference::debug_is_active_address(isolate());
4222 __ Mov(x10, debug_is_active);
4223 __ Ldrb(x0, MemOperand(x10));
4225 context()->Plug(x0);
4229 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4230 // Push the builtins object as the receiver.
4231 __ Ldr(x10, GlobalObjectMemOperand());
4232 __ Ldr(LoadDescriptor::ReceiverRegister(),
4233 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4234 __ Push(LoadDescriptor::ReceiverRegister());
4236 // Load the function from the receiver.
4237 Handle<String> name = expr->name();
4238 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4239 __ Mov(LoadDescriptor::SlotRegister(),
4240 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4241 CallLoadIC(NOT_INSIDE_TYPEOF);
4245 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4246 ZoneList<Expression*>* args = expr->arguments();
4247 int arg_count = args->length();
4249 SetCallPosition(expr, arg_count);
4250 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4251 __ Peek(x1, (arg_count + 1) * kPointerSize);
4256 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4257 ZoneList<Expression*>* args = expr->arguments();
4258 int arg_count = args->length();
4260 if (expr->is_jsruntime()) {
4261 Comment cmnt(masm_, "[ CallRunTime");
4262 EmitLoadJSRuntimeFunction(expr);
4264 // Push the target function under the receiver.
4268 for (int i = 0; i < arg_count; i++) {
4269 VisitForStackValue(args->at(i));
4272 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4273 EmitCallJSRuntimeFunction(expr);
4275 // Restore context register.
4276 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4278 context()->DropAndPlug(1, x0);
4281 const Runtime::Function* function = expr->function();
4282 switch (function->function_id) {
4283 #define CALL_INTRINSIC_GENERATOR(Name) \
4284 case Runtime::kInline##Name: { \
4285 Comment cmnt(masm_, "[ Inline" #Name); \
4286 return Emit##Name(expr); \
4288 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4289 #undef CALL_INTRINSIC_GENERATOR
4291 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4292 // Push the arguments ("left-to-right").
4293 for (int i = 0; i < arg_count; i++) {
4294 VisitForStackValue(args->at(i));
4297 // Call the C runtime function.
4298 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4299 __ CallRuntime(expr->function(), arg_count);
4300 context()->Plug(x0);
4307 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4308 switch (expr->op()) {
4309 case Token::DELETE: {
4310 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4311 Property* property = expr->expression()->AsProperty();
4312 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4314 if (property != NULL) {
4315 VisitForStackValue(property->obj());
4316 VisitForStackValue(property->key());
4317 __ CallRuntime(is_strict(language_mode())
4318 ? Runtime::kDeleteProperty_Strict
4319 : Runtime::kDeleteProperty_Sloppy,
4321 context()->Plug(x0);
4322 } else if (proxy != NULL) {
4323 Variable* var = proxy->var();
4324 // Delete of an unqualified identifier is disallowed in strict mode but
4325 // "delete this" is allowed.
4326 bool is_this = var->HasThisName(isolate());
4327 DCHECK(is_sloppy(language_mode()) || is_this);
4328 if (var->IsUnallocatedOrGlobalSlot()) {
4329 __ Ldr(x12, GlobalObjectMemOperand());
4330 __ Mov(x11, Operand(var->name()));
4332 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4333 context()->Plug(x0);
4334 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4335 // Result of deleting non-global, non-dynamic variables is false.
4336 // The subexpression does not have side effects.
4337 context()->Plug(is_this);
4339 // Non-global variable. Call the runtime to try to delete from the
4340 // context where the variable was introduced.
4341 __ Mov(x2, Operand(var->name()));
4342 __ Push(context_register(), x2);
4343 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4344 context()->Plug(x0);
4347 // Result of deleting non-property, non-variable reference is true.
4348 // The subexpression may have side effects.
4349 VisitForEffect(expr->expression());
4350 context()->Plug(true);
4356 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4357 VisitForEffect(expr->expression());
4358 context()->Plug(Heap::kUndefinedValueRootIndex);
4362 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4363 if (context()->IsEffect()) {
4364 // Unary NOT has no side effects so it's only necessary to visit the
4365 // subexpression. Match the optimizing compiler by not branching.
4366 VisitForEffect(expr->expression());
4367 } else if (context()->IsTest()) {
4368 const TestContext* test = TestContext::cast(context());
4369 // The labels are swapped for the recursive call.
4370 VisitForControl(expr->expression(),
4371 test->false_label(),
4373 test->fall_through());
4374 context()->Plug(test->true_label(), test->false_label());
4376 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4377 // TODO(jbramley): This could be much more efficient using (for
4378 // example) the CSEL instruction.
4379 Label materialize_true, materialize_false, done;
4380 VisitForControl(expr->expression(),
4385 __ Bind(&materialize_true);
4386 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4387 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4390 __ Bind(&materialize_false);
4391 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4392 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4396 if (context()->IsStackValue()) {
4397 __ Push(result_register());
4402 case Token::TYPEOF: {
4403 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4405 AccumulatorValueContext context(this);
4406 VisitForTypeofValue(expr->expression());
4409 TypeofStub typeof_stub(isolate());
4410 __ CallStub(&typeof_stub);
4411 context()->Plug(x0);
4420 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4421 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4423 Comment cmnt(masm_, "[ CountOperation");
4425 Property* prop = expr->expression()->AsProperty();
4426 LhsKind assign_type = Property::GetAssignType(prop);
4428 // Evaluate expression and get value.
4429 if (assign_type == VARIABLE) {
4430 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4431 AccumulatorValueContext context(this);
4432 EmitVariableLoad(expr->expression()->AsVariableProxy());
4434 // Reserve space for result of postfix operation.
4435 if (expr->is_postfix() && !context()->IsEffect()) {
4438 switch (assign_type) {
4439 case NAMED_PROPERTY: {
4440 // Put the object both on the stack and in the register.
4441 VisitForStackValue(prop->obj());
4442 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4443 EmitNamedPropertyLoad(prop);
4447 case NAMED_SUPER_PROPERTY: {
4448 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4449 VisitForAccumulatorValue(
4450 prop->obj()->AsSuperPropertyReference()->home_object());
4451 __ Push(result_register());
4452 const Register scratch = x10;
4453 __ Peek(scratch, kPointerSize);
4454 __ Push(scratch, result_register());
4455 EmitNamedSuperPropertyLoad(prop);
4459 case KEYED_SUPER_PROPERTY: {
4460 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4462 prop->obj()->AsSuperPropertyReference()->home_object());
4463 VisitForAccumulatorValue(prop->key());
4464 __ Push(result_register());
4465 const Register scratch1 = x10;
4466 const Register scratch2 = x11;
4467 __ Peek(scratch1, 2 * kPointerSize);
4468 __ Peek(scratch2, kPointerSize);
4469 __ Push(scratch1, scratch2, result_register());
4470 EmitKeyedSuperPropertyLoad(prop);
4474 case KEYED_PROPERTY: {
4475 VisitForStackValue(prop->obj());
4476 VisitForStackValue(prop->key());
4477 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4478 __ Peek(LoadDescriptor::NameRegister(), 0);
4479 EmitKeyedPropertyLoad(prop);
4488 // We need a second deoptimization point after loading the value
4489 // in case evaluating the property load my have a side effect.
4490 if (assign_type == VARIABLE) {
4491 PrepareForBailout(expr->expression(), TOS_REG);
4493 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4496 // Inline smi case if we are in a loop.
4497 Label stub_call, done;
4498 JumpPatchSite patch_site(masm_);
4500 int count_value = expr->op() == Token::INC ? 1 : -1;
4501 if (ShouldInlineSmiCase(expr->op())) {
4503 patch_site.EmitJumpIfNotSmi(x0, &slow);
4505 // Save result for postfix expressions.
4506 if (expr->is_postfix()) {
4507 if (!context()->IsEffect()) {
4508 // Save the result on the stack. If we have a named or keyed property we
4509 // store the result under the receiver that is currently on top of the
4511 switch (assign_type) {
4515 case NAMED_PROPERTY:
4516 __ Poke(x0, kPointerSize);
4518 case NAMED_SUPER_PROPERTY:
4519 __ Poke(x0, kPointerSize * 2);
4521 case KEYED_PROPERTY:
4522 __ Poke(x0, kPointerSize * 2);
4524 case KEYED_SUPER_PROPERTY:
4525 __ Poke(x0, kPointerSize * 3);
4531 __ Adds(x0, x0, Smi::FromInt(count_value));
4533 // Call stub. Undo operation first.
4534 __ Sub(x0, x0, Smi::FromInt(count_value));
4538 if (!is_strong(language_mode())) {
4539 ToNumberStub convert_stub(isolate());
4540 __ CallStub(&convert_stub);
4541 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4544 // Save result for postfix expressions.
4545 if (expr->is_postfix()) {
4546 if (!context()->IsEffect()) {
4547 // Save the result on the stack. If we have a named or keyed property
4548 // we store the result under the receiver that is currently on top
4550 switch (assign_type) {
4554 case NAMED_PROPERTY:
4555 __ Poke(x0, kXRegSize);
4557 case NAMED_SUPER_PROPERTY:
4558 __ Poke(x0, 2 * kXRegSize);
4560 case KEYED_PROPERTY:
4561 __ Poke(x0, 2 * kXRegSize);
4563 case KEYED_SUPER_PROPERTY:
4564 __ Poke(x0, 3 * kXRegSize);
4570 __ Bind(&stub_call);
4572 __ Mov(x0, Smi::FromInt(count_value));
4574 SetExpressionPosition(expr);
4577 Assembler::BlockPoolsScope scope(masm_);
4579 CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4580 strength(language_mode())).code();
4581 CallIC(code, expr->CountBinOpFeedbackId());
4582 patch_site.EmitPatchInfo();
4586 if (is_strong(language_mode())) {
4587 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4589 // Store the value returned in x0.
4590 switch (assign_type) {
4592 if (expr->is_postfix()) {
4593 { EffectContext context(this);
4594 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4595 Token::ASSIGN, expr->CountSlot());
4596 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4599 // For all contexts except EffectConstant We have the result on
4600 // top of the stack.
4601 if (!context()->IsEffect()) {
4602 context()->PlugTOS();
4605 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4606 Token::ASSIGN, expr->CountSlot());
4607 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4608 context()->Plug(x0);
4611 case NAMED_PROPERTY: {
4612 __ Mov(StoreDescriptor::NameRegister(),
4613 Operand(prop->key()->AsLiteral()->value()));
4614 __ Pop(StoreDescriptor::ReceiverRegister());
4615 if (FLAG_vector_stores) {
4616 EmitLoadStoreICSlot(expr->CountSlot());
4619 CallStoreIC(expr->CountStoreFeedbackId());
4621 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4622 if (expr->is_postfix()) {
4623 if (!context()->IsEffect()) {
4624 context()->PlugTOS();
4627 context()->Plug(x0);
4631 case NAMED_SUPER_PROPERTY: {
4632 EmitNamedSuperPropertyStore(prop);
4633 if (expr->is_postfix()) {
4634 if (!context()->IsEffect()) {
4635 context()->PlugTOS();
4638 context()->Plug(x0);
4642 case KEYED_SUPER_PROPERTY: {
4643 EmitKeyedSuperPropertyStore(prop);
4644 if (expr->is_postfix()) {
4645 if (!context()->IsEffect()) {
4646 context()->PlugTOS();
4649 context()->Plug(x0);
4653 case KEYED_PROPERTY: {
4654 __ Pop(StoreDescriptor::NameRegister());
4655 __ Pop(StoreDescriptor::ReceiverRegister());
4657 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4658 if (FLAG_vector_stores) {
4659 EmitLoadStoreICSlot(expr->CountSlot());
4662 CallIC(ic, expr->CountStoreFeedbackId());
4664 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4665 if (expr->is_postfix()) {
4666 if (!context()->IsEffect()) {
4667 context()->PlugTOS();
4670 context()->Plug(x0);
4678 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4679 Expression* sub_expr,
4680 Handle<String> check) {
4681 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4682 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4683 Label materialize_true, materialize_false;
4684 Label* if_true = NULL;
4685 Label* if_false = NULL;
4686 Label* fall_through = NULL;
4687 context()->PrepareTest(&materialize_true, &materialize_false,
4688 &if_true, &if_false, &fall_through);
4690 { AccumulatorValueContext context(this);
4691 VisitForTypeofValue(sub_expr);
4693 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4695 Factory* factory = isolate()->factory();
4696 if (String::Equals(check, factory->number_string())) {
4697 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4698 __ JumpIfSmi(x0, if_true);
4699 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4700 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4701 Split(eq, if_true, if_false, fall_through);
4702 } else if (String::Equals(check, factory->string_string())) {
4703 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4704 __ JumpIfSmi(x0, if_false);
4705 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
4706 Split(lt, if_true, if_false, fall_through);
4707 } else if (String::Equals(check, factory->symbol_string())) {
4708 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4709 __ JumpIfSmi(x0, if_false);
4710 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4711 Split(eq, if_true, if_false, fall_through);
4712 } else if (String::Equals(check, factory->boolean_string())) {
4713 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4714 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4715 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4716 Split(eq, if_true, if_false, fall_through);
4717 } else if (String::Equals(check, factory->undefined_string())) {
4719 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4720 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4721 __ JumpIfSmi(x0, if_false);
4722 // Check for undetectable objects => true.
4723 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4724 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4725 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4727 } else if (String::Equals(check, factory->function_string())) {
4728 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4729 __ JumpIfSmi(x0, if_false);
4730 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4731 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4732 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4734 } else if (String::Equals(check, factory->object_string())) {
4735 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4736 __ JumpIfSmi(x0, if_false);
4737 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4738 // Check for JS objects => true.
4740 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4742 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4744 // Check for undetectable objects => false.
4745 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4747 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4750 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
4751 } else if (String::Equals(check, factory->type##_string())) { \
4752 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
4754 __ JumpIfSmi(x0, if_true); \
4755 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); \
4756 __ CompareRoot(x0, Heap::k##Type##MapRootIndex); \
4757 Split(eq, if_true, if_false, fall_through);
4758 SIMD128_TYPES(SIMD128_TYPE)
4762 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4763 if (if_false != fall_through) __ B(if_false);
4765 context()->Plug(if_true, if_false);
4769 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4770 Comment cmnt(masm_, "[ CompareOperation");
4771 SetExpressionPosition(expr);
4773 // Try to generate an optimized comparison with a literal value.
4774 // TODO(jbramley): This only checks common values like NaN or undefined.
4775 // Should it also handle ARM64 immediate operands?
4776 if (TryLiteralCompare(expr)) {
4780 // Assign labels according to context()->PrepareTest.
4781 Label materialize_true;
4782 Label materialize_false;
4783 Label* if_true = NULL;
4784 Label* if_false = NULL;
4785 Label* fall_through = NULL;
4786 context()->PrepareTest(&materialize_true, &materialize_false,
4787 &if_true, &if_false, &fall_through);
4789 Token::Value op = expr->op();
4790 VisitForStackValue(expr->left());
4793 VisitForStackValue(expr->right());
4794 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4795 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4796 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4797 Split(eq, if_true, if_false, fall_through);
4800 case Token::INSTANCEOF: {
4801 VisitForAccumulatorValue(expr->right());
4803 InstanceOfStub stub(isolate());
4805 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4806 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4807 Split(eq, if_true, if_false, fall_through);
4812 VisitForAccumulatorValue(expr->right());
4813 Condition cond = CompareIC::ComputeCondition(op);
4815 // Pop the stack value.
4818 JumpPatchSite patch_site(masm_);
4819 if (ShouldInlineSmiCase(op)) {
4821 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4823 Split(cond, if_true, if_false, NULL);
4824 __ Bind(&slow_case);
4827 Handle<Code> ic = CodeFactory::CompareIC(
4828 isolate(), op, strength(language_mode())).code();
4829 CallIC(ic, expr->CompareOperationFeedbackId());
4830 patch_site.EmitPatchInfo();
4831 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4832 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4836 // Convert the result of the comparison into one expected for this
4837 // expression's context.
4838 context()->Plug(if_true, if_false);
4842 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4843 Expression* sub_expr,
4845 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4846 Label materialize_true, materialize_false;
4847 Label* if_true = NULL;
4848 Label* if_false = NULL;
4849 Label* fall_through = NULL;
4850 context()->PrepareTest(&materialize_true, &materialize_false,
4851 &if_true, &if_false, &fall_through);
4853 VisitForAccumulatorValue(sub_expr);
4854 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4856 if (expr->op() == Token::EQ_STRICT) {
4857 Heap::RootListIndex nil_value = nil == kNullValue ?
4858 Heap::kNullValueRootIndex :
4859 Heap::kUndefinedValueRootIndex;
4860 __ CompareRoot(x0, nil_value);
4861 Split(eq, if_true, if_false, fall_through);
4863 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4864 CallIC(ic, expr->CompareOperationFeedbackId());
4865 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4868 context()->Plug(if_true, if_false);
4872 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4873 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4874 context()->Plug(x0);
4878 void FullCodeGenerator::VisitYield(Yield* expr) {
4879 Comment cmnt(masm_, "[ Yield");
4880 SetExpressionPosition(expr);
4882 // Evaluate yielded value first; the initial iterator definition depends on
4883 // this. It stays on the stack while we update the iterator.
4884 VisitForStackValue(expr->expression());
4886 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4887 // and suchlike. The implementation changes a little by bleeding_edge so I
4888 // don't want to spend too much time on it now.
4890 switch (expr->yield_kind()) {
4891 case Yield::kSuspend:
4892 // Pop value from top-of-stack slot; box result into result register.
4893 EmitCreateIteratorResult(false);
4894 __ Push(result_register());
4896 case Yield::kInitial: {
4897 Label suspend, continuation, post_runtime, resume;
4900 // TODO(jbramley): This label is bound here because the following code
4901 // looks at its pos(). Is it possible to do something more efficient here,
4902 // perhaps using Adr?
4903 __ Bind(&continuation);
4904 __ RecordGeneratorContinuation();
4908 VisitForAccumulatorValue(expr->generator_object());
4909 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4910 __ Mov(x1, Smi::FromInt(continuation.pos()));
4911 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4912 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4914 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4915 kLRHasBeenSaved, kDontSaveFPRegs);
4916 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4917 __ Cmp(__ StackPointer(), x1);
4918 __ B(eq, &post_runtime);
4919 __ Push(x0); // generator object
4920 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4921 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4922 __ Bind(&post_runtime);
4923 __ Pop(result_register());
4924 EmitReturnSequence();
4927 context()->Plug(result_register());
4931 case Yield::kFinal: {
4932 VisitForAccumulatorValue(expr->generator_object());
4933 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4934 __ Str(x1, FieldMemOperand(result_register(),
4935 JSGeneratorObject::kContinuationOffset));
4936 // Pop value from top-of-stack slot, box result into result register.
4937 EmitCreateIteratorResult(true);
4938 EmitUnwindBeforeReturn();
4939 EmitReturnSequence();
4943 case Yield::kDelegating: {
4944 VisitForStackValue(expr->generator_object());
4946 // Initial stack layout is as follows:
4947 // [sp + 1 * kPointerSize] iter
4948 // [sp + 0 * kPointerSize] g
4950 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4951 Label l_next, l_call, l_loop;
4952 Register load_receiver = LoadDescriptor::ReceiverRegister();
4953 Register load_name = LoadDescriptor::NameRegister();
4955 // Initial send value is undefined.
4956 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4959 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4961 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
4962 __ Peek(x3, 1 * kPointerSize); // iter
4963 __ Push(load_name, x3, x0); // "throw", iter, except
4966 // try { received = %yield result }
4967 // Shuffle the received result above a try handler and yield it without
4970 __ Pop(x0); // result
4971 int handler_index = NewHandlerTableEntry();
4972 EnterTryBlock(handler_index, &l_catch);
4973 const int try_block_size = TryCatch::kElementCount * kPointerSize;
4974 __ Push(x0); // result
4977 // TODO(jbramley): This label is bound here because the following code
4978 // looks at its pos(). Is it possible to do something more efficient here,
4979 // perhaps using Adr?
4980 __ Bind(&l_continuation);
4981 __ RecordGeneratorContinuation();
4984 __ Bind(&l_suspend);
4985 const int generator_object_depth = kPointerSize + try_block_size;
4986 __ Peek(x0, generator_object_depth);
4988 __ Push(Smi::FromInt(handler_index)); // handler-index
4989 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4990 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4991 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4992 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4994 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4995 kLRHasBeenSaved, kDontSaveFPRegs);
4996 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
4997 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4998 __ Pop(x0); // result
4999 EmitReturnSequence();
5000 __ Bind(&l_resume); // received in x0
5001 ExitTryBlock(handler_index);
5003 // receiver = iter; f = 'next'; arg = received;
5006 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
5007 __ Peek(x3, 1 * kPointerSize); // iter
5008 __ Push(load_name, x3, x0); // "next", iter, received
5010 // result = receiver[f](arg);
5012 __ Peek(load_receiver, 1 * kPointerSize);
5013 __ Peek(load_name, 2 * kPointerSize);
5014 __ Mov(LoadDescriptor::SlotRegister(),
5015 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
5016 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
5017 CallIC(ic, TypeFeedbackId::None());
5019 __ Poke(x1, 2 * kPointerSize);
5020 SetCallPosition(expr, 1);
5021 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
5024 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5025 __ Drop(1); // The function is still on the stack; drop it.
5027 // if (!result.done) goto l_try;
5029 __ Move(load_receiver, x0);
5031 __ Push(load_receiver); // save result
5032 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
5033 __ Mov(LoadDescriptor::SlotRegister(),
5034 SmiFromSlot(expr->DoneFeedbackSlot()));
5035 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.done
5036 // The ToBooleanStub argument (result.done) is in x0.
5037 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
5042 __ Pop(load_receiver); // result
5043 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
5044 __ Mov(LoadDescriptor::SlotRegister(),
5045 SmiFromSlot(expr->ValueFeedbackSlot()));
5046 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.value
5047 context()->DropAndPlug(2, x0); // drop iter and g
5054 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
5056 JSGeneratorObject::ResumeMode resume_mode) {
5057 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
5058 Register generator_object = x1;
5059 Register the_hole = x2;
5060 Register operand_stack_size = w3;
5061 Register function = x4;
5063 // The value stays in x0, and is ultimately read by the resumed generator, as
5064 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
5065 // is read to throw the value when the resumed generator is already closed. x1
5066 // will hold the generator object until the activation has been resumed.
5067 VisitForStackValue(generator);
5068 VisitForAccumulatorValue(value);
5069 __ Pop(generator_object);
5071 // Load suspended function and context.
5072 __ Ldr(cp, FieldMemOperand(generator_object,
5073 JSGeneratorObject::kContextOffset));
5074 __ Ldr(function, FieldMemOperand(generator_object,
5075 JSGeneratorObject::kFunctionOffset));
5077 // Load receiver and store as the first argument.
5078 __ Ldr(x10, FieldMemOperand(generator_object,
5079 JSGeneratorObject::kReceiverOffset));
5082 // Push holes for the rest of the arguments to the generator function.
5083 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
5085 // The number of arguments is stored as an int32_t, and -1 is a marker
5086 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
5087 // extension to correctly handle it. However, in this case, we operate on
5088 // 32-bit W registers, so extension isn't required.
5089 __ Ldr(w10, FieldMemOperand(x10,
5090 SharedFunctionInfo::kFormalParameterCountOffset));
5091 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
5092 __ PushMultipleTimes(the_hole, w10);
5094 // Enter a new JavaScript frame, and initialize its slots as they were when
5095 // the generator was suspended.
5096 Label resume_frame, done;
5097 __ Bl(&resume_frame);
5100 __ Bind(&resume_frame);
5101 __ Push(lr, // Return address.
5102 fp, // Caller's frame pointer.
5103 cp, // Callee's context.
5104 function); // Callee's JS Function.
5105 __ Add(fp, __ StackPointer(), kPointerSize * 2);
5107 // Load and untag the operand stack size.
5108 __ Ldr(x10, FieldMemOperand(generator_object,
5109 JSGeneratorObject::kOperandStackOffset));
5110 __ Ldr(operand_stack_size,
5111 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
5113 // If we are sending a value and there is no operand stack, we can jump back
5115 if (resume_mode == JSGeneratorObject::NEXT) {
5117 __ Cbnz(operand_stack_size, &slow_resume);
5118 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5120 UntagSmiFieldMemOperand(generator_object,
5121 JSGeneratorObject::kContinuationOffset));
5122 __ Add(x10, x10, x11);
5123 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
5124 __ Str(x12, FieldMemOperand(generator_object,
5125 JSGeneratorObject::kContinuationOffset));
5128 __ Bind(&slow_resume);
5131 // Otherwise, we push holes for the operand stack and call the runtime to fix
5132 // up the stack and the handlers.
5133 __ PushMultipleTimes(the_hole, operand_stack_size);
5135 __ Mov(x10, Smi::FromInt(resume_mode));
5136 __ Push(generator_object, result_register(), x10);
5137 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5138 // Not reached: the runtime call returns elsewhere.
5142 context()->Plug(result_register());
5146 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5150 const int instance_size = 5 * kPointerSize;
5151 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5154 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5156 Register result = x0;
5157 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5160 __ Bind(&gc_required);
5161 __ Push(Smi::FromInt(instance_size));
5162 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5163 __ Ldr(context_register(),
5164 MemOperand(fp, StandardFrameConstants::kContextOffset));
5166 __ Bind(&allocated);
5167 Register map_reg = x1;
5168 Register result_value = x2;
5169 Register boolean_done = x3;
5170 Register empty_fixed_array = x4;
5171 Register untagged_result = x5;
5172 __ Ldr(map_reg, GlobalObjectMemOperand());
5173 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5175 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5176 __ Pop(result_value);
5177 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5178 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5179 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5180 JSObject::kElementsOffset);
5181 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5182 JSGeneratorObject::kResultDonePropertyOffset);
5183 __ ObjectUntag(untagged_result, result);
5184 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5185 __ Stp(empty_fixed_array, empty_fixed_array,
5186 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5187 __ Stp(result_value, boolean_done,
5188 MemOperand(untagged_result,
5189 JSGeneratorObject::kResultValuePropertyOffset));
5191 // Only the value field needs a write barrier, as the other values are in the
5193 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5194 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5198 // TODO(all): I don't like this method.
5199 // It seems to me that in too many places x0 is used in place of this.
5200 // Also, this function is not suitable for all places where x0 should be
5201 // abstracted (eg. when used as an argument). But some places assume that the
5202 // first argument register is x0, and use this function instead.
5203 // Considering that most of the register allocation is hard-coded in the
5204 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5205 // that abstracting the allocation through functions would not yield any
5206 // performance benefit, I think the existence of this function is debatable.
5207 Register FullCodeGenerator::result_register() {
5212 Register FullCodeGenerator::context_register() {
5217 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5218 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5219 __ Str(value, MemOperand(fp, frame_offset));
5223 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5224 __ Ldr(dst, ContextMemOperand(cp, context_index));
5228 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5229 Scope* closure_scope = scope()->ClosureScope();
5230 if (closure_scope->is_script_scope() ||
5231 closure_scope->is_module_scope()) {
5232 // Contexts nested in the native context have a canonical empty function
5233 // as their closure, not the anonymous closure containing the global
5234 // code. Pass a smi sentinel and let the runtime look up the empty
5236 DCHECK(kSmiTag == 0);
5238 } else if (closure_scope->is_eval_scope()) {
5239 // Contexts created by a call to eval have the same closure as the
5240 // context calling eval, not the anonymous closure containing the eval
5241 // code. Fetch it from the context.
5242 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5245 DCHECK(closure_scope->is_function_scope());
5246 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5252 void FullCodeGenerator::EnterFinallyBlock() {
5253 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5254 DCHECK(!result_register().is(x10));
5255 // Preserve the result register while executing finally block.
5256 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5257 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5259 __ Push(result_register(), x10);
5261 // Store pending message while executing finally block.
5262 ExternalReference pending_message_obj =
5263 ExternalReference::address_of_pending_message_obj(isolate());
5264 __ Mov(x10, pending_message_obj);
5265 __ Ldr(x10, MemOperand(x10));
5268 ClearPendingMessage();
5272 void FullCodeGenerator::ExitFinallyBlock() {
5273 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5274 DCHECK(!result_register().is(x10));
5276 // Restore pending message from stack.
5278 ExternalReference pending_message_obj =
5279 ExternalReference::address_of_pending_message_obj(isolate());
5280 __ Mov(x13, pending_message_obj);
5281 __ Str(x10, MemOperand(x13));
5283 // Restore result register and cooked return address from the stack.
5284 __ Pop(x10, result_register());
5286 // Uncook the return address (see EnterFinallyBlock).
5288 __ Add(x11, x10, Operand(masm_->CodeObject()));
5293 void FullCodeGenerator::ClearPendingMessage() {
5294 DCHECK(!result_register().is(x10));
5295 ExternalReference pending_message_obj =
5296 ExternalReference::address_of_pending_message_obj(isolate());
5297 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
5298 __ Mov(x13, pending_message_obj);
5299 __ Str(x10, MemOperand(x13));
5303 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5304 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5305 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5312 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5314 BackEdgeState target_state,
5315 Code* replacement_code) {
5316 // Turn the jump into a nop.
5317 Address branch_address = pc - 3 * kInstructionSize;
5318 PatchingAssembler patcher(branch_address, 1);
5320 DCHECK(Instruction::Cast(branch_address)
5321 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5322 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5323 Instruction::Cast(branch_address)->ImmPCOffset() ==
5324 6 * kInstructionSize));
5326 switch (target_state) {
5328 // <decrement profiling counter>
5329 // .. .. .. .. b.pl ok
5330 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5331 // .. .. .. .. blr x16
5332 // ... more instructions.
5334 // Jump offset is 6 instructions.
5337 case ON_STACK_REPLACEMENT:
5338 case OSR_AFTER_STACK_CHECK:
5339 // <decrement profiling counter>
5340 // .. .. .. .. mov x0, x0 (NOP)
5341 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5342 // .. .. .. .. blr x16
5343 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5347 // Replace the call address.
5348 Instruction* load = Instruction::Cast(pc)->preceding(2);
5349 Address interrupt_address_pointer =
5350 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5351 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5352 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5354 ->OnStackReplacement()
5356 (Memory::uint64_at(interrupt_address_pointer) ==
5357 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5361 (Memory::uint64_at(interrupt_address_pointer) ==
5362 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5364 ->OsrAfterStackCheck()
5366 (Memory::uint64_at(interrupt_address_pointer) ==
5367 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5369 ->OnStackReplacement()
5371 Memory::uint64_at(interrupt_address_pointer) =
5372 reinterpret_cast<uint64_t>(replacement_code->entry());
5374 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5375 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5379 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5381 Code* unoptimized_code,
5383 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5384 // back-end), but this function is gone in bleeding_edge so it might not
5386 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5388 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5389 Instruction* load = Instruction::Cast(pc)->preceding(2);
5390 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5391 load->ImmPCOffset());
5392 if (entry == reinterpret_cast<uint64_t>(
5393 isolate->builtins()->OnStackReplacement()->entry())) {
5394 return ON_STACK_REPLACEMENT;
5395 } else if (entry == reinterpret_cast<uint64_t>(
5396 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5397 return OSR_AFTER_STACK_CHECK;
5407 } // namespace internal
5410 #endif // V8_TARGET_ARCH_ARM64