1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug/debug.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/arm64/code-stubs-arm64.h"
20 #include "src/arm64/macro-assembler-arm64.h"
25 #define __ ACCESS_MASM(masm_)
27 class JumpPatchSite BASE_EMBEDDED {
29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
31 info_emitted_ = false;
36 if (patch_site_.is_bound()) {
37 DCHECK(info_emitted_);
39 DCHECK(reg_.IsNone());
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1);
46 DCHECK(!info_emitted_);
47 DCHECK(reg.Is64Bits());
50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1);
57 DCHECK(!info_emitted_);
58 DCHECK(reg.Is64Bits());
61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched.
65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target);
72 void EmitPatchInfo() {
73 Assembler::BlockPoolsScope scope(masm_);
74 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
81 MacroAssembler* masm_;
90 // Generate code for a JS function. On entry to the function the receiver
91 // and arguments have been pushed on the stack left to right. The actual
92 // argument count matches the formal parameter count expected by the
95 // The live registers are:
96 // - x1: the JS function object being called (i.e. ourselves).
98 // - fp: our caller's frame pointer.
99 // - jssp: stack pointer.
100 // - lr: return address.
102 // The function builds a JS frame. See JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(function());
109 Comment cmnt(masm_, "[ Function compiled by full code generator");
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
114 if (strlen(FLAG_stop_at) > 0 &&
115 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
116 __ Debug("stop-at", __LINE__, BREAK);
120 // Sloppy mode functions and builtins need to replace the receiver with the
121 // global proxy when called as functions (without an explicit receiver
123 if (is_sloppy(info->language_mode()) && !info->is_native() &&
124 info->MayUseThis() && info->scope()->has_this_declaration()) {
126 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
127 __ Peek(x10, receiver_offset);
128 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
130 __ Ldr(x10, GlobalObjectMemOperand());
131 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
132 __ Poke(x10, receiver_offset);
138 // Open a frame scope to indicate that there is a frame on the stack.
139 // The MANUAL indicates that the scope shouldn't actually generate code
140 // to set up the frame because we do it manually below.
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
143 // This call emits the following sequence in a way that can be patched for
144 // code ageing support:
145 // Push(lr, fp, cp, x1);
146 // Add(fp, jssp, 2 * kPointerSize);
147 info->set_prologue_offset(masm_->pc_offset());
148 __ Prologue(info->IsCodePreAgingActive());
149 info->AddNoFrameRange(0, masm_->pc_offset());
151 // Reserve space on the stack for locals.
152 { Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots.
155 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
157 if (locals_count > 0) {
158 if (locals_count >= 128) {
160 DCHECK(jssp.Is(__ StackPointer()));
161 __ Sub(x10, jssp, locals_count * kPointerSize);
162 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
167 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
168 if (FLAG_optimize_for_size) {
169 __ PushMultipleTimes(x10 , locals_count);
171 const int kMaxPushes = 32;
172 if (locals_count >= kMaxPushes) {
173 int loop_iterations = locals_count / kMaxPushes;
174 __ Mov(x3, loop_iterations);
176 __ Bind(&loop_header);
178 __ PushMultipleTimes(x10 , kMaxPushes);
180 __ B(ne, &loop_header);
182 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes.
184 __ PushMultipleTimes(x10 , remaining);
189 bool function_in_register_x1 = true;
191 if (info->scope()->num_heap_slots() > 0) {
192 // Argument to NewContext is the function, which is still in x1.
193 Comment cmnt(masm_, "[ Allocate context");
194 bool need_write_barrier = true;
195 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
196 if (info->scope()->is_script_scope()) {
197 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
199 __ CallRuntime(Runtime::kNewScriptContext, 2);
200 } else if (slots <= FastNewContextStub::kMaximumSlots) {
201 FastNewContextStub stub(isolate(), slots);
203 // Result of FastNewContextStub is always in new space.
204 need_write_barrier = false;
207 __ CallRuntime(Runtime::kNewFunctionContext, 1);
209 function_in_register_x1 = false;
210 // Context is returned in x0. It replaces the context passed to us.
211 // It's saved in the stack and kept live in cp.
213 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214 // Copy any necessary parameters into the context.
215 int num_parameters = info->scope()->num_parameters();
216 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
217 for (int i = first_parameter; i < num_parameters; i++) {
218 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
219 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack.
223 __ Ldr(x10, MemOperand(fp, parameter_offset));
224 // Store it in the context.
225 MemOperand target = ContextMemOperand(cp, var->index());
228 // Update the write barrier.
229 if (need_write_barrier) {
230 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
231 x11, kLRHasBeenSaved, kDontSaveFPRegs);
232 } else if (FLAG_debug_code) {
234 __ JumpIfInNewSpace(cp, &done);
235 __ Abort(kExpectedNewSpaceObject);
242 // Possibly set up a local binding to the this function which is used in
243 // derived constructors with super calls.
244 Variable* this_function_var = scope()->this_function_var();
245 if (this_function_var != nullptr) {
246 Comment cmnt(masm_, "[ This function");
247 if (!function_in_register_x1) {
248 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
249 // The write barrier clobbers register again, keep is marked as such.
251 SetVar(this_function_var, x1, x0, x2);
254 Variable* new_target_var = scope()->new_target_var();
255 if (new_target_var != nullptr) {
256 Comment cmnt(masm_, "[ new.target");
257 // Get the frame pointer for the calling frame.
258 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
260 Label check_frame_marker;
261 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
262 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
263 __ B(ne, &check_frame_marker);
264 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
265 __ Bind(&check_frame_marker);
266 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
267 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
269 Label non_construct_frame, done;
271 __ B(ne, &non_construct_frame);
273 MemOperand(x2, ConstructFrameConstants::kOriginalConstructorOffset));
276 __ Bind(&non_construct_frame);
277 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
281 SetVar(new_target_var, x0, x2, x3);
284 // Possibly allocate RestParameters
286 Variable* rest_param = scope()->rest_parameter(&rest_index);
288 Comment cmnt(masm_, "[ Allocate rest parameter array");
290 int num_parameters = info->scope()->num_parameters();
291 int offset = num_parameters * kPointerSize;
293 __ Add(x3, fp, StandardFrameConstants::kCallerSPOffset + offset);
294 __ Mov(x2, Smi::FromInt(num_parameters));
295 __ Mov(x1, Smi::FromInt(rest_index));
296 __ Mov(x0, Smi::FromInt(language_mode()));
297 __ Push(x3, x2, x1, x0);
299 RestParamAccessStub stub(isolate());
302 SetVar(rest_param, x0, x1, x2);
305 Variable* arguments = scope()->arguments();
306 if (arguments != NULL) {
307 // Function uses arguments object.
308 Comment cmnt(masm_, "[ Allocate arguments object");
309 if (!function_in_register_x1) {
310 // Load this again, if it's used by the local context below.
311 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
315 // Receiver is just before the parameters on the caller's stack.
316 int num_parameters = info->scope()->num_parameters();
317 int offset = num_parameters * kPointerSize;
318 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
319 __ Mov(x1, Smi::FromInt(num_parameters));
322 // Arguments to ArgumentsAccessStub:
323 // function, receiver address, parameter count.
324 // The stub will rewrite receiver and parameter count if the previous
325 // stack frame was an arguments adapter frame.
326 ArgumentsAccessStub::Type type;
327 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
328 type = ArgumentsAccessStub::NEW_STRICT;
329 } else if (function()->has_duplicate_parameters()) {
330 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
332 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
334 ArgumentsAccessStub stub(isolate(), type);
337 SetVar(arguments, x0, x1, x2);
341 __ CallRuntime(Runtime::kTraceEnter, 0);
344 // Visit the declarations and body unless there is an illegal
346 if (scope()->HasIllegalRedeclaration()) {
347 Comment cmnt(masm_, "[ Declarations");
348 scope()->VisitIllegalRedeclaration(this);
351 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
352 { Comment cmnt(masm_, "[ Declarations");
353 VisitDeclarations(scope()->declarations());
356 // Assert that the declarations do not use ICs. Otherwise the debugger
357 // won't be able to redirect a PC at an IC to the correct IC in newly
359 DCHECK_EQ(0, ic_total_count_);
362 Comment cmnt(masm_, "[ Stack check");
363 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
365 DCHECK(jssp.Is(__ StackPointer()));
366 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
368 PredictableCodeSizeScope predictable(masm_,
369 Assembler::kCallSizeWithRelocation);
370 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
375 Comment cmnt(masm_, "[ Body");
376 DCHECK(loop_depth() == 0);
377 VisitStatements(function()->body());
378 DCHECK(loop_depth() == 0);
382 // Always emit a 'return undefined' in case control fell off the end of
384 { Comment cmnt(masm_, "[ return <undefined>;");
385 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
387 EmitReturnSequence();
389 // Force emission of the pools, so they don't get emitted in the middle
390 // of the back edge table.
391 masm()->CheckVeneerPool(true, false);
392 masm()->CheckConstPool(true, false);
396 void FullCodeGenerator::ClearAccumulator() {
397 __ Mov(x0, Smi::FromInt(0));
401 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
402 __ Mov(x2, Operand(profiling_counter_));
403 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
404 __ Subs(x3, x3, Smi::FromInt(delta));
405 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
409 void FullCodeGenerator::EmitProfilingCounterReset() {
410 int reset_value = FLAG_interrupt_budget;
411 __ Mov(x2, Operand(profiling_counter_));
412 __ Mov(x3, Smi::FromInt(reset_value));
413 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
417 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
418 Label* back_edge_target) {
419 DCHECK(jssp.Is(__ StackPointer()));
420 Comment cmnt(masm_, "[ Back edge bookkeeping");
421 // Block literal pools whilst emitting back edge code.
422 Assembler::BlockPoolsScope block_const_pool(masm_);
425 DCHECK(back_edge_target->is_bound());
426 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
427 // to reduce the absolute error due to the integer division. To do that,
428 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
431 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
432 kCodeSizeMultiplier / 2);
433 int weight = Min(kMaxBackEdgeWeight,
434 Max(1, distance / kCodeSizeMultiplier));
435 EmitProfilingCounterDecrement(weight);
437 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
439 // Record a mapping of this PC offset to the OSR id. This is used to find
440 // the AST id from the unoptimized code in order to use it as a key into
441 // the deoptimization input data found in the optimized code.
442 RecordBackEdge(stmt->OsrEntryId());
444 EmitProfilingCounterReset();
447 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
448 // Record a mapping of the OSR id to this PC. This is used if the OSR
449 // entry becomes the target of a bailout. We don't expect it to be, but
450 // we want it to work if it is.
451 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
455 void FullCodeGenerator::EmitReturnSequence() {
456 Comment cmnt(masm_, "[ Return sequence");
458 if (return_label_.is_bound()) {
459 __ B(&return_label_);
462 __ Bind(&return_label_);
464 // Push the return value on the stack as the parameter.
465 // Runtime::TraceExit returns its parameter in x0.
466 __ Push(result_register());
467 __ CallRuntime(Runtime::kTraceExit, 1);
468 DCHECK(x0.Is(result_register()));
470 // Pretend that the exit is a backwards jump to the entry.
472 if (info_->ShouldSelfOptimize()) {
473 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
475 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
476 weight = Min(kMaxBackEdgeWeight,
477 Max(1, distance / kCodeSizeMultiplier));
479 EmitProfilingCounterDecrement(weight);
483 __ Call(isolate()->builtins()->InterruptCheck(),
484 RelocInfo::CODE_TARGET);
486 EmitProfilingCounterReset();
489 SetReturnPosition(function());
490 const Register& current_sp = __ StackPointer();
491 // Nothing ensures 16 bytes alignment here.
492 DCHECK(!current_sp.Is(csp));
493 __ Mov(current_sp, fp);
494 int no_frame_start = masm_->pc_offset();
495 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
496 // Drop the arguments and receiver and return.
497 // TODO(all): This implementation is overkill as it supports 2**31+1
498 // arguments, consider how to improve it without creating a security
500 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
501 __ Add(current_sp, current_sp, ip0);
503 int32_t arg_count = info_->scope()->num_parameters() + 1;
504 __ dc64(kXRegSize * arg_count);
505 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
510 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
511 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
512 codegen()->GetVar(result_register(), var);
513 __ Push(result_register());
517 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
518 // Root values have no side effects.
522 void FullCodeGenerator::AccumulatorValueContext::Plug(
523 Heap::RootListIndex index) const {
524 __ LoadRoot(result_register(), index);
528 void FullCodeGenerator::StackValueContext::Plug(
529 Heap::RootListIndex index) const {
530 __ LoadRoot(result_register(), index);
531 __ Push(result_register());
535 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
536 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
538 if (index == Heap::kUndefinedValueRootIndex ||
539 index == Heap::kNullValueRootIndex ||
540 index == Heap::kFalseValueRootIndex) {
541 if (false_label_ != fall_through_) __ B(false_label_);
542 } else if (index == Heap::kTrueValueRootIndex) {
543 if (true_label_ != fall_through_) __ B(true_label_);
545 __ LoadRoot(result_register(), index);
546 codegen()->DoTest(this);
551 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
555 void FullCodeGenerator::AccumulatorValueContext::Plug(
556 Handle<Object> lit) const {
557 __ Mov(result_register(), Operand(lit));
561 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
562 // Immediates cannot be pushed directly.
563 __ Mov(result_register(), Operand(lit));
564 __ Push(result_register());
568 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
569 codegen()->PrepareForBailoutBeforeSplit(condition(),
573 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
574 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
575 if (false_label_ != fall_through_) __ B(false_label_);
576 } else if (lit->IsTrue() || lit->IsJSObject()) {
577 if (true_label_ != fall_through_) __ B(true_label_);
578 } else if (lit->IsString()) {
579 if (String::cast(*lit)->length() == 0) {
580 if (false_label_ != fall_through_) __ B(false_label_);
582 if (true_label_ != fall_through_) __ B(true_label_);
584 } else if (lit->IsSmi()) {
585 if (Smi::cast(*lit)->value() == 0) {
586 if (false_label_ != fall_through_) __ B(false_label_);
588 if (true_label_ != fall_through_) __ B(true_label_);
591 // For simplicity we always test the accumulator register.
592 __ Mov(result_register(), Operand(lit));
593 codegen()->DoTest(this);
598 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
599 Register reg) const {
605 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
607 Register reg) const {
610 __ Move(result_register(), reg);
614 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
615 Register reg) const {
617 if (count > 1) __ Drop(count - 1);
622 void FullCodeGenerator::TestContext::DropAndPlug(int count,
623 Register reg) const {
625 // For simplicity we always test the accumulator register.
627 __ Mov(result_register(), reg);
628 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
629 codegen()->DoTest(this);
633 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
634 Label* materialize_false) const {
635 DCHECK(materialize_true == materialize_false);
636 __ Bind(materialize_true);
640 void FullCodeGenerator::AccumulatorValueContext::Plug(
641 Label* materialize_true,
642 Label* materialize_false) const {
644 __ Bind(materialize_true);
645 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
647 __ Bind(materialize_false);
648 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
653 void FullCodeGenerator::StackValueContext::Plug(
654 Label* materialize_true,
655 Label* materialize_false) const {
657 __ Bind(materialize_true);
658 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
660 __ Bind(materialize_false);
661 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
667 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
668 Label* materialize_false) const {
669 DCHECK(materialize_true == true_label_);
670 DCHECK(materialize_false == false_label_);
674 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
675 Heap::RootListIndex value_root_index =
676 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
677 __ LoadRoot(result_register(), value_root_index);
681 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
682 Heap::RootListIndex value_root_index =
683 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
684 __ LoadRoot(x10, value_root_index);
689 void FullCodeGenerator::TestContext::Plug(bool flag) const {
690 codegen()->PrepareForBailoutBeforeSplit(condition(),
695 if (true_label_ != fall_through_) {
699 if (false_label_ != fall_through_) {
706 void FullCodeGenerator::DoTest(Expression* condition,
709 Label* fall_through) {
710 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
711 CallIC(ic, condition->test_id());
712 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
716 // If (cond), branch to if_true.
717 // If (!cond), branch to if_false.
718 // fall_through is used as an optimization in cases where only one branch
719 // instruction is necessary.
720 void FullCodeGenerator::Split(Condition cond,
723 Label* fall_through) {
724 if (if_false == fall_through) {
726 } else if (if_true == fall_through) {
727 DCHECK(if_false != fall_through);
728 __ B(NegateCondition(cond), if_false);
736 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
737 // Offset is negative because higher indexes are at lower addresses.
738 int offset = -var->index() * kXRegSize;
739 // Adjust by a (parameter or local) base offset.
740 if (var->IsParameter()) {
741 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
743 offset += JavaScriptFrameConstants::kLocal0Offset;
745 return MemOperand(fp, offset);
749 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
750 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
751 if (var->IsContextSlot()) {
752 int context_chain_length = scope()->ContextChainLength(var->scope());
753 __ LoadContext(scratch, context_chain_length);
754 return ContextMemOperand(scratch, var->index());
756 return StackOperand(var);
761 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
762 // Use destination as scratch.
763 MemOperand location = VarOperand(var, dest);
764 __ Ldr(dest, location);
768 void FullCodeGenerator::SetVar(Variable* var,
772 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
773 DCHECK(!AreAliased(src, scratch0, scratch1));
774 MemOperand location = VarOperand(var, scratch0);
775 __ Str(src, location);
777 // Emit the write barrier code if the location is in the heap.
778 if (var->IsContextSlot()) {
779 // scratch0 contains the correct context.
780 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
781 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
786 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
787 bool should_normalize,
790 // Only prepare for bailouts before splits if we're in a test
791 // context. Otherwise, we let the Visit function deal with the
792 // preparation to avoid preparing with the same AST id twice.
793 if (!context()->IsTest() || !info_->IsOptimizable()) return;
795 // TODO(all): Investigate to see if there is something to work on here.
797 if (should_normalize) {
800 PrepareForBailout(expr, TOS_REG);
801 if (should_normalize) {
802 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
803 Split(eq, if_true, if_false, NULL);
809 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
810 // The variable in the declaration always resides in the current function
812 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
813 if (generate_debug_code_) {
814 // Check that we're not inside a with or catch context.
815 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
816 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
817 __ Check(ne, kDeclarationInWithContext);
818 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
819 __ Check(ne, kDeclarationInCatchContext);
824 void FullCodeGenerator::VisitVariableDeclaration(
825 VariableDeclaration* declaration) {
826 // If it was not possible to allocate the variable at compile time, we
827 // need to "declare" it at runtime to make sure it actually exists in the
829 VariableProxy* proxy = declaration->proxy();
830 VariableMode mode = declaration->mode();
831 Variable* variable = proxy->var();
832 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
834 switch (variable->location()) {
835 case VariableLocation::GLOBAL:
836 case VariableLocation::UNALLOCATED:
837 globals_->Add(variable->name(), zone());
838 globals_->Add(variable->binding_needs_init()
839 ? isolate()->factory()->the_hole_value()
840 : isolate()->factory()->undefined_value(),
844 case VariableLocation::PARAMETER:
845 case VariableLocation::LOCAL:
847 Comment cmnt(masm_, "[ VariableDeclaration");
848 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
849 __ Str(x10, StackOperand(variable));
853 case VariableLocation::CONTEXT:
855 Comment cmnt(masm_, "[ VariableDeclaration");
856 EmitDebugCheckDeclarationContext(variable);
857 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
858 __ Str(x10, ContextMemOperand(cp, variable->index()));
859 // No write barrier since the_hole_value is in old space.
860 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
864 case VariableLocation::LOOKUP: {
865 Comment cmnt(masm_, "[ VariableDeclaration");
866 __ Mov(x2, Operand(variable->name()));
867 // Declaration nodes are always introduced in one of four modes.
868 DCHECK(IsDeclaredVariableMode(mode));
869 // Push initial value, if any.
870 // Note: For variables we must not push an initial value (such as
871 // 'undefined') because we may have a (legal) redeclaration and we
872 // must not destroy the current value.
874 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
877 // Pushing 0 (xzr) indicates no initial value.
880 __ CallRuntime(IsImmutableVariableMode(mode)
881 ? Runtime::kDeclareReadOnlyLookupSlot
882 : Runtime::kDeclareLookupSlot,
890 void FullCodeGenerator::VisitFunctionDeclaration(
891 FunctionDeclaration* declaration) {
892 VariableProxy* proxy = declaration->proxy();
893 Variable* variable = proxy->var();
894 switch (variable->location()) {
895 case VariableLocation::GLOBAL:
896 case VariableLocation::UNALLOCATED: {
897 globals_->Add(variable->name(), zone());
898 Handle<SharedFunctionInfo> function =
899 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
900 // Check for stack overflow exception.
901 if (function.is_null()) return SetStackOverflow();
902 globals_->Add(function, zone());
906 case VariableLocation::PARAMETER:
907 case VariableLocation::LOCAL: {
908 Comment cmnt(masm_, "[ Function Declaration");
909 VisitForAccumulatorValue(declaration->fun());
910 __ Str(result_register(), StackOperand(variable));
914 case VariableLocation::CONTEXT: {
915 Comment cmnt(masm_, "[ Function Declaration");
916 EmitDebugCheckDeclarationContext(variable);
917 VisitForAccumulatorValue(declaration->fun());
918 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
919 int offset = Context::SlotOffset(variable->index());
920 // We know that we have written a function, which is not a smi.
921 __ RecordWriteContextSlot(cp,
929 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
933 case VariableLocation::LOOKUP: {
934 Comment cmnt(masm_, "[ Function Declaration");
935 __ Mov(x2, Operand(variable->name()));
937 // Push initial value for function declaration.
938 VisitForStackValue(declaration->fun());
939 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
946 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
947 // Call the runtime to declare the globals.
948 __ Mov(x11, Operand(pairs));
949 Register flags = xzr;
950 if (Smi::FromInt(DeclareGlobalsFlags())) {
952 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
955 __ CallRuntime(Runtime::kDeclareGlobals, 2);
956 // Return value is ignored.
960 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
961 // Call the runtime to declare the modules.
962 __ Push(descriptions);
963 __ CallRuntime(Runtime::kDeclareModules, 1);
964 // Return value is ignored.
968 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
969 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
970 Comment cmnt(masm_, "[ SwitchStatement");
971 Breakable nested_statement(this, stmt);
972 SetStatementPosition(stmt);
974 // Keep the switch value on the stack until a case matches.
975 VisitForStackValue(stmt->tag());
976 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
978 ZoneList<CaseClause*>* clauses = stmt->cases();
979 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
981 Label next_test; // Recycled for each test.
982 // Compile all the tests with branches to their bodies.
983 for (int i = 0; i < clauses->length(); i++) {
984 CaseClause* clause = clauses->at(i);
985 clause->body_target()->Unuse();
987 // The default is not a test, but remember it as final fall through.
988 if (clause->is_default()) {
989 default_clause = clause;
993 Comment cmnt(masm_, "[ Case comparison");
997 // Compile the label expression.
998 VisitForAccumulatorValue(clause->label());
1000 // Perform the comparison as if via '==='.
1001 __ Peek(x1, 0); // Switch value.
1003 JumpPatchSite patch_site(masm_);
1004 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1006 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1008 __ B(ne, &next_test);
1009 __ Drop(1); // Switch value is no longer needed.
1010 __ B(clause->body_target());
1011 __ Bind(&slow_case);
1014 // Record position before stub call for type feedback.
1015 SetExpressionPosition(clause);
1016 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1017 strength(language_mode())).code();
1018 CallIC(ic, clause->CompareId());
1019 patch_site.EmitPatchInfo();
1023 PrepareForBailout(clause, TOS_REG);
1024 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1026 __ B(clause->body_target());
1029 __ Cbnz(x0, &next_test);
1030 __ Drop(1); // Switch value is no longer needed.
1031 __ B(clause->body_target());
1034 // Discard the test value and jump to the default if present, otherwise to
1035 // the end of the statement.
1036 __ Bind(&next_test);
1037 __ Drop(1); // Switch value is no longer needed.
1038 if (default_clause == NULL) {
1039 __ B(nested_statement.break_label());
1041 __ B(default_clause->body_target());
1044 // Compile all the case bodies.
1045 for (int i = 0; i < clauses->length(); i++) {
1046 Comment cmnt(masm_, "[ Case body");
1047 CaseClause* clause = clauses->at(i);
1048 __ Bind(clause->body_target());
1049 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1050 VisitStatements(clause->statements());
1053 __ Bind(nested_statement.break_label());
1054 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1058 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1059 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1060 Comment cmnt(masm_, "[ ForInStatement");
1061 SetStatementPosition(stmt, SKIP_BREAK);
1063 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1065 // TODO(all): This visitor probably needs better comments and a revisit.
1068 ForIn loop_statement(this, stmt);
1069 increment_loop_depth();
1071 // Get the object to enumerate over. If the object is null or undefined, skip
1072 // over the loop. See ECMA-262 version 5, section 12.6.4.
1073 SetExpressionAsStatementPosition(stmt->enumerable());
1074 VisitForAccumulatorValue(stmt->enumerable());
1075 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1076 Register null_value = x15;
1077 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1078 __ Cmp(x0, null_value);
1081 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1083 // Convert the object to a JS object.
1084 Label convert, done_convert;
1085 __ JumpIfSmi(x0, &convert);
1086 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1088 ToObjectStub stub(isolate());
1090 __ Bind(&done_convert);
1091 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1094 // Check for proxies.
1096 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1097 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1099 // Check cache validity in generated code. This is a fast case for
1100 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1101 // guarantee cache validity, call the runtime system to check cache
1102 // validity or get the property names in a fixed array.
1103 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1105 // The enum cache is valid. Load the map of the object being
1106 // iterated over and use the cache for the iteration.
1108 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1111 // Get the set of properties to enumerate.
1112 __ Bind(&call_runtime);
1113 __ Push(x0); // Duplicate the enumerable object on the stack.
1114 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1115 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1117 // If we got a map from the runtime call, we can do a fast
1118 // modification check. Otherwise, we got a fixed array, and we have
1119 // to do a slow check.
1120 Label fixed_array, no_descriptors;
1121 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1122 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1124 // We got a map in register x0. Get the enumeration cache from it.
1125 __ Bind(&use_cache);
1127 __ EnumLengthUntagged(x1, x0);
1128 __ Cbz(x1, &no_descriptors);
1130 __ LoadInstanceDescriptors(x0, x2);
1131 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1133 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1135 // Set up the four remaining stack slots.
1137 // Map, enumeration cache, enum cache length, zero (both last as smis).
1138 __ Push(x0, x2, x1, xzr);
1141 __ Bind(&no_descriptors);
1145 // We got a fixed array in register x0. Iterate through that.
1146 __ Bind(&fixed_array);
1148 __ LoadObject(x1, FeedbackVector());
1149 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1150 int vector_index = FeedbackVector()->GetIndex(slot);
1151 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1153 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1154 __ Peek(x10, 0); // Get enumerated object.
1155 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1156 // TODO(all): similar check was done already. Can we avoid it here?
1157 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1158 DCHECK(Smi::FromInt(0) == 0);
1159 __ CzeroX(x1, le); // Zero indicates proxy.
1160 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1161 // Smi and array, fixed array length (as smi) and initial index.
1162 __ Push(x1, x0, x2, xzr);
1164 // Generate code for doing the condition check.
1165 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1167 SetExpressionAsStatementPosition(stmt->each());
1169 // Load the current count to x0, load the length to x1.
1170 __ PeekPair(x0, x1, 0);
1171 __ Cmp(x0, x1); // Compare to the array length.
1172 __ B(hs, loop_statement.break_label());
1174 // Get the current entry of the array into register r3.
1175 __ Peek(x10, 2 * kXRegSize);
1176 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1177 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1179 // Get the expected map from the stack or a smi in the
1180 // permanent slow case into register x10.
1181 __ Peek(x2, 3 * kXRegSize);
1183 // Check if the expected map still matches that of the enumerable.
1184 // If not, we may have to filter the key.
1186 __ Peek(x1, 4 * kXRegSize);
1187 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1189 __ B(eq, &update_each);
1191 // For proxies, no filtering is done.
1192 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1193 STATIC_ASSERT(kSmiTag == 0);
1194 __ Cbz(x2, &update_each);
1196 // Convert the entry to a string or (smi) 0 if it isn't a property
1197 // any more. If the property has been removed while iterating, we
1200 __ CallRuntime(Runtime::kForInFilter, 2);
1201 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1203 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1204 loop_statement.continue_label());
1206 // Update the 'each' property or variable from the possibly filtered
1207 // entry in register x3.
1208 __ Bind(&update_each);
1209 __ Mov(result_register(), x3);
1210 // Perform the assignment as if via '='.
1211 { EffectContext context(this);
1212 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1213 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1216 // Generate code for the body of the loop.
1217 Visit(stmt->body());
1219 // Generate code for going to the next element by incrementing
1220 // the index (smi) stored on top of the stack.
1221 __ Bind(loop_statement.continue_label());
1222 // TODO(all): We could use a callee saved register to avoid popping.
1224 __ Add(x0, x0, Smi::FromInt(1));
1227 EmitBackEdgeBookkeeping(stmt, &loop);
1230 // Remove the pointers stored on the stack.
1231 __ Bind(loop_statement.break_label());
1234 // Exit and decrement the loop depth.
1235 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1237 decrement_loop_depth();
1241 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1243 // Use the fast case closure allocation code that allocates in new space for
1244 // nested functions that don't need literals cloning. If we're running with
1245 // the --always-opt or the --prepare-always-opt flag, we need to use the
1246 // runtime function so that the new function we are creating here gets a
1247 // chance to have its code optimized and doesn't just get a copy of the
1248 // existing unoptimized code.
1249 if (!FLAG_always_opt &&
1250 !FLAG_prepare_always_opt &&
1252 scope()->is_function_scope() &&
1253 info->num_literals() == 0) {
1254 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1255 __ Mov(x2, Operand(info));
1258 __ Mov(x11, Operand(info));
1259 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1260 : Heap::kFalseValueRootIndex);
1261 __ Push(cp, x11, x10);
1262 __ CallRuntime(Runtime::kNewClosure, 3);
1264 context()->Plug(x0);
1268 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1270 FeedbackVectorICSlot slot) {
1271 if (NeedsHomeObject(initializer)) {
1272 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1273 __ Mov(StoreDescriptor::NameRegister(),
1274 Operand(isolate()->factory()->home_object_symbol()));
1275 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1276 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1282 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1283 TypeofMode typeof_mode,
1285 Register current = cp;
1286 Register next = x10;
1287 Register temp = x11;
1291 if (s->num_heap_slots() > 0) {
1292 if (s->calls_sloppy_eval()) {
1293 // Check that extension is NULL.
1294 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1295 __ Cbnz(temp, slow);
1297 // Load next context in chain.
1298 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1299 // Walk the rest of the chain without clobbering cp.
1302 // If no outer scope calls eval, we do not need to check more
1303 // context extensions.
1304 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1305 s = s->outer_scope();
1308 if (s->is_eval_scope()) {
1310 __ Mov(next, current);
1313 // Terminate at native context.
1314 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1315 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1316 // Check that extension is NULL.
1317 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1318 __ Cbnz(temp, slow);
1319 // Load next context in chain.
1320 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1325 // All extension objects were empty and it is safe to use a normal global
1327 EmitGlobalVariableLoad(proxy, typeof_mode);
1331 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1333 DCHECK(var->IsContextSlot());
1334 Register context = cp;
1335 Register next = x10;
1336 Register temp = x11;
1338 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1339 if (s->num_heap_slots() > 0) {
1340 if (s->calls_sloppy_eval()) {
1341 // Check that extension is NULL.
1342 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1343 __ Cbnz(temp, slow);
1345 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1346 // Walk the rest of the chain without clobbering cp.
1350 // Check that last extension is NULL.
1351 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1352 __ Cbnz(temp, slow);
1354 // This function is used only for loads, not stores, so it's safe to
1355 // return an cp-based operand (the write barrier cannot be allowed to
1356 // destroy the cp register).
1357 return ContextMemOperand(context, var->index());
1361 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1362 TypeofMode typeof_mode,
1363 Label* slow, Label* done) {
1364 // Generate fast-case code for variables that might be shadowed by
1365 // eval-introduced variables. Eval is used a lot without
1366 // introducing variables. In those cases, we do not want to
1367 // perform a runtime call for all variables in the scope
1368 // containing the eval.
1369 Variable* var = proxy->var();
1370 if (var->mode() == DYNAMIC_GLOBAL) {
1371 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1373 } else if (var->mode() == DYNAMIC_LOCAL) {
1374 Variable* local = var->local_if_not_shadowed();
1375 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1376 if (local->mode() == LET || local->mode() == CONST ||
1377 local->mode() == CONST_LEGACY) {
1378 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1379 if (local->mode() == CONST_LEGACY) {
1380 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1381 } else { // LET || CONST
1382 __ Mov(x0, Operand(var->name()));
1384 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1392 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1393 TypeofMode typeof_mode) {
1394 Variable* var = proxy->var();
1395 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1396 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1397 if (var->IsGlobalSlot()) {
1398 DCHECK(var->index() > 0);
1399 DCHECK(var->IsStaticGlobalObjectProperty());
1400 int const slot = var->index();
1401 int const depth = scope()->ContextChainLength(var->scope());
1402 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1403 __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
1404 LoadGlobalViaContextStub stub(isolate(), depth);
1407 __ Push(Smi::FromInt(slot));
1408 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1411 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1412 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1413 __ Mov(LoadDescriptor::SlotRegister(),
1414 SmiFromSlot(proxy->VariableFeedbackSlot()));
1415 CallLoadIC(typeof_mode);
1420 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1421 TypeofMode typeof_mode) {
1422 // Record position before possible IC call.
1423 SetExpressionPosition(proxy);
1424 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1425 Variable* var = proxy->var();
1427 // Three cases: global variables, lookup variables, and all other types of
1429 switch (var->location()) {
1430 case VariableLocation::GLOBAL:
1431 case VariableLocation::UNALLOCATED: {
1432 Comment cmnt(masm_, "Global variable");
1433 EmitGlobalVariableLoad(proxy, typeof_mode);
1434 context()->Plug(x0);
1438 case VariableLocation::PARAMETER:
1439 case VariableLocation::LOCAL:
1440 case VariableLocation::CONTEXT: {
1441 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1442 Comment cmnt(masm_, var->IsContextSlot()
1443 ? "Context variable"
1444 : "Stack variable");
1445 if (var->binding_needs_init()) {
1446 // var->scope() may be NULL when the proxy is located in eval code and
1447 // refers to a potential outside binding. Currently those bindings are
1448 // always looked up dynamically, i.e. in that case
1449 // var->location() == LOOKUP.
1451 DCHECK(var->scope() != NULL);
1453 // Check if the binding really needs an initialization check. The check
1454 // can be skipped in the following situation: we have a LET or CONST
1455 // binding in harmony mode, both the Variable and the VariableProxy have
1456 // the same declaration scope (i.e. they are both in global code, in the
1457 // same function or in the same eval code) and the VariableProxy is in
1458 // the source physically located after the initializer of the variable.
1460 // We cannot skip any initialization checks for CONST in non-harmony
1461 // mode because const variables may be declared but never initialized:
1462 // if (false) { const x; }; var y = x;
1464 // The condition on the declaration scopes is a conservative check for
1465 // nested functions that access a binding and are called before the
1466 // binding is initialized:
1467 // function() { f(); let x = 1; function f() { x = 2; } }
1469 bool skip_init_check;
1470 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1471 skip_init_check = false;
1472 } else if (var->is_this()) {
1473 CHECK(info_->function() != nullptr &&
1474 (info_->function()->kind() & kSubclassConstructor) != 0);
1475 // TODO(dslomov): implement 'this' hole check elimination.
1476 skip_init_check = false;
1478 // Check that we always have valid source position.
1479 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1480 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1481 skip_init_check = var->mode() != CONST_LEGACY &&
1482 var->initializer_position() < proxy->position();
1485 if (!skip_init_check) {
1486 // Let and const need a read barrier.
1489 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1490 if (var->mode() == LET || var->mode() == CONST) {
1491 // Throw a reference error when using an uninitialized let/const
1492 // binding in harmony mode.
1493 __ Mov(x0, Operand(var->name()));
1495 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1498 // Uninitalized const bindings outside of harmony mode are unholed.
1499 DCHECK(var->mode() == CONST_LEGACY);
1500 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1503 context()->Plug(x0);
1507 context()->Plug(var);
1511 case VariableLocation::LOOKUP: {
1513 // Generate code for loading from variables potentially shadowed by
1514 // eval-introduced variables.
1515 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1517 Comment cmnt(masm_, "Lookup variable");
1518 __ Mov(x1, Operand(var->name()));
1519 __ Push(cp, x1); // Context and name.
1520 Runtime::FunctionId function_id =
1521 typeof_mode == NOT_INSIDE_TYPEOF
1522 ? Runtime::kLoadLookupSlot
1523 : Runtime::kLoadLookupSlotNoReferenceError;
1524 __ CallRuntime(function_id, 2);
1526 context()->Plug(x0);
1533 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1534 Comment cmnt(masm_, "[ RegExpLiteral");
1536 // Registers will be used as follows:
1537 // x5 = materialized value (RegExp literal)
1538 // x4 = JS function, literals array
1539 // x3 = literal index
1540 // x2 = RegExp pattern
1541 // x1 = RegExp flags
1542 // x0 = RegExp literal clone
1543 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1544 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1545 int literal_offset =
1546 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1547 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1548 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1550 // Create regexp literal using runtime function.
1551 // Result will be in x0.
1552 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1553 __ Mov(x2, Operand(expr->pattern()));
1554 __ Mov(x1, Operand(expr->flags()));
1555 __ Push(x4, x3, x2, x1);
1556 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1559 __ Bind(&materialized);
1560 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1561 Label allocated, runtime_allocate;
1562 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1565 __ Bind(&runtime_allocate);
1566 __ Mov(x10, Smi::FromInt(size));
1568 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1571 __ Bind(&allocated);
1572 // After this, registers are used as follows:
1573 // x0: Newly allocated regexp.
1574 // x5: Materialized regexp.
1575 // x10, x11, x12: temps.
1576 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1577 context()->Plug(x0);
1581 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1582 if (expression == NULL) {
1583 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1586 VisitForStackValue(expression);
1591 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1592 Comment cmnt(masm_, "[ ObjectLiteral");
1594 Handle<FixedArray> constant_properties = expr->constant_properties();
1595 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1596 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1597 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1598 __ Mov(x1, Operand(constant_properties));
1599 int flags = expr->ComputeFlags();
1600 __ Mov(x0, Smi::FromInt(flags));
1601 if (MustCreateObjectLiteralWithRuntime(expr)) {
1602 __ Push(x3, x2, x1, x0);
1603 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1605 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1608 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1610 // If result_saved is true the result is on top of the stack. If
1611 // result_saved is false the result is in x0.
1612 bool result_saved = false;
1614 AccessorTable accessor_table(zone());
1615 int property_index = 0;
1616 // store_slot_index points to the vector IC slot for the next store IC used.
1617 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1618 // and must be updated if the number of store ICs emitted here changes.
1619 int store_slot_index = 0;
1620 for (; property_index < expr->properties()->length(); property_index++) {
1621 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1622 if (property->is_computed_name()) break;
1623 if (property->IsCompileTimeValue()) continue;
1625 Literal* key = property->key()->AsLiteral();
1626 Expression* value = property->value();
1627 if (!result_saved) {
1628 __ Push(x0); // Save result on stack
1629 result_saved = true;
1631 switch (property->kind()) {
1632 case ObjectLiteral::Property::CONSTANT:
1634 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1635 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1637 case ObjectLiteral::Property::COMPUTED:
1638 // It is safe to use [[Put]] here because the boilerplate already
1639 // contains computed properties with an uninitialized value.
1640 if (key->value()->IsInternalizedString()) {
1641 if (property->emit_store()) {
1642 VisitForAccumulatorValue(value);
1643 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1644 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1645 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1646 if (FLAG_vector_stores) {
1647 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1650 CallStoreIC(key->LiteralFeedbackId());
1652 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1654 if (NeedsHomeObject(value)) {
1655 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1656 __ Mov(StoreDescriptor::NameRegister(),
1657 Operand(isolate()->factory()->home_object_symbol()));
1658 __ Peek(StoreDescriptor::ValueRegister(), 0);
1659 if (FLAG_vector_stores) {
1660 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1665 VisitForEffect(value);
1671 VisitForStackValue(key);
1672 VisitForStackValue(value);
1673 if (property->emit_store()) {
1674 EmitSetHomeObjectIfNeeded(
1675 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1676 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1678 __ CallRuntime(Runtime::kSetProperty, 4);
1683 case ObjectLiteral::Property::PROTOTYPE:
1684 DCHECK(property->emit_store());
1685 // Duplicate receiver on stack.
1688 VisitForStackValue(value);
1689 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1691 case ObjectLiteral::Property::GETTER:
1692 if (property->emit_store()) {
1693 accessor_table.lookup(key)->second->getter = value;
1696 case ObjectLiteral::Property::SETTER:
1697 if (property->emit_store()) {
1698 accessor_table.lookup(key)->second->setter = value;
1704 // Emit code to define accessors, using only a single call to the runtime for
1705 // each pair of corresponding getters and setters.
1706 for (AccessorTable::Iterator it = accessor_table.begin();
1707 it != accessor_table.end();
1709 __ Peek(x10, 0); // Duplicate receiver.
1711 VisitForStackValue(it->first);
1712 EmitAccessor(it->second->getter);
1713 EmitSetHomeObjectIfNeeded(
1714 it->second->getter, 2,
1715 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1716 EmitAccessor(it->second->setter);
1717 EmitSetHomeObjectIfNeeded(
1718 it->second->setter, 3,
1719 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1720 __ Mov(x10, Smi::FromInt(NONE));
1722 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1725 // Object literals have two parts. The "static" part on the left contains no
1726 // computed property names, and so we can compute its map ahead of time; see
1727 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1728 // starts with the first computed property name, and continues with all
1729 // properties to its right. All the code from above initializes the static
1730 // component of the object literal, and arranges for the map of the result to
1731 // reflect the static order in which the keys appear. For the dynamic
1732 // properties, we compile them into a series of "SetOwnProperty" runtime
1733 // calls. This will preserve insertion order.
1734 for (; property_index < expr->properties()->length(); property_index++) {
1735 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1737 Expression* value = property->value();
1738 if (!result_saved) {
1739 __ Push(x0); // Save result on stack
1740 result_saved = true;
1743 __ Peek(x10, 0); // Duplicate receiver.
1746 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1747 DCHECK(!property->is_computed_name());
1748 VisitForStackValue(value);
1749 DCHECK(property->emit_store());
1750 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1752 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1753 VisitForStackValue(value);
1754 EmitSetHomeObjectIfNeeded(
1755 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1757 switch (property->kind()) {
1758 case ObjectLiteral::Property::CONSTANT:
1759 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1760 case ObjectLiteral::Property::COMPUTED:
1761 if (property->emit_store()) {
1762 __ Mov(x0, Smi::FromInt(NONE));
1764 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1770 case ObjectLiteral::Property::PROTOTYPE:
1774 case ObjectLiteral::Property::GETTER:
1775 __ Mov(x0, Smi::FromInt(NONE));
1777 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1780 case ObjectLiteral::Property::SETTER:
1781 __ Mov(x0, Smi::FromInt(NONE));
1783 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1789 if (expr->has_function()) {
1790 DCHECK(result_saved);
1793 __ CallRuntime(Runtime::kToFastProperties, 1);
1797 context()->PlugTOS();
1799 context()->Plug(x0);
1802 // Verify that compilation exactly consumed the number of store ic slots that
1803 // the ObjectLiteral node had to offer.
1804 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1808 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1809 Comment cmnt(masm_, "[ ArrayLiteral");
1811 expr->BuildConstantElements(isolate());
1812 Handle<FixedArray> constant_elements = expr->constant_elements();
1813 bool has_fast_elements =
1814 IsFastObjectElementsKind(expr->constant_elements_kind());
1816 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1817 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1818 // If the only customer of allocation sites is transitioning, then
1819 // we can turn it off if we don't have anywhere else to transition to.
1820 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1823 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1824 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1825 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1826 __ Mov(x1, Operand(constant_elements));
1827 if (MustCreateArrayLiteralWithRuntime(expr)) {
1828 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1829 __ Push(x3, x2, x1, x0);
1830 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1832 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1835 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1837 bool result_saved = false; // Is the result saved to the stack?
1838 ZoneList<Expression*>* subexprs = expr->values();
1839 int length = subexprs->length();
1841 // Emit code to evaluate all the non-constant subexpressions and to store
1842 // them into the newly cloned array.
1843 int array_index = 0;
1844 for (; array_index < length; array_index++) {
1845 Expression* subexpr = subexprs->at(array_index);
1846 if (subexpr->IsSpread()) break;
1848 // If the subexpression is a literal or a simple materialized literal it
1849 // is already set in the cloned array.
1850 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1852 if (!result_saved) {
1853 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1855 result_saved = true;
1857 VisitForAccumulatorValue(subexpr);
1859 if (has_fast_elements) {
1860 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1861 __ Peek(x6, kPointerSize); // Copy of array literal.
1862 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1863 __ Str(result_register(), FieldMemOperand(x1, offset));
1864 // Update the write barrier for the array store.
1865 __ RecordWriteField(x1, offset, result_register(), x10,
1866 kLRHasBeenSaved, kDontSaveFPRegs,
1867 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1869 __ Mov(x3, Smi::FromInt(array_index));
1870 StoreArrayLiteralElementStub stub(isolate());
1874 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1877 // In case the array literal contains spread expressions it has two parts. The
1878 // first part is the "static" array which has a literal index is handled
1879 // above. The second part is the part after the first spread expression
1880 // (inclusive) and these elements gets appended to the array. Note that the
1881 // number elements an iterable produces is unknown ahead of time.
1882 if (array_index < length && result_saved) {
1883 __ Drop(1); // literal index
1885 result_saved = false;
1887 for (; array_index < length; array_index++) {
1888 Expression* subexpr = subexprs->at(array_index);
1891 if (subexpr->IsSpread()) {
1892 VisitForStackValue(subexpr->AsSpread()->expression());
1893 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1895 VisitForStackValue(subexpr);
1896 __ CallRuntime(Runtime::kAppendElement, 2);
1899 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1903 __ Drop(1); // literal index
1904 context()->PlugTOS();
1906 context()->Plug(x0);
1911 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1912 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1914 Comment cmnt(masm_, "[ Assignment");
1915 SetExpressionPosition(expr, INSERT_BREAK);
1917 Property* property = expr->target()->AsProperty();
1918 LhsKind assign_type = Property::GetAssignType(property);
1920 // Evaluate LHS expression.
1921 switch (assign_type) {
1923 // Nothing to do here.
1925 case NAMED_PROPERTY:
1926 if (expr->is_compound()) {
1927 // We need the receiver both on the stack and in the register.
1928 VisitForStackValue(property->obj());
1929 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1931 VisitForStackValue(property->obj());
1934 case NAMED_SUPER_PROPERTY:
1936 property->obj()->AsSuperPropertyReference()->this_var());
1937 VisitForAccumulatorValue(
1938 property->obj()->AsSuperPropertyReference()->home_object());
1939 __ Push(result_register());
1940 if (expr->is_compound()) {
1941 const Register scratch = x10;
1942 __ Peek(scratch, kPointerSize);
1943 __ Push(scratch, result_register());
1946 case KEYED_SUPER_PROPERTY:
1948 property->obj()->AsSuperPropertyReference()->this_var());
1950 property->obj()->AsSuperPropertyReference()->home_object());
1951 VisitForAccumulatorValue(property->key());
1952 __ Push(result_register());
1953 if (expr->is_compound()) {
1954 const Register scratch1 = x10;
1955 const Register scratch2 = x11;
1956 __ Peek(scratch1, 2 * kPointerSize);
1957 __ Peek(scratch2, kPointerSize);
1958 __ Push(scratch1, scratch2, result_register());
1961 case KEYED_PROPERTY:
1962 if (expr->is_compound()) {
1963 VisitForStackValue(property->obj());
1964 VisitForStackValue(property->key());
1965 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1966 __ Peek(LoadDescriptor::NameRegister(), 0);
1968 VisitForStackValue(property->obj());
1969 VisitForStackValue(property->key());
1974 // For compound assignments we need another deoptimization point after the
1975 // variable/property load.
1976 if (expr->is_compound()) {
1977 { AccumulatorValueContext context(this);
1978 switch (assign_type) {
1980 EmitVariableLoad(expr->target()->AsVariableProxy());
1981 PrepareForBailout(expr->target(), TOS_REG);
1983 case NAMED_PROPERTY:
1984 EmitNamedPropertyLoad(property);
1985 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1987 case NAMED_SUPER_PROPERTY:
1988 EmitNamedSuperPropertyLoad(property);
1989 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1991 case KEYED_SUPER_PROPERTY:
1992 EmitKeyedSuperPropertyLoad(property);
1993 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1995 case KEYED_PROPERTY:
1996 EmitKeyedPropertyLoad(property);
1997 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2002 Token::Value op = expr->binary_op();
2003 __ Push(x0); // Left operand goes on the stack.
2004 VisitForAccumulatorValue(expr->value());
2006 AccumulatorValueContext context(this);
2007 if (ShouldInlineSmiCase(op)) {
2008 EmitInlineSmiBinaryOp(expr->binary_operation(),
2013 EmitBinaryOp(expr->binary_operation(), op);
2016 // Deoptimization point in case the binary operation may have side effects.
2017 PrepareForBailout(expr->binary_operation(), TOS_REG);
2019 VisitForAccumulatorValue(expr->value());
2022 SetExpressionPosition(expr);
2025 switch (assign_type) {
2027 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2028 expr->op(), expr->AssignmentSlot());
2029 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2030 context()->Plug(x0);
2032 case NAMED_PROPERTY:
2033 EmitNamedPropertyAssignment(expr);
2035 case NAMED_SUPER_PROPERTY:
2036 EmitNamedSuperPropertyStore(property);
2037 context()->Plug(x0);
2039 case KEYED_SUPER_PROPERTY:
2040 EmitKeyedSuperPropertyStore(property);
2041 context()->Plug(x0);
2043 case KEYED_PROPERTY:
2044 EmitKeyedPropertyAssignment(expr);
2050 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2051 SetExpressionPosition(prop);
2052 Literal* key = prop->key()->AsLiteral();
2053 DCHECK(!prop->IsSuperAccess());
2055 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2056 __ Mov(LoadDescriptor::SlotRegister(),
2057 SmiFromSlot(prop->PropertyFeedbackSlot()));
2058 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2062 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2063 // Stack: receiver, home_object.
2064 SetExpressionPosition(prop);
2065 Literal* key = prop->key()->AsLiteral();
2066 DCHECK(!key->value()->IsSmi());
2067 DCHECK(prop->IsSuperAccess());
2069 __ Push(key->value());
2070 __ Push(Smi::FromInt(language_mode()));
2071 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2075 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2076 SetExpressionPosition(prop);
2077 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2078 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2079 __ Mov(LoadDescriptor::SlotRegister(),
2080 SmiFromSlot(prop->PropertyFeedbackSlot()));
2085 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2086 // Stack: receiver, home_object, key.
2087 SetExpressionPosition(prop);
2088 __ Push(Smi::FromInt(language_mode()));
2089 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2093 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2095 Expression* left_expr,
2096 Expression* right_expr) {
2097 Label done, both_smis, stub_call;
2099 // Get the arguments.
2101 Register right = x0;
2102 Register result = x0;
2105 // Perform combined smi check on both operands.
2106 __ Orr(x10, left, right);
2107 JumpPatchSite patch_site(masm_);
2108 patch_site.EmitJumpIfSmi(x10, &both_smis);
2110 __ Bind(&stub_call);
2113 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2115 Assembler::BlockPoolsScope scope(masm_);
2116 CallIC(code, expr->BinaryOperationFeedbackId());
2117 patch_site.EmitPatchInfo();
2121 __ Bind(&both_smis);
2122 // Smi case. This code works in the same way as the smi-smi case in the type
2123 // recording binary operation stub, see
2124 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2125 // TODO(all): That doesn't exist any more. Where are the comments?
2127 // The set of operations that needs to be supported here is controlled by
2128 // FullCodeGenerator::ShouldInlineSmiCase().
2131 __ Ubfx(right, right, kSmiShift, 5);
2132 __ Asr(result, left, right);
2133 __ Bic(result, result, kSmiShiftMask);
2136 __ Ubfx(right, right, kSmiShift, 5);
2137 __ Lsl(result, left, right);
2140 // If `left >>> right` >= 0x80000000, the result is not representable in a
2141 // signed 32-bit smi.
2142 __ Ubfx(right, right, kSmiShift, 5);
2143 __ Lsr(x10, left, right);
2144 __ Tbnz(x10, kXSignBit, &stub_call);
2145 __ Bic(result, x10, kSmiShiftMask);
2148 __ Adds(x10, left, right);
2149 __ B(vs, &stub_call);
2150 __ Mov(result, x10);
2153 __ Subs(x10, left, right);
2154 __ B(vs, &stub_call);
2155 __ Mov(result, x10);
2158 Label not_minus_zero, done;
2159 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2160 STATIC_ASSERT(kSmiTag == 0);
2161 __ Smulh(x10, left, right);
2162 __ Cbnz(x10, ¬_minus_zero);
2163 __ Eor(x11, left, right);
2164 __ Tbnz(x11, kXSignBit, &stub_call);
2165 __ Mov(result, x10);
2167 __ Bind(¬_minus_zero);
2169 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2170 __ B(lt, &stub_call);
2171 __ SmiTag(result, x10);
2176 __ Orr(result, left, right);
2178 case Token::BIT_AND:
2179 __ And(result, left, right);
2181 case Token::BIT_XOR:
2182 __ Eor(result, left, right);
2189 context()->Plug(x0);
2193 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2196 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2197 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2199 Assembler::BlockPoolsScope scope(masm_);
2200 CallIC(code, expr->BinaryOperationFeedbackId());
2201 patch_site.EmitPatchInfo();
2203 context()->Plug(x0);
2207 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2208 int* used_store_slots) {
2209 // Constructor is in x0.
2210 DCHECK(lit != NULL);
2213 // No access check is needed here since the constructor is created by the
2215 Register scratch = x1;
2217 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2220 for (int i = 0; i < lit->properties()->length(); i++) {
2221 ObjectLiteral::Property* property = lit->properties()->at(i);
2222 Expression* value = property->value();
2224 if (property->is_static()) {
2225 __ Peek(scratch, kPointerSize); // constructor
2227 __ Peek(scratch, 0); // prototype
2230 EmitPropertyKey(property, lit->GetIdForProperty(i));
2232 // The static prototype property is read only. We handle the non computed
2233 // property name case in the parser. Since this is the only case where we
2234 // need to check for an own read only property we special case this so we do
2235 // not need to do this for every property.
2236 if (property->is_static() && property->is_computed_name()) {
2237 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2241 VisitForStackValue(value);
2242 EmitSetHomeObjectIfNeeded(value, 2,
2243 lit->SlotForHomeObject(value, used_store_slots));
2245 switch (property->kind()) {
2246 case ObjectLiteral::Property::CONSTANT:
2247 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2248 case ObjectLiteral::Property::PROTOTYPE:
2250 case ObjectLiteral::Property::COMPUTED:
2251 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2254 case ObjectLiteral::Property::GETTER:
2255 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2257 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2260 case ObjectLiteral::Property::SETTER:
2261 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2263 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2271 // Set both the prototype and constructor to have fast properties, and also
2272 // freeze them in strong mode.
2273 __ CallRuntime(is_strong(language_mode())
2274 ? Runtime::kFinalizeClassDefinitionStrong
2275 : Runtime::kFinalizeClassDefinition,
2280 void FullCodeGenerator::EmitAssignment(Expression* expr,
2281 FeedbackVectorICSlot slot) {
2282 DCHECK(expr->IsValidReferenceExpressionOrThis());
2284 Property* prop = expr->AsProperty();
2285 LhsKind assign_type = Property::GetAssignType(prop);
2287 switch (assign_type) {
2289 Variable* var = expr->AsVariableProxy()->var();
2290 EffectContext context(this);
2291 EmitVariableAssignment(var, Token::ASSIGN, slot);
2294 case NAMED_PROPERTY: {
2295 __ Push(x0); // Preserve value.
2296 VisitForAccumulatorValue(prop->obj());
2297 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2299 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2300 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2301 __ Mov(StoreDescriptor::NameRegister(),
2302 Operand(prop->key()->AsLiteral()->value()));
2303 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2307 case NAMED_SUPER_PROPERTY: {
2309 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2310 VisitForAccumulatorValue(
2311 prop->obj()->AsSuperPropertyReference()->home_object());
2312 // stack: value, this; x0: home_object
2313 Register scratch = x10;
2314 Register scratch2 = x11;
2315 __ mov(scratch, result_register()); // home_object
2316 __ Peek(x0, kPointerSize); // value
2317 __ Peek(scratch2, 0); // this
2318 __ Poke(scratch2, kPointerSize); // this
2319 __ Poke(scratch, 0); // home_object
2320 // stack: this, home_object; x0: value
2321 EmitNamedSuperPropertyStore(prop);
2324 case KEYED_SUPER_PROPERTY: {
2326 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2328 prop->obj()->AsSuperPropertyReference()->home_object());
2329 VisitForAccumulatorValue(prop->key());
2330 Register scratch = x10;
2331 Register scratch2 = x11;
2332 __ Peek(scratch2, 2 * kPointerSize); // value
2333 // stack: value, this, home_object; x0: key, x11: value
2334 __ Peek(scratch, kPointerSize); // this
2335 __ Poke(scratch, 2 * kPointerSize);
2336 __ Peek(scratch, 0); // home_object
2337 __ Poke(scratch, kPointerSize);
2339 __ Move(x0, scratch2);
2340 // stack: this, home_object, key; x0: value.
2341 EmitKeyedSuperPropertyStore(prop);
2344 case KEYED_PROPERTY: {
2345 __ Push(x0); // Preserve value.
2346 VisitForStackValue(prop->obj());
2347 VisitForAccumulatorValue(prop->key());
2348 __ Mov(StoreDescriptor::NameRegister(), x0);
2349 __ Pop(StoreDescriptor::ReceiverRegister(),
2350 StoreDescriptor::ValueRegister());
2351 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2353 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2358 context()->Plug(x0);
2362 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2363 Variable* var, MemOperand location) {
2364 __ Str(result_register(), location);
2365 if (var->IsContextSlot()) {
2366 // RecordWrite may destroy all its register arguments.
2367 __ Mov(x10, result_register());
2368 int offset = Context::SlotOffset(var->index());
2369 __ RecordWriteContextSlot(
2370 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2375 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2376 FeedbackVectorICSlot slot) {
2377 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2378 if (var->IsUnallocated()) {
2379 // Global var, const, or let.
2380 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2381 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2382 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2385 } else if (var->IsGlobalSlot()) {
2386 // Global var, const, or let.
2387 DCHECK(var->index() > 0);
2388 DCHECK(var->IsStaticGlobalObjectProperty());
2389 int const slot = var->index();
2390 int const depth = scope()->ContextChainLength(var->scope());
2391 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2392 __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
2393 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
2394 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2397 __ Push(Smi::FromInt(slot));
2399 __ CallRuntime(is_strict(language_mode())
2400 ? Runtime::kStoreGlobalViaContext_Strict
2401 : Runtime::kStoreGlobalViaContext_Sloppy,
2404 } else if (var->mode() == LET && op != Token::INIT_LET) {
2405 // Non-initializing assignment to let variable needs a write barrier.
2406 DCHECK(!var->IsLookupSlot());
2407 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2409 MemOperand location = VarOperand(var, x1);
2410 __ Ldr(x10, location);
2411 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2412 __ Mov(x10, Operand(var->name()));
2414 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2415 // Perform the assignment.
2417 EmitStoreToStackLocalOrContextSlot(var, location);
2419 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2420 // Assignment to const variable needs a write barrier.
2421 DCHECK(!var->IsLookupSlot());
2422 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2424 MemOperand location = VarOperand(var, x1);
2425 __ Ldr(x10, location);
2426 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2427 __ Mov(x10, Operand(var->name()));
2429 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2430 __ Bind(&const_error);
2431 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2433 } else if (var->is_this() && op == Token::INIT_CONST) {
2434 // Initializing assignment to const {this} needs a write barrier.
2435 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2436 Label uninitialized_this;
2437 MemOperand location = VarOperand(var, x1);
2438 __ Ldr(x10, location);
2439 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2440 __ Mov(x0, Operand(var->name()));
2442 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2443 __ bind(&uninitialized_this);
2444 EmitStoreToStackLocalOrContextSlot(var, location);
2446 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2447 if (var->IsLookupSlot()) {
2448 // Assignment to var.
2449 __ Mov(x11, Operand(var->name()));
2450 __ Mov(x10, Smi::FromInt(language_mode()));
2453 // jssp[16] : context.
2454 // jssp[24] : value.
2455 __ Push(x0, cp, x11, x10);
2456 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2458 // Assignment to var or initializing assignment to let/const in harmony
2460 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2461 MemOperand location = VarOperand(var, x1);
2462 if (FLAG_debug_code && op == Token::INIT_LET) {
2463 __ Ldr(x10, location);
2464 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2465 __ Check(eq, kLetBindingReInitialization);
2467 EmitStoreToStackLocalOrContextSlot(var, location);
2470 } else if (op == Token::INIT_CONST_LEGACY) {
2471 // Const initializers need a write barrier.
2472 DCHECK(var->mode() == CONST_LEGACY);
2473 DCHECK(!var->IsParameter()); // No const parameters.
2474 if (var->IsLookupSlot()) {
2475 __ Mov(x1, Operand(var->name()));
2476 __ Push(x0, cp, x1);
2477 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2479 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2481 MemOperand location = VarOperand(var, x1);
2482 __ Ldr(x10, location);
2483 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2484 EmitStoreToStackLocalOrContextSlot(var, location);
2489 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2490 if (is_strict(language_mode())) {
2491 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2493 // Silently ignore store in sloppy mode.
2498 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2499 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2500 // Assignment to a property, using a named store IC.
2501 Property* prop = expr->target()->AsProperty();
2502 DCHECK(prop != NULL);
2503 DCHECK(prop->key()->IsLiteral());
2505 __ Mov(StoreDescriptor::NameRegister(),
2506 Operand(prop->key()->AsLiteral()->value()));
2507 __ Pop(StoreDescriptor::ReceiverRegister());
2508 if (FLAG_vector_stores) {
2509 EmitLoadStoreICSlot(expr->AssignmentSlot());
2512 CallStoreIC(expr->AssignmentFeedbackId());
2515 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2516 context()->Plug(x0);
2520 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2521 // Assignment to named property of super.
2523 // stack : receiver ('this'), home_object
2524 DCHECK(prop != NULL);
2525 Literal* key = prop->key()->AsLiteral();
2526 DCHECK(key != NULL);
2528 __ Push(key->value());
2530 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2531 : Runtime::kStoreToSuper_Sloppy),
2536 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2537 // Assignment to named property of super.
2539 // stack : receiver ('this'), home_object, key
2540 DCHECK(prop != NULL);
2544 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2545 : Runtime::kStoreKeyedToSuper_Sloppy),
2550 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2551 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2552 // Assignment to a property, using a keyed store IC.
2554 // TODO(all): Could we pass this in registers rather than on the stack?
2555 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2556 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2559 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2560 if (FLAG_vector_stores) {
2561 EmitLoadStoreICSlot(expr->AssignmentSlot());
2564 CallIC(ic, expr->AssignmentFeedbackId());
2567 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2568 context()->Plug(x0);
2572 void FullCodeGenerator::VisitProperty(Property* expr) {
2573 Comment cmnt(masm_, "[ Property");
2574 SetExpressionPosition(expr);
2575 Expression* key = expr->key();
2577 if (key->IsPropertyName()) {
2578 if (!expr->IsSuperAccess()) {
2579 VisitForAccumulatorValue(expr->obj());
2580 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2581 EmitNamedPropertyLoad(expr);
2583 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2585 expr->obj()->AsSuperPropertyReference()->home_object());
2586 EmitNamedSuperPropertyLoad(expr);
2589 if (!expr->IsSuperAccess()) {
2590 VisitForStackValue(expr->obj());
2591 VisitForAccumulatorValue(expr->key());
2592 __ Move(LoadDescriptor::NameRegister(), x0);
2593 __ Pop(LoadDescriptor::ReceiverRegister());
2594 EmitKeyedPropertyLoad(expr);
2596 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2598 expr->obj()->AsSuperPropertyReference()->home_object());
2599 VisitForStackValue(expr->key());
2600 EmitKeyedSuperPropertyLoad(expr);
2603 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2604 context()->Plug(x0);
2608 void FullCodeGenerator::CallIC(Handle<Code> code,
2609 TypeFeedbackId ast_id) {
2611 // All calls must have a predictable size in full-codegen code to ensure that
2612 // the debugger can patch them correctly.
2613 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2617 // Code common for calls using the IC.
2618 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2619 Expression* callee = expr->expression();
2621 CallICState::CallType call_type =
2622 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2624 // Get the target function.
2625 if (call_type == CallICState::FUNCTION) {
2626 { StackValueContext context(this);
2627 EmitVariableLoad(callee->AsVariableProxy());
2628 PrepareForBailout(callee, NO_REGISTERS);
2630 // Push undefined as receiver. This is patched in the method prologue if it
2631 // is a sloppy mode method.
2633 UseScratchRegisterScope temps(masm_);
2634 Register temp = temps.AcquireX();
2635 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2639 // Load the function from the receiver.
2640 DCHECK(callee->IsProperty());
2641 DCHECK(!callee->AsProperty()->IsSuperAccess());
2642 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2643 EmitNamedPropertyLoad(callee->AsProperty());
2644 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2645 // Push the target function under the receiver.
2650 EmitCall(expr, call_type);
2654 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2655 Expression* callee = expr->expression();
2656 DCHECK(callee->IsProperty());
2657 Property* prop = callee->AsProperty();
2658 DCHECK(prop->IsSuperAccess());
2659 SetExpressionPosition(prop);
2661 Literal* key = prop->key()->AsLiteral();
2662 DCHECK(!key->value()->IsSmi());
2664 // Load the function from the receiver.
2665 const Register scratch = x10;
2666 SuperPropertyReference* super_ref =
2667 callee->AsProperty()->obj()->AsSuperPropertyReference();
2668 VisitForStackValue(super_ref->home_object());
2669 VisitForAccumulatorValue(super_ref->this_var());
2671 __ Peek(scratch, kPointerSize);
2672 __ Push(x0, scratch);
2673 __ Push(key->value());
2674 __ Push(Smi::FromInt(language_mode()));
2678 // - this (receiver)
2679 // - this (receiver) <-- LoadFromSuper will pop here and below.
2682 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2684 // Replace home_object with target function.
2685 __ Poke(x0, kPointerSize);
2688 // - target function
2689 // - this (receiver)
2690 EmitCall(expr, CallICState::METHOD);
2694 // Code common for calls using the IC.
2695 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2698 VisitForAccumulatorValue(key);
2700 Expression* callee = expr->expression();
2702 // Load the function from the receiver.
2703 DCHECK(callee->IsProperty());
2704 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2705 __ Move(LoadDescriptor::NameRegister(), x0);
2706 EmitKeyedPropertyLoad(callee->AsProperty());
2707 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2709 // Push the target function under the receiver.
2713 EmitCall(expr, CallICState::METHOD);
2717 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2718 Expression* callee = expr->expression();
2719 DCHECK(callee->IsProperty());
2720 Property* prop = callee->AsProperty();
2721 DCHECK(prop->IsSuperAccess());
2722 SetExpressionPosition(prop);
2724 // Load the function from the receiver.
2725 const Register scratch = x10;
2726 SuperPropertyReference* super_ref =
2727 callee->AsProperty()->obj()->AsSuperPropertyReference();
2728 VisitForStackValue(super_ref->home_object());
2729 VisitForAccumulatorValue(super_ref->this_var());
2731 __ Peek(scratch, kPointerSize);
2732 __ Push(x0, scratch);
2733 VisitForStackValue(prop->key());
2734 __ Push(Smi::FromInt(language_mode()));
2738 // - this (receiver)
2739 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2743 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2745 // Replace home_object with target function.
2746 __ Poke(x0, kPointerSize);
2749 // - target function
2750 // - this (receiver)
2751 EmitCall(expr, CallICState::METHOD);
2755 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2756 // Load the arguments.
2757 ZoneList<Expression*>* args = expr->arguments();
2758 int arg_count = args->length();
2759 for (int i = 0; i < arg_count; i++) {
2760 VisitForStackValue(args->at(i));
2763 SetCallPosition(expr, arg_count);
2765 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2766 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2767 __ Peek(x1, (arg_count + 1) * kXRegSize);
2768 // Don't assign a type feedback id to the IC, since type feedback is provided
2769 // by the vector above.
2772 RecordJSReturnSite(expr);
2773 // Restore context register.
2774 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2775 context()->DropAndPlug(1, x0);
2779 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2780 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2781 // Prepare to push a copy of the first argument or undefined if it doesn't
2783 if (arg_count > 0) {
2784 __ Peek(x9, arg_count * kXRegSize);
2786 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2789 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2791 // Prepare to push the language mode.
2792 __ Mov(x11, Smi::FromInt(language_mode()));
2793 // Prepare to push the start position of the scope the calls resides in.
2794 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2797 __ Push(x9, x10, x11, x12);
2799 // Do the runtime call.
2800 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2804 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2805 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2806 VariableProxy* callee = expr->expression()->AsVariableProxy();
2807 if (callee->var()->IsLookupSlot()) {
2809 SetExpressionPosition(callee);
2810 // Generate code for loading from variables potentially shadowed
2811 // by eval-introduced variables.
2812 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2815 // Call the runtime to find the function to call (returned in x0)
2816 // and the object holding it (returned in x1).
2817 __ Mov(x10, Operand(callee->name()));
2818 __ Push(context_register(), x10);
2819 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2820 __ Push(x0, x1); // Receiver, function.
2821 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2823 // If fast case code has been generated, emit code to push the
2824 // function and receiver and have the slow path jump around this
2826 if (done.is_linked()) {
2831 // The receiver is implicitly the global receiver. Indicate this
2832 // by passing the undefined to the call function stub.
2833 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2838 VisitForStackValue(callee);
2839 // refEnv.WithBaseObject()
2840 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2841 __ Push(x10); // Reserved receiver slot.
2846 void FullCodeGenerator::VisitCall(Call* expr) {
2848 // We want to verify that RecordJSReturnSite gets called on all paths
2849 // through this function. Avoid early returns.
2850 expr->return_is_recorded_ = false;
2853 Comment cmnt(masm_, "[ Call");
2854 Expression* callee = expr->expression();
2855 Call::CallType call_type = expr->GetCallType(isolate());
2857 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2858 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2859 // to resolve the function we need to call. Then we call the resolved
2860 // function using the given arguments.
2861 ZoneList<Expression*>* args = expr->arguments();
2862 int arg_count = args->length();
2864 PushCalleeAndWithBaseObject(expr);
2866 // Push the arguments.
2867 for (int i = 0; i < arg_count; i++) {
2868 VisitForStackValue(args->at(i));
2871 // Push a copy of the function (found below the arguments) and
2873 __ Peek(x10, (arg_count + 1) * kPointerSize);
2875 EmitResolvePossiblyDirectEval(arg_count);
2877 // Touch up the stack with the resolved function.
2878 __ Poke(x0, (arg_count + 1) * kPointerSize);
2880 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2882 // Record source position for debugger.
2883 SetCallPosition(expr, arg_count);
2885 // Call the evaluated function.
2886 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2887 __ Peek(x1, (arg_count + 1) * kXRegSize);
2889 RecordJSReturnSite(expr);
2890 // Restore context register.
2891 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2892 context()->DropAndPlug(1, x0);
2894 } else if (call_type == Call::GLOBAL_CALL) {
2895 EmitCallWithLoadIC(expr);
2897 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2898 // Call to a lookup slot (dynamically introduced variable).
2899 PushCalleeAndWithBaseObject(expr);
2901 } else if (call_type == Call::PROPERTY_CALL) {
2902 Property* property = callee->AsProperty();
2903 bool is_named_call = property->key()->IsPropertyName();
2904 if (property->IsSuperAccess()) {
2905 if (is_named_call) {
2906 EmitSuperCallWithLoadIC(expr);
2908 EmitKeyedSuperCallWithLoadIC(expr);
2911 VisitForStackValue(property->obj());
2912 if (is_named_call) {
2913 EmitCallWithLoadIC(expr);
2915 EmitKeyedCallWithLoadIC(expr, property->key());
2918 } else if (call_type == Call::SUPER_CALL) {
2919 EmitSuperConstructorCall(expr);
2921 DCHECK(call_type == Call::OTHER_CALL);
2922 // Call to an arbitrary expression not handled specially above.
2923 VisitForStackValue(callee);
2924 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2926 // Emit function call.
2931 // RecordJSReturnSite should have been called.
2932 DCHECK(expr->return_is_recorded_);
2937 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2938 Comment cmnt(masm_, "[ CallNew");
2939 // According to ECMA-262, section 11.2.2, page 44, the function
2940 // expression in new calls must be evaluated before the
2943 // Push constructor on the stack. If it's not a function it's used as
2944 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2946 DCHECK(!expr->expression()->IsSuperPropertyReference());
2947 VisitForStackValue(expr->expression());
2949 // Push the arguments ("left-to-right") on the stack.
2950 ZoneList<Expression*>* args = expr->arguments();
2951 int arg_count = args->length();
2952 for (int i = 0; i < arg_count; i++) {
2953 VisitForStackValue(args->at(i));
2956 // Call the construct call builtin that handles allocation and
2957 // constructor invocation.
2958 SetConstructCallPosition(expr);
2960 // Load function and argument count into x1 and x0.
2961 __ Mov(x0, arg_count);
2962 __ Peek(x1, arg_count * kXRegSize);
2964 // Record call targets in unoptimized code.
2965 if (FLAG_pretenuring_call_new) {
2966 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2967 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2968 expr->CallNewFeedbackSlot().ToInt() + 1);
2971 __ LoadObject(x2, FeedbackVector());
2972 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2974 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2975 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2976 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2977 context()->Plug(x0);
2981 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2982 SuperCallReference* super_call_ref =
2983 expr->expression()->AsSuperCallReference();
2984 DCHECK_NOT_NULL(super_call_ref);
2986 EmitLoadSuperConstructor(super_call_ref);
2987 __ push(result_register());
2989 // Push the arguments ("left-to-right") on the stack.
2990 ZoneList<Expression*>* args = expr->arguments();
2991 int arg_count = args->length();
2992 for (int i = 0; i < arg_count; i++) {
2993 VisitForStackValue(args->at(i));
2996 // Call the construct call builtin that handles allocation and
2997 // constructor invocation.
2998 SetConstructCallPosition(expr);
3000 // Load original constructor into x4.
3001 VisitForAccumulatorValue(super_call_ref->new_target_var());
3002 __ Mov(x4, result_register());
3004 // Load function and argument count into x1 and x0.
3005 __ Mov(x0, arg_count);
3006 __ Peek(x1, arg_count * kXRegSize);
3008 // Record call targets in unoptimized code.
3009 if (FLAG_pretenuring_call_new) {
3011 /* TODO(dslomov): support pretenuring.
3012 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3013 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3014 expr->CallNewFeedbackSlot().ToInt() + 1);
3018 __ LoadObject(x2, FeedbackVector());
3019 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
3021 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3022 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3024 RecordJSReturnSite(expr);
3026 context()->Plug(x0);
3030 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3031 ZoneList<Expression*>* args = expr->arguments();
3032 DCHECK(args->length() == 1);
3034 VisitForAccumulatorValue(args->at(0));
3036 Label materialize_true, materialize_false;
3037 Label* if_true = NULL;
3038 Label* if_false = NULL;
3039 Label* fall_through = NULL;
3040 context()->PrepareTest(&materialize_true, &materialize_false,
3041 &if_true, &if_false, &fall_through);
3043 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3044 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
3046 context()->Plug(if_true, if_false);
3050 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3051 ZoneList<Expression*>* args = expr->arguments();
3052 DCHECK(args->length() == 1);
3054 VisitForAccumulatorValue(args->at(0));
3056 Label materialize_true, materialize_false;
3057 Label* if_true = NULL;
3058 Label* if_false = NULL;
3059 Label* fall_through = NULL;
3060 context()->PrepareTest(&materialize_true, &materialize_false,
3061 &if_true, &if_false, &fall_through);
3063 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
3065 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3066 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
3068 context()->Plug(if_true, if_false);
3072 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3073 ZoneList<Expression*>* args = expr->arguments();
3074 DCHECK(args->length() == 1);
3076 VisitForAccumulatorValue(args->at(0));
3078 Label materialize_true, materialize_false;
3079 Label* if_true = NULL;
3080 Label* if_false = NULL;
3081 Label* fall_through = NULL;
3082 context()->PrepareTest(&materialize_true, &materialize_false,
3083 &if_true, &if_false, &fall_through);
3085 __ JumpIfSmi(x0, if_false);
3086 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3087 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3088 // Undetectable objects behave like undefined when tested with typeof.
3089 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3090 __ Tbnz(x11, Map::kIsUndetectable, if_false);
3091 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
3092 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3094 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3095 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3096 Split(le, if_true, if_false, fall_through);
3098 context()->Plug(if_true, if_false);
3102 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3103 ZoneList<Expression*>* args = expr->arguments();
3104 DCHECK(args->length() == 1);
3106 VisitForAccumulatorValue(args->at(0));
3108 Label materialize_true, materialize_false;
3109 Label* if_true = NULL;
3110 Label* if_false = NULL;
3111 Label* fall_through = NULL;
3112 context()->PrepareTest(&materialize_true, &materialize_false,
3113 &if_true, &if_false, &fall_through);
3115 __ JumpIfSmi(x0, if_false);
3116 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3117 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3118 Split(ge, if_true, if_false, fall_through);
3120 context()->Plug(if_true, if_false);
3124 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3125 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
3126 ZoneList<Expression*>* args = expr->arguments();
3127 DCHECK(args->length() == 1);
3129 VisitForAccumulatorValue(args->at(0));
3131 Label materialize_true, materialize_false;
3132 Label* if_true = NULL;
3133 Label* if_false = NULL;
3134 Label* fall_through = NULL;
3135 context()->PrepareTest(&materialize_true, &materialize_false,
3136 &if_true, &if_false, &fall_through);
3138 __ JumpIfSmi(x0, if_false);
3139 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3140 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3141 __ Tst(x11, 1 << Map::kIsUndetectable);
3142 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3143 Split(ne, if_true, if_false, fall_through);
3145 context()->Plug(if_true, if_false);
3149 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3150 CallRuntime* expr) {
3151 ZoneList<Expression*>* args = expr->arguments();
3152 DCHECK(args->length() == 1);
3153 VisitForAccumulatorValue(args->at(0));
3155 Label materialize_true, materialize_false, skip_lookup;
3156 Label* if_true = NULL;
3157 Label* if_false = NULL;
3158 Label* fall_through = NULL;
3159 context()->PrepareTest(&materialize_true, &materialize_false,
3160 &if_true, &if_false, &fall_through);
3162 Register object = x0;
3163 __ AssertNotSmi(object);
3166 Register bitfield2 = x11;
3167 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3168 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3169 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3171 // Check for fast case object. Generate false result for slow case object.
3172 Register props = x12;
3173 Register props_map = x12;
3174 Register hash_table_map = x13;
3175 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3176 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3177 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3178 __ Cmp(props_map, hash_table_map);
3181 // Look for valueOf name in the descriptor array, and indicate false if found.
3182 // Since we omit an enumeration index check, if it is added via a transition
3183 // that shares its descriptor array, this is a false positive.
3186 // Skip loop if no descriptors are valid.
3187 Register descriptors = x12;
3188 Register descriptors_length = x13;
3189 __ NumberOfOwnDescriptors(descriptors_length, map);
3190 __ Cbz(descriptors_length, &done);
3192 __ LoadInstanceDescriptors(map, descriptors);
3194 // Calculate the end of the descriptor array.
3195 Register descriptors_end = x14;
3196 __ Mov(x15, DescriptorArray::kDescriptorSize);
3197 __ Mul(descriptors_length, descriptors_length, x15);
3198 // Calculate location of the first key name.
3199 __ Add(descriptors, descriptors,
3200 DescriptorArray::kFirstOffset - kHeapObjectTag);
3201 // Calculate the end of the descriptor array.
3202 __ Add(descriptors_end, descriptors,
3203 Operand(descriptors_length, LSL, kPointerSizeLog2));
3205 // Loop through all the keys in the descriptor array. If one of these is the
3206 // string "valueOf" the result is false.
3207 Register valueof_string = x1;
3208 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3209 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3211 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3212 __ Cmp(x15, valueof_string);
3214 __ Cmp(descriptors, descriptors_end);
3219 // Set the bit in the map to indicate that there is no local valueOf field.
3220 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3221 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3222 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3224 __ Bind(&skip_lookup);
3226 // If a valueOf property is not found on the object check that its prototype
3227 // is the unmodified String prototype. If not result is false.
3228 Register prototype = x1;
3229 Register global_idx = x2;
3230 Register native_context = x2;
3231 Register string_proto = x3;
3232 Register proto_map = x4;
3233 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3234 __ JumpIfSmi(prototype, if_false);
3235 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3236 __ Ldr(global_idx, GlobalObjectMemOperand());
3237 __ Ldr(native_context,
3238 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3239 __ Ldr(string_proto,
3240 ContextMemOperand(native_context,
3241 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3242 __ Cmp(proto_map, string_proto);
3244 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3245 Split(eq, if_true, if_false, fall_through);
3247 context()->Plug(if_true, if_false);
3251 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments();
3253 DCHECK(args->length() == 1);
3255 VisitForAccumulatorValue(args->at(0));
3257 Label materialize_true, materialize_false;
3258 Label* if_true = NULL;
3259 Label* if_false = NULL;
3260 Label* fall_through = NULL;
3261 context()->PrepareTest(&materialize_true, &materialize_false,
3262 &if_true, &if_false, &fall_through);
3264 __ JumpIfSmi(x0, if_false);
3265 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3266 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3267 Split(eq, if_true, if_false, fall_through);
3269 context()->Plug(if_true, if_false);
3273 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3274 ZoneList<Expression*>* args = expr->arguments();
3275 DCHECK(args->length() == 1);
3277 VisitForAccumulatorValue(args->at(0));
3279 Label materialize_true, materialize_false;
3280 Label* if_true = NULL;
3281 Label* if_false = NULL;
3282 Label* fall_through = NULL;
3283 context()->PrepareTest(&materialize_true, &materialize_false,
3284 &if_true, &if_false, &fall_through);
3286 // Only a HeapNumber can be -0.0, so return false if we have something else.
3287 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3289 // Test the bit pattern.
3290 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3291 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3293 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3294 Split(vs, if_true, if_false, fall_through);
3296 context()->Plug(if_true, if_false);
3300 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3301 ZoneList<Expression*>* args = expr->arguments();
3302 DCHECK(args->length() == 1);
3304 VisitForAccumulatorValue(args->at(0));
3306 Label materialize_true, materialize_false;
3307 Label* if_true = NULL;
3308 Label* if_false = NULL;
3309 Label* fall_through = NULL;
3310 context()->PrepareTest(&materialize_true, &materialize_false,
3311 &if_true, &if_false, &fall_through);
3313 __ JumpIfSmi(x0, if_false);
3314 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3315 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3316 Split(eq, if_true, if_false, fall_through);
3318 context()->Plug(if_true, if_false);
3322 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3323 ZoneList<Expression*>* args = expr->arguments();
3324 DCHECK(args->length() == 1);
3326 VisitForAccumulatorValue(args->at(0));
3328 Label materialize_true, materialize_false;
3329 Label* if_true = NULL;
3330 Label* if_false = NULL;
3331 Label* fall_through = NULL;
3332 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3333 &if_false, &fall_through);
3335 __ JumpIfSmi(x0, if_false);
3336 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
3337 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3338 Split(eq, if_true, if_false, fall_through);
3340 context()->Plug(if_true, if_false);
3344 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3345 ZoneList<Expression*>* args = expr->arguments();
3346 DCHECK(args->length() == 1);
3348 VisitForAccumulatorValue(args->at(0));
3350 Label materialize_true, materialize_false;
3351 Label* if_true = NULL;
3352 Label* if_false = NULL;
3353 Label* fall_through = NULL;
3354 context()->PrepareTest(&materialize_true, &materialize_false,
3355 &if_true, &if_false, &fall_through);
3357 __ JumpIfSmi(x0, if_false);
3358 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3359 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3360 Split(eq, if_true, if_false, fall_through);
3362 context()->Plug(if_true, if_false);
3366 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3367 ZoneList<Expression*>* args = expr->arguments();
3368 DCHECK(args->length() == 1);
3370 VisitForAccumulatorValue(args->at(0));
3372 Label materialize_true, materialize_false;
3373 Label* if_true = NULL;
3374 Label* if_false = NULL;
3375 Label* fall_through = NULL;
3376 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3377 &if_false, &fall_through);
3379 __ JumpIfSmi(x0, if_false);
3381 Register type_reg = x11;
3382 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3383 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3384 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3385 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3386 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3387 Split(ls, if_true, if_false, fall_through);
3389 context()->Plug(if_true, if_false);
3393 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3394 DCHECK(expr->arguments()->length() == 0);
3396 Label materialize_true, materialize_false;
3397 Label* if_true = NULL;
3398 Label* if_false = NULL;
3399 Label* fall_through = NULL;
3400 context()->PrepareTest(&materialize_true, &materialize_false,
3401 &if_true, &if_false, &fall_through);
3403 // Get the frame pointer for the calling frame.
3404 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3406 // Skip the arguments adaptor frame if it exists.
3407 Label check_frame_marker;
3408 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3409 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3410 __ B(ne, &check_frame_marker);
3411 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3413 // Check the marker in the calling frame.
3414 __ Bind(&check_frame_marker);
3415 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3416 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3417 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3418 Split(eq, if_true, if_false, fall_through);
3420 context()->Plug(if_true, if_false);
3424 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3425 ZoneList<Expression*>* args = expr->arguments();
3426 DCHECK(args->length() == 2);
3428 // Load the two objects into registers and perform the comparison.
3429 VisitForStackValue(args->at(0));
3430 VisitForAccumulatorValue(args->at(1));
3432 Label materialize_true, materialize_false;
3433 Label* if_true = NULL;
3434 Label* if_false = NULL;
3435 Label* fall_through = NULL;
3436 context()->PrepareTest(&materialize_true, &materialize_false,
3437 &if_true, &if_false, &fall_through);
3441 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3442 Split(eq, if_true, if_false, fall_through);
3444 context()->Plug(if_true, if_false);
3448 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3449 ZoneList<Expression*>* args = expr->arguments();
3450 DCHECK(args->length() == 1);
3452 // ArgumentsAccessStub expects the key in x1.
3453 VisitForAccumulatorValue(args->at(0));
3455 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3456 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3458 context()->Plug(x0);
3462 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3463 DCHECK(expr->arguments()->length() == 0);
3465 // Get the number of formal parameters.
3466 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3468 // Check if the calling frame is an arguments adaptor frame.
3469 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3470 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3471 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3474 // Arguments adaptor case: Read the arguments length from the
3476 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3479 context()->Plug(x0);
3483 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3484 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3485 ZoneList<Expression*>* args = expr->arguments();
3486 DCHECK(args->length() == 1);
3487 Label done, null, function, non_function_constructor;
3489 VisitForAccumulatorValue(args->at(0));
3491 // If the object is a smi, we return null.
3492 __ JumpIfSmi(x0, &null);
3494 // Check that the object is a JS object but take special care of JS
3495 // functions to make sure they have 'Function' as their class.
3496 // Assume that there are only two callable types, and one of them is at
3497 // either end of the type range for JS object types. Saves extra comparisons.
3498 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3499 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3500 // x10: object's map.
3501 // x11: object's type.
3503 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3504 FIRST_SPEC_OBJECT_TYPE + 1);
3505 __ B(eq, &function);
3507 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3508 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3509 LAST_SPEC_OBJECT_TYPE - 1);
3510 __ B(eq, &function);
3511 // Assume that there is no larger type.
3512 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3514 // Check if the constructor in the map is a JS function.
3515 Register instance_type = x14;
3516 __ GetMapConstructor(x12, x10, x13, instance_type);
3517 __ Cmp(instance_type, JS_FUNCTION_TYPE);
3518 __ B(ne, &non_function_constructor);
3520 // x12 now contains the constructor function. Grab the
3521 // instance class name from there.
3522 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3524 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3527 // Functions have class 'Function'.
3529 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3532 // Objects with a non-function constructor have class 'Object'.
3533 __ Bind(&non_function_constructor);
3534 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3537 // Non-JS objects have class null.
3539 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3544 context()->Plug(x0);
3548 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3549 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3550 ZoneList<Expression*>* args = expr->arguments();
3551 DCHECK(args->length() == 1);
3552 VisitForAccumulatorValue(args->at(0)); // Load the object.
3555 // If the object is a smi return the object.
3556 __ JumpIfSmi(x0, &done);
3557 // If the object is not a value type, return the object.
3558 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3559 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3562 context()->Plug(x0);
3566 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3567 ZoneList<Expression*>* args = expr->arguments();
3568 DCHECK_EQ(1, args->length());
3570 VisitForAccumulatorValue(args->at(0));
3572 Label materialize_true, materialize_false;
3573 Label* if_true = nullptr;
3574 Label* if_false = nullptr;
3575 Label* fall_through = nullptr;
3576 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3577 &if_false, &fall_through);
3579 __ JumpIfSmi(x0, if_false);
3580 __ CompareObjectType(x0, x10, x11, JS_DATE_TYPE);
3581 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3582 Split(eq, if_true, if_false, fall_through);
3584 context()->Plug(if_true, if_false);
3588 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3589 ZoneList<Expression*>* args = expr->arguments();
3590 DCHECK(args->length() == 2);
3591 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3592 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3594 VisitForAccumulatorValue(args->at(0)); // Load the object.
3596 Register object = x0;
3597 Register result = x0;
3598 Register stamp_addr = x10;
3599 Register stamp_cache = x11;
3601 if (index->value() == 0) {
3602 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3604 Label runtime, done;
3605 if (index->value() < JSDate::kFirstUncachedField) {
3606 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3607 __ Mov(stamp_addr, stamp);
3608 __ Ldr(stamp_addr, MemOperand(stamp_addr));
3609 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3610 __ Cmp(stamp_addr, stamp_cache);
3612 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3613 kPointerSize * index->value()));
3619 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3623 context()->Plug(result);
3627 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3628 ZoneList<Expression*>* args = expr->arguments();
3629 DCHECK_EQ(3, args->length());
3631 Register string = x0;
3632 Register index = x1;
3633 Register value = x2;
3634 Register scratch = x10;
3636 VisitForStackValue(args->at(0)); // index
3637 VisitForStackValue(args->at(1)); // value
3638 VisitForAccumulatorValue(args->at(2)); // string
3639 __ Pop(value, index);
3641 if (FLAG_debug_code) {
3642 __ AssertSmi(value, kNonSmiValue);
3643 __ AssertSmi(index, kNonSmiIndex);
3644 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3645 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3649 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3652 __ Strb(value, MemOperand(scratch, index));
3653 context()->Plug(string);
3657 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3658 ZoneList<Expression*>* args = expr->arguments();
3659 DCHECK_EQ(3, args->length());
3661 Register string = x0;
3662 Register index = x1;
3663 Register value = x2;
3664 Register scratch = x10;
3666 VisitForStackValue(args->at(0)); // index
3667 VisitForStackValue(args->at(1)); // value
3668 VisitForAccumulatorValue(args->at(2)); // string
3669 __ Pop(value, index);
3671 if (FLAG_debug_code) {
3672 __ AssertSmi(value, kNonSmiValue);
3673 __ AssertSmi(index, kNonSmiIndex);
3674 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3675 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3679 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3682 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3683 context()->Plug(string);
3687 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3688 ZoneList<Expression*>* args = expr->arguments();
3689 DCHECK(args->length() == 2);
3690 VisitForStackValue(args->at(0)); // Load the object.
3691 VisitForAccumulatorValue(args->at(1)); // Load the value.
3697 // If the object is a smi, return the value.
3698 __ JumpIfSmi(x1, &done);
3700 // If the object is not a value type, return the value.
3701 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3704 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3705 // Update the write barrier. Save the value as it will be
3706 // overwritten by the write barrier code and is needed afterward.
3708 __ RecordWriteField(
3709 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3712 context()->Plug(x0);
3716 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3717 ZoneList<Expression*>* args = expr->arguments();
3718 DCHECK_EQ(args->length(), 1);
3720 // Load the argument into x0 and call the stub.
3721 VisitForAccumulatorValue(args->at(0));
3723 NumberToStringStub stub(isolate());
3725 context()->Plug(x0);
3729 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3730 ZoneList<Expression*>* args = expr->arguments();
3731 DCHECK_EQ(1, args->length());
3733 // Load the argument into x0 and convert it.
3734 VisitForAccumulatorValue(args->at(0));
3736 ToObjectStub stub(isolate());
3738 context()->Plug(x0);
3742 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3743 ZoneList<Expression*>* args = expr->arguments();
3744 DCHECK(args->length() == 1);
3746 VisitForAccumulatorValue(args->at(0));
3750 Register result = x1;
3752 StringCharFromCodeGenerator generator(code, result);
3753 generator.GenerateFast(masm_);
3756 NopRuntimeCallHelper call_helper;
3757 generator.GenerateSlow(masm_, call_helper);
3760 context()->Plug(result);
3764 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3765 ZoneList<Expression*>* args = expr->arguments();
3766 DCHECK(args->length() == 2);
3768 VisitForStackValue(args->at(0));
3769 VisitForAccumulatorValue(args->at(1));
3771 Register object = x1;
3772 Register index = x0;
3773 Register result = x3;
3777 Label need_conversion;
3778 Label index_out_of_range;
3780 StringCharCodeAtGenerator generator(object,
3785 &index_out_of_range,
3786 STRING_INDEX_IS_NUMBER);
3787 generator.GenerateFast(masm_);
3790 __ Bind(&index_out_of_range);
3791 // When the index is out of range, the spec requires us to return NaN.
3792 __ LoadRoot(result, Heap::kNanValueRootIndex);
3795 __ Bind(&need_conversion);
3796 // Load the undefined value into the result register, which will
3797 // trigger conversion.
3798 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3801 NopRuntimeCallHelper call_helper;
3802 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3805 context()->Plug(result);
3809 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3810 ZoneList<Expression*>* args = expr->arguments();
3811 DCHECK(args->length() == 2);
3813 VisitForStackValue(args->at(0));
3814 VisitForAccumulatorValue(args->at(1));
3816 Register object = x1;
3817 Register index = x0;
3818 Register result = x0;
3822 Label need_conversion;
3823 Label index_out_of_range;
3825 StringCharAtGenerator generator(object,
3831 &index_out_of_range,
3832 STRING_INDEX_IS_NUMBER);
3833 generator.GenerateFast(masm_);
3836 __ Bind(&index_out_of_range);
3837 // When the index is out of range, the spec requires us to return
3838 // the empty string.
3839 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3842 __ Bind(&need_conversion);
3843 // Move smi zero into the result register, which will trigger conversion.
3844 __ Mov(result, Smi::FromInt(0));
3847 NopRuntimeCallHelper call_helper;
3848 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3851 context()->Plug(result);
3855 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3856 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3857 ZoneList<Expression*>* args = expr->arguments();
3858 DCHECK_EQ(2, args->length());
3860 VisitForStackValue(args->at(0));
3861 VisitForAccumulatorValue(args->at(1));
3864 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3867 context()->Plug(x0);
3871 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3872 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3873 ZoneList<Expression*>* args = expr->arguments();
3874 DCHECK(args->length() >= 2);
3876 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3877 for (int i = 0; i < arg_count + 1; i++) {
3878 VisitForStackValue(args->at(i));
3880 VisitForAccumulatorValue(args->last()); // Function.
3882 Label runtime, done;
3883 // Check for non-function argument (including proxy).
3884 __ JumpIfSmi(x0, &runtime);
3885 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3887 // InvokeFunction requires the function in x1. Move it in there.
3889 ParameterCount count(arg_count);
3890 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3891 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3896 __ CallRuntime(Runtime::kCall, args->length());
3899 context()->Plug(x0);
3903 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3904 ZoneList<Expression*>* args = expr->arguments();
3905 DCHECK(args->length() == 2);
3908 VisitForStackValue(args->at(0));
3911 VisitForStackValue(args->at(1));
3912 __ CallRuntime(Runtime::kGetPrototype, 1);
3913 __ Push(result_register());
3915 // Load original constructor into x4.
3916 __ Peek(x4, 1 * kPointerSize);
3918 // Check if the calling frame is an arguments adaptor frame.
3919 Label adaptor_frame, args_set_up, runtime;
3920 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3921 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3922 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3923 __ B(eq, &adaptor_frame);
3924 // default constructor has no arguments, so no adaptor frame means no args.
3925 __ Mov(x0, Operand(0));
3928 // Copy arguments from adaptor frame.
3930 __ bind(&adaptor_frame);
3931 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3932 __ SmiUntag(x1, x1);
3936 // Get arguments pointer in x11.
3937 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3938 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3941 // Pre-decrement x11 with kPointerSize on each iteration.
3942 // Pre-decrement in order to skip receiver.
3943 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3945 __ Sub(x1, x1, Operand(1));
3949 __ bind(&args_set_up);
3950 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
3951 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3953 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
3954 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3958 context()->Plug(result_register());
3962 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3963 RegExpConstructResultStub stub(isolate());
3964 ZoneList<Expression*>* args = expr->arguments();
3965 DCHECK(args->length() == 3);
3966 VisitForStackValue(args->at(0));
3967 VisitForStackValue(args->at(1));
3968 VisitForAccumulatorValue(args->at(2));
3971 context()->Plug(x0);
3975 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3976 ZoneList<Expression*>* args = expr->arguments();
3977 DCHECK_EQ(2, args->length());
3978 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
3979 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3981 Handle<FixedArray> jsfunction_result_caches(
3982 isolate()->native_context()->jsfunction_result_caches());
3983 if (jsfunction_result_caches->length() <= cache_id) {
3984 __ Abort(kAttemptToUseUndefinedCache);
3985 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3986 context()->Plug(x0);
3990 VisitForAccumulatorValue(args->at(1));
3993 Register cache = x1;
3994 __ Ldr(cache, GlobalObjectMemOperand());
3995 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3996 __ Ldr(cache, ContextMemOperand(cache,
3997 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3999 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4002 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
4003 JSFunctionResultCache::kFingerOffset));
4004 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
4005 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
4007 // Load the key and data from the cache.
4008 __ Ldp(x2, x3, MemOperand(x3));
4011 __ CmovX(x0, x3, eq);
4014 // Call runtime to perform the lookup.
4015 __ Push(cache, key);
4016 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4019 context()->Plug(x0);
4023 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4024 ZoneList<Expression*>* args = expr->arguments();
4025 VisitForAccumulatorValue(args->at(0));
4027 Label materialize_true, materialize_false;
4028 Label* if_true = NULL;
4029 Label* if_false = NULL;
4030 Label* fall_through = NULL;
4031 context()->PrepareTest(&materialize_true, &materialize_false,
4032 &if_true, &if_false, &fall_through);
4034 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4035 __ Tst(x10, String::kContainsCachedArrayIndexMask);
4036 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4037 Split(eq, if_true, if_false, fall_through);
4039 context()->Plug(if_true, if_false);
4043 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4044 ZoneList<Expression*>* args = expr->arguments();
4045 DCHECK(args->length() == 1);
4046 VisitForAccumulatorValue(args->at(0));
4048 __ AssertString(x0);
4050 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4051 __ IndexFromHash(x10, x0);
4053 context()->Plug(x0);
4057 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4058 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
4060 ZoneList<Expression*>* args = expr->arguments();
4061 DCHECK(args->length() == 2);
4062 VisitForStackValue(args->at(1));
4063 VisitForAccumulatorValue(args->at(0));
4065 Register array = x0;
4066 Register result = x0;
4067 Register elements = x1;
4068 Register element = x2;
4069 Register separator = x3;
4070 Register array_length = x4;
4071 Register result_pos = x5;
4073 Register string_length = x10;
4074 Register elements_end = x11;
4075 Register string = x12;
4076 Register scratch1 = x13;
4077 Register scratch2 = x14;
4078 Register scratch3 = x7;
4079 Register separator_length = x15;
4081 Label bailout, done, one_char_separator, long_separator,
4082 non_trivial_array, not_size_one_array, loop,
4083 empty_separator_loop, one_char_separator_loop,
4084 one_char_separator_loop_entry, long_separator_loop;
4086 // The separator operand is on the stack.
4089 // Check that the array is a JSArray.
4090 __ JumpIfSmi(array, &bailout);
4091 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4093 // Check that the array has fast elements.
4094 __ CheckFastElements(map, scratch1, &bailout);
4096 // If the array has length zero, return the empty string.
4097 // Load and untag the length of the array.
4098 // It is an unsigned value, so we can skip sign extension.
4099 // We assume little endianness.
4100 __ Ldrsw(array_length,
4101 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
4102 __ Cbnz(array_length, &non_trivial_array);
4103 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4106 __ Bind(&non_trivial_array);
4107 // Get the FixedArray containing array's elements.
4108 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4110 // Check that all array elements are sequential one-byte strings, and
4111 // accumulate the sum of their lengths.
4112 __ Mov(string_length, 0);
4113 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4114 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4115 // Loop condition: while (element < elements_end).
4116 // Live values in registers:
4117 // elements: Fixed array of strings.
4118 // array_length: Length of the fixed array of strings (not smi)
4119 // separator: Separator string
4120 // string_length: Accumulated sum of string lengths (not smi).
4121 // element: Current array element.
4122 // elements_end: Array end.
4123 if (FLAG_debug_code) {
4124 __ Cmp(array_length, 0);
4125 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4128 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4129 __ JumpIfSmi(string, &bailout);
4130 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4131 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4132 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4134 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4135 __ Adds(string_length, string_length, scratch1);
4137 __ Cmp(element, elements_end);
4140 // If array_length is 1, return elements[0], a string.
4141 __ Cmp(array_length, 1);
4142 __ B(ne, ¬_size_one_array);
4143 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4146 __ Bind(¬_size_one_array);
4148 // Live values in registers:
4149 // separator: Separator string
4150 // array_length: Length of the array (not smi).
4151 // string_length: Sum of string lengths (not smi).
4152 // elements: FixedArray of strings.
4154 // Check that the separator is a flat one-byte string.
4155 __ JumpIfSmi(separator, &bailout);
4156 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4157 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4158 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4160 // Add (separator length times array_length) - separator length to the
4161 // string_length to get the length of the result string.
4162 // Load the separator length as untagged.
4163 // We assume little endianness, and that the length is positive.
4164 __ Ldrsw(separator_length,
4165 UntagSmiFieldMemOperand(separator,
4166 SeqOneByteString::kLengthOffset));
4167 __ Sub(string_length, string_length, separator_length);
4168 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4171 // Get first element in the array.
4172 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4173 // Live values in registers:
4174 // element: First array element
4175 // separator: Separator string
4176 // string_length: Length of result string (not smi)
4177 // array_length: Length of the array (not smi).
4178 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
4181 // Prepare for looping. Set up elements_end to end of the array. Set
4182 // result_pos to the position of the result where to write the first
4184 // TODO(all): useless unless AllocateOneByteString trashes the register.
4185 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4186 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4188 // Check the length of the separator.
4189 __ Cmp(separator_length, 1);
4190 __ B(eq, &one_char_separator);
4191 __ B(gt, &long_separator);
4193 // Empty separator case
4194 __ Bind(&empty_separator_loop);
4195 // Live values in registers:
4196 // result_pos: the position to which we are currently copying characters.
4197 // element: Current array element.
4198 // elements_end: Array end.
4200 // Copy next array element to the result.
4201 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4202 __ Ldrsw(string_length,
4203 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4204 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4205 __ CopyBytes(result_pos, string, string_length, scratch1);
4206 __ Cmp(element, elements_end);
4207 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4210 // One-character separator case
4211 __ Bind(&one_char_separator);
4212 // Replace separator with its one-byte character value.
4213 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4214 // Jump into the loop after the code that copies the separator, so the first
4215 // element is not preceded by a separator
4216 __ B(&one_char_separator_loop_entry);
4218 __ Bind(&one_char_separator_loop);
4219 // Live values in registers:
4220 // result_pos: the position to which we are currently copying characters.
4221 // element: Current array element.
4222 // elements_end: Array end.
4223 // separator: Single separator one-byte char (in lower byte).
4225 // Copy the separator character to the result.
4226 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4228 // Copy next array element to the result.
4229 __ Bind(&one_char_separator_loop_entry);
4230 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4231 __ Ldrsw(string_length,
4232 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4233 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4234 __ CopyBytes(result_pos, string, string_length, scratch1);
4235 __ Cmp(element, elements_end);
4236 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4239 // Long separator case (separator is more than one character). Entry is at the
4240 // label long_separator below.
4241 __ Bind(&long_separator_loop);
4242 // Live values in registers:
4243 // result_pos: the position to which we are currently copying characters.
4244 // element: Current array element.
4245 // elements_end: Array end.
4246 // separator: Separator string.
4248 // Copy the separator to the result.
4249 // TODO(all): hoist next two instructions.
4250 __ Ldrsw(string_length,
4251 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4252 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4253 __ CopyBytes(result_pos, string, string_length, scratch1);
4255 __ Bind(&long_separator);
4256 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4257 __ Ldrsw(string_length,
4258 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4259 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4260 __ CopyBytes(result_pos, string, string_length, scratch1);
4261 __ Cmp(element, elements_end);
4262 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4266 // Returning undefined will force slower code to handle it.
4267 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4269 context()->Plug(result);
4273 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4274 DCHECK(expr->arguments()->length() == 0);
4275 ExternalReference debug_is_active =
4276 ExternalReference::debug_is_active_address(isolate());
4277 __ Mov(x10, debug_is_active);
4278 __ Ldrb(x0, MemOperand(x10));
4280 context()->Plug(x0);
4284 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4285 // Push the builtins object as the receiver.
4286 __ Ldr(x10, GlobalObjectMemOperand());
4287 __ Ldr(LoadDescriptor::ReceiverRegister(),
4288 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4289 __ Push(LoadDescriptor::ReceiverRegister());
4291 // Load the function from the receiver.
4292 Handle<String> name = expr->name();
4293 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4294 __ Mov(LoadDescriptor::SlotRegister(),
4295 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4296 CallLoadIC(NOT_INSIDE_TYPEOF);
4300 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4301 ZoneList<Expression*>* args = expr->arguments();
4302 int arg_count = args->length();
4304 SetCallPosition(expr, arg_count);
4305 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4306 __ Peek(x1, (arg_count + 1) * kPointerSize);
4311 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4312 ZoneList<Expression*>* args = expr->arguments();
4313 int arg_count = args->length();
4315 if (expr->is_jsruntime()) {
4316 Comment cmnt(masm_, "[ CallRunTime");
4317 EmitLoadJSRuntimeFunction(expr);
4319 // Push the target function under the receiver.
4323 for (int i = 0; i < arg_count; i++) {
4324 VisitForStackValue(args->at(i));
4327 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4328 EmitCallJSRuntimeFunction(expr);
4330 // Restore context register.
4331 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4333 context()->DropAndPlug(1, x0);
4336 const Runtime::Function* function = expr->function();
4337 switch (function->function_id) {
4338 #define CALL_INTRINSIC_GENERATOR(Name) \
4339 case Runtime::kInline##Name: { \
4340 Comment cmnt(masm_, "[ Inline" #Name); \
4341 return Emit##Name(expr); \
4343 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4344 #undef CALL_INTRINSIC_GENERATOR
4346 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4347 // Push the arguments ("left-to-right").
4348 for (int i = 0; i < arg_count; i++) {
4349 VisitForStackValue(args->at(i));
4352 // Call the C runtime function.
4353 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4354 __ CallRuntime(expr->function(), arg_count);
4355 context()->Plug(x0);
4362 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4363 switch (expr->op()) {
4364 case Token::DELETE: {
4365 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4366 Property* property = expr->expression()->AsProperty();
4367 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4369 if (property != NULL) {
4370 VisitForStackValue(property->obj());
4371 VisitForStackValue(property->key());
4372 __ Mov(x10, Smi::FromInt(language_mode()));
4374 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4375 context()->Plug(x0);
4376 } else if (proxy != NULL) {
4377 Variable* var = proxy->var();
4378 // Delete of an unqualified identifier is disallowed in strict mode but
4379 // "delete this" is allowed.
4380 bool is_this = var->HasThisName(isolate());
4381 DCHECK(is_sloppy(language_mode()) || is_this);
4382 if (var->IsUnallocatedOrGlobalSlot()) {
4383 __ Ldr(x12, GlobalObjectMemOperand());
4384 __ Mov(x11, Operand(var->name()));
4385 __ Mov(x10, Smi::FromInt(SLOPPY));
4386 __ Push(x12, x11, x10);
4387 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4388 context()->Plug(x0);
4389 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4390 // Result of deleting non-global, non-dynamic variables is false.
4391 // The subexpression does not have side effects.
4392 context()->Plug(is_this);
4394 // Non-global variable. Call the runtime to try to delete from the
4395 // context where the variable was introduced.
4396 __ Mov(x2, Operand(var->name()));
4397 __ Push(context_register(), x2);
4398 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4399 context()->Plug(x0);
4402 // Result of deleting non-property, non-variable reference is true.
4403 // The subexpression may have side effects.
4404 VisitForEffect(expr->expression());
4405 context()->Plug(true);
4411 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4412 VisitForEffect(expr->expression());
4413 context()->Plug(Heap::kUndefinedValueRootIndex);
4417 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4418 if (context()->IsEffect()) {
4419 // Unary NOT has no side effects so it's only necessary to visit the
4420 // subexpression. Match the optimizing compiler by not branching.
4421 VisitForEffect(expr->expression());
4422 } else if (context()->IsTest()) {
4423 const TestContext* test = TestContext::cast(context());
4424 // The labels are swapped for the recursive call.
4425 VisitForControl(expr->expression(),
4426 test->false_label(),
4428 test->fall_through());
4429 context()->Plug(test->true_label(), test->false_label());
4431 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4432 // TODO(jbramley): This could be much more efficient using (for
4433 // example) the CSEL instruction.
4434 Label materialize_true, materialize_false, done;
4435 VisitForControl(expr->expression(),
4440 __ Bind(&materialize_true);
4441 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4442 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4445 __ Bind(&materialize_false);
4446 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4447 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4451 if (context()->IsStackValue()) {
4452 __ Push(result_register());
4457 case Token::TYPEOF: {
4458 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4460 AccumulatorValueContext context(this);
4461 VisitForTypeofValue(expr->expression());
4464 TypeofStub typeof_stub(isolate());
4465 __ CallStub(&typeof_stub);
4466 context()->Plug(x0);
4475 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4476 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4478 Comment cmnt(masm_, "[ CountOperation");
4480 Property* prop = expr->expression()->AsProperty();
4481 LhsKind assign_type = Property::GetAssignType(prop);
4483 // Evaluate expression and get value.
4484 if (assign_type == VARIABLE) {
4485 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4486 AccumulatorValueContext context(this);
4487 EmitVariableLoad(expr->expression()->AsVariableProxy());
4489 // Reserve space for result of postfix operation.
4490 if (expr->is_postfix() && !context()->IsEffect()) {
4493 switch (assign_type) {
4494 case NAMED_PROPERTY: {
4495 // Put the object both on the stack and in the register.
4496 VisitForStackValue(prop->obj());
4497 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4498 EmitNamedPropertyLoad(prop);
4502 case NAMED_SUPER_PROPERTY: {
4503 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4504 VisitForAccumulatorValue(
4505 prop->obj()->AsSuperPropertyReference()->home_object());
4506 __ Push(result_register());
4507 const Register scratch = x10;
4508 __ Peek(scratch, kPointerSize);
4509 __ Push(scratch, result_register());
4510 EmitNamedSuperPropertyLoad(prop);
4514 case KEYED_SUPER_PROPERTY: {
4515 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4517 prop->obj()->AsSuperPropertyReference()->home_object());
4518 VisitForAccumulatorValue(prop->key());
4519 __ Push(result_register());
4520 const Register scratch1 = x10;
4521 const Register scratch2 = x11;
4522 __ Peek(scratch1, 2 * kPointerSize);
4523 __ Peek(scratch2, kPointerSize);
4524 __ Push(scratch1, scratch2, result_register());
4525 EmitKeyedSuperPropertyLoad(prop);
4529 case KEYED_PROPERTY: {
4530 VisitForStackValue(prop->obj());
4531 VisitForStackValue(prop->key());
4532 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4533 __ Peek(LoadDescriptor::NameRegister(), 0);
4534 EmitKeyedPropertyLoad(prop);
4543 // We need a second deoptimization point after loading the value
4544 // in case evaluating the property load my have a side effect.
4545 if (assign_type == VARIABLE) {
4546 PrepareForBailout(expr->expression(), TOS_REG);
4548 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4551 // Inline smi case if we are in a loop.
4552 Label stub_call, done;
4553 JumpPatchSite patch_site(masm_);
4555 int count_value = expr->op() == Token::INC ? 1 : -1;
4556 if (ShouldInlineSmiCase(expr->op())) {
4558 patch_site.EmitJumpIfNotSmi(x0, &slow);
4560 // Save result for postfix expressions.
4561 if (expr->is_postfix()) {
4562 if (!context()->IsEffect()) {
4563 // Save the result on the stack. If we have a named or keyed property we
4564 // store the result under the receiver that is currently on top of the
4566 switch (assign_type) {
4570 case NAMED_PROPERTY:
4571 __ Poke(x0, kPointerSize);
4573 case NAMED_SUPER_PROPERTY:
4574 __ Poke(x0, kPointerSize * 2);
4576 case KEYED_PROPERTY:
4577 __ Poke(x0, kPointerSize * 2);
4579 case KEYED_SUPER_PROPERTY:
4580 __ Poke(x0, kPointerSize * 3);
4586 __ Adds(x0, x0, Smi::FromInt(count_value));
4588 // Call stub. Undo operation first.
4589 __ Sub(x0, x0, Smi::FromInt(count_value));
4593 if (!is_strong(language_mode())) {
4594 ToNumberStub convert_stub(isolate());
4595 __ CallStub(&convert_stub);
4596 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4599 // Save result for postfix expressions.
4600 if (expr->is_postfix()) {
4601 if (!context()->IsEffect()) {
4602 // Save the result on the stack. If we have a named or keyed property
4603 // we store the result under the receiver that is currently on top
4605 switch (assign_type) {
4609 case NAMED_PROPERTY:
4610 __ Poke(x0, kXRegSize);
4612 case NAMED_SUPER_PROPERTY:
4613 __ Poke(x0, 2 * kXRegSize);
4615 case KEYED_PROPERTY:
4616 __ Poke(x0, 2 * kXRegSize);
4618 case KEYED_SUPER_PROPERTY:
4619 __ Poke(x0, 3 * kXRegSize);
4625 __ Bind(&stub_call);
4627 __ Mov(x0, Smi::FromInt(count_value));
4629 SetExpressionPosition(expr);
4632 Assembler::BlockPoolsScope scope(masm_);
4634 CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4635 strength(language_mode())).code();
4636 CallIC(code, expr->CountBinOpFeedbackId());
4637 patch_site.EmitPatchInfo();
4641 if (is_strong(language_mode())) {
4642 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4644 // Store the value returned in x0.
4645 switch (assign_type) {
4647 if (expr->is_postfix()) {
4648 { EffectContext context(this);
4649 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4650 Token::ASSIGN, expr->CountSlot());
4651 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4654 // For all contexts except EffectConstant We have the result on
4655 // top of the stack.
4656 if (!context()->IsEffect()) {
4657 context()->PlugTOS();
4660 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4661 Token::ASSIGN, expr->CountSlot());
4662 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4663 context()->Plug(x0);
4666 case NAMED_PROPERTY: {
4667 __ Mov(StoreDescriptor::NameRegister(),
4668 Operand(prop->key()->AsLiteral()->value()));
4669 __ Pop(StoreDescriptor::ReceiverRegister());
4670 if (FLAG_vector_stores) {
4671 EmitLoadStoreICSlot(expr->CountSlot());
4674 CallStoreIC(expr->CountStoreFeedbackId());
4676 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4677 if (expr->is_postfix()) {
4678 if (!context()->IsEffect()) {
4679 context()->PlugTOS();
4682 context()->Plug(x0);
4686 case NAMED_SUPER_PROPERTY: {
4687 EmitNamedSuperPropertyStore(prop);
4688 if (expr->is_postfix()) {
4689 if (!context()->IsEffect()) {
4690 context()->PlugTOS();
4693 context()->Plug(x0);
4697 case KEYED_SUPER_PROPERTY: {
4698 EmitKeyedSuperPropertyStore(prop);
4699 if (expr->is_postfix()) {
4700 if (!context()->IsEffect()) {
4701 context()->PlugTOS();
4704 context()->Plug(x0);
4708 case KEYED_PROPERTY: {
4709 __ Pop(StoreDescriptor::NameRegister());
4710 __ Pop(StoreDescriptor::ReceiverRegister());
4712 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4713 if (FLAG_vector_stores) {
4714 EmitLoadStoreICSlot(expr->CountSlot());
4717 CallIC(ic, expr->CountStoreFeedbackId());
4719 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4720 if (expr->is_postfix()) {
4721 if (!context()->IsEffect()) {
4722 context()->PlugTOS();
4725 context()->Plug(x0);
4733 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4734 Expression* sub_expr,
4735 Handle<String> check) {
4736 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4737 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4738 Label materialize_true, materialize_false;
4739 Label* if_true = NULL;
4740 Label* if_false = NULL;
4741 Label* fall_through = NULL;
4742 context()->PrepareTest(&materialize_true, &materialize_false,
4743 &if_true, &if_false, &fall_through);
4745 { AccumulatorValueContext context(this);
4746 VisitForTypeofValue(sub_expr);
4748 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4750 Factory* factory = isolate()->factory();
4751 if (String::Equals(check, factory->number_string())) {
4752 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4753 __ JumpIfSmi(x0, if_true);
4754 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4755 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4756 Split(eq, if_true, if_false, fall_through);
4757 } else if (String::Equals(check, factory->string_string())) {
4758 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4759 __ JumpIfSmi(x0, if_false);
4760 // Check for undetectable objects => false.
4761 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4762 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4763 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4765 } else if (String::Equals(check, factory->symbol_string())) {
4766 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4767 __ JumpIfSmi(x0, if_false);
4768 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4769 Split(eq, if_true, if_false, fall_through);
4770 } else if (String::Equals(check, factory->float32x4_string())) {
4772 "FullCodeGenerator::EmitLiteralCompareTypeof float32x4_string");
4773 __ JumpIfSmi(x0, if_false);
4774 __ CompareObjectType(x0, x0, x1, FLOAT32X4_TYPE);
4775 Split(eq, if_true, if_false, fall_through);
4776 } else if (String::Equals(check, factory->boolean_string())) {
4777 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4778 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4779 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4780 Split(eq, if_true, if_false, fall_through);
4781 } else if (String::Equals(check, factory->undefined_string())) {
4783 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4784 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4785 __ JumpIfSmi(x0, if_false);
4786 // Check for undetectable objects => true.
4787 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4788 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4789 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4791 } else if (String::Equals(check, factory->function_string())) {
4792 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4793 __ JumpIfSmi(x0, if_false);
4794 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4795 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4796 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4799 } else if (String::Equals(check, factory->object_string())) {
4800 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4801 __ JumpIfSmi(x0, if_false);
4802 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4803 // Check for JS objects => true.
4805 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4807 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4809 // Check for undetectable objects => false.
4810 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4812 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4816 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4817 if (if_false != fall_through) __ B(if_false);
4819 context()->Plug(if_true, if_false);
4823 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4824 Comment cmnt(masm_, "[ CompareOperation");
4825 SetExpressionPosition(expr);
4827 // Try to generate an optimized comparison with a literal value.
4828 // TODO(jbramley): This only checks common values like NaN or undefined.
4829 // Should it also handle ARM64 immediate operands?
4830 if (TryLiteralCompare(expr)) {
4834 // Assign labels according to context()->PrepareTest.
4835 Label materialize_true;
4836 Label materialize_false;
4837 Label* if_true = NULL;
4838 Label* if_false = NULL;
4839 Label* fall_through = NULL;
4840 context()->PrepareTest(&materialize_true, &materialize_false,
4841 &if_true, &if_false, &fall_through);
4843 Token::Value op = expr->op();
4844 VisitForStackValue(expr->left());
4847 VisitForStackValue(expr->right());
4848 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4849 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4850 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4851 Split(eq, if_true, if_false, fall_through);
4854 case Token::INSTANCEOF: {
4855 VisitForStackValue(expr->right());
4856 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4858 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4859 // The stub returns 0 for true.
4860 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4865 VisitForAccumulatorValue(expr->right());
4866 Condition cond = CompareIC::ComputeCondition(op);
4868 // Pop the stack value.
4871 JumpPatchSite patch_site(masm_);
4872 if (ShouldInlineSmiCase(op)) {
4874 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4876 Split(cond, if_true, if_false, NULL);
4877 __ Bind(&slow_case);
4880 Handle<Code> ic = CodeFactory::CompareIC(
4881 isolate(), op, strength(language_mode())).code();
4882 CallIC(ic, expr->CompareOperationFeedbackId());
4883 patch_site.EmitPatchInfo();
4884 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4885 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4889 // Convert the result of the comparison into one expected for this
4890 // expression's context.
4891 context()->Plug(if_true, if_false);
4895 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4896 Expression* sub_expr,
4898 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4899 Label materialize_true, materialize_false;
4900 Label* if_true = NULL;
4901 Label* if_false = NULL;
4902 Label* fall_through = NULL;
4903 context()->PrepareTest(&materialize_true, &materialize_false,
4904 &if_true, &if_false, &fall_through);
4906 VisitForAccumulatorValue(sub_expr);
4907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4909 if (expr->op() == Token::EQ_STRICT) {
4910 Heap::RootListIndex nil_value = nil == kNullValue ?
4911 Heap::kNullValueRootIndex :
4912 Heap::kUndefinedValueRootIndex;
4913 __ CompareRoot(x0, nil_value);
4914 Split(eq, if_true, if_false, fall_through);
4916 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4917 CallIC(ic, expr->CompareOperationFeedbackId());
4918 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4921 context()->Plug(if_true, if_false);
4925 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4926 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4927 context()->Plug(x0);
4931 void FullCodeGenerator::VisitYield(Yield* expr) {
4932 Comment cmnt(masm_, "[ Yield");
4933 SetExpressionPosition(expr);
4935 // Evaluate yielded value first; the initial iterator definition depends on
4936 // this. It stays on the stack while we update the iterator.
4937 VisitForStackValue(expr->expression());
4939 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4940 // and suchlike. The implementation changes a little by bleeding_edge so I
4941 // don't want to spend too much time on it now.
4943 switch (expr->yield_kind()) {
4944 case Yield::kSuspend:
4945 // Pop value from top-of-stack slot; box result into result register.
4946 EmitCreateIteratorResult(false);
4947 __ Push(result_register());
4949 case Yield::kInitial: {
4950 Label suspend, continuation, post_runtime, resume;
4953 // TODO(jbramley): This label is bound here because the following code
4954 // looks at its pos(). Is it possible to do something more efficient here,
4955 // perhaps using Adr?
4956 __ Bind(&continuation);
4957 __ RecordGeneratorContinuation();
4961 VisitForAccumulatorValue(expr->generator_object());
4962 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4963 __ Mov(x1, Smi::FromInt(continuation.pos()));
4964 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4965 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4967 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4968 kLRHasBeenSaved, kDontSaveFPRegs);
4969 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4970 __ Cmp(__ StackPointer(), x1);
4971 __ B(eq, &post_runtime);
4972 __ Push(x0); // generator object
4973 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4974 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4975 __ Bind(&post_runtime);
4976 __ Pop(result_register());
4977 EmitReturnSequence();
4980 context()->Plug(result_register());
4984 case Yield::kFinal: {
4985 VisitForAccumulatorValue(expr->generator_object());
4986 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4987 __ Str(x1, FieldMemOperand(result_register(),
4988 JSGeneratorObject::kContinuationOffset));
4989 // Pop value from top-of-stack slot, box result into result register.
4990 EmitCreateIteratorResult(true);
4991 EmitUnwindBeforeReturn();
4992 EmitReturnSequence();
4996 case Yield::kDelegating: {
4997 VisitForStackValue(expr->generator_object());
4999 // Initial stack layout is as follows:
5000 // [sp + 1 * kPointerSize] iter
5001 // [sp + 0 * kPointerSize] g
5003 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
5004 Label l_next, l_call, l_loop;
5005 Register load_receiver = LoadDescriptor::ReceiverRegister();
5006 Register load_name = LoadDescriptor::NameRegister();
5008 // Initial send value is undefined.
5009 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
5012 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
5014 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
5015 __ Peek(x3, 1 * kPointerSize); // iter
5016 __ Push(load_name, x3, x0); // "throw", iter, except
5019 // try { received = %yield result }
5020 // Shuffle the received result above a try handler and yield it without
5023 __ Pop(x0); // result
5024 int handler_index = NewHandlerTableEntry();
5025 EnterTryBlock(handler_index, &l_catch);
5026 const int try_block_size = TryCatch::kElementCount * kPointerSize;
5027 __ Push(x0); // result
5030 // TODO(jbramley): This label is bound here because the following code
5031 // looks at its pos(). Is it possible to do something more efficient here,
5032 // perhaps using Adr?
5033 __ Bind(&l_continuation);
5034 __ RecordGeneratorContinuation();
5037 __ Bind(&l_suspend);
5038 const int generator_object_depth = kPointerSize + try_block_size;
5039 __ Peek(x0, generator_object_depth);
5041 __ Push(Smi::FromInt(handler_index)); // handler-index
5042 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
5043 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
5044 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
5045 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
5047 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
5048 kLRHasBeenSaved, kDontSaveFPRegs);
5049 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
5050 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5051 __ Pop(x0); // result
5052 EmitReturnSequence();
5053 __ Bind(&l_resume); // received in x0
5054 ExitTryBlock(handler_index);
5056 // receiver = iter; f = 'next'; arg = received;
5059 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
5060 __ Peek(x3, 1 * kPointerSize); // iter
5061 __ Push(load_name, x3, x0); // "next", iter, received
5063 // result = receiver[f](arg);
5065 __ Peek(load_receiver, 1 * kPointerSize);
5066 __ Peek(load_name, 2 * kPointerSize);
5067 __ Mov(LoadDescriptor::SlotRegister(),
5068 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
5069 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
5070 CallIC(ic, TypeFeedbackId::None());
5072 __ Poke(x1, 2 * kPointerSize);
5073 SetCallPosition(expr, 1);
5074 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
5077 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5078 __ Drop(1); // The function is still on the stack; drop it.
5080 // if (!result.done) goto l_try;
5082 __ Move(load_receiver, x0);
5084 __ Push(load_receiver); // save result
5085 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
5086 __ Mov(LoadDescriptor::SlotRegister(),
5087 SmiFromSlot(expr->DoneFeedbackSlot()));
5088 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.done
5089 // The ToBooleanStub argument (result.done) is in x0.
5090 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
5095 __ Pop(load_receiver); // result
5096 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
5097 __ Mov(LoadDescriptor::SlotRegister(),
5098 SmiFromSlot(expr->ValueFeedbackSlot()));
5099 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.value
5100 context()->DropAndPlug(2, x0); // drop iter and g
5107 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
5109 JSGeneratorObject::ResumeMode resume_mode) {
5110 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
5111 Register generator_object = x1;
5112 Register the_hole = x2;
5113 Register operand_stack_size = w3;
5114 Register function = x4;
5116 // The value stays in x0, and is ultimately read by the resumed generator, as
5117 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
5118 // is read to throw the value when the resumed generator is already closed. x1
5119 // will hold the generator object until the activation has been resumed.
5120 VisitForStackValue(generator);
5121 VisitForAccumulatorValue(value);
5122 __ Pop(generator_object);
5124 // Load suspended function and context.
5125 __ Ldr(cp, FieldMemOperand(generator_object,
5126 JSGeneratorObject::kContextOffset));
5127 __ Ldr(function, FieldMemOperand(generator_object,
5128 JSGeneratorObject::kFunctionOffset));
5130 // Load receiver and store as the first argument.
5131 __ Ldr(x10, FieldMemOperand(generator_object,
5132 JSGeneratorObject::kReceiverOffset));
5135 // Push holes for the rest of the arguments to the generator function.
5136 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
5138 // The number of arguments is stored as an int32_t, and -1 is a marker
5139 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
5140 // extension to correctly handle it. However, in this case, we operate on
5141 // 32-bit W registers, so extension isn't required.
5142 __ Ldr(w10, FieldMemOperand(x10,
5143 SharedFunctionInfo::kFormalParameterCountOffset));
5144 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
5145 __ PushMultipleTimes(the_hole, w10);
5147 // Enter a new JavaScript frame, and initialize its slots as they were when
5148 // the generator was suspended.
5149 Label resume_frame, done;
5150 __ Bl(&resume_frame);
5153 __ Bind(&resume_frame);
5154 __ Push(lr, // Return address.
5155 fp, // Caller's frame pointer.
5156 cp, // Callee's context.
5157 function); // Callee's JS Function.
5158 __ Add(fp, __ StackPointer(), kPointerSize * 2);
5160 // Load and untag the operand stack size.
5161 __ Ldr(x10, FieldMemOperand(generator_object,
5162 JSGeneratorObject::kOperandStackOffset));
5163 __ Ldr(operand_stack_size,
5164 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
5166 // If we are sending a value and there is no operand stack, we can jump back
5168 if (resume_mode == JSGeneratorObject::NEXT) {
5170 __ Cbnz(operand_stack_size, &slow_resume);
5171 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5173 UntagSmiFieldMemOperand(generator_object,
5174 JSGeneratorObject::kContinuationOffset));
5175 __ Add(x10, x10, x11);
5176 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
5177 __ Str(x12, FieldMemOperand(generator_object,
5178 JSGeneratorObject::kContinuationOffset));
5181 __ Bind(&slow_resume);
5184 // Otherwise, we push holes for the operand stack and call the runtime to fix
5185 // up the stack and the handlers.
5186 __ PushMultipleTimes(the_hole, operand_stack_size);
5188 __ Mov(x10, Smi::FromInt(resume_mode));
5189 __ Push(generator_object, result_register(), x10);
5190 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5191 // Not reached: the runtime call returns elsewhere.
5195 context()->Plug(result_register());
5199 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5203 const int instance_size = 5 * kPointerSize;
5204 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5207 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5209 Register result = x0;
5210 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5213 __ Bind(&gc_required);
5214 __ Push(Smi::FromInt(instance_size));
5215 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5216 __ Ldr(context_register(),
5217 MemOperand(fp, StandardFrameConstants::kContextOffset));
5219 __ Bind(&allocated);
5220 Register map_reg = x1;
5221 Register result_value = x2;
5222 Register boolean_done = x3;
5223 Register empty_fixed_array = x4;
5224 Register untagged_result = x5;
5225 __ Ldr(map_reg, GlobalObjectMemOperand());
5226 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5228 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5229 __ Pop(result_value);
5230 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5231 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5232 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5233 JSObject::kElementsOffset);
5234 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5235 JSGeneratorObject::kResultDonePropertyOffset);
5236 __ ObjectUntag(untagged_result, result);
5237 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5238 __ Stp(empty_fixed_array, empty_fixed_array,
5239 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5240 __ Stp(result_value, boolean_done,
5241 MemOperand(untagged_result,
5242 JSGeneratorObject::kResultValuePropertyOffset));
5244 // Only the value field needs a write barrier, as the other values are in the
5246 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5247 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5251 // TODO(all): I don't like this method.
5252 // It seems to me that in too many places x0 is used in place of this.
5253 // Also, this function is not suitable for all places where x0 should be
5254 // abstracted (eg. when used as an argument). But some places assume that the
5255 // first argument register is x0, and use this function instead.
5256 // Considering that most of the register allocation is hard-coded in the
5257 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5258 // that abstracting the allocation through functions would not yield any
5259 // performance benefit, I think the existence of this function is debatable.
5260 Register FullCodeGenerator::result_register() {
5265 Register FullCodeGenerator::context_register() {
5270 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5271 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5272 __ Str(value, MemOperand(fp, frame_offset));
5276 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5277 __ Ldr(dst, ContextMemOperand(cp, context_index));
5281 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5282 Scope* closure_scope = scope()->ClosureScope();
5283 if (closure_scope->is_script_scope() ||
5284 closure_scope->is_module_scope()) {
5285 // Contexts nested in the native context have a canonical empty function
5286 // as their closure, not the anonymous closure containing the global
5287 // code. Pass a smi sentinel and let the runtime look up the empty
5289 DCHECK(kSmiTag == 0);
5291 } else if (closure_scope->is_eval_scope()) {
5292 // Contexts created by a call to eval have the same closure as the
5293 // context calling eval, not the anonymous closure containing the eval
5294 // code. Fetch it from the context.
5295 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5298 DCHECK(closure_scope->is_function_scope());
5299 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5305 void FullCodeGenerator::EnterFinallyBlock() {
5306 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5307 DCHECK(!result_register().is(x10));
5308 // Preserve the result register while executing finally block.
5309 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5310 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5312 __ Push(result_register(), x10);
5314 // Store pending message while executing finally block.
5315 ExternalReference pending_message_obj =
5316 ExternalReference::address_of_pending_message_obj(isolate());
5317 __ Mov(x10, pending_message_obj);
5318 __ Ldr(x10, MemOperand(x10));
5321 ClearPendingMessage();
5325 void FullCodeGenerator::ExitFinallyBlock() {
5326 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5327 DCHECK(!result_register().is(x10));
5329 // Restore pending message from stack.
5331 ExternalReference pending_message_obj =
5332 ExternalReference::address_of_pending_message_obj(isolate());
5333 __ Mov(x13, pending_message_obj);
5334 __ Str(x10, MemOperand(x13));
5336 // Restore result register and cooked return address from the stack.
5337 __ Pop(x10, result_register());
5339 // Uncook the return address (see EnterFinallyBlock).
5341 __ Add(x11, x10, Operand(masm_->CodeObject()));
5346 void FullCodeGenerator::ClearPendingMessage() {
5347 DCHECK(!result_register().is(x10));
5348 ExternalReference pending_message_obj =
5349 ExternalReference::address_of_pending_message_obj(isolate());
5350 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
5351 __ Mov(x13, pending_message_obj);
5352 __ Str(x10, MemOperand(x13));
5356 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5357 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5358 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5365 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5367 BackEdgeState target_state,
5368 Code* replacement_code) {
5369 // Turn the jump into a nop.
5370 Address branch_address = pc - 3 * kInstructionSize;
5371 PatchingAssembler patcher(branch_address, 1);
5373 DCHECK(Instruction::Cast(branch_address)
5374 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5375 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5376 Instruction::Cast(branch_address)->ImmPCOffset() ==
5377 6 * kInstructionSize));
5379 switch (target_state) {
5381 // <decrement profiling counter>
5382 // .. .. .. .. b.pl ok
5383 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5384 // .. .. .. .. blr x16
5385 // ... more instructions.
5387 // Jump offset is 6 instructions.
5390 case ON_STACK_REPLACEMENT:
5391 case OSR_AFTER_STACK_CHECK:
5392 // <decrement profiling counter>
5393 // .. .. .. .. mov x0, x0 (NOP)
5394 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5395 // .. .. .. .. blr x16
5396 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5400 // Replace the call address.
5401 Instruction* load = Instruction::Cast(pc)->preceding(2);
5402 Address interrupt_address_pointer =
5403 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5404 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5405 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5407 ->OnStackReplacement()
5409 (Memory::uint64_at(interrupt_address_pointer) ==
5410 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5414 (Memory::uint64_at(interrupt_address_pointer) ==
5415 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5417 ->OsrAfterStackCheck()
5419 (Memory::uint64_at(interrupt_address_pointer) ==
5420 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5422 ->OnStackReplacement()
5424 Memory::uint64_at(interrupt_address_pointer) =
5425 reinterpret_cast<uint64_t>(replacement_code->entry());
5427 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5428 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5432 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5434 Code* unoptimized_code,
5436 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5437 // back-end), but this function is gone in bleeding_edge so it might not
5439 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5441 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5442 Instruction* load = Instruction::Cast(pc)->preceding(2);
5443 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5444 load->ImmPCOffset());
5445 if (entry == reinterpret_cast<uint64_t>(
5446 isolate->builtins()->OnStackReplacement()->entry())) {
5447 return ON_STACK_REPLACEMENT;
5448 } else if (entry == reinterpret_cast<uint64_t>(
5449 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5450 return OSR_AFTER_STACK_CHECK;
5460 } // namespace internal
5463 #endif // V8_TARGET_ARCH_ARM64