1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/arm64/code-stubs-arm64.h"
20 #include "src/arm64/macro-assembler-arm64.h"
25 #define __ ACCESS_MASM(masm_)
27 class JumpPatchSite BASE_EMBEDDED {
29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
31 info_emitted_ = false;
36 if (patch_site_.is_bound()) {
37 DCHECK(info_emitted_);
39 DCHECK(reg_.IsNone());
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1);
46 DCHECK(!info_emitted_);
47 DCHECK(reg.Is64Bits());
50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1);
57 DCHECK(!info_emitted_);
58 DCHECK(reg.Is64Bits());
61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched.
65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target);
72 void EmitPatchInfo() {
73 Assembler::BlockPoolsScope scope(masm_);
74 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
81 MacroAssembler* masm_;
90 // Generate code for a JS function. On entry to the function the receiver
91 // and arguments have been pushed on the stack left to right. The actual
92 // argument count matches the formal parameter count expected by the
95 // The live registers are:
96 // - x1: the JS function object being called (i.e. ourselves).
98 // - fp: our caller's frame pointer.
99 // - jssp: stack pointer.
100 // - lr: return address.
102 // The function builds a JS frame. See JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(function());
109 Comment cmnt(masm_, "[ Function compiled by full code generator");
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
114 if (strlen(FLAG_stop_at) > 0 &&
115 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
116 __ Debug("stop-at", __LINE__, BREAK);
120 // Sloppy mode functions and builtins need to replace the receiver with the
121 // global proxy when called as functions (without an explicit receiver
123 if (is_sloppy(info->language_mode()) && !info->is_native() &&
124 info->MayUseThis() && info->scope()->has_this_declaration()) {
126 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
127 __ Peek(x10, receiver_offset);
128 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
130 __ Ldr(x10, GlobalObjectMemOperand());
131 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
132 __ Poke(x10, receiver_offset);
138 // Open a frame scope to indicate that there is a frame on the stack.
139 // The MANUAL indicates that the scope shouldn't actually generate code
140 // to set up the frame because we do it manually below.
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
143 // This call emits the following sequence in a way that can be patched for
144 // code ageing support:
145 // Push(lr, fp, cp, x1);
146 // Add(fp, jssp, 2 * kPointerSize);
147 info->set_prologue_offset(masm_->pc_offset());
148 __ Prologue(info->IsCodePreAgingActive());
149 info->AddNoFrameRange(0, masm_->pc_offset());
151 // Reserve space on the stack for locals.
152 { Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots.
155 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
157 if (locals_count > 0) {
158 if (locals_count >= 128) {
160 DCHECK(jssp.Is(__ StackPointer()));
161 __ Sub(x10, jssp, locals_count * kPointerSize);
162 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
167 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
168 if (FLAG_optimize_for_size) {
169 __ PushMultipleTimes(x10 , locals_count);
171 const int kMaxPushes = 32;
172 if (locals_count >= kMaxPushes) {
173 int loop_iterations = locals_count / kMaxPushes;
174 __ Mov(x3, loop_iterations);
176 __ Bind(&loop_header);
178 __ PushMultipleTimes(x10 , kMaxPushes);
180 __ B(ne, &loop_header);
182 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes.
184 __ PushMultipleTimes(x10 , remaining);
189 bool function_in_register_x1 = true;
191 if (info->scope()->num_heap_slots() > 0) {
192 // Argument to NewContext is the function, which is still in x1.
193 Comment cmnt(masm_, "[ Allocate context");
194 bool need_write_barrier = true;
195 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
196 if (info->scope()->is_script_scope()) {
197 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
199 __ CallRuntime(Runtime::kNewScriptContext, 2);
200 } else if (slots <= FastNewContextStub::kMaximumSlots) {
201 FastNewContextStub stub(isolate(), slots);
203 // Result of FastNewContextStub is always in new space.
204 need_write_barrier = false;
207 __ CallRuntime(Runtime::kNewFunctionContext, 1);
209 function_in_register_x1 = false;
210 // Context is returned in x0. It replaces the context passed to us.
211 // It's saved in the stack and kept live in cp.
213 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214 // Copy any necessary parameters into the context.
215 int num_parameters = info->scope()->num_parameters();
216 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
217 for (int i = first_parameter; i < num_parameters; i++) {
218 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
219 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack.
223 __ Ldr(x10, MemOperand(fp, parameter_offset));
224 // Store it in the context.
225 MemOperand target = ContextMemOperand(cp, var->index());
228 // Update the write barrier.
229 if (need_write_barrier) {
230 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
231 x11, kLRHasBeenSaved, kDontSaveFPRegs);
232 } else if (FLAG_debug_code) {
234 __ JumpIfInNewSpace(cp, &done);
235 __ Abort(kExpectedNewSpaceObject);
242 // Possibly set up a local binding to the this function which is used in
243 // derived constructors with super calls.
244 Variable* this_function_var = scope()->this_function_var();
245 if (this_function_var != nullptr) {
246 Comment cmnt(masm_, "[ This function");
247 if (!function_in_register_x1) {
248 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
249 // The write barrier clobbers register again, keep is marked as such.
251 SetVar(this_function_var, x1, x0, x2);
254 Variable* new_target_var = scope()->new_target_var();
255 if (new_target_var != nullptr) {
256 Comment cmnt(masm_, "[ new.target");
257 // Get the frame pointer for the calling frame.
258 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
260 Label check_frame_marker;
261 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
262 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
263 __ B(ne, &check_frame_marker);
264 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
265 __ Bind(&check_frame_marker);
266 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
267 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
269 Label non_construct_frame, done;
271 __ B(ne, &non_construct_frame);
273 MemOperand(x2, ConstructFrameConstants::kOriginalConstructorOffset));
276 __ Bind(&non_construct_frame);
277 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
281 SetVar(new_target_var, x0, x2, x3);
284 // Possibly allocate RestParameters
286 Variable* rest_param = scope()->rest_parameter(&rest_index);
288 Comment cmnt(masm_, "[ Allocate rest parameter array");
290 int num_parameters = info->scope()->num_parameters();
291 int offset = num_parameters * kPointerSize;
293 __ Add(x3, fp, StandardFrameConstants::kCallerSPOffset + offset);
294 __ Mov(x2, Smi::FromInt(num_parameters));
295 __ Mov(x1, Smi::FromInt(rest_index));
296 __ Mov(x0, Smi::FromInt(language_mode()));
297 __ Push(x3, x2, x1, x0);
299 RestParamAccessStub stub(isolate());
302 SetVar(rest_param, x0, x1, x2);
305 Variable* arguments = scope()->arguments();
306 if (arguments != NULL) {
307 // Function uses arguments object.
308 Comment cmnt(masm_, "[ Allocate arguments object");
309 if (!function_in_register_x1) {
310 // Load this again, if it's used by the local context below.
311 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
315 // Receiver is just before the parameters on the caller's stack.
316 int num_parameters = info->scope()->num_parameters();
317 int offset = num_parameters * kPointerSize;
318 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
319 __ Mov(x1, Smi::FromInt(num_parameters));
322 // Arguments to ArgumentsAccessStub:
323 // function, receiver address, parameter count.
324 // The stub will rewrite receiver and parameter count if the previous
325 // stack frame was an arguments adapter frame.
326 ArgumentsAccessStub::Type type;
327 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
328 type = ArgumentsAccessStub::NEW_STRICT;
329 } else if (function()->has_duplicate_parameters()) {
330 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
332 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
334 ArgumentsAccessStub stub(isolate(), type);
337 SetVar(arguments, x0, x1, x2);
341 __ CallRuntime(Runtime::kTraceEnter, 0);
344 // Visit the declarations and body unless there is an illegal
346 if (scope()->HasIllegalRedeclaration()) {
347 Comment cmnt(masm_, "[ Declarations");
348 scope()->VisitIllegalRedeclaration(this);
351 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
352 { Comment cmnt(masm_, "[ Declarations");
353 VisitDeclarations(scope()->declarations());
357 Comment cmnt(masm_, "[ Stack check");
358 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
360 DCHECK(jssp.Is(__ StackPointer()));
361 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
363 PredictableCodeSizeScope predictable(masm_,
364 Assembler::kCallSizeWithRelocation);
365 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
370 Comment cmnt(masm_, "[ Body");
371 DCHECK(loop_depth() == 0);
372 VisitStatements(function()->body());
373 DCHECK(loop_depth() == 0);
377 // Always emit a 'return undefined' in case control fell off the end of
379 { Comment cmnt(masm_, "[ return <undefined>;");
380 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
382 EmitReturnSequence();
384 // Force emission of the pools, so they don't get emitted in the middle
385 // of the back edge table.
386 masm()->CheckVeneerPool(true, false);
387 masm()->CheckConstPool(true, false);
391 void FullCodeGenerator::ClearAccumulator() {
392 __ Mov(x0, Smi::FromInt(0));
396 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
397 __ Mov(x2, Operand(profiling_counter_));
398 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
399 __ Subs(x3, x3, Smi::FromInt(delta));
400 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
404 void FullCodeGenerator::EmitProfilingCounterReset() {
405 int reset_value = FLAG_interrupt_budget;
406 if (info_->is_debug()) {
407 // Detect debug break requests as soon as possible.
408 reset_value = FLAG_interrupt_budget >> 4;
410 __ Mov(x2, Operand(profiling_counter_));
411 __ Mov(x3, Smi::FromInt(reset_value));
412 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
416 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
417 Label* back_edge_target) {
418 DCHECK(jssp.Is(__ StackPointer()));
419 Comment cmnt(masm_, "[ Back edge bookkeeping");
420 // Block literal pools whilst emitting back edge code.
421 Assembler::BlockPoolsScope block_const_pool(masm_);
424 DCHECK(back_edge_target->is_bound());
425 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
426 // to reduce the absolute error due to the integer division. To do that,
427 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
430 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
431 kCodeSizeMultiplier / 2);
432 int weight = Min(kMaxBackEdgeWeight,
433 Max(1, distance / kCodeSizeMultiplier));
434 EmitProfilingCounterDecrement(weight);
436 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
438 // Record a mapping of this PC offset to the OSR id. This is used to find
439 // the AST id from the unoptimized code in order to use it as a key into
440 // the deoptimization input data found in the optimized code.
441 RecordBackEdge(stmt->OsrEntryId());
443 EmitProfilingCounterReset();
446 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
447 // Record a mapping of the OSR id to this PC. This is used if the OSR
448 // entry becomes the target of a bailout. We don't expect it to be, but
449 // we want it to work if it is.
450 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
454 void FullCodeGenerator::EmitReturnSequence() {
455 Comment cmnt(masm_, "[ Return sequence");
457 if (return_label_.is_bound()) {
458 __ B(&return_label_);
461 __ Bind(&return_label_);
463 // Push the return value on the stack as the parameter.
464 // Runtime::TraceExit returns its parameter in x0.
465 __ Push(result_register());
466 __ CallRuntime(Runtime::kTraceExit, 1);
467 DCHECK(x0.Is(result_register()));
469 // Pretend that the exit is a backwards jump to the entry.
471 if (info_->ShouldSelfOptimize()) {
472 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
474 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
475 weight = Min(kMaxBackEdgeWeight,
476 Max(1, distance / kCodeSizeMultiplier));
478 EmitProfilingCounterDecrement(weight);
482 __ Call(isolate()->builtins()->InterruptCheck(),
483 RelocInfo::CODE_TARGET);
485 EmitProfilingCounterReset();
488 SetReturnPosition(function());
489 const Register& current_sp = __ StackPointer();
490 // Nothing ensures 16 bytes alignment here.
491 DCHECK(!current_sp.Is(csp));
492 __ Mov(current_sp, fp);
493 int no_frame_start = masm_->pc_offset();
494 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
495 // Drop the arguments and receiver and return.
496 // TODO(all): This implementation is overkill as it supports 2**31+1
497 // arguments, consider how to improve it without creating a security
499 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
500 __ Add(current_sp, current_sp, ip0);
502 int32_t arg_count = info_->scope()->num_parameters() + 1;
503 __ dc64(kXRegSize * arg_count);
504 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
509 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
510 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
511 codegen()->GetVar(result_register(), var);
512 __ Push(result_register());
516 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
517 // Root values have no side effects.
521 void FullCodeGenerator::AccumulatorValueContext::Plug(
522 Heap::RootListIndex index) const {
523 __ LoadRoot(result_register(), index);
527 void FullCodeGenerator::StackValueContext::Plug(
528 Heap::RootListIndex index) const {
529 __ LoadRoot(result_register(), index);
530 __ Push(result_register());
534 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
535 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
537 if (index == Heap::kUndefinedValueRootIndex ||
538 index == Heap::kNullValueRootIndex ||
539 index == Heap::kFalseValueRootIndex) {
540 if (false_label_ != fall_through_) __ B(false_label_);
541 } else if (index == Heap::kTrueValueRootIndex) {
542 if (true_label_ != fall_through_) __ B(true_label_);
544 __ LoadRoot(result_register(), index);
545 codegen()->DoTest(this);
550 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
554 void FullCodeGenerator::AccumulatorValueContext::Plug(
555 Handle<Object> lit) const {
556 __ Mov(result_register(), Operand(lit));
560 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
561 // Immediates cannot be pushed directly.
562 __ Mov(result_register(), Operand(lit));
563 __ Push(result_register());
567 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
568 codegen()->PrepareForBailoutBeforeSplit(condition(),
572 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
573 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
574 if (false_label_ != fall_through_) __ B(false_label_);
575 } else if (lit->IsTrue() || lit->IsJSObject()) {
576 if (true_label_ != fall_through_) __ B(true_label_);
577 } else if (lit->IsString()) {
578 if (String::cast(*lit)->length() == 0) {
579 if (false_label_ != fall_through_) __ B(false_label_);
581 if (true_label_ != fall_through_) __ B(true_label_);
583 } else if (lit->IsSmi()) {
584 if (Smi::cast(*lit)->value() == 0) {
585 if (false_label_ != fall_through_) __ B(false_label_);
587 if (true_label_ != fall_through_) __ B(true_label_);
590 // For simplicity we always test the accumulator register.
591 __ Mov(result_register(), Operand(lit));
592 codegen()->DoTest(this);
597 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
598 Register reg) const {
604 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
606 Register reg) const {
609 __ Move(result_register(), reg);
613 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
614 Register reg) const {
616 if (count > 1) __ Drop(count - 1);
621 void FullCodeGenerator::TestContext::DropAndPlug(int count,
622 Register reg) const {
624 // For simplicity we always test the accumulator register.
626 __ Mov(result_register(), reg);
627 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
628 codegen()->DoTest(this);
632 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
633 Label* materialize_false) const {
634 DCHECK(materialize_true == materialize_false);
635 __ Bind(materialize_true);
639 void FullCodeGenerator::AccumulatorValueContext::Plug(
640 Label* materialize_true,
641 Label* materialize_false) const {
643 __ Bind(materialize_true);
644 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
646 __ Bind(materialize_false);
647 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
652 void FullCodeGenerator::StackValueContext::Plug(
653 Label* materialize_true,
654 Label* materialize_false) const {
656 __ Bind(materialize_true);
657 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
659 __ Bind(materialize_false);
660 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
666 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
667 Label* materialize_false) const {
668 DCHECK(materialize_true == true_label_);
669 DCHECK(materialize_false == false_label_);
673 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
674 Heap::RootListIndex value_root_index =
675 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676 __ LoadRoot(result_register(), value_root_index);
680 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
681 Heap::RootListIndex value_root_index =
682 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
683 __ LoadRoot(x10, value_root_index);
688 void FullCodeGenerator::TestContext::Plug(bool flag) const {
689 codegen()->PrepareForBailoutBeforeSplit(condition(),
694 if (true_label_ != fall_through_) {
698 if (false_label_ != fall_through_) {
705 void FullCodeGenerator::DoTest(Expression* condition,
708 Label* fall_through) {
709 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
710 CallIC(ic, condition->test_id());
711 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
715 // If (cond), branch to if_true.
716 // If (!cond), branch to if_false.
717 // fall_through is used as an optimization in cases where only one branch
718 // instruction is necessary.
719 void FullCodeGenerator::Split(Condition cond,
722 Label* fall_through) {
723 if (if_false == fall_through) {
725 } else if (if_true == fall_through) {
726 DCHECK(if_false != fall_through);
727 __ B(NegateCondition(cond), if_false);
735 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
736 // Offset is negative because higher indexes are at lower addresses.
737 int offset = -var->index() * kXRegSize;
738 // Adjust by a (parameter or local) base offset.
739 if (var->IsParameter()) {
740 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
742 offset += JavaScriptFrameConstants::kLocal0Offset;
744 return MemOperand(fp, offset);
748 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
749 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750 if (var->IsContextSlot()) {
751 int context_chain_length = scope()->ContextChainLength(var->scope());
752 __ LoadContext(scratch, context_chain_length);
753 return ContextMemOperand(scratch, var->index());
755 return StackOperand(var);
760 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
761 // Use destination as scratch.
762 MemOperand location = VarOperand(var, dest);
763 __ Ldr(dest, location);
767 void FullCodeGenerator::SetVar(Variable* var,
771 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
772 DCHECK(!AreAliased(src, scratch0, scratch1));
773 MemOperand location = VarOperand(var, scratch0);
774 __ Str(src, location);
776 // Emit the write barrier code if the location is in the heap.
777 if (var->IsContextSlot()) {
778 // scratch0 contains the correct context.
779 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
780 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
785 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
786 bool should_normalize,
789 // Only prepare for bailouts before splits if we're in a test
790 // context. Otherwise, we let the Visit function deal with the
791 // preparation to avoid preparing with the same AST id twice.
792 if (!context()->IsTest() || !info_->IsOptimizable()) return;
794 // TODO(all): Investigate to see if there is something to work on here.
796 if (should_normalize) {
799 PrepareForBailout(expr, TOS_REG);
800 if (should_normalize) {
801 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
802 Split(eq, if_true, if_false, NULL);
808 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
809 // The variable in the declaration always resides in the current function
811 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
812 if (generate_debug_code_) {
813 // Check that we're not inside a with or catch context.
814 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
815 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
816 __ Check(ne, kDeclarationInWithContext);
817 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
818 __ Check(ne, kDeclarationInCatchContext);
823 void FullCodeGenerator::VisitVariableDeclaration(
824 VariableDeclaration* declaration) {
825 // If it was not possible to allocate the variable at compile time, we
826 // need to "declare" it at runtime to make sure it actually exists in the
828 VariableProxy* proxy = declaration->proxy();
829 VariableMode mode = declaration->mode();
830 Variable* variable = proxy->var();
831 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
833 switch (variable->location()) {
834 case VariableLocation::GLOBAL:
835 case VariableLocation::UNALLOCATED:
836 globals_->Add(variable->name(), zone());
837 globals_->Add(variable->binding_needs_init()
838 ? isolate()->factory()->the_hole_value()
839 : isolate()->factory()->undefined_value(),
843 case VariableLocation::PARAMETER:
844 case VariableLocation::LOCAL:
846 Comment cmnt(masm_, "[ VariableDeclaration");
847 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
848 __ Str(x10, StackOperand(variable));
852 case VariableLocation::CONTEXT:
854 Comment cmnt(masm_, "[ VariableDeclaration");
855 EmitDebugCheckDeclarationContext(variable);
856 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
857 __ Str(x10, ContextMemOperand(cp, variable->index()));
858 // No write barrier since the_hole_value is in old space.
859 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
863 case VariableLocation::LOOKUP: {
864 Comment cmnt(masm_, "[ VariableDeclaration");
865 __ Mov(x2, Operand(variable->name()));
866 // Declaration nodes are always introduced in one of four modes.
867 DCHECK(IsDeclaredVariableMode(mode));
868 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
870 __ Mov(x1, Smi::FromInt(attr));
871 // Push initial value, if any.
872 // Note: For variables we must not push an initial value (such as
873 // 'undefined') because we may have a (legal) redeclaration and we
874 // must not destroy the current value.
876 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
877 __ Push(cp, x2, x1, x0);
879 // Pushing 0 (xzr) indicates no initial value.
880 __ Push(cp, x2, x1, xzr);
882 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
889 void FullCodeGenerator::VisitFunctionDeclaration(
890 FunctionDeclaration* declaration) {
891 VariableProxy* proxy = declaration->proxy();
892 Variable* variable = proxy->var();
893 switch (variable->location()) {
894 case VariableLocation::GLOBAL:
895 case VariableLocation::UNALLOCATED: {
896 globals_->Add(variable->name(), zone());
897 Handle<SharedFunctionInfo> function =
898 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
899 // Check for stack overflow exception.
900 if (function.is_null()) return SetStackOverflow();
901 globals_->Add(function, zone());
905 case VariableLocation::PARAMETER:
906 case VariableLocation::LOCAL: {
907 Comment cmnt(masm_, "[ Function Declaration");
908 VisitForAccumulatorValue(declaration->fun());
909 __ Str(result_register(), StackOperand(variable));
913 case VariableLocation::CONTEXT: {
914 Comment cmnt(masm_, "[ Function Declaration");
915 EmitDebugCheckDeclarationContext(variable);
916 VisitForAccumulatorValue(declaration->fun());
917 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
918 int offset = Context::SlotOffset(variable->index());
919 // We know that we have written a function, which is not a smi.
920 __ RecordWriteContextSlot(cp,
928 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
932 case VariableLocation::LOOKUP: {
933 Comment cmnt(masm_, "[ Function Declaration");
934 __ Mov(x2, Operand(variable->name()));
935 __ Mov(x1, Smi::FromInt(NONE));
937 // Push initial value for function declaration.
938 VisitForStackValue(declaration->fun());
939 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
946 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
947 // Call the runtime to declare the globals.
948 __ Mov(x11, Operand(pairs));
949 Register flags = xzr;
950 if (Smi::FromInt(DeclareGlobalsFlags())) {
952 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
954 __ Push(cp, x11, flags);
955 __ CallRuntime(Runtime::kDeclareGlobals, 3);
956 // Return value is ignored.
960 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
961 // Call the runtime to declare the modules.
962 __ Push(descriptions);
963 __ CallRuntime(Runtime::kDeclareModules, 1);
964 // Return value is ignored.
968 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
969 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
970 Comment cmnt(masm_, "[ SwitchStatement");
971 Breakable nested_statement(this, stmt);
972 SetStatementPosition(stmt);
974 // Keep the switch value on the stack until a case matches.
975 VisitForStackValue(stmt->tag());
976 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
978 ZoneList<CaseClause*>* clauses = stmt->cases();
979 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
981 Label next_test; // Recycled for each test.
982 // Compile all the tests with branches to their bodies.
983 for (int i = 0; i < clauses->length(); i++) {
984 CaseClause* clause = clauses->at(i);
985 clause->body_target()->Unuse();
987 // The default is not a test, but remember it as final fall through.
988 if (clause->is_default()) {
989 default_clause = clause;
993 Comment cmnt(masm_, "[ Case comparison");
997 // Compile the label expression.
998 VisitForAccumulatorValue(clause->label());
1000 // Perform the comparison as if via '==='.
1001 __ Peek(x1, 0); // Switch value.
1003 JumpPatchSite patch_site(masm_);
1004 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1006 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1008 __ B(ne, &next_test);
1009 __ Drop(1); // Switch value is no longer needed.
1010 __ B(clause->body_target());
1011 __ Bind(&slow_case);
1014 // Record position before stub call for type feedback.
1015 SetExpressionPosition(clause);
1016 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1017 strength(language_mode())).code();
1018 CallIC(ic, clause->CompareId());
1019 patch_site.EmitPatchInfo();
1023 PrepareForBailout(clause, TOS_REG);
1024 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1026 __ B(clause->body_target());
1029 __ Cbnz(x0, &next_test);
1030 __ Drop(1); // Switch value is no longer needed.
1031 __ B(clause->body_target());
1034 // Discard the test value and jump to the default if present, otherwise to
1035 // the end of the statement.
1036 __ Bind(&next_test);
1037 __ Drop(1); // Switch value is no longer needed.
1038 if (default_clause == NULL) {
1039 __ B(nested_statement.break_label());
1041 __ B(default_clause->body_target());
1044 // Compile all the case bodies.
1045 for (int i = 0; i < clauses->length(); i++) {
1046 Comment cmnt(masm_, "[ Case body");
1047 CaseClause* clause = clauses->at(i);
1048 __ Bind(clause->body_target());
1049 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1050 VisitStatements(clause->statements());
1053 __ Bind(nested_statement.break_label());
1054 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1058 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1059 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1060 Comment cmnt(masm_, "[ ForInStatement");
1061 SetStatementPosition(stmt, SKIP_BREAK);
1063 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1065 // TODO(all): This visitor probably needs better comments and a revisit.
1068 ForIn loop_statement(this, stmt);
1069 increment_loop_depth();
1071 // Get the object to enumerate over. If the object is null or undefined, skip
1072 // over the loop. See ECMA-262 version 5, section 12.6.4.
1073 SetExpressionAsStatementPosition(stmt->enumerable());
1074 VisitForAccumulatorValue(stmt->enumerable());
1075 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1076 Register null_value = x15;
1077 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1078 __ Cmp(x0, null_value);
1081 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1083 // Convert the object to a JS object.
1084 Label convert, done_convert;
1085 __ JumpIfSmi(x0, &convert);
1086 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1089 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1090 __ Bind(&done_convert);
1091 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1094 // Check for proxies.
1096 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1097 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1099 // Check cache validity in generated code. This is a fast case for
1100 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1101 // guarantee cache validity, call the runtime system to check cache
1102 // validity or get the property names in a fixed array.
1103 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1105 // The enum cache is valid. Load the map of the object being
1106 // iterated over and use the cache for the iteration.
1108 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1111 // Get the set of properties to enumerate.
1112 __ Bind(&call_runtime);
1113 __ Push(x0); // Duplicate the enumerable object on the stack.
1114 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1115 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1117 // If we got a map from the runtime call, we can do a fast
1118 // modification check. Otherwise, we got a fixed array, and we have
1119 // to do a slow check.
1120 Label fixed_array, no_descriptors;
1121 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1122 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1124 // We got a map in register x0. Get the enumeration cache from it.
1125 __ Bind(&use_cache);
1127 __ EnumLengthUntagged(x1, x0);
1128 __ Cbz(x1, &no_descriptors);
1130 __ LoadInstanceDescriptors(x0, x2);
1131 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1133 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1135 // Set up the four remaining stack slots.
1137 // Map, enumeration cache, enum cache length, zero (both last as smis).
1138 __ Push(x0, x2, x1, xzr);
1141 __ Bind(&no_descriptors);
1145 // We got a fixed array in register x0. Iterate through that.
1146 __ Bind(&fixed_array);
1148 __ LoadObject(x1, FeedbackVector());
1149 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1150 int vector_index = FeedbackVector()->GetIndex(slot);
1151 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1153 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1154 __ Peek(x10, 0); // Get enumerated object.
1155 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1156 // TODO(all): similar check was done already. Can we avoid it here?
1157 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1158 DCHECK(Smi::FromInt(0) == 0);
1159 __ CzeroX(x1, le); // Zero indicates proxy.
1160 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1161 // Smi and array, fixed array length (as smi) and initial index.
1162 __ Push(x1, x0, x2, xzr);
1164 // Generate code for doing the condition check.
1165 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1167 SetExpressionAsStatementPosition(stmt->each());
1169 // Load the current count to x0, load the length to x1.
1170 __ PeekPair(x0, x1, 0);
1171 __ Cmp(x0, x1); // Compare to the array length.
1172 __ B(hs, loop_statement.break_label());
1174 // Get the current entry of the array into register r3.
1175 __ Peek(x10, 2 * kXRegSize);
1176 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1177 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1179 // Get the expected map from the stack or a smi in the
1180 // permanent slow case into register x10.
1181 __ Peek(x2, 3 * kXRegSize);
1183 // Check if the expected map still matches that of the enumerable.
1184 // If not, we may have to filter the key.
1186 __ Peek(x1, 4 * kXRegSize);
1187 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1189 __ B(eq, &update_each);
1191 // For proxies, no filtering is done.
1192 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1193 STATIC_ASSERT(kSmiTag == 0);
1194 __ Cbz(x2, &update_each);
1196 // Convert the entry to a string or (smi) 0 if it isn't a property
1197 // any more. If the property has been removed while iterating, we
1200 __ CallRuntime(Runtime::kForInFilter, 2);
1201 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1203 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1204 loop_statement.continue_label());
1206 // Update the 'each' property or variable from the possibly filtered
1207 // entry in register x3.
1208 __ Bind(&update_each);
1209 __ Mov(result_register(), x3);
1210 // Perform the assignment as if via '='.
1211 { EffectContext context(this);
1212 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1213 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1216 // Generate code for the body of the loop.
1217 Visit(stmt->body());
1219 // Generate code for going to the next element by incrementing
1220 // the index (smi) stored on top of the stack.
1221 __ Bind(loop_statement.continue_label());
1222 // TODO(all): We could use a callee saved register to avoid popping.
1224 __ Add(x0, x0, Smi::FromInt(1));
1227 EmitBackEdgeBookkeeping(stmt, &loop);
1230 // Remove the pointers stored on the stack.
1231 __ Bind(loop_statement.break_label());
1234 // Exit and decrement the loop depth.
1235 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1237 decrement_loop_depth();
1241 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1243 // Use the fast case closure allocation code that allocates in new space for
1244 // nested functions that don't need literals cloning. If we're running with
1245 // the --always-opt or the --prepare-always-opt flag, we need to use the
1246 // runtime function so that the new function we are creating here gets a
1247 // chance to have its code optimized and doesn't just get a copy of the
1248 // existing unoptimized code.
1249 if (!FLAG_always_opt &&
1250 !FLAG_prepare_always_opt &&
1252 scope()->is_function_scope() &&
1253 info->num_literals() == 0) {
1254 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1255 __ Mov(x2, Operand(info));
1258 __ Mov(x11, Operand(info));
1259 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1260 : Heap::kFalseValueRootIndex);
1261 __ Push(cp, x11, x10);
1262 __ CallRuntime(Runtime::kNewClosure, 3);
1264 context()->Plug(x0);
1268 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1270 FeedbackVectorICSlot slot) {
1271 if (NeedsHomeObject(initializer)) {
1272 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1273 __ Mov(StoreDescriptor::NameRegister(),
1274 Operand(isolate()->factory()->home_object_symbol()));
1275 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1276 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1282 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1283 TypeofMode typeof_mode,
1285 Register current = cp;
1286 Register next = x10;
1287 Register temp = x11;
1291 if (s->num_heap_slots() > 0) {
1292 if (s->calls_sloppy_eval()) {
1293 // Check that extension is NULL.
1294 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1295 __ Cbnz(temp, slow);
1297 // Load next context in chain.
1298 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1299 // Walk the rest of the chain without clobbering cp.
1302 // If no outer scope calls eval, we do not need to check more
1303 // context extensions.
1304 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1305 s = s->outer_scope();
1308 if (s->is_eval_scope()) {
1310 __ Mov(next, current);
1313 // Terminate at native context.
1314 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1315 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1316 // Check that extension is NULL.
1317 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1318 __ Cbnz(temp, slow);
1319 // Load next context in chain.
1320 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1325 // All extension objects were empty and it is safe to use a normal global
1327 EmitGlobalVariableLoad(proxy, typeof_mode);
1331 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1333 DCHECK(var->IsContextSlot());
1334 Register context = cp;
1335 Register next = x10;
1336 Register temp = x11;
1338 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1339 if (s->num_heap_slots() > 0) {
1340 if (s->calls_sloppy_eval()) {
1341 // Check that extension is NULL.
1342 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1343 __ Cbnz(temp, slow);
1345 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1346 // Walk the rest of the chain without clobbering cp.
1350 // Check that last extension is NULL.
1351 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1352 __ Cbnz(temp, slow);
1354 // This function is used only for loads, not stores, so it's safe to
1355 // return an cp-based operand (the write barrier cannot be allowed to
1356 // destroy the cp register).
1357 return ContextMemOperand(context, var->index());
1361 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1362 TypeofMode typeof_mode,
1363 Label* slow, Label* done) {
1364 // Generate fast-case code for variables that might be shadowed by
1365 // eval-introduced variables. Eval is used a lot without
1366 // introducing variables. In those cases, we do not want to
1367 // perform a runtime call for all variables in the scope
1368 // containing the eval.
1369 Variable* var = proxy->var();
1370 if (var->mode() == DYNAMIC_GLOBAL) {
1371 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1373 } else if (var->mode() == DYNAMIC_LOCAL) {
1374 Variable* local = var->local_if_not_shadowed();
1375 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1376 if (local->mode() == LET || local->mode() == CONST ||
1377 local->mode() == CONST_LEGACY) {
1378 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1379 if (local->mode() == CONST_LEGACY) {
1380 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1381 } else { // LET || CONST
1382 __ Mov(x0, Operand(var->name()));
1384 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1392 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1393 TypeofMode typeof_mode) {
1394 Variable* var = proxy->var();
1395 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1396 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1397 if (var->IsGlobalSlot()) {
1398 DCHECK(var->index() > 0);
1399 DCHECK(var->IsStaticGlobalObjectProperty());
1400 // Each var occupies two slots in the context: for reads and writes.
1401 int const slot = var->index();
1402 int const depth = scope()->ContextChainLength(var->scope());
1403 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1404 __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
1405 __ Mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
1406 LoadGlobalViaContextStub stub(isolate(), depth);
1409 __ Push(Smi::FromInt(slot));
1410 __ Push(var->name());
1411 __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
1414 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1415 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1416 __ Mov(LoadDescriptor::SlotRegister(),
1417 SmiFromSlot(proxy->VariableFeedbackSlot()));
1418 CallLoadIC(typeof_mode);
1423 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1424 TypeofMode typeof_mode) {
1425 // Record position before possible IC call.
1426 SetExpressionPosition(proxy);
1427 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1428 Variable* var = proxy->var();
1430 // Three cases: global variables, lookup variables, and all other types of
1432 switch (var->location()) {
1433 case VariableLocation::GLOBAL:
1434 case VariableLocation::UNALLOCATED: {
1435 Comment cmnt(masm_, "Global variable");
1436 EmitGlobalVariableLoad(proxy, typeof_mode);
1437 context()->Plug(x0);
1441 case VariableLocation::PARAMETER:
1442 case VariableLocation::LOCAL:
1443 case VariableLocation::CONTEXT: {
1444 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1445 Comment cmnt(masm_, var->IsContextSlot()
1446 ? "Context variable"
1447 : "Stack variable");
1448 if (var->binding_needs_init()) {
1449 // var->scope() may be NULL when the proxy is located in eval code and
1450 // refers to a potential outside binding. Currently those bindings are
1451 // always looked up dynamically, i.e. in that case
1452 // var->location() == LOOKUP.
1454 DCHECK(var->scope() != NULL);
1456 // Check if the binding really needs an initialization check. The check
1457 // can be skipped in the following situation: we have a LET or CONST
1458 // binding in harmony mode, both the Variable and the VariableProxy have
1459 // the same declaration scope (i.e. they are both in global code, in the
1460 // same function or in the same eval code) and the VariableProxy is in
1461 // the source physically located after the initializer of the variable.
1463 // We cannot skip any initialization checks for CONST in non-harmony
1464 // mode because const variables may be declared but never initialized:
1465 // if (false) { const x; }; var y = x;
1467 // The condition on the declaration scopes is a conservative check for
1468 // nested functions that access a binding and are called before the
1469 // binding is initialized:
1470 // function() { f(); let x = 1; function f() { x = 2; } }
1472 bool skip_init_check;
1473 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1474 skip_init_check = false;
1475 } else if (var->is_this()) {
1476 CHECK(info_->function() != nullptr &&
1477 (info_->function()->kind() & kSubclassConstructor) != 0);
1478 // TODO(dslomov): implement 'this' hole check elimination.
1479 skip_init_check = false;
1481 // Check that we always have valid source position.
1482 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1483 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1484 skip_init_check = var->mode() != CONST_LEGACY &&
1485 var->initializer_position() < proxy->position();
1488 if (!skip_init_check) {
1489 // Let and const need a read barrier.
1492 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1493 if (var->mode() == LET || var->mode() == CONST) {
1494 // Throw a reference error when using an uninitialized let/const
1495 // binding in harmony mode.
1496 __ Mov(x0, Operand(var->name()));
1498 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1501 // Uninitalized const bindings outside of harmony mode are unholed.
1502 DCHECK(var->mode() == CONST_LEGACY);
1503 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1506 context()->Plug(x0);
1510 context()->Plug(var);
1514 case VariableLocation::LOOKUP: {
1516 // Generate code for loading from variables potentially shadowed by
1517 // eval-introduced variables.
1518 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1520 Comment cmnt(masm_, "Lookup variable");
1521 __ Mov(x1, Operand(var->name()));
1522 __ Push(cp, x1); // Context and name.
1523 Runtime::FunctionId function_id =
1524 typeof_mode == NOT_INSIDE_TYPEOF
1525 ? Runtime::kLoadLookupSlot
1526 : Runtime::kLoadLookupSlotNoReferenceError;
1527 __ CallRuntime(function_id, 2);
1529 context()->Plug(x0);
1536 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1537 Comment cmnt(masm_, "[ RegExpLiteral");
1539 // Registers will be used as follows:
1540 // x5 = materialized value (RegExp literal)
1541 // x4 = JS function, literals array
1542 // x3 = literal index
1543 // x2 = RegExp pattern
1544 // x1 = RegExp flags
1545 // x0 = RegExp literal clone
1546 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1547 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1548 int literal_offset =
1549 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1550 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1551 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1553 // Create regexp literal using runtime function.
1554 // Result will be in x0.
1555 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1556 __ Mov(x2, Operand(expr->pattern()));
1557 __ Mov(x1, Operand(expr->flags()));
1558 __ Push(x4, x3, x2, x1);
1559 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1562 __ Bind(&materialized);
1563 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1564 Label allocated, runtime_allocate;
1565 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1568 __ Bind(&runtime_allocate);
1569 __ Mov(x10, Smi::FromInt(size));
1571 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1574 __ Bind(&allocated);
1575 // After this, registers are used as follows:
1576 // x0: Newly allocated regexp.
1577 // x5: Materialized regexp.
1578 // x10, x11, x12: temps.
1579 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1580 context()->Plug(x0);
1584 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1585 if (expression == NULL) {
1586 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1589 VisitForStackValue(expression);
1594 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1595 Comment cmnt(masm_, "[ ObjectLiteral");
1597 Handle<FixedArray> constant_properties = expr->constant_properties();
1598 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1599 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1600 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1601 __ Mov(x1, Operand(constant_properties));
1602 int flags = expr->ComputeFlags();
1603 __ Mov(x0, Smi::FromInt(flags));
1604 if (MustCreateObjectLiteralWithRuntime(expr)) {
1605 __ Push(x3, x2, x1, x0);
1606 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1608 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1611 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1613 // If result_saved is true the result is on top of the stack. If
1614 // result_saved is false the result is in x0.
1615 bool result_saved = false;
1617 AccessorTable accessor_table(zone());
1618 int property_index = 0;
1619 // store_slot_index points to the vector IC slot for the next store IC used.
1620 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1621 // and must be updated if the number of store ICs emitted here changes.
1622 int store_slot_index = 0;
1623 for (; property_index < expr->properties()->length(); property_index++) {
1624 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1625 if (property->is_computed_name()) break;
1626 if (property->IsCompileTimeValue()) continue;
1628 Literal* key = property->key()->AsLiteral();
1629 Expression* value = property->value();
1630 if (!result_saved) {
1631 __ Push(x0); // Save result on stack
1632 result_saved = true;
1634 switch (property->kind()) {
1635 case ObjectLiteral::Property::CONSTANT:
1637 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1638 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1640 case ObjectLiteral::Property::COMPUTED:
1641 // It is safe to use [[Put]] here because the boilerplate already
1642 // contains computed properties with an uninitialized value.
1643 if (key->value()->IsInternalizedString()) {
1644 if (property->emit_store()) {
1645 VisitForAccumulatorValue(value);
1646 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1647 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1648 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1649 if (FLAG_vector_stores) {
1650 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1653 CallStoreIC(key->LiteralFeedbackId());
1655 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1657 if (NeedsHomeObject(value)) {
1658 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1659 __ Mov(StoreDescriptor::NameRegister(),
1660 Operand(isolate()->factory()->home_object_symbol()));
1661 __ Peek(StoreDescriptor::ValueRegister(), 0);
1662 if (FLAG_vector_stores) {
1663 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1668 VisitForEffect(value);
1674 VisitForStackValue(key);
1675 VisitForStackValue(value);
1676 if (property->emit_store()) {
1677 EmitSetHomeObjectIfNeeded(
1678 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1679 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1681 __ CallRuntime(Runtime::kSetProperty, 4);
1686 case ObjectLiteral::Property::PROTOTYPE:
1687 DCHECK(property->emit_store());
1688 // Duplicate receiver on stack.
1691 VisitForStackValue(value);
1692 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1694 case ObjectLiteral::Property::GETTER:
1695 if (property->emit_store()) {
1696 accessor_table.lookup(key)->second->getter = value;
1699 case ObjectLiteral::Property::SETTER:
1700 if (property->emit_store()) {
1701 accessor_table.lookup(key)->second->setter = value;
1707 // Emit code to define accessors, using only a single call to the runtime for
1708 // each pair of corresponding getters and setters.
1709 for (AccessorTable::Iterator it = accessor_table.begin();
1710 it != accessor_table.end();
1712 __ Peek(x10, 0); // Duplicate receiver.
1714 VisitForStackValue(it->first);
1715 EmitAccessor(it->second->getter);
1716 EmitSetHomeObjectIfNeeded(
1717 it->second->getter, 2,
1718 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1719 EmitAccessor(it->second->setter);
1720 EmitSetHomeObjectIfNeeded(
1721 it->second->setter, 3,
1722 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1723 __ Mov(x10, Smi::FromInt(NONE));
1725 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1728 // Object literals have two parts. The "static" part on the left contains no
1729 // computed property names, and so we can compute its map ahead of time; see
1730 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1731 // starts with the first computed property name, and continues with all
1732 // properties to its right. All the code from above initializes the static
1733 // component of the object literal, and arranges for the map of the result to
1734 // reflect the static order in which the keys appear. For the dynamic
1735 // properties, we compile them into a series of "SetOwnProperty" runtime
1736 // calls. This will preserve insertion order.
1737 for (; property_index < expr->properties()->length(); property_index++) {
1738 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1740 Expression* value = property->value();
1741 if (!result_saved) {
1742 __ Push(x0); // Save result on stack
1743 result_saved = true;
1746 __ Peek(x10, 0); // Duplicate receiver.
1749 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1750 DCHECK(!property->is_computed_name());
1751 VisitForStackValue(value);
1752 DCHECK(property->emit_store());
1753 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1755 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1756 VisitForStackValue(value);
1757 EmitSetHomeObjectIfNeeded(
1758 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1760 switch (property->kind()) {
1761 case ObjectLiteral::Property::CONSTANT:
1762 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1763 case ObjectLiteral::Property::COMPUTED:
1764 if (property->emit_store()) {
1765 __ Mov(x0, Smi::FromInt(NONE));
1767 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1773 case ObjectLiteral::Property::PROTOTYPE:
1777 case ObjectLiteral::Property::GETTER:
1778 __ Mov(x0, Smi::FromInt(NONE));
1780 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1783 case ObjectLiteral::Property::SETTER:
1784 __ Mov(x0, Smi::FromInt(NONE));
1786 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1792 if (expr->has_function()) {
1793 DCHECK(result_saved);
1796 __ CallRuntime(Runtime::kToFastProperties, 1);
1800 context()->PlugTOS();
1802 context()->Plug(x0);
1805 // Verify that compilation exactly consumed the number of store ic slots that
1806 // the ObjectLiteral node had to offer.
1807 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1811 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1812 Comment cmnt(masm_, "[ ArrayLiteral");
1814 expr->BuildConstantElements(isolate());
1815 Handle<FixedArray> constant_elements = expr->constant_elements();
1816 bool has_fast_elements =
1817 IsFastObjectElementsKind(expr->constant_elements_kind());
1819 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1820 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1821 // If the only customer of allocation sites is transitioning, then
1822 // we can turn it off if we don't have anywhere else to transition to.
1823 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1826 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1827 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1828 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1829 __ Mov(x1, Operand(constant_elements));
1830 if (MustCreateArrayLiteralWithRuntime(expr)) {
1831 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1832 __ Push(x3, x2, x1, x0);
1833 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1835 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1838 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1840 bool result_saved = false; // Is the result saved to the stack?
1841 ZoneList<Expression*>* subexprs = expr->values();
1842 int length = subexprs->length();
1844 // Emit code to evaluate all the non-constant subexpressions and to store
1845 // them into the newly cloned array.
1846 int array_index = 0;
1847 for (; array_index < length; array_index++) {
1848 Expression* subexpr = subexprs->at(array_index);
1849 if (subexpr->IsSpread()) break;
1851 // If the subexpression is a literal or a simple materialized literal it
1852 // is already set in the cloned array.
1853 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1855 if (!result_saved) {
1856 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1858 result_saved = true;
1860 VisitForAccumulatorValue(subexpr);
1862 if (has_fast_elements) {
1863 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1864 __ Peek(x6, kPointerSize); // Copy of array literal.
1865 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1866 __ Str(result_register(), FieldMemOperand(x1, offset));
1867 // Update the write barrier for the array store.
1868 __ RecordWriteField(x1, offset, result_register(), x10,
1869 kLRHasBeenSaved, kDontSaveFPRegs,
1870 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1872 __ Mov(x3, Smi::FromInt(array_index));
1873 StoreArrayLiteralElementStub stub(isolate());
1877 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1880 // In case the array literal contains spread expressions it has two parts. The
1881 // first part is the "static" array which has a literal index is handled
1882 // above. The second part is the part after the first spread expression
1883 // (inclusive) and these elements gets appended to the array. Note that the
1884 // number elements an iterable produces is unknown ahead of time.
1885 if (array_index < length && result_saved) {
1886 __ Drop(1); // literal index
1888 result_saved = false;
1890 for (; array_index < length; array_index++) {
1891 Expression* subexpr = subexprs->at(array_index);
1894 if (subexpr->IsSpread()) {
1895 VisitForStackValue(subexpr->AsSpread()->expression());
1896 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1898 VisitForStackValue(subexpr);
1899 __ CallRuntime(Runtime::kAppendElement, 2);
1902 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1906 __ Drop(1); // literal index
1907 context()->PlugTOS();
1909 context()->Plug(x0);
1914 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1915 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1917 Comment cmnt(masm_, "[ Assignment");
1918 SetExpressionPosition(expr, INSERT_BREAK);
1920 Property* property = expr->target()->AsProperty();
1921 LhsKind assign_type = Property::GetAssignType(property);
1923 // Evaluate LHS expression.
1924 switch (assign_type) {
1926 // Nothing to do here.
1928 case NAMED_PROPERTY:
1929 if (expr->is_compound()) {
1930 // We need the receiver both on the stack and in the register.
1931 VisitForStackValue(property->obj());
1932 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1934 VisitForStackValue(property->obj());
1937 case NAMED_SUPER_PROPERTY:
1939 property->obj()->AsSuperPropertyReference()->this_var());
1940 VisitForAccumulatorValue(
1941 property->obj()->AsSuperPropertyReference()->home_object());
1942 __ Push(result_register());
1943 if (expr->is_compound()) {
1944 const Register scratch = x10;
1945 __ Peek(scratch, kPointerSize);
1946 __ Push(scratch, result_register());
1949 case KEYED_SUPER_PROPERTY:
1951 property->obj()->AsSuperPropertyReference()->this_var());
1953 property->obj()->AsSuperPropertyReference()->home_object());
1954 VisitForAccumulatorValue(property->key());
1955 __ Push(result_register());
1956 if (expr->is_compound()) {
1957 const Register scratch1 = x10;
1958 const Register scratch2 = x11;
1959 __ Peek(scratch1, 2 * kPointerSize);
1960 __ Peek(scratch2, kPointerSize);
1961 __ Push(scratch1, scratch2, result_register());
1964 case KEYED_PROPERTY:
1965 if (expr->is_compound()) {
1966 VisitForStackValue(property->obj());
1967 VisitForStackValue(property->key());
1968 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1969 __ Peek(LoadDescriptor::NameRegister(), 0);
1971 VisitForStackValue(property->obj());
1972 VisitForStackValue(property->key());
1977 // For compound assignments we need another deoptimization point after the
1978 // variable/property load.
1979 if (expr->is_compound()) {
1980 { AccumulatorValueContext context(this);
1981 switch (assign_type) {
1983 EmitVariableLoad(expr->target()->AsVariableProxy());
1984 PrepareForBailout(expr->target(), TOS_REG);
1986 case NAMED_PROPERTY:
1987 EmitNamedPropertyLoad(property);
1988 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1990 case NAMED_SUPER_PROPERTY:
1991 EmitNamedSuperPropertyLoad(property);
1992 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1994 case KEYED_SUPER_PROPERTY:
1995 EmitKeyedSuperPropertyLoad(property);
1996 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1998 case KEYED_PROPERTY:
1999 EmitKeyedPropertyLoad(property);
2000 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2005 Token::Value op = expr->binary_op();
2006 __ Push(x0); // Left operand goes on the stack.
2007 VisitForAccumulatorValue(expr->value());
2009 AccumulatorValueContext context(this);
2010 if (ShouldInlineSmiCase(op)) {
2011 EmitInlineSmiBinaryOp(expr->binary_operation(),
2016 EmitBinaryOp(expr->binary_operation(), op);
2019 // Deoptimization point in case the binary operation may have side effects.
2020 PrepareForBailout(expr->binary_operation(), TOS_REG);
2022 VisitForAccumulatorValue(expr->value());
2025 SetExpressionPosition(expr);
2028 switch (assign_type) {
2030 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2031 expr->op(), expr->AssignmentSlot());
2032 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2033 context()->Plug(x0);
2035 case NAMED_PROPERTY:
2036 EmitNamedPropertyAssignment(expr);
2038 case NAMED_SUPER_PROPERTY:
2039 EmitNamedSuperPropertyStore(property);
2040 context()->Plug(x0);
2042 case KEYED_SUPER_PROPERTY:
2043 EmitKeyedSuperPropertyStore(property);
2044 context()->Plug(x0);
2046 case KEYED_PROPERTY:
2047 EmitKeyedPropertyAssignment(expr);
2053 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2054 SetExpressionPosition(prop);
2055 Literal* key = prop->key()->AsLiteral();
2056 DCHECK(!prop->IsSuperAccess());
2058 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2059 __ Mov(LoadDescriptor::SlotRegister(),
2060 SmiFromSlot(prop->PropertyFeedbackSlot()));
2061 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2065 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2066 // Stack: receiver, home_object.
2067 SetExpressionPosition(prop);
2068 Literal* key = prop->key()->AsLiteral();
2069 DCHECK(!key->value()->IsSmi());
2070 DCHECK(prop->IsSuperAccess());
2072 __ Push(key->value());
2073 __ Push(Smi::FromInt(language_mode()));
2074 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2078 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2079 SetExpressionPosition(prop);
2080 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2081 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2082 __ Mov(LoadDescriptor::SlotRegister(),
2083 SmiFromSlot(prop->PropertyFeedbackSlot()));
2088 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2089 // Stack: receiver, home_object, key.
2090 SetExpressionPosition(prop);
2091 __ Push(Smi::FromInt(language_mode()));
2092 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2096 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2098 Expression* left_expr,
2099 Expression* right_expr) {
2100 Label done, both_smis, stub_call;
2102 // Get the arguments.
2104 Register right = x0;
2105 Register result = x0;
2108 // Perform combined smi check on both operands.
2109 __ Orr(x10, left, right);
2110 JumpPatchSite patch_site(masm_);
2111 patch_site.EmitJumpIfSmi(x10, &both_smis);
2113 __ Bind(&stub_call);
2116 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2118 Assembler::BlockPoolsScope scope(masm_);
2119 CallIC(code, expr->BinaryOperationFeedbackId());
2120 patch_site.EmitPatchInfo();
2124 __ Bind(&both_smis);
2125 // Smi case. This code works in the same way as the smi-smi case in the type
2126 // recording binary operation stub, see
2127 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2128 // TODO(all): That doesn't exist any more. Where are the comments?
2130 // The set of operations that needs to be supported here is controlled by
2131 // FullCodeGenerator::ShouldInlineSmiCase().
2134 __ Ubfx(right, right, kSmiShift, 5);
2135 __ Asr(result, left, right);
2136 __ Bic(result, result, kSmiShiftMask);
2139 __ Ubfx(right, right, kSmiShift, 5);
2140 __ Lsl(result, left, right);
2143 // If `left >>> right` >= 0x80000000, the result is not representable in a
2144 // signed 32-bit smi.
2145 __ Ubfx(right, right, kSmiShift, 5);
2146 __ Lsr(x10, left, right);
2147 __ Tbnz(x10, kXSignBit, &stub_call);
2148 __ Bic(result, x10, kSmiShiftMask);
2151 __ Adds(x10, left, right);
2152 __ B(vs, &stub_call);
2153 __ Mov(result, x10);
2156 __ Subs(x10, left, right);
2157 __ B(vs, &stub_call);
2158 __ Mov(result, x10);
2161 Label not_minus_zero, done;
2162 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2163 STATIC_ASSERT(kSmiTag == 0);
2164 __ Smulh(x10, left, right);
2165 __ Cbnz(x10, ¬_minus_zero);
2166 __ Eor(x11, left, right);
2167 __ Tbnz(x11, kXSignBit, &stub_call);
2168 __ Mov(result, x10);
2170 __ Bind(¬_minus_zero);
2172 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2173 __ B(lt, &stub_call);
2174 __ SmiTag(result, x10);
2179 __ Orr(result, left, right);
2181 case Token::BIT_AND:
2182 __ And(result, left, right);
2184 case Token::BIT_XOR:
2185 __ Eor(result, left, right);
2192 context()->Plug(x0);
2196 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2199 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2200 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2202 Assembler::BlockPoolsScope scope(masm_);
2203 CallIC(code, expr->BinaryOperationFeedbackId());
2204 patch_site.EmitPatchInfo();
2206 context()->Plug(x0);
2210 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2211 int* used_store_slots) {
2212 // Constructor is in x0.
2213 DCHECK(lit != NULL);
2216 // No access check is needed here since the constructor is created by the
2218 Register scratch = x1;
2220 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2223 for (int i = 0; i < lit->properties()->length(); i++) {
2224 ObjectLiteral::Property* property = lit->properties()->at(i);
2225 Expression* value = property->value();
2227 if (property->is_static()) {
2228 __ Peek(scratch, kPointerSize); // constructor
2230 __ Peek(scratch, 0); // prototype
2233 EmitPropertyKey(property, lit->GetIdForProperty(i));
2235 // The static prototype property is read only. We handle the non computed
2236 // property name case in the parser. Since this is the only case where we
2237 // need to check for an own read only property we special case this so we do
2238 // not need to do this for every property.
2239 if (property->is_static() && property->is_computed_name()) {
2240 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2244 VisitForStackValue(value);
2245 EmitSetHomeObjectIfNeeded(value, 2,
2246 lit->SlotForHomeObject(value, used_store_slots));
2248 switch (property->kind()) {
2249 case ObjectLiteral::Property::CONSTANT:
2250 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2251 case ObjectLiteral::Property::PROTOTYPE:
2253 case ObjectLiteral::Property::COMPUTED:
2254 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2257 case ObjectLiteral::Property::GETTER:
2258 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2260 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2263 case ObjectLiteral::Property::SETTER:
2264 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2266 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2275 __ CallRuntime(Runtime::kToFastProperties, 1);
2278 __ CallRuntime(Runtime::kToFastProperties, 1);
2280 if (is_strong(language_mode())) {
2282 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2285 // TODO(conradw): It would be more efficient to define the properties with
2286 // the right attributes the first time round.
2287 // Freeze the prototype.
2288 __ CallRuntime(Runtime::kObjectFreeze, 1);
2289 // Freeze the constructor.
2290 __ CallRuntime(Runtime::kObjectFreeze, 1);
2295 void FullCodeGenerator::EmitAssignment(Expression* expr,
2296 FeedbackVectorICSlot slot) {
2297 DCHECK(expr->IsValidReferenceExpressionOrThis());
2299 Property* prop = expr->AsProperty();
2300 LhsKind assign_type = Property::GetAssignType(prop);
2302 switch (assign_type) {
2304 Variable* var = expr->AsVariableProxy()->var();
2305 EffectContext context(this);
2306 EmitVariableAssignment(var, Token::ASSIGN, slot);
2309 case NAMED_PROPERTY: {
2310 __ Push(x0); // Preserve value.
2311 VisitForAccumulatorValue(prop->obj());
2312 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2314 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2315 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2316 __ Mov(StoreDescriptor::NameRegister(),
2317 Operand(prop->key()->AsLiteral()->value()));
2318 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2322 case NAMED_SUPER_PROPERTY: {
2324 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2325 VisitForAccumulatorValue(
2326 prop->obj()->AsSuperPropertyReference()->home_object());
2327 // stack: value, this; x0: home_object
2328 Register scratch = x10;
2329 Register scratch2 = x11;
2330 __ mov(scratch, result_register()); // home_object
2331 __ Peek(x0, kPointerSize); // value
2332 __ Peek(scratch2, 0); // this
2333 __ Poke(scratch2, kPointerSize); // this
2334 __ Poke(scratch, 0); // home_object
2335 // stack: this, home_object; x0: value
2336 EmitNamedSuperPropertyStore(prop);
2339 case KEYED_SUPER_PROPERTY: {
2341 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2343 prop->obj()->AsSuperPropertyReference()->home_object());
2344 VisitForAccumulatorValue(prop->key());
2345 Register scratch = x10;
2346 Register scratch2 = x11;
2347 __ Peek(scratch2, 2 * kPointerSize); // value
2348 // stack: value, this, home_object; x0: key, x11: value
2349 __ Peek(scratch, kPointerSize); // this
2350 __ Poke(scratch, 2 * kPointerSize);
2351 __ Peek(scratch, 0); // home_object
2352 __ Poke(scratch, kPointerSize);
2354 __ Move(x0, scratch2);
2355 // stack: this, home_object, key; x0: value.
2356 EmitKeyedSuperPropertyStore(prop);
2359 case KEYED_PROPERTY: {
2360 __ Push(x0); // Preserve value.
2361 VisitForStackValue(prop->obj());
2362 VisitForAccumulatorValue(prop->key());
2363 __ Mov(StoreDescriptor::NameRegister(), x0);
2364 __ Pop(StoreDescriptor::ReceiverRegister(),
2365 StoreDescriptor::ValueRegister());
2366 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2368 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2373 context()->Plug(x0);
2377 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2378 Variable* var, MemOperand location) {
2379 __ Str(result_register(), location);
2380 if (var->IsContextSlot()) {
2381 // RecordWrite may destroy all its register arguments.
2382 __ Mov(x10, result_register());
2383 int offset = Context::SlotOffset(var->index());
2384 __ RecordWriteContextSlot(
2385 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2390 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2391 FeedbackVectorICSlot slot) {
2392 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2393 if (var->IsUnallocated()) {
2394 // Global var, const, or let.
2395 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2396 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2397 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2400 } else if (var->IsGlobalSlot()) {
2401 // Global var, const, or let.
2402 DCHECK(var->index() > 0);
2403 DCHECK(var->IsStaticGlobalObjectProperty());
2404 // Each var occupies two slots in the context: for reads and writes.
2405 int const slot = var->index() + 1;
2406 int const depth = scope()->ContextChainLength(var->scope());
2407 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2408 __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
2409 __ Mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
2410 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
2411 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2414 __ Push(Smi::FromInt(slot));
2415 __ Push(var->name());
2417 __ CallRuntime(is_strict(language_mode())
2418 ? Runtime::kStoreGlobalViaContext_Strict
2419 : Runtime::kStoreGlobalViaContext_Sloppy,
2422 } else if (var->mode() == LET && op != Token::INIT_LET) {
2423 // Non-initializing assignment to let variable needs a write barrier.
2424 DCHECK(!var->IsLookupSlot());
2425 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2427 MemOperand location = VarOperand(var, x1);
2428 __ Ldr(x10, location);
2429 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2430 __ Mov(x10, Operand(var->name()));
2432 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2433 // Perform the assignment.
2435 EmitStoreToStackLocalOrContextSlot(var, location);
2437 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2438 // Assignment to const variable needs a write barrier.
2439 DCHECK(!var->IsLookupSlot());
2440 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2442 MemOperand location = VarOperand(var, x1);
2443 __ Ldr(x10, location);
2444 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2445 __ Mov(x10, Operand(var->name()));
2447 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2448 __ Bind(&const_error);
2449 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2451 } else if (var->is_this() && op == Token::INIT_CONST) {
2452 // Initializing assignment to const {this} needs a write barrier.
2453 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2454 Label uninitialized_this;
2455 MemOperand location = VarOperand(var, x1);
2456 __ Ldr(x10, location);
2457 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2458 __ Mov(x0, Operand(var->name()));
2460 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2461 __ bind(&uninitialized_this);
2462 EmitStoreToStackLocalOrContextSlot(var, location);
2464 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2465 if (var->IsLookupSlot()) {
2466 // Assignment to var.
2467 __ Mov(x11, Operand(var->name()));
2468 __ Mov(x10, Smi::FromInt(language_mode()));
2471 // jssp[16] : context.
2472 // jssp[24] : value.
2473 __ Push(x0, cp, x11, x10);
2474 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2476 // Assignment to var or initializing assignment to let/const in harmony
2478 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2479 MemOperand location = VarOperand(var, x1);
2480 if (FLAG_debug_code && op == Token::INIT_LET) {
2481 __ Ldr(x10, location);
2482 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2483 __ Check(eq, kLetBindingReInitialization);
2485 EmitStoreToStackLocalOrContextSlot(var, location);
2488 } else if (op == Token::INIT_CONST_LEGACY) {
2489 // Const initializers need a write barrier.
2490 DCHECK(var->mode() == CONST_LEGACY);
2491 DCHECK(!var->IsParameter()); // No const parameters.
2492 if (var->IsLookupSlot()) {
2493 __ Mov(x1, Operand(var->name()));
2494 __ Push(x0, cp, x1);
2495 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2497 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2499 MemOperand location = VarOperand(var, x1);
2500 __ Ldr(x10, location);
2501 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2502 EmitStoreToStackLocalOrContextSlot(var, location);
2507 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2508 if (is_strict(language_mode())) {
2509 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2511 // Silently ignore store in sloppy mode.
2516 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2517 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2518 // Assignment to a property, using a named store IC.
2519 Property* prop = expr->target()->AsProperty();
2520 DCHECK(prop != NULL);
2521 DCHECK(prop->key()->IsLiteral());
2523 __ Mov(StoreDescriptor::NameRegister(),
2524 Operand(prop->key()->AsLiteral()->value()));
2525 __ Pop(StoreDescriptor::ReceiverRegister());
2526 if (FLAG_vector_stores) {
2527 EmitLoadStoreICSlot(expr->AssignmentSlot());
2530 CallStoreIC(expr->AssignmentFeedbackId());
2533 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2534 context()->Plug(x0);
2538 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2539 // Assignment to named property of super.
2541 // stack : receiver ('this'), home_object
2542 DCHECK(prop != NULL);
2543 Literal* key = prop->key()->AsLiteral();
2544 DCHECK(key != NULL);
2546 __ Push(key->value());
2548 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2549 : Runtime::kStoreToSuper_Sloppy),
2554 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2555 // Assignment to named property of super.
2557 // stack : receiver ('this'), home_object, key
2558 DCHECK(prop != NULL);
2562 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2563 : Runtime::kStoreKeyedToSuper_Sloppy),
2568 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2569 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2570 // Assignment to a property, using a keyed store IC.
2572 // TODO(all): Could we pass this in registers rather than on the stack?
2573 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2574 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2577 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2578 if (FLAG_vector_stores) {
2579 EmitLoadStoreICSlot(expr->AssignmentSlot());
2582 CallIC(ic, expr->AssignmentFeedbackId());
2585 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2586 context()->Plug(x0);
2590 void FullCodeGenerator::VisitProperty(Property* expr) {
2591 Comment cmnt(masm_, "[ Property");
2592 SetExpressionPosition(expr);
2593 Expression* key = expr->key();
2595 if (key->IsPropertyName()) {
2596 if (!expr->IsSuperAccess()) {
2597 VisitForAccumulatorValue(expr->obj());
2598 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2599 EmitNamedPropertyLoad(expr);
2601 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2603 expr->obj()->AsSuperPropertyReference()->home_object());
2604 EmitNamedSuperPropertyLoad(expr);
2607 if (!expr->IsSuperAccess()) {
2608 VisitForStackValue(expr->obj());
2609 VisitForAccumulatorValue(expr->key());
2610 __ Move(LoadDescriptor::NameRegister(), x0);
2611 __ Pop(LoadDescriptor::ReceiverRegister());
2612 EmitKeyedPropertyLoad(expr);
2614 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2616 expr->obj()->AsSuperPropertyReference()->home_object());
2617 VisitForStackValue(expr->key());
2618 EmitKeyedSuperPropertyLoad(expr);
2621 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2622 context()->Plug(x0);
2626 void FullCodeGenerator::CallIC(Handle<Code> code,
2627 TypeFeedbackId ast_id) {
2629 // All calls must have a predictable size in full-codegen code to ensure that
2630 // the debugger can patch them correctly.
2631 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2635 // Code common for calls using the IC.
2636 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2637 Expression* callee = expr->expression();
2639 CallICState::CallType call_type =
2640 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2642 // Get the target function.
2643 if (call_type == CallICState::FUNCTION) {
2644 { StackValueContext context(this);
2645 EmitVariableLoad(callee->AsVariableProxy());
2646 PrepareForBailout(callee, NO_REGISTERS);
2648 // Push undefined as receiver. This is patched in the method prologue if it
2649 // is a sloppy mode method.
2651 UseScratchRegisterScope temps(masm_);
2652 Register temp = temps.AcquireX();
2653 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2657 // Load the function from the receiver.
2658 DCHECK(callee->IsProperty());
2659 DCHECK(!callee->AsProperty()->IsSuperAccess());
2660 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2661 EmitNamedPropertyLoad(callee->AsProperty());
2662 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2663 // Push the target function under the receiver.
2668 EmitCall(expr, call_type);
2672 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2673 Expression* callee = expr->expression();
2674 DCHECK(callee->IsProperty());
2675 Property* prop = callee->AsProperty();
2676 DCHECK(prop->IsSuperAccess());
2677 SetExpressionPosition(prop);
2679 Literal* key = prop->key()->AsLiteral();
2680 DCHECK(!key->value()->IsSmi());
2682 // Load the function from the receiver.
2683 const Register scratch = x10;
2684 SuperPropertyReference* super_ref =
2685 callee->AsProperty()->obj()->AsSuperPropertyReference();
2686 VisitForStackValue(super_ref->home_object());
2687 VisitForAccumulatorValue(super_ref->this_var());
2689 __ Peek(scratch, kPointerSize);
2690 __ Push(x0, scratch);
2691 __ Push(key->value());
2692 __ Push(Smi::FromInt(language_mode()));
2696 // - this (receiver)
2697 // - this (receiver) <-- LoadFromSuper will pop here and below.
2700 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2702 // Replace home_object with target function.
2703 __ Poke(x0, kPointerSize);
2706 // - target function
2707 // - this (receiver)
2708 EmitCall(expr, CallICState::METHOD);
2712 // Code common for calls using the IC.
2713 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2716 VisitForAccumulatorValue(key);
2718 Expression* callee = expr->expression();
2720 // Load the function from the receiver.
2721 DCHECK(callee->IsProperty());
2722 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2723 __ Move(LoadDescriptor::NameRegister(), x0);
2724 EmitKeyedPropertyLoad(callee->AsProperty());
2725 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2727 // Push the target function under the receiver.
2731 EmitCall(expr, CallICState::METHOD);
2735 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2736 Expression* callee = expr->expression();
2737 DCHECK(callee->IsProperty());
2738 Property* prop = callee->AsProperty();
2739 DCHECK(prop->IsSuperAccess());
2740 SetExpressionPosition(prop);
2742 // Load the function from the receiver.
2743 const Register scratch = x10;
2744 SuperPropertyReference* super_ref =
2745 callee->AsProperty()->obj()->AsSuperPropertyReference();
2746 VisitForStackValue(super_ref->home_object());
2747 VisitForAccumulatorValue(super_ref->this_var());
2749 __ Peek(scratch, kPointerSize);
2750 __ Push(x0, scratch);
2751 VisitForStackValue(prop->key());
2752 __ Push(Smi::FromInt(language_mode()));
2756 // - this (receiver)
2757 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2761 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2763 // Replace home_object with target function.
2764 __ Poke(x0, kPointerSize);
2767 // - target function
2768 // - this (receiver)
2769 EmitCall(expr, CallICState::METHOD);
2773 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2774 // Load the arguments.
2775 ZoneList<Expression*>* args = expr->arguments();
2776 int arg_count = args->length();
2777 for (int i = 0; i < arg_count; i++) {
2778 VisitForStackValue(args->at(i));
2781 SetCallPosition(expr, arg_count);
2783 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2784 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2785 __ Peek(x1, (arg_count + 1) * kXRegSize);
2786 // Don't assign a type feedback id to the IC, since type feedback is provided
2787 // by the vector above.
2790 RecordJSReturnSite(expr);
2791 // Restore context register.
2792 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2793 context()->DropAndPlug(1, x0);
2797 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2798 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2799 // Prepare to push a copy of the first argument or undefined if it doesn't
2801 if (arg_count > 0) {
2802 __ Peek(x9, arg_count * kXRegSize);
2804 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2807 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2809 // Prepare to push the language mode.
2810 __ Mov(x11, Smi::FromInt(language_mode()));
2811 // Prepare to push the start position of the scope the calls resides in.
2812 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2815 __ Push(x9, x10, x11, x12);
2817 // Do the runtime call.
2818 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2822 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2823 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2824 VariableProxy* callee = expr->expression()->AsVariableProxy();
2825 if (callee->var()->IsLookupSlot()) {
2827 SetExpressionPosition(callee);
2828 // Generate code for loading from variables potentially shadowed
2829 // by eval-introduced variables.
2830 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2833 // Call the runtime to find the function to call (returned in x0)
2834 // and the object holding it (returned in x1).
2835 __ Mov(x10, Operand(callee->name()));
2836 __ Push(context_register(), x10);
2837 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2838 __ Push(x0, x1); // Receiver, function.
2839 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2841 // If fast case code has been generated, emit code to push the
2842 // function and receiver and have the slow path jump around this
2844 if (done.is_linked()) {
2849 // The receiver is implicitly the global receiver. Indicate this
2850 // by passing the undefined to the call function stub.
2851 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2856 VisitForStackValue(callee);
2857 // refEnv.WithBaseObject()
2858 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2859 __ Push(x10); // Reserved receiver slot.
2864 void FullCodeGenerator::VisitCall(Call* expr) {
2866 // We want to verify that RecordJSReturnSite gets called on all paths
2867 // through this function. Avoid early returns.
2868 expr->return_is_recorded_ = false;
2871 Comment cmnt(masm_, "[ Call");
2872 Expression* callee = expr->expression();
2873 Call::CallType call_type = expr->GetCallType(isolate());
2875 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2876 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2877 // to resolve the function we need to call. Then we call the resolved
2878 // function using the given arguments.
2879 ZoneList<Expression*>* args = expr->arguments();
2880 int arg_count = args->length();
2882 PushCalleeAndWithBaseObject(expr);
2884 // Push the arguments.
2885 for (int i = 0; i < arg_count; i++) {
2886 VisitForStackValue(args->at(i));
2889 // Push a copy of the function (found below the arguments) and
2891 __ Peek(x10, (arg_count + 1) * kPointerSize);
2893 EmitResolvePossiblyDirectEval(arg_count);
2895 // Touch up the stack with the resolved function.
2896 __ Poke(x0, (arg_count + 1) * kPointerSize);
2898 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2900 // Record source position for debugger.
2901 SetCallPosition(expr, arg_count);
2903 // Call the evaluated function.
2904 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2905 __ Peek(x1, (arg_count + 1) * kXRegSize);
2907 RecordJSReturnSite(expr);
2908 // Restore context register.
2909 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2910 context()->DropAndPlug(1, x0);
2912 } else if (call_type == Call::GLOBAL_CALL) {
2913 EmitCallWithLoadIC(expr);
2915 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2916 // Call to a lookup slot (dynamically introduced variable).
2917 PushCalleeAndWithBaseObject(expr);
2919 } else if (call_type == Call::PROPERTY_CALL) {
2920 Property* property = callee->AsProperty();
2921 bool is_named_call = property->key()->IsPropertyName();
2922 if (property->IsSuperAccess()) {
2923 if (is_named_call) {
2924 EmitSuperCallWithLoadIC(expr);
2926 EmitKeyedSuperCallWithLoadIC(expr);
2929 VisitForStackValue(property->obj());
2930 if (is_named_call) {
2931 EmitCallWithLoadIC(expr);
2933 EmitKeyedCallWithLoadIC(expr, property->key());
2936 } else if (call_type == Call::SUPER_CALL) {
2937 EmitSuperConstructorCall(expr);
2939 DCHECK(call_type == Call::OTHER_CALL);
2940 // Call to an arbitrary expression not handled specially above.
2941 VisitForStackValue(callee);
2942 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2944 // Emit function call.
2949 // RecordJSReturnSite should have been called.
2950 DCHECK(expr->return_is_recorded_);
2955 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2956 Comment cmnt(masm_, "[ CallNew");
2957 // According to ECMA-262, section 11.2.2, page 44, the function
2958 // expression in new calls must be evaluated before the
2961 // Push constructor on the stack. If it's not a function it's used as
2962 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2964 DCHECK(!expr->expression()->IsSuperPropertyReference());
2965 VisitForStackValue(expr->expression());
2967 // Push the arguments ("left-to-right") on the stack.
2968 ZoneList<Expression*>* args = expr->arguments();
2969 int arg_count = args->length();
2970 for (int i = 0; i < arg_count; i++) {
2971 VisitForStackValue(args->at(i));
2974 // Call the construct call builtin that handles allocation and
2975 // constructor invocation.
2976 SetConstructCallPosition(expr);
2978 // Load function and argument count into x1 and x0.
2979 __ Mov(x0, arg_count);
2980 __ Peek(x1, arg_count * kXRegSize);
2982 // Record call targets in unoptimized code.
2983 if (FLAG_pretenuring_call_new) {
2984 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2985 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2986 expr->CallNewFeedbackSlot().ToInt() + 1);
2989 __ LoadObject(x2, FeedbackVector());
2990 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2992 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2993 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2994 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2995 context()->Plug(x0);
2999 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3000 SuperCallReference* super_call_ref =
3001 expr->expression()->AsSuperCallReference();
3002 DCHECK_NOT_NULL(super_call_ref);
3004 EmitLoadSuperConstructor(super_call_ref);
3005 __ push(result_register());
3007 // Push the arguments ("left-to-right") on the stack.
3008 ZoneList<Expression*>* args = expr->arguments();
3009 int arg_count = args->length();
3010 for (int i = 0; i < arg_count; i++) {
3011 VisitForStackValue(args->at(i));
3014 // Call the construct call builtin that handles allocation and
3015 // constructor invocation.
3016 SetConstructCallPosition(expr);
3018 // Load original constructor into x4.
3019 VisitForAccumulatorValue(super_call_ref->new_target_var());
3020 __ Mov(x4, result_register());
3022 // Load function and argument count into x1 and x0.
3023 __ Mov(x0, arg_count);
3024 __ Peek(x1, arg_count * kXRegSize);
3026 // Record call targets in unoptimized code.
3027 if (FLAG_pretenuring_call_new) {
3029 /* TODO(dslomov): support pretenuring.
3030 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3031 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3032 expr->CallNewFeedbackSlot().ToInt() + 1);
3036 __ LoadObject(x2, FeedbackVector());
3037 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
3039 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3040 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3042 RecordJSReturnSite(expr);
3044 context()->Plug(x0);
3048 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3049 ZoneList<Expression*>* args = expr->arguments();
3050 DCHECK(args->length() == 1);
3052 VisitForAccumulatorValue(args->at(0));
3054 Label materialize_true, materialize_false;
3055 Label* if_true = NULL;
3056 Label* if_false = NULL;
3057 Label* fall_through = NULL;
3058 context()->PrepareTest(&materialize_true, &materialize_false,
3059 &if_true, &if_false, &fall_through);
3061 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3062 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
3064 context()->Plug(if_true, if_false);
3068 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3069 ZoneList<Expression*>* args = expr->arguments();
3070 DCHECK(args->length() == 1);
3072 VisitForAccumulatorValue(args->at(0));
3074 Label materialize_true, materialize_false;
3075 Label* if_true = NULL;
3076 Label* if_false = NULL;
3077 Label* fall_through = NULL;
3078 context()->PrepareTest(&materialize_true, &materialize_false,
3079 &if_true, &if_false, &fall_through);
3081 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
3083 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3084 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
3086 context()->Plug(if_true, if_false);
3090 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3091 ZoneList<Expression*>* args = expr->arguments();
3092 DCHECK(args->length() == 1);
3094 VisitForAccumulatorValue(args->at(0));
3096 Label materialize_true, materialize_false;
3097 Label* if_true = NULL;
3098 Label* if_false = NULL;
3099 Label* fall_through = NULL;
3100 context()->PrepareTest(&materialize_true, &materialize_false,
3101 &if_true, &if_false, &fall_through);
3103 __ JumpIfSmi(x0, if_false);
3104 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3105 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3106 // Undetectable objects behave like undefined when tested with typeof.
3107 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3108 __ Tbnz(x11, Map::kIsUndetectable, if_false);
3109 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
3110 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3112 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3113 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3114 Split(le, if_true, if_false, fall_through);
3116 context()->Plug(if_true, if_false);
3120 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3121 ZoneList<Expression*>* args = expr->arguments();
3122 DCHECK(args->length() == 1);
3124 VisitForAccumulatorValue(args->at(0));
3126 Label materialize_true, materialize_false;
3127 Label* if_true = NULL;
3128 Label* if_false = NULL;
3129 Label* fall_through = NULL;
3130 context()->PrepareTest(&materialize_true, &materialize_false,
3131 &if_true, &if_false, &fall_through);
3133 __ JumpIfSmi(x0, if_false);
3134 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3135 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3136 Split(ge, if_true, if_false, fall_through);
3138 context()->Plug(if_true, if_false);
3142 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3143 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
3144 ZoneList<Expression*>* args = expr->arguments();
3145 DCHECK(args->length() == 1);
3147 VisitForAccumulatorValue(args->at(0));
3149 Label materialize_true, materialize_false;
3150 Label* if_true = NULL;
3151 Label* if_false = NULL;
3152 Label* fall_through = NULL;
3153 context()->PrepareTest(&materialize_true, &materialize_false,
3154 &if_true, &if_false, &fall_through);
3156 __ JumpIfSmi(x0, if_false);
3157 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3158 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3159 __ Tst(x11, 1 << Map::kIsUndetectable);
3160 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3161 Split(ne, if_true, if_false, fall_through);
3163 context()->Plug(if_true, if_false);
3167 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3168 CallRuntime* expr) {
3169 ZoneList<Expression*>* args = expr->arguments();
3170 DCHECK(args->length() == 1);
3171 VisitForAccumulatorValue(args->at(0));
3173 Label materialize_true, materialize_false, skip_lookup;
3174 Label* if_true = NULL;
3175 Label* if_false = NULL;
3176 Label* fall_through = NULL;
3177 context()->PrepareTest(&materialize_true, &materialize_false,
3178 &if_true, &if_false, &fall_through);
3180 Register object = x0;
3181 __ AssertNotSmi(object);
3184 Register bitfield2 = x11;
3185 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3186 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3187 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3189 // Check for fast case object. Generate false result for slow case object.
3190 Register props = x12;
3191 Register props_map = x12;
3192 Register hash_table_map = x13;
3193 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3194 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3195 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3196 __ Cmp(props_map, hash_table_map);
3199 // Look for valueOf name in the descriptor array, and indicate false if found.
3200 // Since we omit an enumeration index check, if it is added via a transition
3201 // that shares its descriptor array, this is a false positive.
3204 // Skip loop if no descriptors are valid.
3205 Register descriptors = x12;
3206 Register descriptors_length = x13;
3207 __ NumberOfOwnDescriptors(descriptors_length, map);
3208 __ Cbz(descriptors_length, &done);
3210 __ LoadInstanceDescriptors(map, descriptors);
3212 // Calculate the end of the descriptor array.
3213 Register descriptors_end = x14;
3214 __ Mov(x15, DescriptorArray::kDescriptorSize);
3215 __ Mul(descriptors_length, descriptors_length, x15);
3216 // Calculate location of the first key name.
3217 __ Add(descriptors, descriptors,
3218 DescriptorArray::kFirstOffset - kHeapObjectTag);
3219 // Calculate the end of the descriptor array.
3220 __ Add(descriptors_end, descriptors,
3221 Operand(descriptors_length, LSL, kPointerSizeLog2));
3223 // Loop through all the keys in the descriptor array. If one of these is the
3224 // string "valueOf" the result is false.
3225 Register valueof_string = x1;
3226 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3227 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3229 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3230 __ Cmp(x15, valueof_string);
3232 __ Cmp(descriptors, descriptors_end);
3237 // Set the bit in the map to indicate that there is no local valueOf field.
3238 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3239 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3240 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3242 __ Bind(&skip_lookup);
3244 // If a valueOf property is not found on the object check that its prototype
3245 // is the unmodified String prototype. If not result is false.
3246 Register prototype = x1;
3247 Register global_idx = x2;
3248 Register native_context = x2;
3249 Register string_proto = x3;
3250 Register proto_map = x4;
3251 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3252 __ JumpIfSmi(prototype, if_false);
3253 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3254 __ Ldr(global_idx, GlobalObjectMemOperand());
3255 __ Ldr(native_context,
3256 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3257 __ Ldr(string_proto,
3258 ContextMemOperand(native_context,
3259 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3260 __ Cmp(proto_map, string_proto);
3262 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3263 Split(eq, if_true, if_false, fall_through);
3265 context()->Plug(if_true, if_false);
3269 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3270 ZoneList<Expression*>* args = expr->arguments();
3271 DCHECK(args->length() == 1);
3273 VisitForAccumulatorValue(args->at(0));
3275 Label materialize_true, materialize_false;
3276 Label* if_true = NULL;
3277 Label* if_false = NULL;
3278 Label* fall_through = NULL;
3279 context()->PrepareTest(&materialize_true, &materialize_false,
3280 &if_true, &if_false, &fall_through);
3282 __ JumpIfSmi(x0, if_false);
3283 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3284 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3285 Split(eq, if_true, if_false, fall_through);
3287 context()->Plug(if_true, if_false);
3291 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK(args->length() == 1);
3295 VisitForAccumulatorValue(args->at(0));
3297 Label materialize_true, materialize_false;
3298 Label* if_true = NULL;
3299 Label* if_false = NULL;
3300 Label* fall_through = NULL;
3301 context()->PrepareTest(&materialize_true, &materialize_false,
3302 &if_true, &if_false, &fall_through);
3304 // Only a HeapNumber can be -0.0, so return false if we have something else.
3305 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3307 // Test the bit pattern.
3308 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3309 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3311 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3312 Split(vs, if_true, if_false, fall_through);
3314 context()->Plug(if_true, if_false);
3318 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3319 ZoneList<Expression*>* args = expr->arguments();
3320 DCHECK(args->length() == 1);
3322 VisitForAccumulatorValue(args->at(0));
3324 Label materialize_true, materialize_false;
3325 Label* if_true = NULL;
3326 Label* if_false = NULL;
3327 Label* fall_through = NULL;
3328 context()->PrepareTest(&materialize_true, &materialize_false,
3329 &if_true, &if_false, &fall_through);
3331 __ JumpIfSmi(x0, if_false);
3332 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3333 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3334 Split(eq, if_true, if_false, fall_through);
3336 context()->Plug(if_true, if_false);
3340 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3341 ZoneList<Expression*>* args = expr->arguments();
3342 DCHECK(args->length() == 1);
3344 VisitForAccumulatorValue(args->at(0));
3346 Label materialize_true, materialize_false;
3347 Label* if_true = NULL;
3348 Label* if_false = NULL;
3349 Label* fall_through = NULL;
3350 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3351 &if_false, &fall_through);
3353 __ JumpIfSmi(x0, if_false);
3354 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
3355 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3356 Split(eq, if_true, if_false, fall_through);
3358 context()->Plug(if_true, if_false);
3362 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3363 ZoneList<Expression*>* args = expr->arguments();
3364 DCHECK(args->length() == 1);
3366 VisitForAccumulatorValue(args->at(0));
3368 Label materialize_true, materialize_false;
3369 Label* if_true = NULL;
3370 Label* if_false = NULL;
3371 Label* fall_through = NULL;
3372 context()->PrepareTest(&materialize_true, &materialize_false,
3373 &if_true, &if_false, &fall_through);
3375 __ JumpIfSmi(x0, if_false);
3376 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3377 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3378 Split(eq, if_true, if_false, fall_through);
3380 context()->Plug(if_true, if_false);
3384 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3385 ZoneList<Expression*>* args = expr->arguments();
3386 DCHECK(args->length() == 1);
3388 VisitForAccumulatorValue(args->at(0));
3390 Label materialize_true, materialize_false;
3391 Label* if_true = NULL;
3392 Label* if_false = NULL;
3393 Label* fall_through = NULL;
3394 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3395 &if_false, &fall_through);
3397 __ JumpIfSmi(x0, if_false);
3399 Register type_reg = x11;
3400 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3401 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3402 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3403 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3404 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3405 Split(ls, if_true, if_false, fall_through);
3407 context()->Plug(if_true, if_false);
3411 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3412 DCHECK(expr->arguments()->length() == 0);
3414 Label materialize_true, materialize_false;
3415 Label* if_true = NULL;
3416 Label* if_false = NULL;
3417 Label* fall_through = NULL;
3418 context()->PrepareTest(&materialize_true, &materialize_false,
3419 &if_true, &if_false, &fall_through);
3421 // Get the frame pointer for the calling frame.
3422 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3424 // Skip the arguments adaptor frame if it exists.
3425 Label check_frame_marker;
3426 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3427 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3428 __ B(ne, &check_frame_marker);
3429 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3431 // Check the marker in the calling frame.
3432 __ Bind(&check_frame_marker);
3433 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3434 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3435 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3436 Split(eq, if_true, if_false, fall_through);
3438 context()->Plug(if_true, if_false);
3442 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3443 ZoneList<Expression*>* args = expr->arguments();
3444 DCHECK(args->length() == 2);
3446 // Load the two objects into registers and perform the comparison.
3447 VisitForStackValue(args->at(0));
3448 VisitForAccumulatorValue(args->at(1));
3450 Label materialize_true, materialize_false;
3451 Label* if_true = NULL;
3452 Label* if_false = NULL;
3453 Label* fall_through = NULL;
3454 context()->PrepareTest(&materialize_true, &materialize_false,
3455 &if_true, &if_false, &fall_through);
3459 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3460 Split(eq, if_true, if_false, fall_through);
3462 context()->Plug(if_true, if_false);
3466 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3467 ZoneList<Expression*>* args = expr->arguments();
3468 DCHECK(args->length() == 1);
3470 // ArgumentsAccessStub expects the key in x1.
3471 VisitForAccumulatorValue(args->at(0));
3473 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3474 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3476 context()->Plug(x0);
3480 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3481 DCHECK(expr->arguments()->length() == 0);
3483 // Get the number of formal parameters.
3484 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3486 // Check if the calling frame is an arguments adaptor frame.
3487 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3488 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3489 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3492 // Arguments adaptor case: Read the arguments length from the
3494 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3497 context()->Plug(x0);
3501 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3502 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3503 ZoneList<Expression*>* args = expr->arguments();
3504 DCHECK(args->length() == 1);
3505 Label done, null, function, non_function_constructor;
3507 VisitForAccumulatorValue(args->at(0));
3509 // If the object is a smi, we return null.
3510 __ JumpIfSmi(x0, &null);
3512 // Check that the object is a JS object but take special care of JS
3513 // functions to make sure they have 'Function' as their class.
3514 // Assume that there are only two callable types, and one of them is at
3515 // either end of the type range for JS object types. Saves extra comparisons.
3516 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3517 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3518 // x10: object's map.
3519 // x11: object's type.
3521 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3522 FIRST_SPEC_OBJECT_TYPE + 1);
3523 __ B(eq, &function);
3525 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3526 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3527 LAST_SPEC_OBJECT_TYPE - 1);
3528 __ B(eq, &function);
3529 // Assume that there is no larger type.
3530 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3532 // Check if the constructor in the map is a JS function.
3533 Register instance_type = x14;
3534 __ GetMapConstructor(x12, x10, x13, instance_type);
3535 __ Cmp(instance_type, JS_FUNCTION_TYPE);
3536 __ B(ne, &non_function_constructor);
3538 // x12 now contains the constructor function. Grab the
3539 // instance class name from there.
3540 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3542 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3545 // Functions have class 'Function'.
3547 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3550 // Objects with a non-function constructor have class 'Object'.
3551 __ Bind(&non_function_constructor);
3552 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3555 // Non-JS objects have class null.
3557 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3562 context()->Plug(x0);
3566 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3567 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3568 ZoneList<Expression*>* args = expr->arguments();
3569 DCHECK(args->length() == 1);
3570 VisitForAccumulatorValue(args->at(0)); // Load the object.
3573 // If the object is a smi return the object.
3574 __ JumpIfSmi(x0, &done);
3575 // If the object is not a value type, return the object.
3576 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3577 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3580 context()->Plug(x0);
3584 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3585 ZoneList<Expression*>* args = expr->arguments();
3586 DCHECK_EQ(1, args->length());
3588 VisitForAccumulatorValue(args->at(0));
3590 Label materialize_true, materialize_false;
3591 Label* if_true = nullptr;
3592 Label* if_false = nullptr;
3593 Label* fall_through = nullptr;
3594 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3595 &if_false, &fall_through);
3597 __ JumpIfSmi(x0, if_false);
3598 __ CompareObjectType(x0, x10, x11, JS_DATE_TYPE);
3599 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3600 Split(eq, if_true, if_false, fall_through);
3602 context()->Plug(if_true, if_false);
3606 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3607 ZoneList<Expression*>* args = expr->arguments();
3608 DCHECK(args->length() == 2);
3609 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3610 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3612 VisitForAccumulatorValue(args->at(0)); // Load the object.
3614 Register object = x0;
3615 Register result = x0;
3616 Register stamp_addr = x10;
3617 Register stamp_cache = x11;
3619 if (index->value() == 0) {
3620 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3622 Label runtime, done;
3623 if (index->value() < JSDate::kFirstUncachedField) {
3624 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3625 __ Mov(stamp_addr, stamp);
3626 __ Ldr(stamp_addr, MemOperand(stamp_addr));
3627 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3628 __ Cmp(stamp_addr, stamp_cache);
3630 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3631 kPointerSize * index->value()));
3637 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3641 context()->Plug(result);
3645 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3647 DCHECK_EQ(3, args->length());
3649 Register string = x0;
3650 Register index = x1;
3651 Register value = x2;
3652 Register scratch = x10;
3654 VisitForStackValue(args->at(0)); // index
3655 VisitForStackValue(args->at(1)); // value
3656 VisitForAccumulatorValue(args->at(2)); // string
3657 __ Pop(value, index);
3659 if (FLAG_debug_code) {
3660 __ AssertSmi(value, kNonSmiValue);
3661 __ AssertSmi(index, kNonSmiIndex);
3662 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3663 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3667 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3670 __ Strb(value, MemOperand(scratch, index));
3671 context()->Plug(string);
3675 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments();
3677 DCHECK_EQ(3, args->length());
3679 Register string = x0;
3680 Register index = x1;
3681 Register value = x2;
3682 Register scratch = x10;
3684 VisitForStackValue(args->at(0)); // index
3685 VisitForStackValue(args->at(1)); // value
3686 VisitForAccumulatorValue(args->at(2)); // string
3687 __ Pop(value, index);
3689 if (FLAG_debug_code) {
3690 __ AssertSmi(value, kNonSmiValue);
3691 __ AssertSmi(index, kNonSmiIndex);
3692 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3693 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3697 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3700 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3701 context()->Plug(string);
3705 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3706 ZoneList<Expression*>* args = expr->arguments();
3707 DCHECK(args->length() == 2);
3708 VisitForStackValue(args->at(0)); // Load the object.
3709 VisitForAccumulatorValue(args->at(1)); // Load the value.
3715 // If the object is a smi, return the value.
3716 __ JumpIfSmi(x1, &done);
3718 // If the object is not a value type, return the value.
3719 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3722 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3723 // Update the write barrier. Save the value as it will be
3724 // overwritten by the write barrier code and is needed afterward.
3726 __ RecordWriteField(
3727 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3730 context()->Plug(x0);
3734 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3735 ZoneList<Expression*>* args = expr->arguments();
3736 DCHECK_EQ(args->length(), 1);
3738 // Load the argument into x0 and call the stub.
3739 VisitForAccumulatorValue(args->at(0));
3741 NumberToStringStub stub(isolate());
3743 context()->Plug(x0);
3747 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3748 ZoneList<Expression*>* args = expr->arguments();
3749 DCHECK(args->length() == 1);
3751 VisitForAccumulatorValue(args->at(0));
3755 Register result = x1;
3757 StringCharFromCodeGenerator generator(code, result);
3758 generator.GenerateFast(masm_);
3761 NopRuntimeCallHelper call_helper;
3762 generator.GenerateSlow(masm_, call_helper);
3765 context()->Plug(result);
3769 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3770 ZoneList<Expression*>* args = expr->arguments();
3771 DCHECK(args->length() == 2);
3773 VisitForStackValue(args->at(0));
3774 VisitForAccumulatorValue(args->at(1));
3776 Register object = x1;
3777 Register index = x0;
3778 Register result = x3;
3782 Label need_conversion;
3783 Label index_out_of_range;
3785 StringCharCodeAtGenerator generator(object,
3790 &index_out_of_range,
3791 STRING_INDEX_IS_NUMBER);
3792 generator.GenerateFast(masm_);
3795 __ Bind(&index_out_of_range);
3796 // When the index is out of range, the spec requires us to return NaN.
3797 __ LoadRoot(result, Heap::kNanValueRootIndex);
3800 __ Bind(&need_conversion);
3801 // Load the undefined value into the result register, which will
3802 // trigger conversion.
3803 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3806 NopRuntimeCallHelper call_helper;
3807 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3810 context()->Plug(result);
3814 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3815 ZoneList<Expression*>* args = expr->arguments();
3816 DCHECK(args->length() == 2);
3818 VisitForStackValue(args->at(0));
3819 VisitForAccumulatorValue(args->at(1));
3821 Register object = x1;
3822 Register index = x0;
3823 Register result = x0;
3827 Label need_conversion;
3828 Label index_out_of_range;
3830 StringCharAtGenerator generator(object,
3836 &index_out_of_range,
3837 STRING_INDEX_IS_NUMBER);
3838 generator.GenerateFast(masm_);
3841 __ Bind(&index_out_of_range);
3842 // When the index is out of range, the spec requires us to return
3843 // the empty string.
3844 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3847 __ Bind(&need_conversion);
3848 // Move smi zero into the result register, which will trigger conversion.
3849 __ Mov(result, Smi::FromInt(0));
3852 NopRuntimeCallHelper call_helper;
3853 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3856 context()->Plug(result);
3860 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3861 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3862 ZoneList<Expression*>* args = expr->arguments();
3863 DCHECK_EQ(2, args->length());
3865 VisitForStackValue(args->at(0));
3866 VisitForAccumulatorValue(args->at(1));
3869 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3872 context()->Plug(x0);
3876 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3877 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3878 ZoneList<Expression*>* args = expr->arguments();
3879 DCHECK(args->length() >= 2);
3881 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3882 for (int i = 0; i < arg_count + 1; i++) {
3883 VisitForStackValue(args->at(i));
3885 VisitForAccumulatorValue(args->last()); // Function.
3887 Label runtime, done;
3888 // Check for non-function argument (including proxy).
3889 __ JumpIfSmi(x0, &runtime);
3890 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3892 // InvokeFunction requires the function in x1. Move it in there.
3894 ParameterCount count(arg_count);
3895 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3896 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3901 __ CallRuntime(Runtime::kCall, args->length());
3904 context()->Plug(x0);
3908 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3909 ZoneList<Expression*>* args = expr->arguments();
3910 DCHECK(args->length() == 2);
3913 VisitForStackValue(args->at(0));
3916 VisitForStackValue(args->at(1));
3917 __ CallRuntime(Runtime::kGetPrototype, 1);
3918 __ Push(result_register());
3920 // Load original constructor into x4.
3921 __ Peek(x4, 1 * kPointerSize);
3923 // Check if the calling frame is an arguments adaptor frame.
3924 Label adaptor_frame, args_set_up, runtime;
3925 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3926 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3927 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3928 __ B(eq, &adaptor_frame);
3929 // default constructor has no arguments, so no adaptor frame means no args.
3930 __ Mov(x0, Operand(0));
3933 // Copy arguments from adaptor frame.
3935 __ bind(&adaptor_frame);
3936 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3937 __ SmiUntag(x1, x1);
3941 // Get arguments pointer in x11.
3942 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3943 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3946 // Pre-decrement x11 with kPointerSize on each iteration.
3947 // Pre-decrement in order to skip receiver.
3948 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3950 __ Sub(x1, x1, Operand(1));
3954 __ bind(&args_set_up);
3955 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
3956 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3958 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
3959 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3963 context()->Plug(result_register());
3967 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3968 RegExpConstructResultStub stub(isolate());
3969 ZoneList<Expression*>* args = expr->arguments();
3970 DCHECK(args->length() == 3);
3971 VisitForStackValue(args->at(0));
3972 VisitForStackValue(args->at(1));
3973 VisitForAccumulatorValue(args->at(2));
3976 context()->Plug(x0);
3980 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3981 ZoneList<Expression*>* args = expr->arguments();
3982 DCHECK_EQ(2, args->length());
3983 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
3984 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3986 Handle<FixedArray> jsfunction_result_caches(
3987 isolate()->native_context()->jsfunction_result_caches());
3988 if (jsfunction_result_caches->length() <= cache_id) {
3989 __ Abort(kAttemptToUseUndefinedCache);
3990 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3991 context()->Plug(x0);
3995 VisitForAccumulatorValue(args->at(1));
3998 Register cache = x1;
3999 __ Ldr(cache, GlobalObjectMemOperand());
4000 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4001 __ Ldr(cache, ContextMemOperand(cache,
4002 Context::JSFUNCTION_RESULT_CACHES_INDEX));
4004 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4007 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
4008 JSFunctionResultCache::kFingerOffset));
4009 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
4010 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
4012 // Load the key and data from the cache.
4013 __ Ldp(x2, x3, MemOperand(x3));
4016 __ CmovX(x0, x3, eq);
4019 // Call runtime to perform the lookup.
4020 __ Push(cache, key);
4021 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4024 context()->Plug(x0);
4028 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4029 ZoneList<Expression*>* args = expr->arguments();
4030 VisitForAccumulatorValue(args->at(0));
4032 Label materialize_true, materialize_false;
4033 Label* if_true = NULL;
4034 Label* if_false = NULL;
4035 Label* fall_through = NULL;
4036 context()->PrepareTest(&materialize_true, &materialize_false,
4037 &if_true, &if_false, &fall_through);
4039 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4040 __ Tst(x10, String::kContainsCachedArrayIndexMask);
4041 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4042 Split(eq, if_true, if_false, fall_through);
4044 context()->Plug(if_true, if_false);
4048 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4049 ZoneList<Expression*>* args = expr->arguments();
4050 DCHECK(args->length() == 1);
4051 VisitForAccumulatorValue(args->at(0));
4053 __ AssertString(x0);
4055 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4056 __ IndexFromHash(x10, x0);
4058 context()->Plug(x0);
4062 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4063 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
4065 ZoneList<Expression*>* args = expr->arguments();
4066 DCHECK(args->length() == 2);
4067 VisitForStackValue(args->at(1));
4068 VisitForAccumulatorValue(args->at(0));
4070 Register array = x0;
4071 Register result = x0;
4072 Register elements = x1;
4073 Register element = x2;
4074 Register separator = x3;
4075 Register array_length = x4;
4076 Register result_pos = x5;
4078 Register string_length = x10;
4079 Register elements_end = x11;
4080 Register string = x12;
4081 Register scratch1 = x13;
4082 Register scratch2 = x14;
4083 Register scratch3 = x7;
4084 Register separator_length = x15;
4086 Label bailout, done, one_char_separator, long_separator,
4087 non_trivial_array, not_size_one_array, loop,
4088 empty_separator_loop, one_char_separator_loop,
4089 one_char_separator_loop_entry, long_separator_loop;
4091 // The separator operand is on the stack.
4094 // Check that the array is a JSArray.
4095 __ JumpIfSmi(array, &bailout);
4096 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4098 // Check that the array has fast elements.
4099 __ CheckFastElements(map, scratch1, &bailout);
4101 // If the array has length zero, return the empty string.
4102 // Load and untag the length of the array.
4103 // It is an unsigned value, so we can skip sign extension.
4104 // We assume little endianness.
4105 __ Ldrsw(array_length,
4106 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
4107 __ Cbnz(array_length, &non_trivial_array);
4108 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4111 __ Bind(&non_trivial_array);
4112 // Get the FixedArray containing array's elements.
4113 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4115 // Check that all array elements are sequential one-byte strings, and
4116 // accumulate the sum of their lengths.
4117 __ Mov(string_length, 0);
4118 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4119 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4120 // Loop condition: while (element < elements_end).
4121 // Live values in registers:
4122 // elements: Fixed array of strings.
4123 // array_length: Length of the fixed array of strings (not smi)
4124 // separator: Separator string
4125 // string_length: Accumulated sum of string lengths (not smi).
4126 // element: Current array element.
4127 // elements_end: Array end.
4128 if (FLAG_debug_code) {
4129 __ Cmp(array_length, 0);
4130 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4133 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4134 __ JumpIfSmi(string, &bailout);
4135 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4136 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4137 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4139 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4140 __ Adds(string_length, string_length, scratch1);
4142 __ Cmp(element, elements_end);
4145 // If array_length is 1, return elements[0], a string.
4146 __ Cmp(array_length, 1);
4147 __ B(ne, ¬_size_one_array);
4148 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4151 __ Bind(¬_size_one_array);
4153 // Live values in registers:
4154 // separator: Separator string
4155 // array_length: Length of the array (not smi).
4156 // string_length: Sum of string lengths (not smi).
4157 // elements: FixedArray of strings.
4159 // Check that the separator is a flat one-byte string.
4160 __ JumpIfSmi(separator, &bailout);
4161 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4162 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4163 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4165 // Add (separator length times array_length) - separator length to the
4166 // string_length to get the length of the result string.
4167 // Load the separator length as untagged.
4168 // We assume little endianness, and that the length is positive.
4169 __ Ldrsw(separator_length,
4170 UntagSmiFieldMemOperand(separator,
4171 SeqOneByteString::kLengthOffset));
4172 __ Sub(string_length, string_length, separator_length);
4173 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4176 // Get first element in the array.
4177 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4178 // Live values in registers:
4179 // element: First array element
4180 // separator: Separator string
4181 // string_length: Length of result string (not smi)
4182 // array_length: Length of the array (not smi).
4183 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
4186 // Prepare for looping. Set up elements_end to end of the array. Set
4187 // result_pos to the position of the result where to write the first
4189 // TODO(all): useless unless AllocateOneByteString trashes the register.
4190 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4191 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4193 // Check the length of the separator.
4194 __ Cmp(separator_length, 1);
4195 __ B(eq, &one_char_separator);
4196 __ B(gt, &long_separator);
4198 // Empty separator case
4199 __ Bind(&empty_separator_loop);
4200 // Live values in registers:
4201 // result_pos: the position to which we are currently copying characters.
4202 // element: Current array element.
4203 // elements_end: Array end.
4205 // Copy next array element to the result.
4206 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4207 __ Ldrsw(string_length,
4208 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4209 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4210 __ CopyBytes(result_pos, string, string_length, scratch1);
4211 __ Cmp(element, elements_end);
4212 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4215 // One-character separator case
4216 __ Bind(&one_char_separator);
4217 // Replace separator with its one-byte character value.
4218 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4219 // Jump into the loop after the code that copies the separator, so the first
4220 // element is not preceded by a separator
4221 __ B(&one_char_separator_loop_entry);
4223 __ Bind(&one_char_separator_loop);
4224 // Live values in registers:
4225 // result_pos: the position to which we are currently copying characters.
4226 // element: Current array element.
4227 // elements_end: Array end.
4228 // separator: Single separator one-byte char (in lower byte).
4230 // Copy the separator character to the result.
4231 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4233 // Copy next array element to the result.
4234 __ Bind(&one_char_separator_loop_entry);
4235 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4236 __ Ldrsw(string_length,
4237 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4238 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4239 __ CopyBytes(result_pos, string, string_length, scratch1);
4240 __ Cmp(element, elements_end);
4241 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4244 // Long separator case (separator is more than one character). Entry is at the
4245 // label long_separator below.
4246 __ Bind(&long_separator_loop);
4247 // Live values in registers:
4248 // result_pos: the position to which we are currently copying characters.
4249 // element: Current array element.
4250 // elements_end: Array end.
4251 // separator: Separator string.
4253 // Copy the separator to the result.
4254 // TODO(all): hoist next two instructions.
4255 __ Ldrsw(string_length,
4256 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4257 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4258 __ CopyBytes(result_pos, string, string_length, scratch1);
4260 __ Bind(&long_separator);
4261 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4262 __ Ldrsw(string_length,
4263 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4264 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4265 __ CopyBytes(result_pos, string, string_length, scratch1);
4266 __ Cmp(element, elements_end);
4267 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4271 // Returning undefined will force slower code to handle it.
4272 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4274 context()->Plug(result);
4278 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4279 DCHECK(expr->arguments()->length() == 0);
4280 ExternalReference debug_is_active =
4281 ExternalReference::debug_is_active_address(isolate());
4282 __ Mov(x10, debug_is_active);
4283 __ Ldrb(x0, MemOperand(x10));
4285 context()->Plug(x0);
4289 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4290 // Push the builtins object as the receiver.
4291 __ Ldr(x10, GlobalObjectMemOperand());
4292 __ Ldr(LoadDescriptor::ReceiverRegister(),
4293 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4294 __ Push(LoadDescriptor::ReceiverRegister());
4296 // Load the function from the receiver.
4297 Handle<String> name = expr->name();
4298 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4299 __ Mov(LoadDescriptor::SlotRegister(),
4300 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4301 CallLoadIC(NOT_INSIDE_TYPEOF);
4305 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4306 ZoneList<Expression*>* args = expr->arguments();
4307 int arg_count = args->length();
4309 SetCallPosition(expr, arg_count);
4310 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4311 __ Peek(x1, (arg_count + 1) * kPointerSize);
4316 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4317 ZoneList<Expression*>* args = expr->arguments();
4318 int arg_count = args->length();
4320 if (expr->is_jsruntime()) {
4321 Comment cmnt(masm_, "[ CallRunTime");
4322 EmitLoadJSRuntimeFunction(expr);
4324 // Push the target function under the receiver.
4328 for (int i = 0; i < arg_count; i++) {
4329 VisitForStackValue(args->at(i));
4332 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4333 EmitCallJSRuntimeFunction(expr);
4335 // Restore context register.
4336 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4338 context()->DropAndPlug(1, x0);
4341 const Runtime::Function* function = expr->function();
4342 switch (function->function_id) {
4343 #define CALL_INTRINSIC_GENERATOR(Name) \
4344 case Runtime::kInline##Name: { \
4345 Comment cmnt(masm_, "[ Inline" #Name); \
4346 return Emit##Name(expr); \
4348 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4349 #undef CALL_INTRINSIC_GENERATOR
4351 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4352 // Push the arguments ("left-to-right").
4353 for (int i = 0; i < arg_count; i++) {
4354 VisitForStackValue(args->at(i));
4357 // Call the C runtime function.
4358 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4359 __ CallRuntime(expr->function(), arg_count);
4360 context()->Plug(x0);
4367 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4368 switch (expr->op()) {
4369 case Token::DELETE: {
4370 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4371 Property* property = expr->expression()->AsProperty();
4372 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4374 if (property != NULL) {
4375 VisitForStackValue(property->obj());
4376 VisitForStackValue(property->key());
4377 __ Mov(x10, Smi::FromInt(language_mode()));
4379 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4380 context()->Plug(x0);
4381 } else if (proxy != NULL) {
4382 Variable* var = proxy->var();
4383 // Delete of an unqualified identifier is disallowed in strict mode but
4384 // "delete this" is allowed.
4385 bool is_this = var->HasThisName(isolate());
4386 DCHECK(is_sloppy(language_mode()) || is_this);
4387 if (var->IsUnallocatedOrGlobalSlot()) {
4388 __ Ldr(x12, GlobalObjectMemOperand());
4389 __ Mov(x11, Operand(var->name()));
4390 __ Mov(x10, Smi::FromInt(SLOPPY));
4391 __ Push(x12, x11, x10);
4392 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4393 context()->Plug(x0);
4394 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4395 // Result of deleting non-global, non-dynamic variables is false.
4396 // The subexpression does not have side effects.
4397 context()->Plug(is_this);
4399 // Non-global variable. Call the runtime to try to delete from the
4400 // context where the variable was introduced.
4401 __ Mov(x2, Operand(var->name()));
4402 __ Push(context_register(), x2);
4403 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4404 context()->Plug(x0);
4407 // Result of deleting non-property, non-variable reference is true.
4408 // The subexpression may have side effects.
4409 VisitForEffect(expr->expression());
4410 context()->Plug(true);
4416 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4417 VisitForEffect(expr->expression());
4418 context()->Plug(Heap::kUndefinedValueRootIndex);
4422 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4423 if (context()->IsEffect()) {
4424 // Unary NOT has no side effects so it's only necessary to visit the
4425 // subexpression. Match the optimizing compiler by not branching.
4426 VisitForEffect(expr->expression());
4427 } else if (context()->IsTest()) {
4428 const TestContext* test = TestContext::cast(context());
4429 // The labels are swapped for the recursive call.
4430 VisitForControl(expr->expression(),
4431 test->false_label(),
4433 test->fall_through());
4434 context()->Plug(test->true_label(), test->false_label());
4436 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4437 // TODO(jbramley): This could be much more efficient using (for
4438 // example) the CSEL instruction.
4439 Label materialize_true, materialize_false, done;
4440 VisitForControl(expr->expression(),
4445 __ Bind(&materialize_true);
4446 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4447 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4450 __ Bind(&materialize_false);
4451 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4452 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4456 if (context()->IsStackValue()) {
4457 __ Push(result_register());
4462 case Token::TYPEOF: {
4463 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4465 AccumulatorValueContext context(this);
4466 VisitForTypeofValue(expr->expression());
4469 TypeofStub typeof_stub(isolate());
4470 __ CallStub(&typeof_stub);
4471 context()->Plug(x0);
4480 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4481 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4483 Comment cmnt(masm_, "[ CountOperation");
4485 Property* prop = expr->expression()->AsProperty();
4486 LhsKind assign_type = Property::GetAssignType(prop);
4488 // Evaluate expression and get value.
4489 if (assign_type == VARIABLE) {
4490 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4491 AccumulatorValueContext context(this);
4492 EmitVariableLoad(expr->expression()->AsVariableProxy());
4494 // Reserve space for result of postfix operation.
4495 if (expr->is_postfix() && !context()->IsEffect()) {
4498 switch (assign_type) {
4499 case NAMED_PROPERTY: {
4500 // Put the object both on the stack and in the register.
4501 VisitForStackValue(prop->obj());
4502 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4503 EmitNamedPropertyLoad(prop);
4507 case NAMED_SUPER_PROPERTY: {
4508 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4509 VisitForAccumulatorValue(
4510 prop->obj()->AsSuperPropertyReference()->home_object());
4511 __ Push(result_register());
4512 const Register scratch = x10;
4513 __ Peek(scratch, kPointerSize);
4514 __ Push(scratch, result_register());
4515 EmitNamedSuperPropertyLoad(prop);
4519 case KEYED_SUPER_PROPERTY: {
4520 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4522 prop->obj()->AsSuperPropertyReference()->home_object());
4523 VisitForAccumulatorValue(prop->key());
4524 __ Push(result_register());
4525 const Register scratch1 = x10;
4526 const Register scratch2 = x11;
4527 __ Peek(scratch1, 2 * kPointerSize);
4528 __ Peek(scratch2, kPointerSize);
4529 __ Push(scratch1, scratch2, result_register());
4530 EmitKeyedSuperPropertyLoad(prop);
4534 case KEYED_PROPERTY: {
4535 VisitForStackValue(prop->obj());
4536 VisitForStackValue(prop->key());
4537 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4538 __ Peek(LoadDescriptor::NameRegister(), 0);
4539 EmitKeyedPropertyLoad(prop);
4548 // We need a second deoptimization point after loading the value
4549 // in case evaluating the property load my have a side effect.
4550 if (assign_type == VARIABLE) {
4551 PrepareForBailout(expr->expression(), TOS_REG);
4553 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4556 // Inline smi case if we are in a loop.
4557 Label stub_call, done;
4558 JumpPatchSite patch_site(masm_);
4560 int count_value = expr->op() == Token::INC ? 1 : -1;
4561 if (ShouldInlineSmiCase(expr->op())) {
4563 patch_site.EmitJumpIfNotSmi(x0, &slow);
4565 // Save result for postfix expressions.
4566 if (expr->is_postfix()) {
4567 if (!context()->IsEffect()) {
4568 // Save the result on the stack. If we have a named or keyed property we
4569 // store the result under the receiver that is currently on top of the
4571 switch (assign_type) {
4575 case NAMED_PROPERTY:
4576 __ Poke(x0, kPointerSize);
4578 case NAMED_SUPER_PROPERTY:
4579 __ Poke(x0, kPointerSize * 2);
4581 case KEYED_PROPERTY:
4582 __ Poke(x0, kPointerSize * 2);
4584 case KEYED_SUPER_PROPERTY:
4585 __ Poke(x0, kPointerSize * 3);
4591 __ Adds(x0, x0, Smi::FromInt(count_value));
4593 // Call stub. Undo operation first.
4594 __ Sub(x0, x0, Smi::FromInt(count_value));
4598 if (!is_strong(language_mode())) {
4599 ToNumberStub convert_stub(isolate());
4600 __ CallStub(&convert_stub);
4601 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4604 // Save result for postfix expressions.
4605 if (expr->is_postfix()) {
4606 if (!context()->IsEffect()) {
4607 // Save the result on the stack. If we have a named or keyed property
4608 // we store the result under the receiver that is currently on top
4610 switch (assign_type) {
4614 case NAMED_PROPERTY:
4615 __ Poke(x0, kXRegSize);
4617 case NAMED_SUPER_PROPERTY:
4618 __ Poke(x0, 2 * kXRegSize);
4620 case KEYED_PROPERTY:
4621 __ Poke(x0, 2 * kXRegSize);
4623 case KEYED_SUPER_PROPERTY:
4624 __ Poke(x0, 3 * kXRegSize);
4630 __ Bind(&stub_call);
4632 __ Mov(x0, Smi::FromInt(count_value));
4634 SetExpressionPosition(expr);
4637 Assembler::BlockPoolsScope scope(masm_);
4639 CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4640 strength(language_mode())).code();
4641 CallIC(code, expr->CountBinOpFeedbackId());
4642 patch_site.EmitPatchInfo();
4646 if (is_strong(language_mode())) {
4647 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4649 // Store the value returned in x0.
4650 switch (assign_type) {
4652 if (expr->is_postfix()) {
4653 { EffectContext context(this);
4654 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4655 Token::ASSIGN, expr->CountSlot());
4656 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4659 // For all contexts except EffectConstant We have the result on
4660 // top of the stack.
4661 if (!context()->IsEffect()) {
4662 context()->PlugTOS();
4665 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4666 Token::ASSIGN, expr->CountSlot());
4667 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4668 context()->Plug(x0);
4671 case NAMED_PROPERTY: {
4672 __ Mov(StoreDescriptor::NameRegister(),
4673 Operand(prop->key()->AsLiteral()->value()));
4674 __ Pop(StoreDescriptor::ReceiverRegister());
4675 if (FLAG_vector_stores) {
4676 EmitLoadStoreICSlot(expr->CountSlot());
4679 CallStoreIC(expr->CountStoreFeedbackId());
4681 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4682 if (expr->is_postfix()) {
4683 if (!context()->IsEffect()) {
4684 context()->PlugTOS();
4687 context()->Plug(x0);
4691 case NAMED_SUPER_PROPERTY: {
4692 EmitNamedSuperPropertyStore(prop);
4693 if (expr->is_postfix()) {
4694 if (!context()->IsEffect()) {
4695 context()->PlugTOS();
4698 context()->Plug(x0);
4702 case KEYED_SUPER_PROPERTY: {
4703 EmitKeyedSuperPropertyStore(prop);
4704 if (expr->is_postfix()) {
4705 if (!context()->IsEffect()) {
4706 context()->PlugTOS();
4709 context()->Plug(x0);
4713 case KEYED_PROPERTY: {
4714 __ Pop(StoreDescriptor::NameRegister());
4715 __ Pop(StoreDescriptor::ReceiverRegister());
4717 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4718 if (FLAG_vector_stores) {
4719 EmitLoadStoreICSlot(expr->CountSlot());
4722 CallIC(ic, expr->CountStoreFeedbackId());
4724 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4725 if (expr->is_postfix()) {
4726 if (!context()->IsEffect()) {
4727 context()->PlugTOS();
4730 context()->Plug(x0);
4738 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4739 Expression* sub_expr,
4740 Handle<String> check) {
4741 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4742 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4743 Label materialize_true, materialize_false;
4744 Label* if_true = NULL;
4745 Label* if_false = NULL;
4746 Label* fall_through = NULL;
4747 context()->PrepareTest(&materialize_true, &materialize_false,
4748 &if_true, &if_false, &fall_through);
4750 { AccumulatorValueContext context(this);
4751 VisitForTypeofValue(sub_expr);
4753 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4755 Factory* factory = isolate()->factory();
4756 if (String::Equals(check, factory->number_string())) {
4757 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4758 __ JumpIfSmi(x0, if_true);
4759 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4760 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4761 Split(eq, if_true, if_false, fall_through);
4762 } else if (String::Equals(check, factory->string_string())) {
4763 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4764 __ JumpIfSmi(x0, if_false);
4765 // Check for undetectable objects => false.
4766 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4767 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4768 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4770 } else if (String::Equals(check, factory->symbol_string())) {
4771 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4772 __ JumpIfSmi(x0, if_false);
4773 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4774 Split(eq, if_true, if_false, fall_through);
4775 } else if (String::Equals(check, factory->float32x4_string())) {
4777 "FullCodeGenerator::EmitLiteralCompareTypeof float32x4_string");
4778 __ JumpIfSmi(x0, if_false);
4779 __ CompareObjectType(x0, x0, x1, FLOAT32X4_TYPE);
4780 Split(eq, if_true, if_false, fall_through);
4781 } else if (String::Equals(check, factory->boolean_string())) {
4782 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4783 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4784 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4785 Split(eq, if_true, if_false, fall_through);
4786 } else if (String::Equals(check, factory->undefined_string())) {
4788 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4789 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4790 __ JumpIfSmi(x0, if_false);
4791 // Check for undetectable objects => true.
4792 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4793 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4794 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4796 } else if (String::Equals(check, factory->function_string())) {
4797 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4798 __ JumpIfSmi(x0, if_false);
4799 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4800 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4801 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4804 } else if (String::Equals(check, factory->object_string())) {
4805 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4806 __ JumpIfSmi(x0, if_false);
4807 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4808 // Check for JS objects => true.
4810 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4812 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4814 // Check for undetectable objects => false.
4815 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4817 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4821 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4822 if (if_false != fall_through) __ B(if_false);
4824 context()->Plug(if_true, if_false);
4828 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4829 Comment cmnt(masm_, "[ CompareOperation");
4830 SetExpressionPosition(expr);
4832 // Try to generate an optimized comparison with a literal value.
4833 // TODO(jbramley): This only checks common values like NaN or undefined.
4834 // Should it also handle ARM64 immediate operands?
4835 if (TryLiteralCompare(expr)) {
4839 // Assign labels according to context()->PrepareTest.
4840 Label materialize_true;
4841 Label materialize_false;
4842 Label* if_true = NULL;
4843 Label* if_false = NULL;
4844 Label* fall_through = NULL;
4845 context()->PrepareTest(&materialize_true, &materialize_false,
4846 &if_true, &if_false, &fall_through);
4848 Token::Value op = expr->op();
4849 VisitForStackValue(expr->left());
4852 VisitForStackValue(expr->right());
4853 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4854 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4855 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4856 Split(eq, if_true, if_false, fall_through);
4859 case Token::INSTANCEOF: {
4860 VisitForStackValue(expr->right());
4861 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4863 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4864 // The stub returns 0 for true.
4865 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4870 VisitForAccumulatorValue(expr->right());
4871 Condition cond = CompareIC::ComputeCondition(op);
4873 // Pop the stack value.
4876 JumpPatchSite patch_site(masm_);
4877 if (ShouldInlineSmiCase(op)) {
4879 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4881 Split(cond, if_true, if_false, NULL);
4882 __ Bind(&slow_case);
4885 Handle<Code> ic = CodeFactory::CompareIC(
4886 isolate(), op, strength(language_mode())).code();
4887 CallIC(ic, expr->CompareOperationFeedbackId());
4888 patch_site.EmitPatchInfo();
4889 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4890 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4894 // Convert the result of the comparison into one expected for this
4895 // expression's context.
4896 context()->Plug(if_true, if_false);
4900 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4901 Expression* sub_expr,
4903 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4904 Label materialize_true, materialize_false;
4905 Label* if_true = NULL;
4906 Label* if_false = NULL;
4907 Label* fall_through = NULL;
4908 context()->PrepareTest(&materialize_true, &materialize_false,
4909 &if_true, &if_false, &fall_through);
4911 VisitForAccumulatorValue(sub_expr);
4912 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4914 if (expr->op() == Token::EQ_STRICT) {
4915 Heap::RootListIndex nil_value = nil == kNullValue ?
4916 Heap::kNullValueRootIndex :
4917 Heap::kUndefinedValueRootIndex;
4918 __ CompareRoot(x0, nil_value);
4919 Split(eq, if_true, if_false, fall_through);
4921 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4922 CallIC(ic, expr->CompareOperationFeedbackId());
4923 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4926 context()->Plug(if_true, if_false);
4930 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4931 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4932 context()->Plug(x0);
4936 void FullCodeGenerator::VisitYield(Yield* expr) {
4937 Comment cmnt(masm_, "[ Yield");
4938 SetExpressionPosition(expr);
4940 // Evaluate yielded value first; the initial iterator definition depends on
4941 // this. It stays on the stack while we update the iterator.
4942 VisitForStackValue(expr->expression());
4944 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4945 // and suchlike. The implementation changes a little by bleeding_edge so I
4946 // don't want to spend too much time on it now.
4948 switch (expr->yield_kind()) {
4949 case Yield::kSuspend:
4950 // Pop value from top-of-stack slot; box result into result register.
4951 EmitCreateIteratorResult(false);
4952 __ Push(result_register());
4954 case Yield::kInitial: {
4955 Label suspend, continuation, post_runtime, resume;
4958 // TODO(jbramley): This label is bound here because the following code
4959 // looks at its pos(). Is it possible to do something more efficient here,
4960 // perhaps using Adr?
4961 __ Bind(&continuation);
4962 __ RecordGeneratorContinuation();
4966 VisitForAccumulatorValue(expr->generator_object());
4967 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4968 __ Mov(x1, Smi::FromInt(continuation.pos()));
4969 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4970 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4972 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4973 kLRHasBeenSaved, kDontSaveFPRegs);
4974 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4975 __ Cmp(__ StackPointer(), x1);
4976 __ B(eq, &post_runtime);
4977 __ Push(x0); // generator object
4978 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4979 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4980 __ Bind(&post_runtime);
4981 __ Pop(result_register());
4982 EmitReturnSequence();
4985 context()->Plug(result_register());
4989 case Yield::kFinal: {
4990 VisitForAccumulatorValue(expr->generator_object());
4991 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4992 __ Str(x1, FieldMemOperand(result_register(),
4993 JSGeneratorObject::kContinuationOffset));
4994 // Pop value from top-of-stack slot, box result into result register.
4995 EmitCreateIteratorResult(true);
4996 EmitUnwindBeforeReturn();
4997 EmitReturnSequence();
5001 case Yield::kDelegating: {
5002 VisitForStackValue(expr->generator_object());
5004 // Initial stack layout is as follows:
5005 // [sp + 1 * kPointerSize] iter
5006 // [sp + 0 * kPointerSize] g
5008 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
5009 Label l_next, l_call, l_loop;
5010 Register load_receiver = LoadDescriptor::ReceiverRegister();
5011 Register load_name = LoadDescriptor::NameRegister();
5013 // Initial send value is undefined.
5014 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
5017 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
5019 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
5020 __ Peek(x3, 1 * kPointerSize); // iter
5021 __ Push(load_name, x3, x0); // "throw", iter, except
5024 // try { received = %yield result }
5025 // Shuffle the received result above a try handler and yield it without
5028 __ Pop(x0); // result
5029 int handler_index = NewHandlerTableEntry();
5030 EnterTryBlock(handler_index, &l_catch);
5031 const int try_block_size = TryCatch::kElementCount * kPointerSize;
5032 __ Push(x0); // result
5035 // TODO(jbramley): This label is bound here because the following code
5036 // looks at its pos(). Is it possible to do something more efficient here,
5037 // perhaps using Adr?
5038 __ Bind(&l_continuation);
5039 __ RecordGeneratorContinuation();
5042 __ Bind(&l_suspend);
5043 const int generator_object_depth = kPointerSize + try_block_size;
5044 __ Peek(x0, generator_object_depth);
5046 __ Push(Smi::FromInt(handler_index)); // handler-index
5047 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
5048 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
5049 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
5050 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
5052 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
5053 kLRHasBeenSaved, kDontSaveFPRegs);
5054 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
5055 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5056 __ Pop(x0); // result
5057 EmitReturnSequence();
5058 __ Bind(&l_resume); // received in x0
5059 ExitTryBlock(handler_index);
5061 // receiver = iter; f = 'next'; arg = received;
5064 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
5065 __ Peek(x3, 1 * kPointerSize); // iter
5066 __ Push(load_name, x3, x0); // "next", iter, received
5068 // result = receiver[f](arg);
5070 __ Peek(load_receiver, 1 * kPointerSize);
5071 __ Peek(load_name, 2 * kPointerSize);
5072 __ Mov(LoadDescriptor::SlotRegister(),
5073 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
5074 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
5075 CallIC(ic, TypeFeedbackId::None());
5077 __ Poke(x1, 2 * kPointerSize);
5078 SetCallPosition(expr, 1);
5079 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
5082 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5083 __ Drop(1); // The function is still on the stack; drop it.
5085 // if (!result.done) goto l_try;
5087 __ Move(load_receiver, x0);
5089 __ Push(load_receiver); // save result
5090 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
5091 __ Mov(LoadDescriptor::SlotRegister(),
5092 SmiFromSlot(expr->DoneFeedbackSlot()));
5093 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.done
5094 // The ToBooleanStub argument (result.done) is in x0.
5095 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
5100 __ Pop(load_receiver); // result
5101 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
5102 __ Mov(LoadDescriptor::SlotRegister(),
5103 SmiFromSlot(expr->ValueFeedbackSlot()));
5104 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.value
5105 context()->DropAndPlug(2, x0); // drop iter and g
5112 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
5114 JSGeneratorObject::ResumeMode resume_mode) {
5115 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
5116 Register generator_object = x1;
5117 Register the_hole = x2;
5118 Register operand_stack_size = w3;
5119 Register function = x4;
5121 // The value stays in x0, and is ultimately read by the resumed generator, as
5122 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
5123 // is read to throw the value when the resumed generator is already closed. x1
5124 // will hold the generator object until the activation has been resumed.
5125 VisitForStackValue(generator);
5126 VisitForAccumulatorValue(value);
5127 __ Pop(generator_object);
5129 // Load suspended function and context.
5130 __ Ldr(cp, FieldMemOperand(generator_object,
5131 JSGeneratorObject::kContextOffset));
5132 __ Ldr(function, FieldMemOperand(generator_object,
5133 JSGeneratorObject::kFunctionOffset));
5135 // Load receiver and store as the first argument.
5136 __ Ldr(x10, FieldMemOperand(generator_object,
5137 JSGeneratorObject::kReceiverOffset));
5140 // Push holes for the rest of the arguments to the generator function.
5141 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
5143 // The number of arguments is stored as an int32_t, and -1 is a marker
5144 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
5145 // extension to correctly handle it. However, in this case, we operate on
5146 // 32-bit W registers, so extension isn't required.
5147 __ Ldr(w10, FieldMemOperand(x10,
5148 SharedFunctionInfo::kFormalParameterCountOffset));
5149 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
5150 __ PushMultipleTimes(the_hole, w10);
5152 // Enter a new JavaScript frame, and initialize its slots as they were when
5153 // the generator was suspended.
5154 Label resume_frame, done;
5155 __ Bl(&resume_frame);
5158 __ Bind(&resume_frame);
5159 __ Push(lr, // Return address.
5160 fp, // Caller's frame pointer.
5161 cp, // Callee's context.
5162 function); // Callee's JS Function.
5163 __ Add(fp, __ StackPointer(), kPointerSize * 2);
5165 // Load and untag the operand stack size.
5166 __ Ldr(x10, FieldMemOperand(generator_object,
5167 JSGeneratorObject::kOperandStackOffset));
5168 __ Ldr(operand_stack_size,
5169 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
5171 // If we are sending a value and there is no operand stack, we can jump back
5173 if (resume_mode == JSGeneratorObject::NEXT) {
5175 __ Cbnz(operand_stack_size, &slow_resume);
5176 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5178 UntagSmiFieldMemOperand(generator_object,
5179 JSGeneratorObject::kContinuationOffset));
5180 __ Add(x10, x10, x11);
5181 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
5182 __ Str(x12, FieldMemOperand(generator_object,
5183 JSGeneratorObject::kContinuationOffset));
5186 __ Bind(&slow_resume);
5189 // Otherwise, we push holes for the operand stack and call the runtime to fix
5190 // up the stack and the handlers.
5191 __ PushMultipleTimes(the_hole, operand_stack_size);
5193 __ Mov(x10, Smi::FromInt(resume_mode));
5194 __ Push(generator_object, result_register(), x10);
5195 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5196 // Not reached: the runtime call returns elsewhere.
5200 context()->Plug(result_register());
5204 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5208 const int instance_size = 5 * kPointerSize;
5209 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5212 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5214 Register result = x0;
5215 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5218 __ Bind(&gc_required);
5219 __ Push(Smi::FromInt(instance_size));
5220 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5221 __ Ldr(context_register(),
5222 MemOperand(fp, StandardFrameConstants::kContextOffset));
5224 __ Bind(&allocated);
5225 Register map_reg = x1;
5226 Register result_value = x2;
5227 Register boolean_done = x3;
5228 Register empty_fixed_array = x4;
5229 Register untagged_result = x5;
5230 __ Ldr(map_reg, GlobalObjectMemOperand());
5231 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5233 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5234 __ Pop(result_value);
5235 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5236 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5237 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5238 JSObject::kElementsOffset);
5239 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5240 JSGeneratorObject::kResultDonePropertyOffset);
5241 __ ObjectUntag(untagged_result, result);
5242 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5243 __ Stp(empty_fixed_array, empty_fixed_array,
5244 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5245 __ Stp(result_value, boolean_done,
5246 MemOperand(untagged_result,
5247 JSGeneratorObject::kResultValuePropertyOffset));
5249 // Only the value field needs a write barrier, as the other values are in the
5251 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5252 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5256 // TODO(all): I don't like this method.
5257 // It seems to me that in too many places x0 is used in place of this.
5258 // Also, this function is not suitable for all places where x0 should be
5259 // abstracted (eg. when used as an argument). But some places assume that the
5260 // first argument register is x0, and use this function instead.
5261 // Considering that most of the register allocation is hard-coded in the
5262 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5263 // that abstracting the allocation through functions would not yield any
5264 // performance benefit, I think the existence of this function is debatable.
5265 Register FullCodeGenerator::result_register() {
5270 Register FullCodeGenerator::context_register() {
5275 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5276 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5277 __ Str(value, MemOperand(fp, frame_offset));
5281 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5282 __ Ldr(dst, ContextMemOperand(cp, context_index));
5286 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5287 Scope* declaration_scope = scope()->DeclarationScope();
5288 if (declaration_scope->is_script_scope() ||
5289 declaration_scope->is_module_scope()) {
5290 // Contexts nested in the native context have a canonical empty function
5291 // as their closure, not the anonymous closure containing the global
5292 // code. Pass a smi sentinel and let the runtime look up the empty
5294 DCHECK(kSmiTag == 0);
5296 } else if (declaration_scope->is_eval_scope()) {
5297 // Contexts created by a call to eval have the same closure as the
5298 // context calling eval, not the anonymous closure containing the eval
5299 // code. Fetch it from the context.
5300 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5303 DCHECK(declaration_scope->is_function_scope());
5304 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5310 void FullCodeGenerator::EnterFinallyBlock() {
5311 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5312 DCHECK(!result_register().is(x10));
5313 // Preserve the result register while executing finally block.
5314 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5315 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5317 __ Push(result_register(), x10);
5319 // Store pending message while executing finally block.
5320 ExternalReference pending_message_obj =
5321 ExternalReference::address_of_pending_message_obj(isolate());
5322 __ Mov(x10, pending_message_obj);
5323 __ Ldr(x10, MemOperand(x10));
5326 ClearPendingMessage();
5330 void FullCodeGenerator::ExitFinallyBlock() {
5331 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5332 DCHECK(!result_register().is(x10));
5334 // Restore pending message from stack.
5336 ExternalReference pending_message_obj =
5337 ExternalReference::address_of_pending_message_obj(isolate());
5338 __ Mov(x13, pending_message_obj);
5339 __ Str(x10, MemOperand(x13));
5341 // Restore result register and cooked return address from the stack.
5342 __ Pop(x10, result_register());
5344 // Uncook the return address (see EnterFinallyBlock).
5346 __ Add(x11, x10, Operand(masm_->CodeObject()));
5351 void FullCodeGenerator::ClearPendingMessage() {
5352 DCHECK(!result_register().is(x10));
5353 ExternalReference pending_message_obj =
5354 ExternalReference::address_of_pending_message_obj(isolate());
5355 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
5356 __ Mov(x13, pending_message_obj);
5357 __ Str(x10, MemOperand(x13));
5361 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5362 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5363 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5370 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5372 BackEdgeState target_state,
5373 Code* replacement_code) {
5374 // Turn the jump into a nop.
5375 Address branch_address = pc - 3 * kInstructionSize;
5376 PatchingAssembler patcher(branch_address, 1);
5378 DCHECK(Instruction::Cast(branch_address)
5379 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5380 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5381 Instruction::Cast(branch_address)->ImmPCOffset() ==
5382 6 * kInstructionSize));
5384 switch (target_state) {
5386 // <decrement profiling counter>
5387 // .. .. .. .. b.pl ok
5388 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5389 // .. .. .. .. blr x16
5390 // ... more instructions.
5392 // Jump offset is 6 instructions.
5395 case ON_STACK_REPLACEMENT:
5396 case OSR_AFTER_STACK_CHECK:
5397 // <decrement profiling counter>
5398 // .. .. .. .. mov x0, x0 (NOP)
5399 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5400 // .. .. .. .. blr x16
5401 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5405 // Replace the call address.
5406 Instruction* load = Instruction::Cast(pc)->preceding(2);
5407 Address interrupt_address_pointer =
5408 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5409 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5410 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5412 ->OnStackReplacement()
5414 (Memory::uint64_at(interrupt_address_pointer) ==
5415 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5419 (Memory::uint64_at(interrupt_address_pointer) ==
5420 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5422 ->OsrAfterStackCheck()
5424 (Memory::uint64_at(interrupt_address_pointer) ==
5425 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5427 ->OnStackReplacement()
5429 Memory::uint64_at(interrupt_address_pointer) =
5430 reinterpret_cast<uint64_t>(replacement_code->entry());
5432 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5433 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5437 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5439 Code* unoptimized_code,
5441 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5442 // back-end), but this function is gone in bleeding_edge so it might not
5444 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5446 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5447 Instruction* load = Instruction::Cast(pc)->preceding(2);
5448 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5449 load->ImmPCOffset());
5450 if (entry == reinterpret_cast<uint64_t>(
5451 isolate->builtins()->OnStackReplacement()->entry())) {
5452 return ON_STACK_REPLACEMENT;
5453 } else if (entry == reinterpret_cast<uint64_t>(
5454 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5455 return OSR_AFTER_STACK_CHECK;
5465 } // namespace internal
5468 #endif // V8_TARGET_ARCH_ARM64