1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "code-stubs.h"
13 #include "full-codegen.h"
14 #include "isolate-inl.h"
17 #include "stub-cache.h"
19 #include "arm64/code-stubs-arm64.h"
20 #include "arm64/macro-assembler-arm64.h"
25 #define __ ACCESS_MASM(masm_)
27 class JumpPatchSite BASE_EMBEDDED {
29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
31 info_emitted_ = false;
36 if (patch_site_.is_bound()) {
37 ASSERT(info_emitted_);
39 ASSERT(reg_.IsNone());
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1);
46 ASSERT(!info_emitted_);
47 ASSERT(reg.Is64Bits());
50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1);
57 ASSERT(!info_emitted_);
58 ASSERT(reg.Is64Bits());
61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched.
65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target);
72 void EmitPatchInfo() {
73 Assembler::BlockPoolsScope scope(masm_);
74 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
81 MacroAssembler* masm_;
90 static void EmitStackCheck(MacroAssembler* masm_,
92 Register scratch = jssp) {
93 Isolate* isolate = masm_->isolate();
95 ASSERT(jssp.Is(__ StackPointer()));
96 ASSERT(scratch.Is(jssp) == (pointers == 0));
97 Heap::RootListIndex index;
99 __ Sub(scratch, jssp, pointers * kPointerSize);
100 index = Heap::kRealStackLimitRootIndex;
102 index = Heap::kStackLimitRootIndex;
104 __ CompareRoot(scratch, index);
106 PredictableCodeSizeScope predictable(masm_,
107 Assembler::kCallSizeWithRelocation);
108 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
113 // Generate code for a JS function. On entry to the function the receiver
114 // and arguments have been pushed on the stack left to right. The actual
115 // argument count matches the formal parameter count expected by the
118 // The live registers are:
119 // - x1: the JS function object being called (i.e. ourselves).
120 // - cp: our context.
121 // - fp: our caller's frame pointer.
122 // - jssp: stack pointer.
123 // - lr: return address.
125 // The function builds a JS frame. See JavaScriptFrameConstants in
126 // frames-arm.h for its layout.
127 void FullCodeGenerator::Generate() {
128 CompilationInfo* info = info_;
130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
132 profiling_counter_ = isolate()->factory()->NewCell(
133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
134 SetFunctionPosition(function());
135 Comment cmnt(masm_, "[ Function compiled by full code generator");
137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
140 if (strlen(FLAG_stop_at) > 0 &&
141 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
142 __ Debug("stop-at", __LINE__, BREAK);
146 // Sloppy mode functions and builtins need to replace the receiver with the
147 // global proxy when called as functions (without an explicit receiver
149 if (info->strict_mode() == SLOPPY && !info->is_native()) {
151 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
152 __ Peek(x10, receiver_offset);
153 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
155 __ Ldr(x10, GlobalObjectMemOperand());
156 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset));
157 __ Poke(x10, receiver_offset);
163 // Open a frame scope to indicate that there is a frame on the stack.
164 // The MANUAL indicates that the scope shouldn't actually generate code
165 // to set up the frame because we do it manually below.
166 FrameScope frame_scope(masm_, StackFrame::MANUAL);
168 // This call emits the following sequence in a way that can be patched for
169 // code ageing support:
170 // Push(lr, fp, cp, x1);
171 // Add(fp, jssp, 2 * kPointerSize);
172 info->set_prologue_offset(masm_->pc_offset());
173 __ Prologue(BUILD_FUNCTION_FRAME);
174 info->AddNoFrameRange(0, masm_->pc_offset());
176 // Reserve space on the stack for locals.
177 { Comment cmnt(masm_, "[ Allocate locals");
178 int locals_count = info->scope()->num_stack_slots();
179 // Generators allocate locals, if any, in context slots.
180 ASSERT(!info->function()->is_generator() || locals_count == 0);
182 if (locals_count > 0) {
183 if (locals_count >= 128) {
184 EmitStackCheck(masm_, locals_count, x10);
186 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
187 if (FLAG_optimize_for_size) {
188 __ PushMultipleTimes(x10 , locals_count);
190 const int kMaxPushes = 32;
191 if (locals_count >= kMaxPushes) {
192 int loop_iterations = locals_count / kMaxPushes;
193 __ Mov(x3, loop_iterations);
195 __ Bind(&loop_header);
197 __ PushMultipleTimes(x10 , kMaxPushes);
199 __ B(ne, &loop_header);
201 int remaining = locals_count % kMaxPushes;
202 // Emit the remaining pushes.
203 __ PushMultipleTimes(x10 , remaining);
208 bool function_in_register_x1 = true;
210 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
211 if (heap_slots > 0) {
212 // Argument to NewContext is the function, which is still in x1.
213 Comment cmnt(masm_, "[ Allocate context");
214 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
215 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
217 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
218 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(isolate(), heap_slots);
223 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
225 function_in_register_x1 = false;
226 // Context is returned in x0. It replaces the context passed to us.
227 // It's saved in the stack and kept live in cp.
229 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
230 // Copy any necessary parameters into the context.
231 int num_parameters = info->scope()->num_parameters();
232 for (int i = 0; i < num_parameters; i++) {
233 Variable* var = scope()->parameter(i);
234 if (var->IsContextSlot()) {
235 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
236 (num_parameters - 1 - i) * kPointerSize;
237 // Load parameter from stack.
238 __ Ldr(x10, MemOperand(fp, parameter_offset));
239 // Store it in the context.
240 MemOperand target = ContextMemOperand(cp, var->index());
243 // Update the write barrier.
244 __ RecordWriteContextSlot(
245 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
250 Variable* arguments = scope()->arguments();
251 if (arguments != NULL) {
252 // Function uses arguments object.
253 Comment cmnt(masm_, "[ Allocate arguments object");
254 if (!function_in_register_x1) {
255 // Load this again, if it's used by the local context below.
256 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
260 // Receiver is just before the parameters on the caller's stack.
261 int num_parameters = info->scope()->num_parameters();
262 int offset = num_parameters * kPointerSize;
263 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
264 __ Mov(x1, Smi::FromInt(num_parameters));
267 // Arguments to ArgumentsAccessStub:
268 // function, receiver address, parameter count.
269 // The stub will rewrite receiver and parameter count if the previous
270 // stack frame was an arguments adapter frame.
271 ArgumentsAccessStub::Type type;
272 if (strict_mode() == STRICT) {
273 type = ArgumentsAccessStub::NEW_STRICT;
274 } else if (function()->has_duplicate_parameters()) {
275 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
277 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
279 ArgumentsAccessStub stub(isolate(), type);
282 SetVar(arguments, x0, x1, x2);
286 __ CallRuntime(Runtime::kTraceEnter, 0);
290 // Visit the declarations and body unless there is an illegal
292 if (scope()->HasIllegalRedeclaration()) {
293 Comment cmnt(masm_, "[ Declarations");
294 scope()->VisitIllegalRedeclaration(this);
297 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
298 { Comment cmnt(masm_, "[ Declarations");
299 if (scope()->is_function_scope() && scope()->function() != NULL) {
300 VariableDeclaration* function = scope()->function();
301 ASSERT(function->proxy()->var()->mode() == CONST ||
302 function->proxy()->var()->mode() == CONST_LEGACY);
303 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
304 VisitVariableDeclaration(function);
306 VisitDeclarations(scope()->declarations());
310 { Comment cmnt(masm_, "[ Stack check");
311 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
312 EmitStackCheck(masm_);
315 { Comment cmnt(masm_, "[ Body");
316 ASSERT(loop_depth() == 0);
317 VisitStatements(function()->body());
318 ASSERT(loop_depth() == 0);
321 // Always emit a 'return undefined' in case control fell off the end of
323 { Comment cmnt(masm_, "[ return <undefined>;");
324 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
326 EmitReturnSequence();
328 // Force emission of the pools, so they don't get emitted in the middle
329 // of the back edge table.
330 masm()->CheckVeneerPool(true, false);
331 masm()->CheckConstPool(true, false);
335 void FullCodeGenerator::ClearAccumulator() {
336 __ Mov(x0, Smi::FromInt(0));
340 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
341 __ Mov(x2, Operand(profiling_counter_));
342 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
343 __ Subs(x3, x3, Smi::FromInt(delta));
344 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
348 void FullCodeGenerator::EmitProfilingCounterReset() {
349 int reset_value = FLAG_interrupt_budget;
350 if (isolate()->IsDebuggerActive()) {
351 // Detect debug break requests as soon as possible.
352 reset_value = FLAG_interrupt_budget >> 4;
354 __ Mov(x2, Operand(profiling_counter_));
355 __ Mov(x3, Smi::FromInt(reset_value));
356 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
360 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
361 Label* back_edge_target) {
362 ASSERT(jssp.Is(__ StackPointer()));
363 Comment cmnt(masm_, "[ Back edge bookkeeping");
364 // Block literal pools whilst emitting back edge code.
365 Assembler::BlockPoolsScope block_const_pool(masm_);
368 ASSERT(back_edge_target->is_bound());
369 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
370 // to reduce the absolute error due to the integer division. To do that,
371 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
374 masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
375 int weight = Min(kMaxBackEdgeWeight,
376 Max(1, distance / kCodeSizeMultiplier));
377 EmitProfilingCounterDecrement(weight);
379 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
381 // Record a mapping of this PC offset to the OSR id. This is used to find
382 // the AST id from the unoptimized code in order to use it as a key into
383 // the deoptimization input data found in the optimized code.
384 RecordBackEdge(stmt->OsrEntryId());
386 EmitProfilingCounterReset();
389 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
390 // Record a mapping of the OSR id to this PC. This is used if the OSR
391 // entry becomes the target of a bailout. We don't expect it to be, but
392 // we want it to work if it is.
393 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
397 void FullCodeGenerator::EmitReturnSequence() {
398 Comment cmnt(masm_, "[ Return sequence");
400 if (return_label_.is_bound()) {
401 __ B(&return_label_);
404 __ Bind(&return_label_);
406 // Push the return value on the stack as the parameter.
407 // Runtime::TraceExit returns its parameter in x0.
408 __ Push(result_register());
409 __ CallRuntime(Runtime::kTraceExit, 1);
410 ASSERT(x0.Is(result_register()));
412 // Pretend that the exit is a backwards jump to the entry.
414 if (info_->ShouldSelfOptimize()) {
415 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
418 weight = Min(kMaxBackEdgeWeight,
419 Max(1, distance / kCodeSizeMultiplier));
421 EmitProfilingCounterDecrement(weight);
425 __ Call(isolate()->builtins()->InterruptCheck(),
426 RelocInfo::CODE_TARGET);
428 EmitProfilingCounterReset();
431 // Make sure that the constant pool is not emitted inside of the return
432 // sequence. This sequence can get patched when the debugger is used. See
433 // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
435 InstructionAccurateScope scope(masm_,
436 Assembler::kJSRetSequenceInstructions);
437 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
439 // This code is generated using Assembler methods rather than Macro
440 // Assembler methods because it will be patched later on, and so the size
441 // of the generated code must be consistent.
442 const Register& current_sp = __ StackPointer();
443 // Nothing ensures 16 bytes alignment here.
444 ASSERT(!current_sp.Is(csp));
445 __ mov(current_sp, fp);
446 int no_frame_start = masm_->pc_offset();
447 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
448 // Drop the arguments and receiver and return.
449 // TODO(all): This implementation is overkill as it supports 2**31+1
450 // arguments, consider how to improve it without creating a security
452 __ LoadLiteral(ip0, 3 * kInstructionSize);
453 __ add(current_sp, current_sp, ip0);
455 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
456 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
462 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
463 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
467 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
468 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
469 codegen()->GetVar(result_register(), var);
473 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
474 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
475 codegen()->GetVar(result_register(), var);
476 __ Push(result_register());
480 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
481 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
482 // For simplicity we always test the accumulator register.
483 codegen()->GetVar(result_register(), var);
484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
485 codegen()->DoTest(this);
489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
490 // Root values have no side effects.
494 void FullCodeGenerator::AccumulatorValueContext::Plug(
495 Heap::RootListIndex index) const {
496 __ LoadRoot(result_register(), index);
500 void FullCodeGenerator::StackValueContext::Plug(
501 Heap::RootListIndex index) const {
502 __ LoadRoot(result_register(), index);
503 __ Push(result_register());
507 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
508 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
510 if (index == Heap::kUndefinedValueRootIndex ||
511 index == Heap::kNullValueRootIndex ||
512 index == Heap::kFalseValueRootIndex) {
513 if (false_label_ != fall_through_) __ B(false_label_);
514 } else if (index == Heap::kTrueValueRootIndex) {
515 if (true_label_ != fall_through_) __ B(true_label_);
517 __ LoadRoot(result_register(), index);
518 codegen()->DoTest(this);
523 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
527 void FullCodeGenerator::AccumulatorValueContext::Plug(
528 Handle<Object> lit) const {
529 __ Mov(result_register(), Operand(lit));
533 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
534 // Immediates cannot be pushed directly.
535 __ Mov(result_register(), Operand(lit));
536 __ Push(result_register());
540 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
541 codegen()->PrepareForBailoutBeforeSplit(condition(),
545 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
546 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
547 if (false_label_ != fall_through_) __ B(false_label_);
548 } else if (lit->IsTrue() || lit->IsJSObject()) {
549 if (true_label_ != fall_through_) __ B(true_label_);
550 } else if (lit->IsString()) {
551 if (String::cast(*lit)->length() == 0) {
552 if (false_label_ != fall_through_) __ B(false_label_);
554 if (true_label_ != fall_through_) __ B(true_label_);
556 } else if (lit->IsSmi()) {
557 if (Smi::cast(*lit)->value() == 0) {
558 if (false_label_ != fall_through_) __ B(false_label_);
560 if (true_label_ != fall_through_) __ B(true_label_);
563 // For simplicity we always test the accumulator register.
564 __ Mov(result_register(), Operand(lit));
565 codegen()->DoTest(this);
570 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
571 Register reg) const {
577 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
579 Register reg) const {
582 __ Move(result_register(), reg);
586 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
587 Register reg) const {
589 if (count > 1) __ Drop(count - 1);
594 void FullCodeGenerator::TestContext::DropAndPlug(int count,
595 Register reg) const {
597 // For simplicity we always test the accumulator register.
599 __ Mov(result_register(), reg);
600 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
601 codegen()->DoTest(this);
605 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
606 Label* materialize_false) const {
607 ASSERT(materialize_true == materialize_false);
608 __ Bind(materialize_true);
612 void FullCodeGenerator::AccumulatorValueContext::Plug(
613 Label* materialize_true,
614 Label* materialize_false) const {
616 __ Bind(materialize_true);
617 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619 __ Bind(materialize_false);
620 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
625 void FullCodeGenerator::StackValueContext::Plug(
626 Label* materialize_true,
627 Label* materialize_false) const {
629 __ Bind(materialize_true);
630 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
632 __ Bind(materialize_false);
633 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
640 Label* materialize_false) const {
641 ASSERT(materialize_true == true_label_);
642 ASSERT(materialize_false == false_label_);
646 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
651 Heap::RootListIndex value_root_index =
652 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
653 __ LoadRoot(result_register(), value_root_index);
657 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
658 Heap::RootListIndex value_root_index =
659 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660 __ LoadRoot(x10, value_root_index);
665 void FullCodeGenerator::TestContext::Plug(bool flag) const {
666 codegen()->PrepareForBailoutBeforeSplit(condition(),
671 if (true_label_ != fall_through_) {
675 if (false_label_ != fall_through_) {
682 void FullCodeGenerator::DoTest(Expression* condition,
685 Label* fall_through) {
686 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
687 CallIC(ic, condition->test_id());
688 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
692 // If (cond), branch to if_true.
693 // If (!cond), branch to if_false.
694 // fall_through is used as an optimization in cases where only one branch
695 // instruction is necessary.
696 void FullCodeGenerator::Split(Condition cond,
699 Label* fall_through) {
700 if (if_false == fall_through) {
702 } else if (if_true == fall_through) {
703 ASSERT(if_false != fall_through);
704 __ B(InvertCondition(cond), if_false);
712 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
713 // Offset is negative because higher indexes are at lower addresses.
714 int offset = -var->index() * kXRegSize;
715 // Adjust by a (parameter or local) base offset.
716 if (var->IsParameter()) {
717 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
719 offset += JavaScriptFrameConstants::kLocal0Offset;
721 return MemOperand(fp, offset);
725 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
726 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
727 if (var->IsContextSlot()) {
728 int context_chain_length = scope()->ContextChainLength(var->scope());
729 __ LoadContext(scratch, context_chain_length);
730 return ContextMemOperand(scratch, var->index());
732 return StackOperand(var);
737 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
738 // Use destination as scratch.
739 MemOperand location = VarOperand(var, dest);
740 __ Ldr(dest, location);
744 void FullCodeGenerator::SetVar(Variable* var,
748 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
749 ASSERT(!AreAliased(src, scratch0, scratch1));
750 MemOperand location = VarOperand(var, scratch0);
751 __ Str(src, location);
753 // Emit the write barrier code if the location is in the heap.
754 if (var->IsContextSlot()) {
755 // scratch0 contains the correct context.
756 __ RecordWriteContextSlot(scratch0,
766 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
767 bool should_normalize,
770 // Only prepare for bailouts before splits if we're in a test
771 // context. Otherwise, we let the Visit function deal with the
772 // preparation to avoid preparing with the same AST id twice.
773 if (!context()->IsTest() || !info_->IsOptimizable()) return;
775 // TODO(all): Investigate to see if there is something to work on here.
777 if (should_normalize) {
780 PrepareForBailout(expr, TOS_REG);
781 if (should_normalize) {
782 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
783 Split(eq, if_true, if_false, NULL);
789 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
790 // The variable in the declaration always resides in the current function
792 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
793 if (generate_debug_code_) {
794 // Check that we're not inside a with or catch context.
795 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
796 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
797 __ Check(ne, kDeclarationInWithContext);
798 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
799 __ Check(ne, kDeclarationInCatchContext);
804 void FullCodeGenerator::VisitVariableDeclaration(
805 VariableDeclaration* declaration) {
806 // If it was not possible to allocate the variable at compile time, we
807 // need to "declare" it at runtime to make sure it actually exists in the
809 VariableProxy* proxy = declaration->proxy();
810 VariableMode mode = declaration->mode();
811 Variable* variable = proxy->var();
812 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
814 switch (variable->location()) {
815 case Variable::UNALLOCATED:
816 globals_->Add(variable->name(), zone());
817 globals_->Add(variable->binding_needs_init()
818 ? isolate()->factory()->the_hole_value()
819 : isolate()->factory()->undefined_value(),
823 case Variable::PARAMETER:
824 case Variable::LOCAL:
826 Comment cmnt(masm_, "[ VariableDeclaration");
827 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
828 __ Str(x10, StackOperand(variable));
832 case Variable::CONTEXT:
834 Comment cmnt(masm_, "[ VariableDeclaration");
835 EmitDebugCheckDeclarationContext(variable);
836 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
837 __ Str(x10, ContextMemOperand(cp, variable->index()));
838 // No write barrier since the_hole_value is in old space.
839 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
843 case Variable::LOOKUP: {
844 Comment cmnt(masm_, "[ VariableDeclaration");
845 __ Mov(x2, Operand(variable->name()));
846 // Declaration nodes are always introduced in one of four modes.
847 ASSERT(IsDeclaredVariableMode(mode));
848 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
850 __ Mov(x1, Smi::FromInt(attr));
851 // Push initial value, if any.
852 // Note: For variables we must not push an initial value (such as
853 // 'undefined') because we may have a (legal) redeclaration and we
854 // must not destroy the current value.
856 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
857 __ Push(cp, x2, x1, x0);
859 // Pushing 0 (xzr) indicates no initial value.
860 __ Push(cp, x2, x1, xzr);
862 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
869 void FullCodeGenerator::VisitFunctionDeclaration(
870 FunctionDeclaration* declaration) {
871 VariableProxy* proxy = declaration->proxy();
872 Variable* variable = proxy->var();
873 switch (variable->location()) {
874 case Variable::UNALLOCATED: {
875 globals_->Add(variable->name(), zone());
876 Handle<SharedFunctionInfo> function =
877 Compiler::BuildFunctionInfo(declaration->fun(), script());
878 // Check for stack overflow exception.
879 if (function.is_null()) return SetStackOverflow();
880 globals_->Add(function, zone());
884 case Variable::PARAMETER:
885 case Variable::LOCAL: {
886 Comment cmnt(masm_, "[ Function Declaration");
887 VisitForAccumulatorValue(declaration->fun());
888 __ Str(result_register(), StackOperand(variable));
892 case Variable::CONTEXT: {
893 Comment cmnt(masm_, "[ Function Declaration");
894 EmitDebugCheckDeclarationContext(variable);
895 VisitForAccumulatorValue(declaration->fun());
896 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
897 int offset = Context::SlotOffset(variable->index());
898 // We know that we have written a function, which is not a smi.
899 __ RecordWriteContextSlot(cp,
907 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
911 case Variable::LOOKUP: {
912 Comment cmnt(masm_, "[ Function Declaration");
913 __ Mov(x2, Operand(variable->name()));
914 __ Mov(x1, Smi::FromInt(NONE));
916 // Push initial value for function declaration.
917 VisitForStackValue(declaration->fun());
918 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
925 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
926 Variable* variable = declaration->proxy()->var();
927 ASSERT(variable->location() == Variable::CONTEXT);
928 ASSERT(variable->interface()->IsFrozen());
930 Comment cmnt(masm_, "[ ModuleDeclaration");
931 EmitDebugCheckDeclarationContext(variable);
933 // Load instance object.
934 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
935 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
936 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
939 __ Str(x1, ContextMemOperand(cp, variable->index()));
940 // We know that we have written a module, which is not a smi.
941 __ RecordWriteContextSlot(cp,
942 Context::SlotOffset(variable->index()),
949 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
951 // Traverse info body.
952 Visit(declaration->module());
956 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
957 VariableProxy* proxy = declaration->proxy();
958 Variable* variable = proxy->var();
959 switch (variable->location()) {
960 case Variable::UNALLOCATED:
964 case Variable::CONTEXT: {
965 Comment cmnt(masm_, "[ ImportDeclaration");
966 EmitDebugCheckDeclarationContext(variable);
971 case Variable::PARAMETER:
972 case Variable::LOCAL:
973 case Variable::LOOKUP:
979 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
984 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
985 // Call the runtime to declare the globals.
986 __ Mov(x11, Operand(pairs));
987 Register flags = xzr;
988 if (Smi::FromInt(DeclareGlobalsFlags())) {
990 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
992 __ Push(cp, x11, flags);
993 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
994 // Return value is ignored.
998 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
999 // Call the runtime to declare the modules.
1000 __ Push(descriptions);
1001 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1002 // Return value is ignored.
1006 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1007 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1008 Comment cmnt(masm_, "[ SwitchStatement");
1009 Breakable nested_statement(this, stmt);
1010 SetStatementPosition(stmt);
1012 // Keep the switch value on the stack until a case matches.
1013 VisitForStackValue(stmt->tag());
1014 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1016 ZoneList<CaseClause*>* clauses = stmt->cases();
1017 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1019 Label next_test; // Recycled for each test.
1020 // Compile all the tests with branches to their bodies.
1021 for (int i = 0; i < clauses->length(); i++) {
1022 CaseClause* clause = clauses->at(i);
1023 clause->body_target()->Unuse();
1025 // The default is not a test, but remember it as final fall through.
1026 if (clause->is_default()) {
1027 default_clause = clause;
1031 Comment cmnt(masm_, "[ Case comparison");
1032 __ Bind(&next_test);
1035 // Compile the label expression.
1036 VisitForAccumulatorValue(clause->label());
1038 // Perform the comparison as if via '==='.
1039 __ Peek(x1, 0); // Switch value.
1041 JumpPatchSite patch_site(masm_);
1042 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1044 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1046 __ B(ne, &next_test);
1047 __ Drop(1); // Switch value is no longer needed.
1048 __ B(clause->body_target());
1049 __ Bind(&slow_case);
1052 // Record position before stub call for type feedback.
1053 SetSourcePosition(clause->position());
1054 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1055 CallIC(ic, clause->CompareId());
1056 patch_site.EmitPatchInfo();
1060 PrepareForBailout(clause, TOS_REG);
1061 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1063 __ B(clause->body_target());
1066 __ Cbnz(x0, &next_test);
1067 __ Drop(1); // Switch value is no longer needed.
1068 __ B(clause->body_target());
1071 // Discard the test value and jump to the default if present, otherwise to
1072 // the end of the statement.
1073 __ Bind(&next_test);
1074 __ Drop(1); // Switch value is no longer needed.
1075 if (default_clause == NULL) {
1076 __ B(nested_statement.break_label());
1078 __ B(default_clause->body_target());
1081 // Compile all the case bodies.
1082 for (int i = 0; i < clauses->length(); i++) {
1083 Comment cmnt(masm_, "[ Case body");
1084 CaseClause* clause = clauses->at(i);
1085 __ Bind(clause->body_target());
1086 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1087 VisitStatements(clause->statements());
1090 __ Bind(nested_statement.break_label());
1091 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1095 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1096 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1097 Comment cmnt(masm_, "[ ForInStatement");
1098 int slot = stmt->ForInFeedbackSlot();
1099 // TODO(all): This visitor probably needs better comments and a revisit.
1100 SetStatementPosition(stmt);
1103 ForIn loop_statement(this, stmt);
1104 increment_loop_depth();
1106 // Get the object to enumerate over. If the object is null or undefined, skip
1107 // over the loop. See ECMA-262 version 5, section 12.6.4.
1108 VisitForAccumulatorValue(stmt->enumerable());
1109 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1110 Register null_value = x15;
1111 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1112 __ Cmp(x0, null_value);
1115 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1117 // Convert the object to a JS object.
1118 Label convert, done_convert;
1119 __ JumpIfSmi(x0, &convert);
1120 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1123 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1124 __ Bind(&done_convert);
1127 // Check for proxies.
1129 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1130 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1132 // Check cache validity in generated code. This is a fast case for
1133 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1134 // guarantee cache validity, call the runtime system to check cache
1135 // validity or get the property names in a fixed array.
1136 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1138 // The enum cache is valid. Load the map of the object being
1139 // iterated over and use the cache for the iteration.
1141 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1144 // Get the set of properties to enumerate.
1145 __ Bind(&call_runtime);
1146 __ Push(x0); // Duplicate the enumerable object on the stack.
1147 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1149 // If we got a map from the runtime call, we can do a fast
1150 // modification check. Otherwise, we got a fixed array, and we have
1151 // to do a slow check.
1152 Label fixed_array, no_descriptors;
1153 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1154 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1156 // We got a map in register x0. Get the enumeration cache from it.
1157 __ Bind(&use_cache);
1159 __ EnumLengthUntagged(x1, x0);
1160 __ Cbz(x1, &no_descriptors);
1162 __ LoadInstanceDescriptors(x0, x2);
1163 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1165 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1167 // Set up the four remaining stack slots.
1168 __ Push(x0); // Map.
1169 __ Mov(x0, Smi::FromInt(0));
1170 // Push enumeration cache, enumeration cache length (as smi) and zero.
1172 __ Push(x2, x1, x0);
1175 __ Bind(&no_descriptors);
1179 // We got a fixed array in register x0. Iterate through that.
1180 __ Bind(&fixed_array);
1182 __ LoadObject(x1, FeedbackVector());
1183 __ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1184 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1186 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1187 __ Peek(x10, 0); // Get enumerated object.
1188 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1189 // TODO(all): similar check was done already. Can we avoid it here?
1190 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1191 ASSERT(Smi::FromInt(0) == 0);
1192 __ CzeroX(x1, le); // Zero indicates proxy.
1193 __ Push(x1, x0); // Smi and array
1194 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
1195 __ Push(x1, xzr); // Fixed array length (as smi) and initial index.
1197 // Generate code for doing the condition check.
1198 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1200 // Load the current count to x0, load the length to x1.
1201 __ PeekPair(x0, x1, 0);
1202 __ Cmp(x0, x1); // Compare to the array length.
1203 __ B(hs, loop_statement.break_label());
1205 // Get the current entry of the array into register r3.
1206 __ Peek(x10, 2 * kXRegSize);
1207 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1208 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1210 // Get the expected map from the stack or a smi in the
1211 // permanent slow case into register x10.
1212 __ Peek(x2, 3 * kXRegSize);
1214 // Check if the expected map still matches that of the enumerable.
1215 // If not, we may have to filter the key.
1217 __ Peek(x1, 4 * kXRegSize);
1218 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1220 __ B(eq, &update_each);
1222 // For proxies, no filtering is done.
1223 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1224 STATIC_ASSERT(kSmiTag == 0);
1225 __ Cbz(x2, &update_each);
1227 // Convert the entry to a string or (smi) 0 if it isn't a property
1228 // any more. If the property has been removed while iterating, we
1231 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1233 __ Cbz(x0, loop_statement.continue_label());
1235 // Update the 'each' property or variable from the possibly filtered
1236 // entry in register x3.
1237 __ Bind(&update_each);
1238 __ Mov(result_register(), x3);
1239 // Perform the assignment as if via '='.
1240 { EffectContext context(this);
1241 EmitAssignment(stmt->each());
1244 // Generate code for the body of the loop.
1245 Visit(stmt->body());
1247 // Generate code for going to the next element by incrementing
1248 // the index (smi) stored on top of the stack.
1249 __ Bind(loop_statement.continue_label());
1250 // TODO(all): We could use a callee saved register to avoid popping.
1252 __ Add(x0, x0, Smi::FromInt(1));
1255 EmitBackEdgeBookkeeping(stmt, &loop);
1258 // Remove the pointers stored on the stack.
1259 __ Bind(loop_statement.break_label());
1262 // Exit and decrement the loop depth.
1263 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1265 decrement_loop_depth();
1269 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1270 Comment cmnt(masm_, "[ ForOfStatement");
1271 SetStatementPosition(stmt);
1273 Iteration loop_statement(this, stmt);
1274 increment_loop_depth();
1276 // var iterator = iterable[@@iterator]()
1277 VisitForAccumulatorValue(stmt->assign_iterator());
1279 // As with for-in, skip the loop if the iterator is null or undefined.
1280 Register iterator = x0;
1281 __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex,
1282 loop_statement.break_label());
1283 __ JumpIfRoot(iterator, Heap::kNullValueRootIndex,
1284 loop_statement.break_label());
1286 // Convert the iterator to a JS object.
1287 Label convert, done_convert;
1288 __ JumpIfSmi(iterator, &convert);
1289 __ CompareObjectType(iterator, x1, x1, FIRST_SPEC_OBJECT_TYPE);
1290 __ B(ge, &done_convert);
1293 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1294 __ Bind(&done_convert);
1298 __ Bind(loop_statement.continue_label());
1300 // result = iterator.next()
1301 VisitForEffect(stmt->next_result());
1303 // if (result.done) break;
1304 Label result_not_done;
1305 VisitForControl(stmt->result_done(),
1306 loop_statement.break_label(),
1309 __ Bind(&result_not_done);
1311 // each = result.value
1312 VisitForEffect(stmt->assign_each());
1314 // Generate code for the body of the loop.
1315 Visit(stmt->body());
1317 // Check stack before looping.
1318 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1319 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1320 __ B(loop_statement.continue_label());
1322 // Exit and decrement the loop depth.
1323 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1324 __ Bind(loop_statement.break_label());
1325 decrement_loop_depth();
1329 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1331 // Use the fast case closure allocation code that allocates in new space for
1332 // nested functions that don't need literals cloning. If we're running with
1333 // the --always-opt or the --prepare-always-opt flag, we need to use the
1334 // runtime function so that the new function we are creating here gets a
1335 // chance to have its code optimized and doesn't just get a copy of the
1336 // existing unoptimized code.
1337 if (!FLAG_always_opt &&
1338 !FLAG_prepare_always_opt &&
1340 scope()->is_function_scope() &&
1341 info->num_literals() == 0) {
1342 FastNewClosureStub stub(isolate(),
1343 info->strict_mode(),
1344 info->is_generator());
1345 __ Mov(x2, Operand(info));
1348 __ Mov(x11, Operand(info));
1349 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1350 : Heap::kFalseValueRootIndex);
1351 __ Push(cp, x11, x10);
1352 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1354 context()->Plug(x0);
1358 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1359 Comment cmnt(masm_, "[ VariableProxy");
1360 EmitVariableLoad(expr);
1364 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1365 TypeofState typeof_state,
1367 Register current = cp;
1368 Register next = x10;
1369 Register temp = x11;
1373 if (s->num_heap_slots() > 0) {
1374 if (s->calls_sloppy_eval()) {
1375 // Check that extension is NULL.
1376 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1377 __ Cbnz(temp, slow);
1379 // Load next context in chain.
1380 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1381 // Walk the rest of the chain without clobbering cp.
1384 // If no outer scope calls eval, we do not need to check more
1385 // context extensions.
1386 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1387 s = s->outer_scope();
1390 if (s->is_eval_scope()) {
1392 __ Mov(next, current);
1395 // Terminate at native context.
1396 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1397 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1398 // Check that extension is NULL.
1399 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1400 __ Cbnz(temp, slow);
1401 // Load next context in chain.
1402 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1407 __ Ldr(x0, GlobalObjectMemOperand());
1408 __ Mov(x2, Operand(var->name()));
1409 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1415 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1417 ASSERT(var->IsContextSlot());
1418 Register context = cp;
1419 Register next = x10;
1420 Register temp = x11;
1422 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1423 if (s->num_heap_slots() > 0) {
1424 if (s->calls_sloppy_eval()) {
1425 // Check that extension is NULL.
1426 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1427 __ Cbnz(temp, slow);
1429 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1430 // Walk the rest of the chain without clobbering cp.
1434 // Check that last extension is NULL.
1435 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1436 __ Cbnz(temp, slow);
1438 // This function is used only for loads, not stores, so it's safe to
1439 // return an cp-based operand (the write barrier cannot be allowed to
1440 // destroy the cp register).
1441 return ContextMemOperand(context, var->index());
1445 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1446 TypeofState typeof_state,
1449 // Generate fast-case code for variables that might be shadowed by
1450 // eval-introduced variables. Eval is used a lot without
1451 // introducing variables. In those cases, we do not want to
1452 // perform a runtime call for all variables in the scope
1453 // containing the eval.
1454 if (var->mode() == DYNAMIC_GLOBAL) {
1455 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1457 } else if (var->mode() == DYNAMIC_LOCAL) {
1458 Variable* local = var->local_if_not_shadowed();
1459 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1460 if (local->mode() == LET || local->mode() == CONST ||
1461 local->mode() == CONST_LEGACY) {
1462 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1463 if (local->mode() == CONST_LEGACY) {
1464 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1465 } else { // LET || CONST
1466 __ Mov(x0, Operand(var->name()));
1468 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1476 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1477 // Record position before possible IC call.
1478 SetSourcePosition(proxy->position());
1479 Variable* var = proxy->var();
1481 // Three cases: global variables, lookup variables, and all other types of
1483 switch (var->location()) {
1484 case Variable::UNALLOCATED: {
1485 Comment cmnt(masm_, "Global variable");
1486 // Use inline caching. Variable name is passed in x2 and the global
1487 // object (receiver) in x0.
1488 __ Ldr(x0, GlobalObjectMemOperand());
1489 __ Mov(x2, Operand(var->name()));
1490 CallLoadIC(CONTEXTUAL);
1491 context()->Plug(x0);
1495 case Variable::PARAMETER:
1496 case Variable::LOCAL:
1497 case Variable::CONTEXT: {
1498 Comment cmnt(masm_, var->IsContextSlot()
1499 ? "Context variable"
1500 : "Stack variable");
1501 if (var->binding_needs_init()) {
1502 // var->scope() may be NULL when the proxy is located in eval code and
1503 // refers to a potential outside binding. Currently those bindings are
1504 // always looked up dynamically, i.e. in that case
1505 // var->location() == LOOKUP.
1507 ASSERT(var->scope() != NULL);
1509 // Check if the binding really needs an initialization check. The check
1510 // can be skipped in the following situation: we have a LET or CONST
1511 // binding in harmony mode, both the Variable and the VariableProxy have
1512 // the same declaration scope (i.e. they are both in global code, in the
1513 // same function or in the same eval code) and the VariableProxy is in
1514 // the source physically located after the initializer of the variable.
1516 // We cannot skip any initialization checks for CONST in non-harmony
1517 // mode because const variables may be declared but never initialized:
1518 // if (false) { const x; }; var y = x;
1520 // The condition on the declaration scopes is a conservative check for
1521 // nested functions that access a binding and are called before the
1522 // binding is initialized:
1523 // function() { f(); let x = 1; function f() { x = 2; } }
1525 bool skip_init_check;
1526 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1527 skip_init_check = false;
1529 // Check that we always have valid source position.
1530 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1531 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1532 skip_init_check = var->mode() != CONST_LEGACY &&
1533 var->initializer_position() < proxy->position();
1536 if (!skip_init_check) {
1537 // Let and const need a read barrier.
1540 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1541 if (var->mode() == LET || var->mode() == CONST) {
1542 // Throw a reference error when using an uninitialized let/const
1543 // binding in harmony mode.
1544 __ Mov(x0, Operand(var->name()));
1546 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1549 // Uninitalized const bindings outside of harmony mode are unholed.
1550 ASSERT(var->mode() == CONST_LEGACY);
1551 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1554 context()->Plug(x0);
1558 context()->Plug(var);
1562 case Variable::LOOKUP: {
1564 // Generate code for loading from variables potentially shadowed by
1565 // eval-introduced variables.
1566 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1568 Comment cmnt(masm_, "Lookup variable");
1569 __ Mov(x1, Operand(var->name()));
1570 __ Push(cp, x1); // Context and name.
1571 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1573 context()->Plug(x0);
1580 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1581 Comment cmnt(masm_, "[ RegExpLiteral");
1583 // Registers will be used as follows:
1584 // x5 = materialized value (RegExp literal)
1585 // x4 = JS function, literals array
1586 // x3 = literal index
1587 // x2 = RegExp pattern
1588 // x1 = RegExp flags
1589 // x0 = RegExp literal clone
1590 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1591 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1592 int literal_offset =
1593 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1594 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1595 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1597 // Create regexp literal using runtime function.
1598 // Result will be in x0.
1599 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1600 __ Mov(x2, Operand(expr->pattern()));
1601 __ Mov(x1, Operand(expr->flags()));
1602 __ Push(x4, x3, x2, x1);
1603 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1606 __ Bind(&materialized);
1607 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1608 Label allocated, runtime_allocate;
1609 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1612 __ Bind(&runtime_allocate);
1613 __ Mov(x10, Smi::FromInt(size));
1615 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1618 __ Bind(&allocated);
1619 // After this, registers are used as follows:
1620 // x0: Newly allocated regexp.
1621 // x5: Materialized regexp.
1622 // x10, x11, x12: temps.
1623 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1624 context()->Plug(x0);
1628 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1629 if (expression == NULL) {
1630 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1633 VisitForStackValue(expression);
1638 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1639 Comment cmnt(masm_, "[ ObjectLiteral");
1641 expr->BuildConstantProperties(isolate());
1642 Handle<FixedArray> constant_properties = expr->constant_properties();
1643 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1644 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1645 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1646 __ Mov(x1, Operand(constant_properties));
1647 int flags = expr->fast_elements()
1648 ? ObjectLiteral::kFastElements
1649 : ObjectLiteral::kNoFlags;
1650 flags |= expr->has_function()
1651 ? ObjectLiteral::kHasFunction
1652 : ObjectLiteral::kNoFlags;
1653 __ Mov(x0, Smi::FromInt(flags));
1654 int properties_count = constant_properties->length() / 2;
1655 const int max_cloned_properties =
1656 FastCloneShallowObjectStub::kMaximumClonedProperties;
1657 if (expr->may_store_doubles() || expr->depth() > 1 ||
1658 Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements ||
1659 properties_count > max_cloned_properties) {
1660 __ Push(x3, x2, x1, x0);
1661 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1663 FastCloneShallowObjectStub stub(isolate(), properties_count);
1667 // If result_saved is true the result is on top of the stack. If
1668 // result_saved is false the result is in x0.
1669 bool result_saved = false;
1671 // Mark all computed expressions that are bound to a key that
1672 // is shadowed by a later occurrence of the same key. For the
1673 // marked expressions, no store code is emitted.
1674 expr->CalculateEmitStore(zone());
1676 AccessorTable accessor_table(zone());
1677 for (int i = 0; i < expr->properties()->length(); i++) {
1678 ObjectLiteral::Property* property = expr->properties()->at(i);
1679 if (property->IsCompileTimeValue()) continue;
1681 Literal* key = property->key();
1682 Expression* value = property->value();
1683 if (!result_saved) {
1684 __ Push(x0); // Save result on stack
1685 result_saved = true;
1687 switch (property->kind()) {
1688 case ObjectLiteral::Property::CONSTANT:
1690 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1691 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1693 case ObjectLiteral::Property::COMPUTED:
1694 if (key->value()->IsInternalizedString()) {
1695 if (property->emit_store()) {
1696 VisitForAccumulatorValue(value);
1697 __ Mov(x2, Operand(key->value()));
1699 CallStoreIC(key->LiteralFeedbackId());
1700 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1702 VisitForEffect(value);
1706 if (property->emit_store()) {
1707 // Duplicate receiver on stack.
1710 VisitForStackValue(key);
1711 VisitForStackValue(value);
1712 __ Mov(x0, Smi::FromInt(NONE)); // PropertyAttributes
1714 __ CallRuntime(Runtime::kSetProperty, 4);
1716 VisitForEffect(key);
1717 VisitForEffect(value);
1720 case ObjectLiteral::Property::PROTOTYPE:
1721 if (property->emit_store()) {
1722 // Duplicate receiver on stack.
1725 VisitForStackValue(value);
1726 __ CallRuntime(Runtime::kSetPrototype, 2);
1728 VisitForEffect(value);
1731 case ObjectLiteral::Property::GETTER:
1732 accessor_table.lookup(key)->second->getter = value;
1734 case ObjectLiteral::Property::SETTER:
1735 accessor_table.lookup(key)->second->setter = value;
1740 // Emit code to define accessors, using only a single call to the runtime for
1741 // each pair of corresponding getters and setters.
1742 for (AccessorTable::Iterator it = accessor_table.begin();
1743 it != accessor_table.end();
1745 __ Peek(x10, 0); // Duplicate receiver.
1747 VisitForStackValue(it->first);
1748 EmitAccessor(it->second->getter);
1749 EmitAccessor(it->second->setter);
1750 __ Mov(x10, Smi::FromInt(NONE));
1752 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1755 if (expr->has_function()) {
1756 ASSERT(result_saved);
1759 __ CallRuntime(Runtime::kToFastProperties, 1);
1763 context()->PlugTOS();
1765 context()->Plug(x0);
1770 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1771 Comment cmnt(masm_, "[ ArrayLiteral");
1773 expr->BuildConstantElements(isolate());
1774 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1775 : ArrayLiteral::kNoFlags;
1777 ZoneList<Expression*>* subexprs = expr->values();
1778 int length = subexprs->length();
1779 Handle<FixedArray> constant_elements = expr->constant_elements();
1780 ASSERT_EQ(2, constant_elements->length());
1781 ElementsKind constant_elements_kind =
1782 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1783 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1784 Handle<FixedArrayBase> constant_elements_values(
1785 FixedArrayBase::cast(constant_elements->get(1)));
1787 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1788 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1789 // If the only customer of allocation sites is transitioning, then
1790 // we can turn it off if we don't have anywhere else to transition to.
1791 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1794 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1795 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1796 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1797 __ Mov(x1, Operand(constant_elements));
1798 if (has_fast_elements && constant_elements_values->map() ==
1799 isolate()->heap()->fixed_cow_array_map()) {
1800 FastCloneShallowArrayStub stub(
1802 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1803 allocation_site_mode,
1806 __ IncrementCounter(
1807 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
1808 } else if ((expr->depth() > 1) || Serializer::enabled(isolate()) ||
1809 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1810 __ Mov(x0, Smi::FromInt(flags));
1811 __ Push(x3, x2, x1, x0);
1812 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1814 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1815 FLAG_smi_only_arrays);
1816 FastCloneShallowArrayStub::Mode mode =
1817 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1819 if (has_fast_elements) {
1820 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1823 FastCloneShallowArrayStub stub(isolate(),
1825 allocation_site_mode,
1830 bool result_saved = false; // Is the result saved to the stack?
1832 // Emit code to evaluate all the non-constant subexpressions and to store
1833 // them into the newly cloned array.
1834 for (int i = 0; i < length; i++) {
1835 Expression* subexpr = subexprs->at(i);
1836 // If the subexpression is a literal or a simple materialized literal it
1837 // is already set in the cloned array.
1838 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1840 if (!result_saved) {
1842 __ Push(Smi::FromInt(expr->literal_index()));
1843 result_saved = true;
1845 VisitForAccumulatorValue(subexpr);
1847 if (IsFastObjectElementsKind(constant_elements_kind)) {
1848 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1849 __ Peek(x6, kPointerSize); // Copy of array literal.
1850 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1851 __ Str(result_register(), FieldMemOperand(x1, offset));
1852 // Update the write barrier for the array store.
1853 __ RecordWriteField(x1, offset, result_register(), x10,
1854 kLRHasBeenSaved, kDontSaveFPRegs,
1855 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1857 __ Mov(x3, Smi::FromInt(i));
1858 StoreArrayLiteralElementStub stub(isolate());
1862 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1866 __ Drop(1); // literal index
1867 context()->PlugTOS();
1869 context()->Plug(x0);
1874 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1875 ASSERT(expr->target()->IsValidReferenceExpression());
1877 Comment cmnt(masm_, "[ Assignment");
1879 // Left-hand side can only be a property, a global or a (parameter or local)
1881 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1882 LhsKind assign_type = VARIABLE;
1883 Property* property = expr->target()->AsProperty();
1884 if (property != NULL) {
1885 assign_type = (property->key()->IsPropertyName())
1890 // Evaluate LHS expression.
1891 switch (assign_type) {
1893 // Nothing to do here.
1895 case NAMED_PROPERTY:
1896 if (expr->is_compound()) {
1897 // We need the receiver both on the stack and in the accumulator.
1898 VisitForAccumulatorValue(property->obj());
1899 __ Push(result_register());
1901 VisitForStackValue(property->obj());
1904 case KEYED_PROPERTY:
1905 if (expr->is_compound()) {
1906 VisitForStackValue(property->obj());
1907 VisitForAccumulatorValue(property->key());
1911 VisitForStackValue(property->obj());
1912 VisitForStackValue(property->key());
1917 // For compound assignments we need another deoptimization point after the
1918 // variable/property load.
1919 if (expr->is_compound()) {
1920 { AccumulatorValueContext context(this);
1921 switch (assign_type) {
1923 EmitVariableLoad(expr->target()->AsVariableProxy());
1924 PrepareForBailout(expr->target(), TOS_REG);
1926 case NAMED_PROPERTY:
1927 EmitNamedPropertyLoad(property);
1928 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1930 case KEYED_PROPERTY:
1931 EmitKeyedPropertyLoad(property);
1932 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1937 Token::Value op = expr->binary_op();
1938 __ Push(x0); // Left operand goes on the stack.
1939 VisitForAccumulatorValue(expr->value());
1941 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1944 SetSourcePosition(expr->position() + 1);
1945 AccumulatorValueContext context(this);
1946 if (ShouldInlineSmiCase(op)) {
1947 EmitInlineSmiBinaryOp(expr->binary_operation(),
1953 EmitBinaryOp(expr->binary_operation(), op, mode);
1956 // Deoptimization point in case the binary operation may have side effects.
1957 PrepareForBailout(expr->binary_operation(), TOS_REG);
1959 VisitForAccumulatorValue(expr->value());
1962 // Record source position before possible IC call.
1963 SetSourcePosition(expr->position());
1966 switch (assign_type) {
1968 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1970 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1971 context()->Plug(x0);
1973 case NAMED_PROPERTY:
1974 EmitNamedPropertyAssignment(expr);
1976 case KEYED_PROPERTY:
1977 EmitKeyedPropertyAssignment(expr);
1983 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1984 SetSourcePosition(prop->position());
1985 Literal* key = prop->key()->AsLiteral();
1986 __ Mov(x2, Operand(key->value()));
1987 // Call load IC. It has arguments receiver and property name x0 and x2.
1988 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
1992 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1993 SetSourcePosition(prop->position());
1994 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1995 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1996 CallIC(ic, prop->PropertyFeedbackId());
2000 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2003 Expression* left_expr,
2004 Expression* right_expr) {
2005 Label done, both_smis, stub_call;
2007 // Get the arguments.
2009 Register right = x0;
2010 Register result = x0;
2013 // Perform combined smi check on both operands.
2014 __ Orr(x10, left, right);
2015 JumpPatchSite patch_site(masm_);
2016 patch_site.EmitJumpIfSmi(x10, &both_smis);
2018 __ Bind(&stub_call);
2019 BinaryOpICStub stub(isolate(), op, mode);
2021 Assembler::BlockPoolsScope scope(masm_);
2022 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2023 patch_site.EmitPatchInfo();
2027 __ Bind(&both_smis);
2028 // Smi case. This code works in the same way as the smi-smi case in the type
2029 // recording binary operation stub, see
2030 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2031 // TODO(all): That doesn't exist any more. Where are the comments?
2033 // The set of operations that needs to be supported here is controlled by
2034 // FullCodeGenerator::ShouldInlineSmiCase().
2037 __ Ubfx(right, right, kSmiShift, 5);
2038 __ Asr(result, left, right);
2039 __ Bic(result, result, kSmiShiftMask);
2042 __ Ubfx(right, right, kSmiShift, 5);
2043 __ Lsl(result, left, right);
2046 Label right_not_zero;
2047 __ Cbnz(right, &right_not_zero);
2048 __ Tbnz(left, kXSignBit, &stub_call);
2049 __ Bind(&right_not_zero);
2050 __ Ubfx(right, right, kSmiShift, 5);
2051 __ Lsr(result, left, right);
2052 __ Bic(result, result, kSmiShiftMask);
2056 __ Adds(x10, left, right);
2057 __ B(vs, &stub_call);
2058 __ Mov(result, x10);
2061 __ Subs(x10, left, right);
2062 __ B(vs, &stub_call);
2063 __ Mov(result, x10);
2066 Label not_minus_zero, done;
2067 __ Smulh(x10, left, right);
2068 __ Cbnz(x10, ¬_minus_zero);
2069 __ Eor(x11, left, right);
2070 __ Tbnz(x11, kXSignBit, &stub_call);
2071 STATIC_ASSERT(kSmiTag == 0);
2072 __ Mov(result, x10);
2074 __ Bind(¬_minus_zero);
2076 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2077 __ B(lt, &stub_call);
2078 __ SmiTag(result, x10);
2083 __ Orr(result, left, right);
2085 case Token::BIT_AND:
2086 __ And(result, left, right);
2088 case Token::BIT_XOR:
2089 __ Eor(result, left, right);
2096 context()->Plug(x0);
2100 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2102 OverwriteMode mode) {
2104 BinaryOpICStub stub(isolate(), op, mode);
2105 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2107 Assembler::BlockPoolsScope scope(masm_);
2108 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2109 patch_site.EmitPatchInfo();
2111 context()->Plug(x0);
2115 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2116 ASSERT(expr->IsValidReferenceExpression());
2118 // Left-hand side can only be a property, a global or a (parameter or local)
2120 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2121 LhsKind assign_type = VARIABLE;
2122 Property* prop = expr->AsProperty();
2124 assign_type = (prop->key()->IsPropertyName())
2129 switch (assign_type) {
2131 Variable* var = expr->AsVariableProxy()->var();
2132 EffectContext context(this);
2133 EmitVariableAssignment(var, Token::ASSIGN);
2136 case NAMED_PROPERTY: {
2137 __ Push(x0); // Preserve value.
2138 VisitForAccumulatorValue(prop->obj());
2139 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2142 __ Pop(x0); // Restore value.
2143 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2147 case KEYED_PROPERTY: {
2148 __ Push(x0); // Preserve value.
2149 VisitForStackValue(prop->obj());
2150 VisitForAccumulatorValue(prop->key());
2153 Handle<Code> ic = strict_mode() == SLOPPY
2154 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2155 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2160 context()->Plug(x0);
2164 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2165 Variable* var, MemOperand location) {
2166 __ Str(result_register(), location);
2167 if (var->IsContextSlot()) {
2168 // RecordWrite may destroy all its register arguments.
2169 __ Mov(x10, result_register());
2170 int offset = Context::SlotOffset(var->index());
2171 __ RecordWriteContextSlot(
2172 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2177 void FullCodeGenerator::EmitCallStoreContextSlot(
2178 Handle<String> name, StrictMode strict_mode) {
2179 __ Mov(x11, Operand(name));
2180 __ Mov(x10, Smi::FromInt(strict_mode));
2183 // jssp[16] : context.
2184 // jssp[24] : value.
2185 __ Push(x0, cp, x11, x10);
2186 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2190 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2192 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2193 if (var->IsUnallocated()) {
2194 // Global var, const, or let.
2195 __ Mov(x2, Operand(var->name()));
2196 __ Ldr(x1, GlobalObjectMemOperand());
2199 } else if (op == Token::INIT_CONST_LEGACY) {
2200 // Const initializers need a write barrier.
2201 ASSERT(!var->IsParameter()); // No const parameters.
2202 if (var->IsLookupSlot()) {
2204 __ Mov(x0, Operand(var->name()));
2205 __ Push(cp, x0); // Context and name.
2206 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2208 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2210 MemOperand location = VarOperand(var, x1);
2211 __ Ldr(x10, location);
2212 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2213 EmitStoreToStackLocalOrContextSlot(var, location);
2217 } else if (var->mode() == LET && op != Token::INIT_LET) {
2218 // Non-initializing assignment to let variable needs a write barrier.
2219 if (var->IsLookupSlot()) {
2220 EmitCallStoreContextSlot(var->name(), strict_mode());
2222 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2224 MemOperand location = VarOperand(var, x1);
2225 __ Ldr(x10, location);
2226 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2227 __ Mov(x10, Operand(var->name()));
2229 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2230 // Perform the assignment.
2232 EmitStoreToStackLocalOrContextSlot(var, location);
2235 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2236 // Assignment to var or initializing assignment to let/const
2238 if (var->IsLookupSlot()) {
2239 EmitCallStoreContextSlot(var->name(), strict_mode());
2241 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2242 MemOperand location = VarOperand(var, x1);
2243 if (FLAG_debug_code && op == Token::INIT_LET) {
2244 __ Ldr(x10, location);
2245 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2246 __ Check(eq, kLetBindingReInitialization);
2248 EmitStoreToStackLocalOrContextSlot(var, location);
2251 // Non-initializing assignments to consts are ignored.
2255 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2256 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2257 // Assignment to a property, using a named store IC.
2258 Property* prop = expr->target()->AsProperty();
2259 ASSERT(prop != NULL);
2260 ASSERT(prop->key()->AsLiteral() != NULL);
2262 // Record source code position before IC call.
2263 SetSourcePosition(expr->position());
2264 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2267 CallStoreIC(expr->AssignmentFeedbackId());
2269 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2270 context()->Plug(x0);
2274 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2275 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2276 // Assignment to a property, using a keyed store IC.
2278 // Record source code position before IC call.
2279 SetSourcePosition(expr->position());
2280 // TODO(all): Could we pass this in registers rather than on the stack?
2281 __ Pop(x1, x2); // Key and object holding the property.
2283 Handle<Code> ic = strict_mode() == SLOPPY
2284 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2285 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2286 CallIC(ic, expr->AssignmentFeedbackId());
2288 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2289 context()->Plug(x0);
2293 void FullCodeGenerator::VisitProperty(Property* expr) {
2294 Comment cmnt(masm_, "[ Property");
2295 Expression* key = expr->key();
2297 if (key->IsPropertyName()) {
2298 VisitForAccumulatorValue(expr->obj());
2299 EmitNamedPropertyLoad(expr);
2300 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2301 context()->Plug(x0);
2303 VisitForStackValue(expr->obj());
2304 VisitForAccumulatorValue(expr->key());
2306 EmitKeyedPropertyLoad(expr);
2307 context()->Plug(x0);
2312 void FullCodeGenerator::CallIC(Handle<Code> code,
2313 TypeFeedbackId ast_id) {
2315 // All calls must have a predictable size in full-codegen code to ensure that
2316 // the debugger can patch them correctly.
2317 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2321 // Code common for calls using the IC.
2322 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2323 Expression* callee = expr->expression();
2325 CallIC::CallType call_type = callee->IsVariableProxy()
2329 // Get the target function.
2330 if (call_type == CallIC::FUNCTION) {
2331 { StackValueContext context(this);
2332 EmitVariableLoad(callee->AsVariableProxy());
2333 PrepareForBailout(callee, NO_REGISTERS);
2335 // Push undefined as receiver. This is patched in the method prologue if it
2336 // is a sloppy mode method.
2337 __ Push(isolate()->factory()->undefined_value());
2339 // Load the function from the receiver.
2340 ASSERT(callee->IsProperty());
2342 EmitNamedPropertyLoad(callee->AsProperty());
2343 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2344 // Push the target function under the receiver.
2349 EmitCall(expr, call_type);
2353 // Code common for calls using the IC.
2354 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2357 VisitForAccumulatorValue(key);
2359 Expression* callee = expr->expression();
2361 // Load the function from the receiver.
2362 ASSERT(callee->IsProperty());
2364 EmitKeyedPropertyLoad(callee->AsProperty());
2365 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2367 // Push the target function under the receiver.
2371 EmitCall(expr, CallIC::METHOD);
2375 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2376 // Load the arguments.
2377 ZoneList<Expression*>* args = expr->arguments();
2378 int arg_count = args->length();
2379 { PreservePositionScope scope(masm()->positions_recorder());
2380 for (int i = 0; i < arg_count; i++) {
2381 VisitForStackValue(args->at(i));
2384 // Record source position of the IC call.
2385 SetSourcePosition(expr->position());
2387 Handle<Code> ic = CallIC::initialize_stub(
2388 isolate(), arg_count, call_type);
2389 __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
2390 __ Peek(x1, (arg_count + 1) * kXRegSize);
2391 // Don't assign a type feedback id to the IC, since type feedback is provided
2392 // by the vector above.
2395 RecordJSReturnSite(expr);
2396 // Restore context register.
2397 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2398 context()->DropAndPlug(1, x0);
2402 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2403 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2404 // Prepare to push a copy of the first argument or undefined if it doesn't
2406 if (arg_count > 0) {
2407 __ Peek(x10, arg_count * kXRegSize);
2409 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2412 // Prepare to push the receiver of the enclosing function.
2413 int receiver_offset = 2 + info_->scope()->num_parameters();
2414 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2419 // Prepare to push the language mode.
2420 __ Mov(x10, Smi::FromInt(strict_mode()));
2421 // Prepare to push the start position of the scope the calls resides in.
2422 __ Mov(x11, Smi::FromInt(scope()->start_position()));
2427 // Do the runtime call.
2428 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2432 void FullCodeGenerator::VisitCall(Call* expr) {
2434 // We want to verify that RecordJSReturnSite gets called on all paths
2435 // through this function. Avoid early returns.
2436 expr->return_is_recorded_ = false;
2439 Comment cmnt(masm_, "[ Call");
2440 Expression* callee = expr->expression();
2441 Call::CallType call_type = expr->GetCallType(isolate());
2443 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2444 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2445 // to resolve the function we need to call and the receiver of the
2446 // call. Then we call the resolved function using the given
2448 ZoneList<Expression*>* args = expr->arguments();
2449 int arg_count = args->length();
2452 PreservePositionScope pos_scope(masm()->positions_recorder());
2453 VisitForStackValue(callee);
2454 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2455 __ Push(x10); // Reserved receiver slot.
2457 // Push the arguments.
2458 for (int i = 0; i < arg_count; i++) {
2459 VisitForStackValue(args->at(i));
2462 // Push a copy of the function (found below the arguments) and
2464 __ Peek(x10, (arg_count + 1) * kPointerSize);
2466 EmitResolvePossiblyDirectEval(arg_count);
2468 // The runtime call returns a pair of values in x0 (function) and
2469 // x1 (receiver). Touch up the stack with the right values.
2470 __ PokePair(x1, x0, arg_count * kPointerSize);
2473 // Record source position for debugger.
2474 SetSourcePosition(expr->position());
2476 // Call the evaluated function.
2477 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2478 __ Peek(x1, (arg_count + 1) * kXRegSize);
2480 RecordJSReturnSite(expr);
2481 // Restore context register.
2482 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2483 context()->DropAndPlug(1, x0);
2485 } else if (call_type == Call::GLOBAL_CALL) {
2486 EmitCallWithLoadIC(expr);
2488 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2489 // Call to a lookup slot (dynamically introduced variable).
2490 VariableProxy* proxy = callee->AsVariableProxy();
2493 { PreservePositionScope scope(masm()->positions_recorder());
2494 // Generate code for loading from variables potentially shadowed
2495 // by eval-introduced variables.
2496 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2500 // Call the runtime to find the function to call (returned in x0)
2501 // and the object holding it (returned in x1).
2502 __ Push(context_register());
2503 __ Mov(x10, Operand(proxy->name()));
2505 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2506 __ Push(x0, x1); // Receiver, function.
2508 // If fast case code has been generated, emit code to push the
2509 // function and receiver and have the slow path jump around this
2511 if (done.is_linked()) {
2517 // The receiver is implicitly the global receiver. Indicate this
2518 // by passing the undefined to the call function stub.
2519 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2524 // The receiver is either the global receiver or an object found
2525 // by LoadContextSlot.
2527 } else if (call_type == Call::PROPERTY_CALL) {
2528 Property* property = callee->AsProperty();
2529 { PreservePositionScope scope(masm()->positions_recorder());
2530 VisitForStackValue(property->obj());
2532 if (property->key()->IsPropertyName()) {
2533 EmitCallWithLoadIC(expr);
2535 EmitKeyedCallWithLoadIC(expr, property->key());
2539 ASSERT(call_type == Call::OTHER_CALL);
2540 // Call to an arbitrary expression not handled specially above.
2541 { PreservePositionScope scope(masm()->positions_recorder());
2542 VisitForStackValue(callee);
2544 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2546 // Emit function call.
2551 // RecordJSReturnSite should have been called.
2552 ASSERT(expr->return_is_recorded_);
2557 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2558 Comment cmnt(masm_, "[ CallNew");
2559 // According to ECMA-262, section 11.2.2, page 44, the function
2560 // expression in new calls must be evaluated before the
2563 // Push constructor on the stack. If it's not a function it's used as
2564 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2566 VisitForStackValue(expr->expression());
2568 // Push the arguments ("left-to-right") on the stack.
2569 ZoneList<Expression*>* args = expr->arguments();
2570 int arg_count = args->length();
2571 for (int i = 0; i < arg_count; i++) {
2572 VisitForStackValue(args->at(i));
2575 // Call the construct call builtin that handles allocation and
2576 // constructor invocation.
2577 SetSourcePosition(expr->position());
2579 // Load function and argument count into x1 and x0.
2580 __ Mov(x0, arg_count);
2581 __ Peek(x1, arg_count * kXRegSize);
2583 // Record call targets in unoptimized code.
2584 if (FLAG_pretenuring_call_new) {
2585 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2586 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2587 expr->CallNewFeedbackSlot() + 1);
2590 __ LoadObject(x2, FeedbackVector());
2591 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2593 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2594 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2595 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2596 context()->Plug(x0);
2600 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2601 ZoneList<Expression*>* args = expr->arguments();
2602 ASSERT(args->length() == 1);
2604 VisitForAccumulatorValue(args->at(0));
2606 Label materialize_true, materialize_false;
2607 Label* if_true = NULL;
2608 Label* if_false = NULL;
2609 Label* fall_through = NULL;
2610 context()->PrepareTest(&materialize_true, &materialize_false,
2611 &if_true, &if_false, &fall_through);
2613 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2614 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2616 context()->Plug(if_true, if_false);
2620 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2621 ZoneList<Expression*>* args = expr->arguments();
2622 ASSERT(args->length() == 1);
2624 VisitForAccumulatorValue(args->at(0));
2626 Label materialize_true, materialize_false;
2627 Label* if_true = NULL;
2628 Label* if_false = NULL;
2629 Label* fall_through = NULL;
2630 context()->PrepareTest(&materialize_true, &materialize_false,
2631 &if_true, &if_false, &fall_through);
2633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2634 __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true,
2635 if_false, fall_through);
2637 context()->Plug(if_true, if_false);
2641 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2642 ZoneList<Expression*>* args = expr->arguments();
2643 ASSERT(args->length() == 1);
2645 VisitForAccumulatorValue(args->at(0));
2647 Label materialize_true, materialize_false;
2648 Label* if_true = NULL;
2649 Label* if_false = NULL;
2650 Label* fall_through = NULL;
2651 context()->PrepareTest(&materialize_true, &materialize_false,
2652 &if_true, &if_false, &fall_through);
2654 __ JumpIfSmi(x0, if_false);
2655 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2656 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2657 // Undetectable objects behave like undefined when tested with typeof.
2658 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2659 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2660 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2661 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2663 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2664 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2665 Split(le, if_true, if_false, fall_through);
2667 context()->Plug(if_true, if_false);
2671 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2672 ZoneList<Expression*>* args = expr->arguments();
2673 ASSERT(args->length() == 1);
2675 VisitForAccumulatorValue(args->at(0));
2677 Label materialize_true, materialize_false;
2678 Label* if_true = NULL;
2679 Label* if_false = NULL;
2680 Label* fall_through = NULL;
2681 context()->PrepareTest(&materialize_true, &materialize_false,
2682 &if_true, &if_false, &fall_through);
2684 __ JumpIfSmi(x0, if_false);
2685 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2686 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2687 Split(ge, if_true, if_false, fall_through);
2689 context()->Plug(if_true, if_false);
2693 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2694 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2695 ZoneList<Expression*>* args = expr->arguments();
2696 ASSERT(args->length() == 1);
2698 VisitForAccumulatorValue(args->at(0));
2700 Label materialize_true, materialize_false;
2701 Label* if_true = NULL;
2702 Label* if_false = NULL;
2703 Label* fall_through = NULL;
2704 context()->PrepareTest(&materialize_true, &materialize_false,
2705 &if_true, &if_false, &fall_through);
2707 __ JumpIfSmi(x0, if_false);
2708 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2709 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2710 __ Tst(x11, 1 << Map::kIsUndetectable);
2711 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2712 Split(ne, if_true, if_false, fall_through);
2714 context()->Plug(if_true, if_false);
2718 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2719 CallRuntime* expr) {
2720 ZoneList<Expression*>* args = expr->arguments();
2721 ASSERT(args->length() == 1);
2722 VisitForAccumulatorValue(args->at(0));
2724 Label materialize_true, materialize_false, skip_lookup;
2725 Label* if_true = NULL;
2726 Label* if_false = NULL;
2727 Label* fall_through = NULL;
2728 context()->PrepareTest(&materialize_true, &materialize_false,
2729 &if_true, &if_false, &fall_through);
2731 Register object = x0;
2732 __ AssertNotSmi(object);
2735 Register bitfield2 = x11;
2736 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2737 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2738 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
2740 // Check for fast case object. Generate false result for slow case object.
2741 Register props = x12;
2742 Register props_map = x12;
2743 Register hash_table_map = x13;
2744 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
2745 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2746 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2747 __ Cmp(props_map, hash_table_map);
2750 // Look for valueOf name in the descriptor array, and indicate false if found.
2751 // Since we omit an enumeration index check, if it is added via a transition
2752 // that shares its descriptor array, this is a false positive.
2755 // Skip loop if no descriptors are valid.
2756 Register descriptors = x12;
2757 Register descriptors_length = x13;
2758 __ NumberOfOwnDescriptors(descriptors_length, map);
2759 __ Cbz(descriptors_length, &done);
2761 __ LoadInstanceDescriptors(map, descriptors);
2763 // Calculate the end of the descriptor array.
2764 Register descriptors_end = x14;
2765 __ Mov(x15, DescriptorArray::kDescriptorSize);
2766 __ Mul(descriptors_length, descriptors_length, x15);
2767 // Calculate location of the first key name.
2768 __ Add(descriptors, descriptors,
2769 DescriptorArray::kFirstOffset - kHeapObjectTag);
2770 // Calculate the end of the descriptor array.
2771 __ Add(descriptors_end, descriptors,
2772 Operand(descriptors_length, LSL, kPointerSizeLog2));
2774 // Loop through all the keys in the descriptor array. If one of these is the
2775 // string "valueOf" the result is false.
2776 Register valueof_string = x1;
2777 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
2778 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2780 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
2781 __ Cmp(x15, valueof_string);
2783 __ Cmp(descriptors, descriptors_end);
2788 // Set the bit in the map to indicate that there is no local valueOf field.
2789 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2790 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2791 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2793 __ Bind(&skip_lookup);
2795 // If a valueOf property is not found on the object check that its prototype
2796 // is the unmodified String prototype. If not result is false.
2797 Register prototype = x1;
2798 Register global_idx = x2;
2799 Register native_context = x2;
2800 Register string_proto = x3;
2801 Register proto_map = x4;
2802 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
2803 __ JumpIfSmi(prototype, if_false);
2804 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2805 __ Ldr(global_idx, GlobalObjectMemOperand());
2806 __ Ldr(native_context,
2807 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
2808 __ Ldr(string_proto,
2809 ContextMemOperand(native_context,
2810 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2811 __ Cmp(proto_map, string_proto);
2813 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2814 Split(eq, if_true, if_false, fall_through);
2816 context()->Plug(if_true, if_false);
2820 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2821 ZoneList<Expression*>* args = expr->arguments();
2822 ASSERT(args->length() == 1);
2824 VisitForAccumulatorValue(args->at(0));
2826 Label materialize_true, materialize_false;
2827 Label* if_true = NULL;
2828 Label* if_false = NULL;
2829 Label* fall_through = NULL;
2830 context()->PrepareTest(&materialize_true, &materialize_false,
2831 &if_true, &if_false, &fall_through);
2833 __ JumpIfSmi(x0, if_false);
2834 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2835 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2836 Split(eq, if_true, if_false, fall_through);
2838 context()->Plug(if_true, if_false);
2842 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2843 ZoneList<Expression*>* args = expr->arguments();
2844 ASSERT(args->length() == 1);
2846 VisitForAccumulatorValue(args->at(0));
2848 Label materialize_true, materialize_false;
2849 Label* if_true = NULL;
2850 Label* if_false = NULL;
2851 Label* fall_through = NULL;
2852 context()->PrepareTest(&materialize_true, &materialize_false,
2853 &if_true, &if_false, &fall_through);
2855 // Only a HeapNumber can be -0.0, so return false if we have something else.
2856 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
2858 // Test the bit pattern.
2859 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2860 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
2862 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2863 Split(vs, if_true, if_false, fall_through);
2865 context()->Plug(if_true, if_false);
2869 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2870 ZoneList<Expression*>* args = expr->arguments();
2871 ASSERT(args->length() == 1);
2873 VisitForAccumulatorValue(args->at(0));
2875 Label materialize_true, materialize_false;
2876 Label* if_true = NULL;
2877 Label* if_false = NULL;
2878 Label* fall_through = NULL;
2879 context()->PrepareTest(&materialize_true, &materialize_false,
2880 &if_true, &if_false, &fall_through);
2882 __ JumpIfSmi(x0, if_false);
2883 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2884 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2885 Split(eq, if_true, if_false, fall_through);
2887 context()->Plug(if_true, if_false);
2891 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2892 ZoneList<Expression*>* args = expr->arguments();
2893 ASSERT(args->length() == 1);
2895 VisitForAccumulatorValue(args->at(0));
2897 Label materialize_true, materialize_false;
2898 Label* if_true = NULL;
2899 Label* if_false = NULL;
2900 Label* fall_through = NULL;
2901 context()->PrepareTest(&materialize_true, &materialize_false,
2902 &if_true, &if_false, &fall_through);
2904 __ JumpIfSmi(x0, if_false);
2905 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2906 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2907 Split(eq, if_true, if_false, fall_through);
2909 context()->Plug(if_true, if_false);
2914 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2915 ASSERT(expr->arguments()->length() == 0);
2917 Label materialize_true, materialize_false;
2918 Label* if_true = NULL;
2919 Label* if_false = NULL;
2920 Label* fall_through = NULL;
2921 context()->PrepareTest(&materialize_true, &materialize_false,
2922 &if_true, &if_false, &fall_through);
2924 // Get the frame pointer for the calling frame.
2925 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2927 // Skip the arguments adaptor frame if it exists.
2928 Label check_frame_marker;
2929 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
2930 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2931 __ B(ne, &check_frame_marker);
2932 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
2934 // Check the marker in the calling frame.
2935 __ Bind(&check_frame_marker);
2936 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
2937 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
2938 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2939 Split(eq, if_true, if_false, fall_through);
2941 context()->Plug(if_true, if_false);
2945 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2946 ZoneList<Expression*>* args = expr->arguments();
2947 ASSERT(args->length() == 2);
2949 // Load the two objects into registers and perform the comparison.
2950 VisitForStackValue(args->at(0));
2951 VisitForAccumulatorValue(args->at(1));
2953 Label materialize_true, materialize_false;
2954 Label* if_true = NULL;
2955 Label* if_false = NULL;
2956 Label* fall_through = NULL;
2957 context()->PrepareTest(&materialize_true, &materialize_false,
2958 &if_true, &if_false, &fall_through);
2962 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2963 Split(eq, if_true, if_false, fall_through);
2965 context()->Plug(if_true, if_false);
2969 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2970 ZoneList<Expression*>* args = expr->arguments();
2971 ASSERT(args->length() == 1);
2973 // ArgumentsAccessStub expects the key in x1.
2974 VisitForAccumulatorValue(args->at(0));
2976 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
2977 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
2979 context()->Plug(x0);
2983 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2984 ASSERT(expr->arguments()->length() == 0);
2986 // Get the number of formal parameters.
2987 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
2989 // Check if the calling frame is an arguments adaptor frame.
2990 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2991 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
2992 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2995 // Arguments adaptor case: Read the arguments length from the
2997 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3000 context()->Plug(x0);
3004 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3005 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3006 ZoneList<Expression*>* args = expr->arguments();
3007 ASSERT(args->length() == 1);
3008 Label done, null, function, non_function_constructor;
3010 VisitForAccumulatorValue(args->at(0));
3012 // If the object is a smi, we return null.
3013 __ JumpIfSmi(x0, &null);
3015 // Check that the object is a JS object but take special care of JS
3016 // functions to make sure they have 'Function' as their class.
3017 // Assume that there are only two callable types, and one of them is at
3018 // either end of the type range for JS object types. Saves extra comparisons.
3019 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3020 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3021 // x10: object's map.
3022 // x11: object's type.
3024 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3025 FIRST_SPEC_OBJECT_TYPE + 1);
3026 __ B(eq, &function);
3028 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3029 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3030 LAST_SPEC_OBJECT_TYPE - 1);
3031 __ B(eq, &function);
3032 // Assume that there is no larger type.
3033 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3035 // Check if the constructor in the map is a JS function.
3036 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3037 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3038 &non_function_constructor);
3040 // x12 now contains the constructor function. Grab the
3041 // instance class name from there.
3042 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3044 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3047 // Functions have class 'Function'.
3049 __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
3052 // Objects with a non-function constructor have class 'Object'.
3053 __ Bind(&non_function_constructor);
3054 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3057 // Non-JS objects have class null.
3059 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3064 context()->Plug(x0);
3068 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3069 // Conditionally generate a log call.
3071 // 0 (literal string): The type of logging (corresponds to the flags).
3072 // This is used to determine whether or not to generate the log call.
3073 // 1 (string): Format string. Access the string at argument index 2
3074 // with '%2s' (see Logger::LogRuntime for all the formats).
3075 // 2 (array): Arguments to the format string.
3076 ZoneList<Expression*>* args = expr->arguments();
3077 ASSERT_EQ(args->length(), 3);
3078 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3079 VisitForStackValue(args->at(1));
3080 VisitForStackValue(args->at(2));
3081 __ CallRuntime(Runtime::kHiddenLog, 2);
3084 // Finally, we're expected to leave a value on the top of the stack.
3085 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3086 context()->Plug(x0);
3090 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3091 // Load the arguments on the stack and call the stub.
3092 SubStringStub stub(isolate());
3093 ZoneList<Expression*>* args = expr->arguments();
3094 ASSERT(args->length() == 3);
3095 VisitForStackValue(args->at(0));
3096 VisitForStackValue(args->at(1));
3097 VisitForStackValue(args->at(2));
3099 context()->Plug(x0);
3103 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3104 // Load the arguments on the stack and call the stub.
3105 RegExpExecStub stub(isolate());
3106 ZoneList<Expression*>* args = expr->arguments();
3107 ASSERT(args->length() == 4);
3108 VisitForStackValue(args->at(0));
3109 VisitForStackValue(args->at(1));
3110 VisitForStackValue(args->at(2));
3111 VisitForStackValue(args->at(3));
3113 context()->Plug(x0);
3117 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3118 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3119 ZoneList<Expression*>* args = expr->arguments();
3120 ASSERT(args->length() == 1);
3121 VisitForAccumulatorValue(args->at(0)); // Load the object.
3124 // If the object is a smi return the object.
3125 __ JumpIfSmi(x0, &done);
3126 // If the object is not a value type, return the object.
3127 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3128 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3131 context()->Plug(x0);
3135 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3136 ZoneList<Expression*>* args = expr->arguments();
3137 ASSERT(args->length() == 2);
3138 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3139 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3141 VisitForAccumulatorValue(args->at(0)); // Load the object.
3143 Label runtime, done, not_date_object;
3144 Register object = x0;
3145 Register result = x0;
3146 Register stamp_addr = x10;
3147 Register stamp_cache = x11;
3149 __ JumpIfSmi(object, ¬_date_object);
3150 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object);
3152 if (index->value() == 0) {
3153 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3156 if (index->value() < JSDate::kFirstUncachedField) {
3157 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3159 __ Ldr(stamp_addr, MemOperand(x10));
3160 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3161 __ Cmp(stamp_addr, stamp_cache);
3163 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3164 kPointerSize * index->value()));
3170 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3174 __ Bind(¬_date_object);
3175 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3177 context()->Plug(x0);
3181 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3182 ZoneList<Expression*>* args = expr->arguments();
3183 ASSERT_EQ(3, args->length());
3185 Register string = x0;
3186 Register index = x1;
3187 Register value = x2;
3188 Register scratch = x10;
3190 VisitForStackValue(args->at(1)); // index
3191 VisitForStackValue(args->at(2)); // value
3192 VisitForAccumulatorValue(args->at(0)); // string
3193 __ Pop(value, index);
3195 if (FLAG_debug_code) {
3196 __ AssertSmi(value, kNonSmiValue);
3197 __ AssertSmi(index, kNonSmiIndex);
3198 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3199 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3203 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3206 __ Strb(value, MemOperand(scratch, index));
3207 context()->Plug(string);
3211 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3212 ZoneList<Expression*>* args = expr->arguments();
3213 ASSERT_EQ(3, args->length());
3215 Register string = x0;
3216 Register index = x1;
3217 Register value = x2;
3218 Register scratch = x10;
3220 VisitForStackValue(args->at(1)); // index
3221 VisitForStackValue(args->at(2)); // value
3222 VisitForAccumulatorValue(args->at(0)); // string
3223 __ Pop(value, index);
3225 if (FLAG_debug_code) {
3226 __ AssertSmi(value, kNonSmiValue);
3227 __ AssertSmi(index, kNonSmiIndex);
3228 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3229 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3233 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3236 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3237 context()->Plug(string);
3241 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3242 // Load the arguments on the stack and call the MathPow stub.
3243 ZoneList<Expression*>* args = expr->arguments();
3244 ASSERT(args->length() == 2);
3245 VisitForStackValue(args->at(0));
3246 VisitForStackValue(args->at(1));
3247 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3249 context()->Plug(x0);
3253 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3254 ZoneList<Expression*>* args = expr->arguments();
3255 ASSERT(args->length() == 2);
3256 VisitForStackValue(args->at(0)); // Load the object.
3257 VisitForAccumulatorValue(args->at(1)); // Load the value.
3263 // If the object is a smi, return the value.
3264 __ JumpIfSmi(x1, &done);
3266 // If the object is not a value type, return the value.
3267 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3270 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3271 // Update the write barrier. Save the value as it will be
3272 // overwritten by the write barrier code and is needed afterward.
3274 __ RecordWriteField(
3275 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3278 context()->Plug(x0);
3282 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3283 ZoneList<Expression*>* args = expr->arguments();
3284 ASSERT_EQ(args->length(), 1);
3286 // Load the argument into x0 and call the stub.
3287 VisitForAccumulatorValue(args->at(0));
3289 NumberToStringStub stub(isolate());
3291 context()->Plug(x0);
3295 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3296 ZoneList<Expression*>* args = expr->arguments();
3297 ASSERT(args->length() == 1);
3299 VisitForAccumulatorValue(args->at(0));
3303 Register result = x1;
3305 StringCharFromCodeGenerator generator(code, result);
3306 generator.GenerateFast(masm_);
3309 NopRuntimeCallHelper call_helper;
3310 generator.GenerateSlow(masm_, call_helper);
3313 context()->Plug(result);
3317 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3318 ZoneList<Expression*>* args = expr->arguments();
3319 ASSERT(args->length() == 2);
3321 VisitForStackValue(args->at(0));
3322 VisitForAccumulatorValue(args->at(1));
3324 Register object = x1;
3325 Register index = x0;
3326 Register result = x3;
3330 Label need_conversion;
3331 Label index_out_of_range;
3333 StringCharCodeAtGenerator generator(object,
3338 &index_out_of_range,
3339 STRING_INDEX_IS_NUMBER);
3340 generator.GenerateFast(masm_);
3343 __ Bind(&index_out_of_range);
3344 // When the index is out of range, the spec requires us to return NaN.
3345 __ LoadRoot(result, Heap::kNanValueRootIndex);
3348 __ Bind(&need_conversion);
3349 // Load the undefined value into the result register, which will
3350 // trigger conversion.
3351 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3354 NopRuntimeCallHelper call_helper;
3355 generator.GenerateSlow(masm_, call_helper);
3358 context()->Plug(result);
3362 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3363 ZoneList<Expression*>* args = expr->arguments();
3364 ASSERT(args->length() == 2);
3366 VisitForStackValue(args->at(0));
3367 VisitForAccumulatorValue(args->at(1));
3369 Register object = x1;
3370 Register index = x0;
3371 Register result = x0;
3375 Label need_conversion;
3376 Label index_out_of_range;
3378 StringCharAtGenerator generator(object,
3384 &index_out_of_range,
3385 STRING_INDEX_IS_NUMBER);
3386 generator.GenerateFast(masm_);
3389 __ Bind(&index_out_of_range);
3390 // When the index is out of range, the spec requires us to return
3391 // the empty string.
3392 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3395 __ Bind(&need_conversion);
3396 // Move smi zero into the result register, which will trigger conversion.
3397 __ Mov(result, Smi::FromInt(0));
3400 NopRuntimeCallHelper call_helper;
3401 generator.GenerateSlow(masm_, call_helper);
3404 context()->Plug(result);
3408 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3409 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3410 ZoneList<Expression*>* args = expr->arguments();
3411 ASSERT_EQ(2, args->length());
3413 VisitForStackValue(args->at(0));
3414 VisitForAccumulatorValue(args->at(1));
3417 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3420 context()->Plug(x0);
3424 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3425 ZoneList<Expression*>* args = expr->arguments();
3426 ASSERT_EQ(2, args->length());
3427 VisitForStackValue(args->at(0));
3428 VisitForStackValue(args->at(1));
3430 StringCompareStub stub(isolate());
3432 context()->Plug(x0);
3436 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3437 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3438 ZoneList<Expression*>* args = expr->arguments();
3439 ASSERT(args->length() >= 2);
3441 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3442 for (int i = 0; i < arg_count + 1; i++) {
3443 VisitForStackValue(args->at(i));
3445 VisitForAccumulatorValue(args->last()); // Function.
3447 Label runtime, done;
3448 // Check for non-function argument (including proxy).
3449 __ JumpIfSmi(x0, &runtime);
3450 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3452 // InvokeFunction requires the function in x1. Move it in there.
3454 ParameterCount count(arg_count);
3455 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3456 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3461 __ CallRuntime(Runtime::kCall, args->length());
3464 context()->Plug(x0);
3468 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3469 RegExpConstructResultStub stub(isolate());
3470 ZoneList<Expression*>* args = expr->arguments();
3471 ASSERT(args->length() == 3);
3472 VisitForStackValue(args->at(0));
3473 VisitForStackValue(args->at(1));
3474 VisitForAccumulatorValue(args->at(2));
3477 context()->Plug(x0);
3481 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3482 ZoneList<Expression*>* args = expr->arguments();
3483 ASSERT_EQ(2, args->length());
3484 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3485 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3487 Handle<FixedArray> jsfunction_result_caches(
3488 isolate()->native_context()->jsfunction_result_caches());
3489 if (jsfunction_result_caches->length() <= cache_id) {
3490 __ Abort(kAttemptToUseUndefinedCache);
3491 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3492 context()->Plug(x0);
3496 VisitForAccumulatorValue(args->at(1));
3499 Register cache = x1;
3500 __ Ldr(cache, GlobalObjectMemOperand());
3501 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3502 __ Ldr(cache, ContextMemOperand(cache,
3503 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3505 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3508 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3509 JSFunctionResultCache::kFingerOffset));
3510 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3511 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3513 // Load the key and data from the cache.
3514 __ Ldp(x2, x3, MemOperand(x3));
3517 __ CmovX(x0, x3, eq);
3520 // Call runtime to perform the lookup.
3521 __ Push(cache, key);
3522 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3525 context()->Plug(x0);
3529 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3530 ZoneList<Expression*>* args = expr->arguments();
3531 VisitForAccumulatorValue(args->at(0));
3533 Label materialize_true, materialize_false;
3534 Label* if_true = NULL;
3535 Label* if_false = NULL;
3536 Label* fall_through = NULL;
3537 context()->PrepareTest(&materialize_true, &materialize_false,
3538 &if_true, &if_false, &fall_through);
3540 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3541 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3542 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3543 Split(eq, if_true, if_false, fall_through);
3545 context()->Plug(if_true, if_false);
3549 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3550 ZoneList<Expression*>* args = expr->arguments();
3551 ASSERT(args->length() == 1);
3552 VisitForAccumulatorValue(args->at(0));
3554 __ AssertString(x0);
3556 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3557 __ IndexFromHash(x10, x0);
3559 context()->Plug(x0);
3563 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3564 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
3566 ZoneList<Expression*>* args = expr->arguments();
3567 ASSERT(args->length() == 2);
3568 VisitForStackValue(args->at(1));
3569 VisitForAccumulatorValue(args->at(0));
3571 Register array = x0;
3572 Register result = x0;
3573 Register elements = x1;
3574 Register element = x2;
3575 Register separator = x3;
3576 Register array_length = x4;
3577 Register result_pos = x5;
3579 Register string_length = x10;
3580 Register elements_end = x11;
3581 Register string = x12;
3582 Register scratch1 = x13;
3583 Register scratch2 = x14;
3584 Register scratch3 = x7;
3585 Register separator_length = x15;
3587 Label bailout, done, one_char_separator, long_separator,
3588 non_trivial_array, not_size_one_array, loop,
3589 empty_separator_loop, one_char_separator_loop,
3590 one_char_separator_loop_entry, long_separator_loop;
3592 // The separator operand is on the stack.
3595 // Check that the array is a JSArray.
3596 __ JumpIfSmi(array, &bailout);
3597 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3599 // Check that the array has fast elements.
3600 __ CheckFastElements(map, scratch1, &bailout);
3602 // If the array has length zero, return the empty string.
3603 // Load and untag the length of the array.
3604 // It is an unsigned value, so we can skip sign extension.
3605 // We assume little endianness.
3606 __ Ldrsw(array_length,
3607 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3608 __ Cbnz(array_length, &non_trivial_array);
3609 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3612 __ Bind(&non_trivial_array);
3613 // Get the FixedArray containing array's elements.
3614 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3616 // Check that all array elements are sequential ASCII strings, and
3617 // accumulate the sum of their lengths.
3618 __ Mov(string_length, 0);
3619 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3620 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3621 // Loop condition: while (element < elements_end).
3622 // Live values in registers:
3623 // elements: Fixed array of strings.
3624 // array_length: Length of the fixed array of strings (not smi)
3625 // separator: Separator string
3626 // string_length: Accumulated sum of string lengths (not smi).
3627 // element: Current array element.
3628 // elements_end: Array end.
3629 if (FLAG_debug_code) {
3630 __ Cmp(array_length, 0);
3631 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3634 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3635 __ JumpIfSmi(string, &bailout);
3636 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3637 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3638 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3640 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3641 __ Adds(string_length, string_length, scratch1);
3643 __ Cmp(element, elements_end);
3646 // If array_length is 1, return elements[0], a string.
3647 __ Cmp(array_length, 1);
3648 __ B(ne, ¬_size_one_array);
3649 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3652 __ Bind(¬_size_one_array);
3654 // Live values in registers:
3655 // separator: Separator string
3656 // array_length: Length of the array (not smi).
3657 // string_length: Sum of string lengths (not smi).
3658 // elements: FixedArray of strings.
3660 // Check that the separator is a flat ASCII string.
3661 __ JumpIfSmi(separator, &bailout);
3662 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3663 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3664 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3666 // Add (separator length times array_length) - separator length to the
3667 // string_length to get the length of the result string.
3668 // Load the separator length as untagged.
3669 // We assume little endianness, and that the length is positive.
3670 __ Ldrsw(separator_length,
3671 UntagSmiFieldMemOperand(separator,
3672 SeqOneByteString::kLengthOffset));
3673 __ Sub(string_length, string_length, separator_length);
3674 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3677 // Get first element in the array.
3678 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3679 // Live values in registers:
3680 // element: First array element
3681 // separator: Separator string
3682 // string_length: Length of result string (not smi)
3683 // array_length: Length of the array (not smi).
3684 __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
3687 // Prepare for looping. Set up elements_end to end of the array. Set
3688 // result_pos to the position of the result where to write the first
3690 // TODO(all): useless unless AllocateAsciiString trashes the register.
3691 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3692 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3694 // Check the length of the separator.
3695 __ Cmp(separator_length, 1);
3696 __ B(eq, &one_char_separator);
3697 __ B(gt, &long_separator);
3699 // Empty separator case
3700 __ Bind(&empty_separator_loop);
3701 // Live values in registers:
3702 // result_pos: the position to which we are currently copying characters.
3703 // element: Current array element.
3704 // elements_end: Array end.
3706 // Copy next array element to the result.
3707 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3708 __ Ldrsw(string_length,
3709 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3710 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3711 __ CopyBytes(result_pos, string, string_length, scratch1);
3712 __ Cmp(element, elements_end);
3713 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
3716 // One-character separator case
3717 __ Bind(&one_char_separator);
3718 // Replace separator with its ASCII character value.
3719 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3720 // Jump into the loop after the code that copies the separator, so the first
3721 // element is not preceded by a separator
3722 __ B(&one_char_separator_loop_entry);
3724 __ Bind(&one_char_separator_loop);
3725 // Live values in registers:
3726 // result_pos: the position to which we are currently copying characters.
3727 // element: Current array element.
3728 // elements_end: Array end.
3729 // separator: Single separator ASCII char (in lower byte).
3731 // Copy the separator character to the result.
3732 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3734 // Copy next array element to the result.
3735 __ Bind(&one_char_separator_loop_entry);
3736 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3737 __ Ldrsw(string_length,
3738 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3739 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3740 __ CopyBytes(result_pos, string, string_length, scratch1);
3741 __ Cmp(element, elements_end);
3742 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
3745 // Long separator case (separator is more than one character). Entry is at the
3746 // label long_separator below.
3747 __ Bind(&long_separator_loop);
3748 // Live values in registers:
3749 // result_pos: the position to which we are currently copying characters.
3750 // element: Current array element.
3751 // elements_end: Array end.
3752 // separator: Separator string.
3754 // Copy the separator to the result.
3755 // TODO(all): hoist next two instructions.
3756 __ Ldrsw(string_length,
3757 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
3758 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3759 __ CopyBytes(result_pos, string, string_length, scratch1);
3761 __ Bind(&long_separator);
3762 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3763 __ Ldrsw(string_length,
3764 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3765 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3766 __ CopyBytes(result_pos, string, string_length, scratch1);
3767 __ Cmp(element, elements_end);
3768 __ B(lt, &long_separator_loop); // End while (element < elements_end).
3772 // Returning undefined will force slower code to handle it.
3773 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3775 context()->Plug(result);
3779 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3780 if (expr->function() != NULL &&
3781 expr->function()->intrinsic_type == Runtime::INLINE) {
3782 Comment cmnt(masm_, "[ InlineRuntimeCall");
3783 EmitInlineRuntimeCall(expr);
3787 Comment cmnt(masm_, "[ CallRunTime");
3788 ZoneList<Expression*>* args = expr->arguments();
3789 int arg_count = args->length();
3791 if (expr->is_jsruntime()) {
3792 // Push the builtins object as the receiver.
3793 __ Ldr(x10, GlobalObjectMemOperand());
3794 __ Ldr(x0, FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
3797 // Load the function from the receiver.
3798 Handle<String> name = expr->name();
3799 __ Mov(x2, Operand(name));
3800 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
3802 // Push the target function under the receiver.
3806 int arg_count = args->length();
3807 for (int i = 0; i < arg_count; i++) {
3808 VisitForStackValue(args->at(i));
3811 // Record source position of the IC call.
3812 SetSourcePosition(expr->position());
3813 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3814 __ Peek(x1, (arg_count + 1) * kPointerSize);
3817 // Restore context register.
3818 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3820 context()->DropAndPlug(1, x0);
3822 // Push the arguments ("left-to-right").
3823 for (int i = 0; i < arg_count; i++) {
3824 VisitForStackValue(args->at(i));
3827 // Call the C runtime function.
3828 __ CallRuntime(expr->function(), arg_count);
3829 context()->Plug(x0);
3834 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3835 switch (expr->op()) {
3836 case Token::DELETE: {
3837 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3838 Property* property = expr->expression()->AsProperty();
3839 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3841 if (property != NULL) {
3842 VisitForStackValue(property->obj());
3843 VisitForStackValue(property->key());
3844 __ Mov(x10, Smi::FromInt(strict_mode()));
3846 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3847 context()->Plug(x0);
3848 } else if (proxy != NULL) {
3849 Variable* var = proxy->var();
3850 // Delete of an unqualified identifier is disallowed in strict mode
3851 // but "delete this" is allowed.
3852 ASSERT(strict_mode() == SLOPPY || var->is_this());
3853 if (var->IsUnallocated()) {
3854 __ Ldr(x12, GlobalObjectMemOperand());
3855 __ Mov(x11, Operand(var->name()));
3856 __ Mov(x10, Smi::FromInt(SLOPPY));
3857 __ Push(x12, x11, x10);
3858 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3859 context()->Plug(x0);
3860 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3861 // Result of deleting non-global, non-dynamic variables is false.
3862 // The subexpression does not have side effects.
3863 context()->Plug(var->is_this());
3865 // Non-global variable. Call the runtime to try to delete from the
3866 // context where the variable was introduced.
3867 __ Mov(x2, Operand(var->name()));
3868 __ Push(context_register(), x2);
3869 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
3870 context()->Plug(x0);
3873 // Result of deleting non-property, non-variable reference is true.
3874 // The subexpression may have side effects.
3875 VisitForEffect(expr->expression());
3876 context()->Plug(true);
3882 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3883 VisitForEffect(expr->expression());
3884 context()->Plug(Heap::kUndefinedValueRootIndex);
3888 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3889 if (context()->IsEffect()) {
3890 // Unary NOT has no side effects so it's only necessary to visit the
3891 // subexpression. Match the optimizing compiler by not branching.
3892 VisitForEffect(expr->expression());
3893 } else if (context()->IsTest()) {
3894 const TestContext* test = TestContext::cast(context());
3895 // The labels are swapped for the recursive call.
3896 VisitForControl(expr->expression(),
3897 test->false_label(),
3899 test->fall_through());
3900 context()->Plug(test->true_label(), test->false_label());
3902 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3903 // TODO(jbramley): This could be much more efficient using (for
3904 // example) the CSEL instruction.
3905 Label materialize_true, materialize_false, done;
3906 VisitForControl(expr->expression(),
3911 __ Bind(&materialize_true);
3912 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3913 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3916 __ Bind(&materialize_false);
3917 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3918 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3922 if (context()->IsStackValue()) {
3923 __ Push(result_register());
3928 case Token::TYPEOF: {
3929 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3931 StackValueContext context(this);
3932 VisitForTypeofValue(expr->expression());
3934 __ CallRuntime(Runtime::kTypeof, 1);
3935 context()->Plug(x0);
3944 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3945 ASSERT(expr->expression()->IsValidReferenceExpression());
3947 Comment cmnt(masm_, "[ CountOperation");
3948 SetSourcePosition(expr->position());
3950 // Expression can only be a property, a global or a (parameter or local)
3952 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3953 LhsKind assign_type = VARIABLE;
3954 Property* prop = expr->expression()->AsProperty();
3955 // In case of a property we use the uninitialized expression context
3956 // of the key to detect a named property.
3959 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3962 // Evaluate expression and get value.
3963 if (assign_type == VARIABLE) {
3964 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3965 AccumulatorValueContext context(this);
3966 EmitVariableLoad(expr->expression()->AsVariableProxy());
3968 // Reserve space for result of postfix operation.
3969 if (expr->is_postfix() && !context()->IsEffect()) {
3972 if (assign_type == NAMED_PROPERTY) {
3973 // Put the object both on the stack and in the accumulator.
3974 VisitForAccumulatorValue(prop->obj());
3976 EmitNamedPropertyLoad(prop);
3979 VisitForStackValue(prop->obj());
3980 VisitForAccumulatorValue(prop->key());
3983 EmitKeyedPropertyLoad(prop);
3987 // We need a second deoptimization point after loading the value
3988 // in case evaluating the property load my have a side effect.
3989 if (assign_type == VARIABLE) {
3990 PrepareForBailout(expr->expression(), TOS_REG);
3992 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3995 // Inline smi case if we are in a loop.
3996 Label stub_call, done;
3997 JumpPatchSite patch_site(masm_);
3999 int count_value = expr->op() == Token::INC ? 1 : -1;
4000 if (ShouldInlineSmiCase(expr->op())) {
4002 patch_site.EmitJumpIfNotSmi(x0, &slow);
4004 // Save result for postfix expressions.
4005 if (expr->is_postfix()) {
4006 if (!context()->IsEffect()) {
4007 // Save the result on the stack. If we have a named or keyed property we
4008 // store the result under the receiver that is currently on top of the
4010 switch (assign_type) {
4014 case NAMED_PROPERTY:
4015 __ Poke(x0, kPointerSize);
4017 case KEYED_PROPERTY:
4018 __ Poke(x0, kPointerSize * 2);
4024 __ Adds(x0, x0, Smi::FromInt(count_value));
4026 // Call stub. Undo operation first.
4027 __ Sub(x0, x0, Smi::FromInt(count_value));
4031 ToNumberStub convert_stub(isolate());
4032 __ CallStub(&convert_stub);
4034 // Save result for postfix expressions.
4035 if (expr->is_postfix()) {
4036 if (!context()->IsEffect()) {
4037 // Save the result on the stack. If we have a named or keyed property
4038 // we store the result under the receiver that is currently on top
4040 switch (assign_type) {
4044 case NAMED_PROPERTY:
4045 __ Poke(x0, kXRegSize);
4047 case KEYED_PROPERTY:
4048 __ Poke(x0, 2 * kXRegSize);
4054 __ Bind(&stub_call);
4056 __ Mov(x0, Smi::FromInt(count_value));
4058 // Record position before stub call.
4059 SetSourcePosition(expr->position());
4062 Assembler::BlockPoolsScope scope(masm_);
4063 BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
4064 CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4065 patch_site.EmitPatchInfo();
4069 // Store the value returned in x0.
4070 switch (assign_type) {
4072 if (expr->is_postfix()) {
4073 { EffectContext context(this);
4074 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4076 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4079 // For all contexts except EffectConstant We have the result on
4080 // top of the stack.
4081 if (!context()->IsEffect()) {
4082 context()->PlugTOS();
4085 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4087 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4088 context()->Plug(x0);
4091 case NAMED_PROPERTY: {
4092 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
4094 CallStoreIC(expr->CountStoreFeedbackId());
4095 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4096 if (expr->is_postfix()) {
4097 if (!context()->IsEffect()) {
4098 context()->PlugTOS();
4101 context()->Plug(x0);
4105 case KEYED_PROPERTY: {
4107 __ Pop(x2); // Receiver.
4108 Handle<Code> ic = strict_mode() == SLOPPY
4109 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4110 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4111 CallIC(ic, expr->CountStoreFeedbackId());
4112 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4113 if (expr->is_postfix()) {
4114 if (!context()->IsEffect()) {
4115 context()->PlugTOS();
4118 context()->Plug(x0);
4126 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4127 ASSERT(!context()->IsEffect());
4128 ASSERT(!context()->IsTest());
4129 VariableProxy* proxy = expr->AsVariableProxy();
4130 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4131 Comment cmnt(masm_, "Global variable");
4132 __ Ldr(x0, GlobalObjectMemOperand());
4133 __ Mov(x2, Operand(proxy->name()));
4134 // Use a regular load, not a contextual load, to avoid a reference
4136 CallLoadIC(NOT_CONTEXTUAL);
4137 PrepareForBailout(expr, TOS_REG);
4138 context()->Plug(x0);
4139 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4142 // Generate code for loading from variables potentially shadowed
4143 // by eval-introduced variables.
4144 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4147 __ Mov(x0, Operand(proxy->name()));
4149 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4150 PrepareForBailout(expr, TOS_REG);
4153 context()->Plug(x0);
4155 // This expression cannot throw a reference error at the top level.
4156 VisitInDuplicateContext(expr);
4161 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4162 Expression* sub_expr,
4163 Handle<String> check) {
4164 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4165 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4166 Label materialize_true, materialize_false;
4167 Label* if_true = NULL;
4168 Label* if_false = NULL;
4169 Label* fall_through = NULL;
4170 context()->PrepareTest(&materialize_true, &materialize_false,
4171 &if_true, &if_false, &fall_through);
4173 { AccumulatorValueContext context(this);
4174 VisitForTypeofValue(sub_expr);
4176 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4178 Factory* factory = isolate()->factory();
4179 if (String::Equals(check, factory->number_string())) {
4180 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4181 __ JumpIfSmi(x0, if_true);
4182 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4183 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4184 Split(eq, if_true, if_false, fall_through);
4185 } else if (String::Equals(check, factory->string_string())) {
4186 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4187 __ JumpIfSmi(x0, if_false);
4188 // Check for undetectable objects => false.
4189 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4190 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4191 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4193 } else if (String::Equals(check, factory->symbol_string())) {
4194 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4195 __ JumpIfSmi(x0, if_false);
4196 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4197 Split(eq, if_true, if_false, fall_through);
4198 } else if (String::Equals(check, factory->boolean_string())) {
4199 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4200 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4201 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4202 Split(eq, if_true, if_false, fall_through);
4203 } else if (FLAG_harmony_typeof &&
4204 String::Equals(check, factory->null_string())) {
4205 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string");
4206 __ CompareRoot(x0, Heap::kNullValueRootIndex);
4207 Split(eq, if_true, if_false, fall_through);
4208 } else if (String::Equals(check, factory->undefined_string())) {
4210 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4211 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4212 __ JumpIfSmi(x0, if_false);
4213 // Check for undetectable objects => true.
4214 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4215 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4216 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4218 } else if (String::Equals(check, factory->function_string())) {
4219 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4220 __ JumpIfSmi(x0, if_false);
4221 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4222 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4223 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4226 } else if (String::Equals(check, factory->object_string())) {
4227 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4228 __ JumpIfSmi(x0, if_false);
4229 if (!FLAG_harmony_typeof) {
4230 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4232 // Check for JS objects => true.
4234 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4236 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4238 // Check for undetectable objects => false.
4239 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4241 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4245 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4246 if (if_false != fall_through) __ B(if_false);
4248 context()->Plug(if_true, if_false);
4252 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4253 Comment cmnt(masm_, "[ CompareOperation");
4254 SetSourcePosition(expr->position());
4256 // Try to generate an optimized comparison with a literal value.
4257 // TODO(jbramley): This only checks common values like NaN or undefined.
4258 // Should it also handle ARM64 immediate operands?
4259 if (TryLiteralCompare(expr)) {
4263 // Assign labels according to context()->PrepareTest.
4264 Label materialize_true;
4265 Label materialize_false;
4266 Label* if_true = NULL;
4267 Label* if_false = NULL;
4268 Label* fall_through = NULL;
4269 context()->PrepareTest(&materialize_true, &materialize_false,
4270 &if_true, &if_false, &fall_through);
4272 Token::Value op = expr->op();
4273 VisitForStackValue(expr->left());
4276 VisitForStackValue(expr->right());
4277 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4278 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4279 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4280 Split(eq, if_true, if_false, fall_through);
4283 case Token::INSTANCEOF: {
4284 VisitForStackValue(expr->right());
4285 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4287 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4288 // The stub returns 0 for true.
4289 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4294 VisitForAccumulatorValue(expr->right());
4295 Condition cond = CompareIC::ComputeCondition(op);
4297 // Pop the stack value.
4300 JumpPatchSite patch_site(masm_);
4301 if (ShouldInlineSmiCase(op)) {
4303 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4305 Split(cond, if_true, if_false, NULL);
4306 __ Bind(&slow_case);
4309 // Record position and call the compare IC.
4310 SetSourcePosition(expr->position());
4311 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4312 CallIC(ic, expr->CompareOperationFeedbackId());
4313 patch_site.EmitPatchInfo();
4314 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4315 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4319 // Convert the result of the comparison into one expected for this
4320 // expression's context.
4321 context()->Plug(if_true, if_false);
4325 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4326 Expression* sub_expr,
4328 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4329 Label materialize_true, materialize_false;
4330 Label* if_true = NULL;
4331 Label* if_false = NULL;
4332 Label* fall_through = NULL;
4333 context()->PrepareTest(&materialize_true, &materialize_false,
4334 &if_true, &if_false, &fall_through);
4336 VisitForAccumulatorValue(sub_expr);
4337 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4339 if (expr->op() == Token::EQ_STRICT) {
4340 Heap::RootListIndex nil_value = nil == kNullValue ?
4341 Heap::kNullValueRootIndex :
4342 Heap::kUndefinedValueRootIndex;
4343 __ CompareRoot(x0, nil_value);
4344 Split(eq, if_true, if_false, fall_through);
4346 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4347 CallIC(ic, expr->CompareOperationFeedbackId());
4348 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4351 context()->Plug(if_true, if_false);
4355 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4356 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4357 context()->Plug(x0);
4361 void FullCodeGenerator::VisitYield(Yield* expr) {
4362 Comment cmnt(masm_, "[ Yield");
4363 // Evaluate yielded value first; the initial iterator definition depends on
4364 // this. It stays on the stack while we update the iterator.
4365 VisitForStackValue(expr->expression());
4367 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4368 // and suchlike. The implementation changes a little by bleeding_edge so I
4369 // don't want to spend too much time on it now.
4371 switch (expr->yield_kind()) {
4372 case Yield::SUSPEND:
4373 // Pop value from top-of-stack slot; box result into result register.
4374 EmitCreateIteratorResult(false);
4375 __ Push(result_register());
4377 case Yield::INITIAL: {
4378 Label suspend, continuation, post_runtime, resume;
4382 // TODO(jbramley): This label is bound here because the following code
4383 // looks at its pos(). Is it possible to do something more efficient here,
4384 // perhaps using Adr?
4385 __ Bind(&continuation);
4389 VisitForAccumulatorValue(expr->generator_object());
4390 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4391 __ Mov(x1, Smi::FromInt(continuation.pos()));
4392 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4393 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4395 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4396 kLRHasBeenSaved, kDontSaveFPRegs);
4397 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4398 __ Cmp(__ StackPointer(), x1);
4399 __ B(eq, &post_runtime);
4400 __ Push(x0); // generator object
4401 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4402 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4403 __ Bind(&post_runtime);
4404 __ Pop(result_register());
4405 EmitReturnSequence();
4408 context()->Plug(result_register());
4412 case Yield::FINAL: {
4413 VisitForAccumulatorValue(expr->generator_object());
4414 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4415 __ Str(x1, FieldMemOperand(result_register(),
4416 JSGeneratorObject::kContinuationOffset));
4417 // Pop value from top-of-stack slot, box result into result register.
4418 EmitCreateIteratorResult(true);
4419 EmitUnwindBeforeReturn();
4420 EmitReturnSequence();
4424 case Yield::DELEGATING: {
4425 VisitForStackValue(expr->generator_object());
4427 // Initial stack layout is as follows:
4428 // [sp + 1 * kPointerSize] iter
4429 // [sp + 0 * kPointerSize] g
4431 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4432 Label l_next, l_call, l_loop;
4433 // Initial send value is undefined.
4434 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4437 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4439 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4440 __ LoadRoot(x2, Heap::kthrow_stringRootIndex); // "throw"
4441 __ Peek(x3, 1 * kPointerSize); // iter
4442 __ Push(x2, x3, x0); // "throw", iter, except
4445 // try { received = %yield result }
4446 // Shuffle the received result above a try handler and yield it without
4449 __ Pop(x0); // result
4450 __ PushTryHandler(StackHandler::CATCH, expr->index());
4451 const int handler_size = StackHandlerConstants::kSize;
4452 __ Push(x0); // result
4455 // TODO(jbramley): This label is bound here because the following code
4456 // looks at its pos(). Is it possible to do something more efficient here,
4457 // perhaps using Adr?
4458 __ Bind(&l_continuation);
4461 __ Bind(&l_suspend);
4462 const int generator_object_depth = kPointerSize + handler_size;
4463 __ Peek(x0, generator_object_depth);
4465 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4466 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4467 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4468 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4470 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4471 kLRHasBeenSaved, kDontSaveFPRegs);
4472 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4473 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4474 __ Pop(x0); // result
4475 EmitReturnSequence();
4476 __ Bind(&l_resume); // received in x0
4479 // receiver = iter; f = 'next'; arg = received;
4481 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next"
4482 __ Peek(x3, 1 * kPointerSize); // iter
4483 __ Push(x2, x3, x0); // "next", iter, received
4485 // result = receiver[f](arg);
4487 __ Peek(x1, 1 * kPointerSize);
4488 __ Peek(x0, 2 * kPointerSize);
4489 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
4490 CallIC(ic, TypeFeedbackId::None());
4492 __ Poke(x1, 2 * kPointerSize);
4493 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
4496 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4497 __ Drop(1); // The function is still on the stack; drop it.
4499 // if (!result.done) goto l_try;
4501 __ Push(x0); // save result
4502 __ LoadRoot(x2, Heap::kdone_stringRootIndex); // "done"
4503 CallLoadIC(NOT_CONTEXTUAL); // result.done in x0
4504 // The ToBooleanStub argument (result.done) is in x0.
4505 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4510 __ Pop(x0); // result
4511 __ LoadRoot(x2, Heap::kvalue_stringRootIndex); // "value"
4512 CallLoadIC(NOT_CONTEXTUAL); // result.value in x0
4513 context()->DropAndPlug(2, x0); // drop iter and g
4520 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4522 JSGeneratorObject::ResumeMode resume_mode) {
4523 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4524 Register value_reg = x0;
4525 Register generator_object = x1;
4526 Register the_hole = x2;
4527 Register operand_stack_size = w3;
4528 Register function = x4;
4530 // The value stays in x0, and is ultimately read by the resumed generator, as
4531 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
4532 // is read to throw the value when the resumed generator is already closed. r1
4533 // will hold the generator object until the activation has been resumed.
4534 VisitForStackValue(generator);
4535 VisitForAccumulatorValue(value);
4536 __ Pop(generator_object);
4538 // Check generator state.
4539 Label wrong_state, closed_state, done;
4540 __ Ldr(x10, FieldMemOperand(generator_object,
4541 JSGeneratorObject::kContinuationOffset));
4542 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
4543 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
4544 __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4545 __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4547 // Load suspended function and context.
4548 __ Ldr(cp, FieldMemOperand(generator_object,
4549 JSGeneratorObject::kContextOffset));
4550 __ Ldr(function, FieldMemOperand(generator_object,
4551 JSGeneratorObject::kFunctionOffset));
4553 // Load receiver and store as the first argument.
4554 __ Ldr(x10, FieldMemOperand(generator_object,
4555 JSGeneratorObject::kReceiverOffset));
4558 // Push holes for the rest of the arguments to the generator function.
4559 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4561 // The number of arguments is stored as an int32_t, and -1 is a marker
4562 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4563 // extension to correctly handle it. However, in this case, we operate on
4564 // 32-bit W registers, so extension isn't required.
4565 __ Ldr(w10, FieldMemOperand(x10,
4566 SharedFunctionInfo::kFormalParameterCountOffset));
4567 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4568 __ PushMultipleTimes(the_hole, w10);
4570 // Enter a new JavaScript frame, and initialize its slots as they were when
4571 // the generator was suspended.
4573 __ Bl(&resume_frame);
4576 __ Bind(&resume_frame);
4577 __ Push(lr, // Return address.
4578 fp, // Caller's frame pointer.
4579 cp, // Callee's context.
4580 function); // Callee's JS Function.
4581 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4583 // Load and untag the operand stack size.
4584 __ Ldr(x10, FieldMemOperand(generator_object,
4585 JSGeneratorObject::kOperandStackOffset));
4586 __ Ldr(operand_stack_size,
4587 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4589 // If we are sending a value and there is no operand stack, we can jump back
4591 if (resume_mode == JSGeneratorObject::NEXT) {
4593 __ Cbnz(operand_stack_size, &slow_resume);
4594 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4596 UntagSmiFieldMemOperand(generator_object,
4597 JSGeneratorObject::kContinuationOffset));
4598 __ Add(x10, x10, x11);
4599 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4600 __ Str(x12, FieldMemOperand(generator_object,
4601 JSGeneratorObject::kContinuationOffset));
4604 __ Bind(&slow_resume);
4607 // Otherwise, we push holes for the operand stack and call the runtime to fix
4608 // up the stack and the handlers.
4609 __ PushMultipleTimes(the_hole, operand_stack_size);
4611 __ Mov(x10, Smi::FromInt(resume_mode));
4612 __ Push(generator_object, result_register(), x10);
4613 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
4614 // Not reached: the runtime call returns elsewhere.
4617 // Reach here when generator is closed.
4618 __ Bind(&closed_state);
4619 if (resume_mode == JSGeneratorObject::NEXT) {
4620 // Return completed iterator result when generator is closed.
4621 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4623 // Pop value from top-of-stack slot; box result into result register.
4624 EmitCreateIteratorResult(true);
4626 // Throw the provided value.
4628 __ CallRuntime(Runtime::kHiddenThrow, 1);
4632 // Throw error if we attempt to operate on a running generator.
4633 __ Bind(&wrong_state);
4634 __ Push(generator_object);
4635 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
4638 context()->Plug(result_register());
4642 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4646 Handle<Map> map(isolate()->native_context()->iterator_result_map());
4648 // Allocate and populate an object with this form: { value: VAL, done: DONE }
4650 Register result = x0;
4651 __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4654 __ Bind(&gc_required);
4655 __ Push(Smi::FromInt(map->instance_size()));
4656 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
4657 __ Ldr(context_register(),
4658 MemOperand(fp, StandardFrameConstants::kContextOffset));
4660 __ Bind(&allocated);
4661 Register map_reg = x1;
4662 Register result_value = x2;
4663 Register boolean_done = x3;
4664 Register empty_fixed_array = x4;
4665 Register untagged_result = x5;
4666 __ Mov(map_reg, Operand(map));
4667 __ Pop(result_value);
4668 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4669 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4670 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
4671 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4672 JSObject::kElementsOffset);
4673 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
4674 JSGeneratorObject::kResultDonePropertyOffset);
4675 __ ObjectUntag(untagged_result, result);
4676 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4677 __ Stp(empty_fixed_array, empty_fixed_array,
4678 MemOperand(untagged_result, JSObject::kPropertiesOffset));
4679 __ Stp(result_value, boolean_done,
4680 MemOperand(untagged_result,
4681 JSGeneratorObject::kResultValuePropertyOffset));
4683 // Only the value field needs a write barrier, as the other values are in the
4685 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4686 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4690 // TODO(all): I don't like this method.
4691 // It seems to me that in too many places x0 is used in place of this.
4692 // Also, this function is not suitable for all places where x0 should be
4693 // abstracted (eg. when used as an argument). But some places assume that the
4694 // first argument register is x0, and use this function instead.
4695 // Considering that most of the register allocation is hard-coded in the
4696 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4697 // that abstracting the allocation through functions would not yield any
4698 // performance benefit, I think the existence of this function is debatable.
4699 Register FullCodeGenerator::result_register() {
4704 Register FullCodeGenerator::context_register() {
4709 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4710 ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4711 __ Str(value, MemOperand(fp, frame_offset));
4715 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4716 __ Ldr(dst, ContextMemOperand(cp, context_index));
4720 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4721 Scope* declaration_scope = scope()->DeclarationScope();
4722 if (declaration_scope->is_global_scope() ||
4723 declaration_scope->is_module_scope()) {
4724 // Contexts nested in the native context have a canonical empty function
4725 // as their closure, not the anonymous closure containing the global
4726 // code. Pass a smi sentinel and let the runtime look up the empty
4728 ASSERT(kSmiTag == 0);
4730 } else if (declaration_scope->is_eval_scope()) {
4731 // Contexts created by a call to eval have the same closure as the
4732 // context calling eval, not the anonymous closure containing the eval
4733 // code. Fetch it from the context.
4734 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4737 ASSERT(declaration_scope->is_function_scope());
4738 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4744 void FullCodeGenerator::EnterFinallyBlock() {
4745 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4746 ASSERT(!result_register().is(x10));
4747 // Preserve the result register while executing finally block.
4748 // Also cook the return address in lr to the stack (smi encoded Code* delta).
4749 __ Sub(x10, lr, Operand(masm_->CodeObject()));
4751 __ Push(result_register(), x10);
4753 // Store pending message while executing finally block.
4754 ExternalReference pending_message_obj =
4755 ExternalReference::address_of_pending_message_obj(isolate());
4756 __ Mov(x10, pending_message_obj);
4757 __ Ldr(x10, MemOperand(x10));
4759 ExternalReference has_pending_message =
4760 ExternalReference::address_of_has_pending_message(isolate());
4761 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4762 __ Mov(x11, has_pending_message);
4763 __ Ldrb(x11, MemOperand(x11));
4768 ExternalReference pending_message_script =
4769 ExternalReference::address_of_pending_message_script(isolate());
4770 __ Mov(x10, pending_message_script);
4771 __ Ldr(x10, MemOperand(x10));
4776 void FullCodeGenerator::ExitFinallyBlock() {
4777 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4778 ASSERT(!result_register().is(x10));
4780 // Restore pending message from stack.
4781 __ Pop(x10, x11, x12);
4782 ExternalReference pending_message_script =
4783 ExternalReference::address_of_pending_message_script(isolate());
4784 __ Mov(x13, pending_message_script);
4785 __ Str(x10, MemOperand(x13));
4788 ExternalReference has_pending_message =
4789 ExternalReference::address_of_has_pending_message(isolate());
4790 __ Mov(x13, has_pending_message);
4791 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4792 __ Strb(x11, MemOperand(x13));
4794 ExternalReference pending_message_obj =
4795 ExternalReference::address_of_pending_message_obj(isolate());
4796 __ Mov(x13, pending_message_obj);
4797 __ Str(x12, MemOperand(x13));
4799 // Restore result register and cooked return address from the stack.
4800 __ Pop(x10, result_register());
4802 // Uncook the return address (see EnterFinallyBlock).
4804 __ Add(x11, x10, Operand(masm_->CodeObject()));
4812 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4814 BackEdgeState target_state,
4815 Code* replacement_code) {
4816 // Turn the jump into a nop.
4817 Address branch_address = pc - 3 * kInstructionSize;
4818 PatchingAssembler patcher(branch_address, 1);
4820 ASSERT(Instruction::Cast(branch_address)
4821 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4822 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4823 Instruction::Cast(branch_address)->ImmPCOffset() ==
4824 6 * kInstructionSize));
4826 switch (target_state) {
4828 // <decrement profiling counter>
4829 // .. .. .. .. b.pl ok
4830 // .. .. .. .. ldr x16, pc+<interrupt stub address>
4831 // .. .. .. .. blr x16
4832 // ... more instructions.
4834 // Jump offset is 6 instructions.
4837 case ON_STACK_REPLACEMENT:
4838 case OSR_AFTER_STACK_CHECK:
4839 // <decrement profiling counter>
4840 // .. .. .. .. mov x0, x0 (NOP)
4841 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4842 // .. .. .. .. blr x16
4843 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4847 // Replace the call address.
4848 Instruction* load = Instruction::Cast(pc)->preceding(2);
4849 Address interrupt_address_pointer =
4850 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4851 ASSERT((Memory::uint64_at(interrupt_address_pointer) ==
4852 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4854 ->OnStackReplacement()
4856 (Memory::uint64_at(interrupt_address_pointer) ==
4857 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4861 (Memory::uint64_at(interrupt_address_pointer) ==
4862 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4864 ->OsrAfterStackCheck()
4866 (Memory::uint64_at(interrupt_address_pointer) ==
4867 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4869 ->OnStackReplacement()
4871 Memory::uint64_at(interrupt_address_pointer) =
4872 reinterpret_cast<uint64_t>(replacement_code->entry());
4874 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4875 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4879 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4881 Code* unoptimized_code,
4883 // TODO(jbramley): There should be some extra assertions here (as in the ARM
4884 // back-end), but this function is gone in bleeding_edge so it might not
4886 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4888 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4889 Instruction* load = Instruction::Cast(pc)->preceding(2);
4890 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4891 load->ImmPCOffset());
4892 if (entry == reinterpret_cast<uint64_t>(
4893 isolate->builtins()->OnStackReplacement()->entry())) {
4894 return ON_STACK_REPLACEMENT;
4895 } else if (entry == reinterpret_cast<uint64_t>(
4896 isolate->builtins()->OsrAfterStackCheck()->entry())) {
4897 return OSR_AFTER_STACK_CHECK;
4907 #define __ ACCESS_MASM(masm())
4910 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4912 int* context_length) {
4913 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4914 // The macros used here must preserve the result register.
4916 // Because the handler block contains the context of the finally
4917 // code, we can restore it directly from there for the finally code
4918 // rather than iteratively unwinding contexts via their previous
4920 __ Drop(*stack_depth); // Down to the handler block.
4921 if (*context_length > 0) {
4922 // Restore the context to its dedicated register and the stack.
4923 __ Peek(cp, StackHandlerConstants::kContextOffset);
4924 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4927 __ Bl(finally_entry_);
4930 *context_length = 0;
4938 } } // namespace v8::internal
4940 #endif // V8_TARGET_ARCH_ARM64