1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_ARM64
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
42 #include "arm64/code-stubs-arm64.h"
43 #include "arm64/macro-assembler-arm64.h"
48 #define __ ACCESS_MASM(masm_)
50 class JumpPatchSite BASE_EMBEDDED {
52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
54 info_emitted_ = false;
59 if (patch_site_.is_bound()) {
60 ASSERT(info_emitted_);
62 ASSERT(reg_.IsNone());
66 void EmitJumpIfNotSmi(Register reg, Label* target) {
67 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
68 InstructionAccurateScope scope(masm_, 1);
69 ASSERT(!info_emitted_);
70 ASSERT(reg.Is64Bits());
73 __ bind(&patch_site_);
74 __ tbz(xzr, 0, target); // Always taken before patched.
77 void EmitJumpIfSmi(Register reg, Label* target) {
78 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
79 InstructionAccurateScope scope(masm_, 1);
80 ASSERT(!info_emitted_);
81 ASSERT(reg.Is64Bits());
84 __ bind(&patch_site_);
85 __ tbnz(xzr, 0, target); // Never taken before patched.
88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
89 UseScratchRegisterScope temps(masm_);
90 Register temp = temps.AcquireX();
91 __ Orr(temp, reg1, reg2);
92 EmitJumpIfNotSmi(temp, target);
95 void EmitPatchInfo() {
96 Assembler::BlockPoolsScope scope(masm_);
97 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
104 MacroAssembler* masm_;
113 static void EmitStackCheck(MacroAssembler* masm_,
115 Register scratch = jssp) {
116 Isolate* isolate = masm_->isolate();
118 ASSERT(jssp.Is(__ StackPointer()));
119 ASSERT(scratch.Is(jssp) == (pointers == 0));
121 __ Sub(scratch, jssp, pointers * kPointerSize);
123 __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
125 PredictableCodeSizeScope predictable(masm_,
126 Assembler::kCallSizeWithRelocation);
127 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
132 // Generate code for a JS function. On entry to the function the receiver
133 // and arguments have been pushed on the stack left to right. The actual
134 // argument count matches the formal parameter count expected by the
137 // The live registers are:
138 // - x1: the JS function object being called (i.e. ourselves).
139 // - cp: our context.
140 // - fp: our caller's frame pointer.
141 // - jssp: stack pointer.
142 // - lr: return address.
144 // The function builds a JS frame. See JavaScriptFrameConstants in
145 // frames-arm.h for its layout.
146 void FullCodeGenerator::Generate() {
147 CompilationInfo* info = info_;
149 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
151 InitializeFeedbackVector();
153 profiling_counter_ = isolate()->factory()->NewCell(
154 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
155 SetFunctionPosition(function());
156 Comment cmnt(masm_, "[ Function compiled by full code generator");
158 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
161 if (strlen(FLAG_stop_at) > 0 &&
162 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
163 __ Debug("stop-at", __LINE__, BREAK);
167 // Sloppy mode functions and builtins need to replace the receiver with the
168 // global proxy when called as functions (without an explicit receiver
170 if (info->strict_mode() == SLOPPY && !info->is_native()) {
172 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
173 __ Peek(x10, receiver_offset);
174 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
176 __ Ldr(x10, GlobalObjectMemOperand());
177 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset));
178 __ Poke(x10, receiver_offset);
184 // Open a frame scope to indicate that there is a frame on the stack.
185 // The MANUAL indicates that the scope shouldn't actually generate code
186 // to set up the frame because we do it manually below.
187 FrameScope frame_scope(masm_, StackFrame::MANUAL);
189 // This call emits the following sequence in a way that can be patched for
190 // code ageing support:
191 // Push(lr, fp, cp, x1);
192 // Add(fp, jssp, 2 * kPointerSize);
193 info->set_prologue_offset(masm_->pc_offset());
194 __ Prologue(BUILD_FUNCTION_FRAME);
195 info->AddNoFrameRange(0, masm_->pc_offset());
197 // Reserve space on the stack for locals.
198 { Comment cmnt(masm_, "[ Allocate locals");
199 int locals_count = info->scope()->num_stack_slots();
200 // Generators allocate locals, if any, in context slots.
201 ASSERT(!info->function()->is_generator() || locals_count == 0);
203 if (locals_count > 0) {
204 if (locals_count >= 128) {
205 EmitStackCheck(masm_, locals_count, x10);
207 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
208 if (FLAG_optimize_for_size) {
209 __ PushMultipleTimes(x10 , locals_count);
211 const int kMaxPushes = 32;
212 if (locals_count >= kMaxPushes) {
213 int loop_iterations = locals_count / kMaxPushes;
214 __ Mov(x3, loop_iterations);
216 __ Bind(&loop_header);
218 __ PushMultipleTimes(x10 , kMaxPushes);
220 __ B(ne, &loop_header);
222 int remaining = locals_count % kMaxPushes;
223 // Emit the remaining pushes.
224 __ PushMultipleTimes(x10 , remaining);
229 bool function_in_register_x1 = true;
231 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
232 if (heap_slots > 0) {
233 // Argument to NewContext is the function, which is still in x1.
234 Comment cmnt(masm_, "[ Allocate context");
235 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
236 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
238 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
239 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
240 FastNewContextStub stub(heap_slots);
244 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
246 function_in_register_x1 = false;
247 // Context is returned in x0. It replaces the context passed to us.
248 // It's saved in the stack and kept live in cp.
250 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
251 // Copy any necessary parameters into the context.
252 int num_parameters = info->scope()->num_parameters();
253 for (int i = 0; i < num_parameters; i++) {
254 Variable* var = scope()->parameter(i);
255 if (var->IsContextSlot()) {
256 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
257 (num_parameters - 1 - i) * kPointerSize;
258 // Load parameter from stack.
259 __ Ldr(x10, MemOperand(fp, parameter_offset));
260 // Store it in the context.
261 MemOperand target = ContextMemOperand(cp, var->index());
264 // Update the write barrier.
265 __ RecordWriteContextSlot(
266 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
271 Variable* arguments = scope()->arguments();
272 if (arguments != NULL) {
273 // Function uses arguments object.
274 Comment cmnt(masm_, "[ Allocate arguments object");
275 if (!function_in_register_x1) {
276 // Load this again, if it's used by the local context below.
277 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
281 // Receiver is just before the parameters on the caller's stack.
282 int num_parameters = info->scope()->num_parameters();
283 int offset = num_parameters * kPointerSize;
284 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
285 __ Mov(x1, Smi::FromInt(num_parameters));
288 // Arguments to ArgumentsAccessStub:
289 // function, receiver address, parameter count.
290 // The stub will rewrite receiver and parameter count if the previous
291 // stack frame was an arguments adapter frame.
292 ArgumentsAccessStub::Type type;
293 if (strict_mode() == STRICT) {
294 type = ArgumentsAccessStub::NEW_STRICT;
295 } else if (function()->has_duplicate_parameters()) {
296 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
298 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
300 ArgumentsAccessStub stub(type);
303 SetVar(arguments, x0, x1, x2);
307 __ CallRuntime(Runtime::kTraceEnter, 0);
311 // Visit the declarations and body unless there is an illegal
313 if (scope()->HasIllegalRedeclaration()) {
314 Comment cmnt(masm_, "[ Declarations");
315 scope()->VisitIllegalRedeclaration(this);
318 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
319 { Comment cmnt(masm_, "[ Declarations");
320 if (scope()->is_function_scope() && scope()->function() != NULL) {
321 VariableDeclaration* function = scope()->function();
322 ASSERT(function->proxy()->var()->mode() == CONST ||
323 function->proxy()->var()->mode() == CONST_LEGACY);
324 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
325 VisitVariableDeclaration(function);
327 VisitDeclarations(scope()->declarations());
331 { Comment cmnt(masm_, "[ Stack check");
332 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
333 EmitStackCheck(masm_);
336 { Comment cmnt(masm_, "[ Body");
337 ASSERT(loop_depth() == 0);
338 VisitStatements(function()->body());
339 ASSERT(loop_depth() == 0);
342 // Always emit a 'return undefined' in case control fell off the end of
344 { Comment cmnt(masm_, "[ return <undefined>;");
345 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
347 EmitReturnSequence();
349 // Force emission of the pools, so they don't get emitted in the middle
350 // of the back edge table.
351 masm()->CheckVeneerPool(true, false);
352 masm()->CheckConstPool(true, false);
356 void FullCodeGenerator::ClearAccumulator() {
357 __ Mov(x0, Smi::FromInt(0));
361 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
362 __ Mov(x2, Operand(profiling_counter_));
363 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
364 __ Subs(x3, x3, Smi::FromInt(delta));
365 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
369 void FullCodeGenerator::EmitProfilingCounterReset() {
370 int reset_value = FLAG_interrupt_budget;
371 if (isolate()->IsDebuggerActive()) {
372 // Detect debug break requests as soon as possible.
373 reset_value = FLAG_interrupt_budget >> 4;
375 __ Mov(x2, Operand(profiling_counter_));
376 __ Mov(x3, Smi::FromInt(reset_value));
377 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382 Label* back_edge_target) {
383 ASSERT(jssp.Is(__ StackPointer()));
384 Comment cmnt(masm_, "[ Back edge bookkeeping");
385 // Block literal pools whilst emitting back edge code.
386 Assembler::BlockPoolsScope block_const_pool(masm_);
389 ASSERT(back_edge_target->is_bound());
390 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
391 int weight = Min(kMaxBackEdgeWeight,
392 Max(1, distance / kCodeSizeMultiplier));
393 EmitProfilingCounterDecrement(weight);
395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
397 // Record a mapping of this PC offset to the OSR id. This is used to find
398 // the AST id from the unoptimized code in order to use it as a key into
399 // the deoptimization input data found in the optimized code.
400 RecordBackEdge(stmt->OsrEntryId());
402 EmitProfilingCounterReset();
405 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
406 // Record a mapping of the OSR id to this PC. This is used if the OSR
407 // entry becomes the target of a bailout. We don't expect it to be, but
408 // we want it to work if it is.
409 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
413 void FullCodeGenerator::EmitReturnSequence() {
414 Comment cmnt(masm_, "[ Return sequence");
416 if (return_label_.is_bound()) {
417 __ B(&return_label_);
420 __ Bind(&return_label_);
422 // Push the return value on the stack as the parameter.
423 // Runtime::TraceExit returns its parameter in x0.
424 __ Push(result_register());
425 __ CallRuntime(Runtime::kTraceExit, 1);
426 ASSERT(x0.Is(result_register()));
428 // Pretend that the exit is a backwards jump to the entry.
430 if (info_->ShouldSelfOptimize()) {
431 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
433 int distance = masm_->pc_offset();
434 weight = Min(kMaxBackEdgeWeight,
435 Max(1, distance / kCodeSizeMultiplier));
437 EmitProfilingCounterDecrement(weight);
441 __ Call(isolate()->builtins()->InterruptCheck(),
442 RelocInfo::CODE_TARGET);
444 EmitProfilingCounterReset();
447 // Make sure that the constant pool is not emitted inside of the return
448 // sequence. This sequence can get patched when the debugger is used. See
449 // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
451 InstructionAccurateScope scope(masm_,
452 Assembler::kJSRetSequenceInstructions);
453 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
455 // This code is generated using Assembler methods rather than Macro
456 // Assembler methods because it will be patched later on, and so the size
457 // of the generated code must be consistent.
458 const Register& current_sp = __ StackPointer();
459 // Nothing ensures 16 bytes alignment here.
460 ASSERT(!current_sp.Is(csp));
461 __ mov(current_sp, fp);
462 int no_frame_start = masm_->pc_offset();
463 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
464 // Drop the arguments and receiver and return.
465 // TODO(all): This implementation is overkill as it supports 2**31+1
466 // arguments, consider how to improve it without creating a security
468 __ LoadLiteral(ip0, 3 * kInstructionSize);
469 __ add(current_sp, current_sp, ip0);
471 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
472 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
478 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
479 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
483 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
484 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485 codegen()->GetVar(result_register(), var);
489 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
490 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
491 codegen()->GetVar(result_register(), var);
492 __ Push(result_register());
496 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
497 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
498 // For simplicity we always test the accumulator register.
499 codegen()->GetVar(result_register(), var);
500 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
501 codegen()->DoTest(this);
505 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
506 // Root values have no side effects.
510 void FullCodeGenerator::AccumulatorValueContext::Plug(
511 Heap::RootListIndex index) const {
512 __ LoadRoot(result_register(), index);
516 void FullCodeGenerator::StackValueContext::Plug(
517 Heap::RootListIndex index) const {
518 __ LoadRoot(result_register(), index);
519 __ Push(result_register());
523 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
524 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
526 if (index == Heap::kUndefinedValueRootIndex ||
527 index == Heap::kNullValueRootIndex ||
528 index == Heap::kFalseValueRootIndex) {
529 if (false_label_ != fall_through_) __ B(false_label_);
530 } else if (index == Heap::kTrueValueRootIndex) {
531 if (true_label_ != fall_through_) __ B(true_label_);
533 __ LoadRoot(result_register(), index);
534 codegen()->DoTest(this);
539 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
543 void FullCodeGenerator::AccumulatorValueContext::Plug(
544 Handle<Object> lit) const {
545 __ Mov(result_register(), Operand(lit));
549 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
550 // Immediates cannot be pushed directly.
551 __ Mov(result_register(), Operand(lit));
552 __ Push(result_register());
556 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
557 codegen()->PrepareForBailoutBeforeSplit(condition(),
561 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
562 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
563 if (false_label_ != fall_through_) __ B(false_label_);
564 } else if (lit->IsTrue() || lit->IsJSObject()) {
565 if (true_label_ != fall_through_) __ B(true_label_);
566 } else if (lit->IsString()) {
567 if (String::cast(*lit)->length() == 0) {
568 if (false_label_ != fall_through_) __ B(false_label_);
570 if (true_label_ != fall_through_) __ B(true_label_);
572 } else if (lit->IsSmi()) {
573 if (Smi::cast(*lit)->value() == 0) {
574 if (false_label_ != fall_through_) __ B(false_label_);
576 if (true_label_ != fall_through_) __ B(true_label_);
579 // For simplicity we always test the accumulator register.
580 __ Mov(result_register(), Operand(lit));
581 codegen()->DoTest(this);
586 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
587 Register reg) const {
593 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
595 Register reg) const {
598 __ Move(result_register(), reg);
602 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
603 Register reg) const {
605 if (count > 1) __ Drop(count - 1);
610 void FullCodeGenerator::TestContext::DropAndPlug(int count,
611 Register reg) const {
613 // For simplicity we always test the accumulator register.
615 __ Mov(result_register(), reg);
616 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
617 codegen()->DoTest(this);
621 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
622 Label* materialize_false) const {
623 ASSERT(materialize_true == materialize_false);
624 __ Bind(materialize_true);
628 void FullCodeGenerator::AccumulatorValueContext::Plug(
629 Label* materialize_true,
630 Label* materialize_false) const {
632 __ Bind(materialize_true);
633 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
635 __ Bind(materialize_false);
636 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
641 void FullCodeGenerator::StackValueContext::Plug(
642 Label* materialize_true,
643 Label* materialize_false) const {
645 __ Bind(materialize_true);
646 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
648 __ Bind(materialize_false);
649 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
655 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
656 Label* materialize_false) const {
657 ASSERT(materialize_true == true_label_);
658 ASSERT(materialize_false == false_label_);
662 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
666 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
667 Heap::RootListIndex value_root_index =
668 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
669 __ LoadRoot(result_register(), value_root_index);
673 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
674 Heap::RootListIndex value_root_index =
675 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676 __ LoadRoot(x10, value_root_index);
681 void FullCodeGenerator::TestContext::Plug(bool flag) const {
682 codegen()->PrepareForBailoutBeforeSplit(condition(),
687 if (true_label_ != fall_through_) {
691 if (false_label_ != fall_through_) {
698 void FullCodeGenerator::DoTest(Expression* condition,
701 Label* fall_through) {
702 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
703 CallIC(ic, condition->test_id());
704 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
708 // If (cond), branch to if_true.
709 // If (!cond), branch to if_false.
710 // fall_through is used as an optimization in cases where only one branch
711 // instruction is necessary.
712 void FullCodeGenerator::Split(Condition cond,
715 Label* fall_through) {
716 if (if_false == fall_through) {
718 } else if (if_true == fall_through) {
719 ASSERT(if_false != fall_through);
720 __ B(InvertCondition(cond), if_false);
728 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
729 // Offset is negative because higher indexes are at lower addresses.
730 int offset = -var->index() * kXRegSize;
731 // Adjust by a (parameter or local) base offset.
732 if (var->IsParameter()) {
733 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
735 offset += JavaScriptFrameConstants::kLocal0Offset;
737 return MemOperand(fp, offset);
741 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
742 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
743 if (var->IsContextSlot()) {
744 int context_chain_length = scope()->ContextChainLength(var->scope());
745 __ LoadContext(scratch, context_chain_length);
746 return ContextMemOperand(scratch, var->index());
748 return StackOperand(var);
753 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
754 // Use destination as scratch.
755 MemOperand location = VarOperand(var, dest);
756 __ Ldr(dest, location);
760 void FullCodeGenerator::SetVar(Variable* var,
764 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
765 ASSERT(!AreAliased(src, scratch0, scratch1));
766 MemOperand location = VarOperand(var, scratch0);
767 __ Str(src, location);
769 // Emit the write barrier code if the location is in the heap.
770 if (var->IsContextSlot()) {
771 // scratch0 contains the correct context.
772 __ RecordWriteContextSlot(scratch0,
782 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
783 bool should_normalize,
786 // Only prepare for bailouts before splits if we're in a test
787 // context. Otherwise, we let the Visit function deal with the
788 // preparation to avoid preparing with the same AST id twice.
789 if (!context()->IsTest() || !info_->IsOptimizable()) return;
791 // TODO(all): Investigate to see if there is something to work on here.
793 if (should_normalize) {
796 PrepareForBailout(expr, TOS_REG);
797 if (should_normalize) {
798 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
799 Split(eq, if_true, if_false, NULL);
805 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
806 // The variable in the declaration always resides in the current function
808 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
809 if (generate_debug_code_) {
810 // Check that we're not inside a with or catch context.
811 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
812 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
813 __ Check(ne, kDeclarationInWithContext);
814 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
815 __ Check(ne, kDeclarationInCatchContext);
820 void FullCodeGenerator::VisitVariableDeclaration(
821 VariableDeclaration* declaration) {
822 // If it was not possible to allocate the variable at compile time, we
823 // need to "declare" it at runtime to make sure it actually exists in the
825 VariableProxy* proxy = declaration->proxy();
826 VariableMode mode = declaration->mode();
827 Variable* variable = proxy->var();
828 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
830 switch (variable->location()) {
831 case Variable::UNALLOCATED:
832 globals_->Add(variable->name(), zone());
833 globals_->Add(variable->binding_needs_init()
834 ? isolate()->factory()->the_hole_value()
835 : isolate()->factory()->undefined_value(),
839 case Variable::PARAMETER:
840 case Variable::LOCAL:
842 Comment cmnt(masm_, "[ VariableDeclaration");
843 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
844 __ Str(x10, StackOperand(variable));
848 case Variable::CONTEXT:
850 Comment cmnt(masm_, "[ VariableDeclaration");
851 EmitDebugCheckDeclarationContext(variable);
852 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
853 __ Str(x10, ContextMemOperand(cp, variable->index()));
854 // No write barrier since the_hole_value is in old space.
855 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
859 case Variable::LOOKUP: {
860 Comment cmnt(masm_, "[ VariableDeclaration");
861 __ Mov(x2, Operand(variable->name()));
862 // Declaration nodes are always introduced in one of four modes.
863 ASSERT(IsDeclaredVariableMode(mode));
864 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
866 __ Mov(x1, Smi::FromInt(attr));
867 // Push initial value, if any.
868 // Note: For variables we must not push an initial value (such as
869 // 'undefined') because we may have a (legal) redeclaration and we
870 // must not destroy the current value.
872 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
873 __ Push(cp, x2, x1, x0);
875 // Pushing 0 (xzr) indicates no initial value.
876 __ Push(cp, x2, x1, xzr);
878 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
885 void FullCodeGenerator::VisitFunctionDeclaration(
886 FunctionDeclaration* declaration) {
887 VariableProxy* proxy = declaration->proxy();
888 Variable* variable = proxy->var();
889 switch (variable->location()) {
890 case Variable::UNALLOCATED: {
891 globals_->Add(variable->name(), zone());
892 Handle<SharedFunctionInfo> function =
893 Compiler::BuildFunctionInfo(declaration->fun(), script());
894 // Check for stack overflow exception.
895 if (function.is_null()) return SetStackOverflow();
896 globals_->Add(function, zone());
900 case Variable::PARAMETER:
901 case Variable::LOCAL: {
902 Comment cmnt(masm_, "[ Function Declaration");
903 VisitForAccumulatorValue(declaration->fun());
904 __ Str(result_register(), StackOperand(variable));
908 case Variable::CONTEXT: {
909 Comment cmnt(masm_, "[ Function Declaration");
910 EmitDebugCheckDeclarationContext(variable);
911 VisitForAccumulatorValue(declaration->fun());
912 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
913 int offset = Context::SlotOffset(variable->index());
914 // We know that we have written a function, which is not a smi.
915 __ RecordWriteContextSlot(cp,
923 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
927 case Variable::LOOKUP: {
928 Comment cmnt(masm_, "[ Function Declaration");
929 __ Mov(x2, Operand(variable->name()));
930 __ Mov(x1, Smi::FromInt(NONE));
932 // Push initial value for function declaration.
933 VisitForStackValue(declaration->fun());
934 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
941 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
942 Variable* variable = declaration->proxy()->var();
943 ASSERT(variable->location() == Variable::CONTEXT);
944 ASSERT(variable->interface()->IsFrozen());
946 Comment cmnt(masm_, "[ ModuleDeclaration");
947 EmitDebugCheckDeclarationContext(variable);
949 // Load instance object.
950 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
951 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
952 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
955 __ Str(x1, ContextMemOperand(cp, variable->index()));
956 // We know that we have written a module, which is not a smi.
957 __ RecordWriteContextSlot(cp,
958 Context::SlotOffset(variable->index()),
965 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
967 // Traverse info body.
968 Visit(declaration->module());
972 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
973 VariableProxy* proxy = declaration->proxy();
974 Variable* variable = proxy->var();
975 switch (variable->location()) {
976 case Variable::UNALLOCATED:
980 case Variable::CONTEXT: {
981 Comment cmnt(masm_, "[ ImportDeclaration");
982 EmitDebugCheckDeclarationContext(variable);
987 case Variable::PARAMETER:
988 case Variable::LOCAL:
989 case Variable::LOOKUP:
995 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1000 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1001 // Call the runtime to declare the globals.
1002 __ Mov(x11, Operand(pairs));
1003 Register flags = xzr;
1004 if (Smi::FromInt(DeclareGlobalsFlags())) {
1006 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
1008 __ Push(cp, x11, flags);
1009 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
1010 // Return value is ignored.
1014 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1015 // Call the runtime to declare the modules.
1016 __ Push(descriptions);
1017 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1018 // Return value is ignored.
1022 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1023 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1024 Comment cmnt(masm_, "[ SwitchStatement");
1025 Breakable nested_statement(this, stmt);
1026 SetStatementPosition(stmt);
1028 // Keep the switch value on the stack until a case matches.
1029 VisitForStackValue(stmt->tag());
1030 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1032 ZoneList<CaseClause*>* clauses = stmt->cases();
1033 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1035 Label next_test; // Recycled for each test.
1036 // Compile all the tests with branches to their bodies.
1037 for (int i = 0; i < clauses->length(); i++) {
1038 CaseClause* clause = clauses->at(i);
1039 clause->body_target()->Unuse();
1041 // The default is not a test, but remember it as final fall through.
1042 if (clause->is_default()) {
1043 default_clause = clause;
1047 Comment cmnt(masm_, "[ Case comparison");
1048 __ Bind(&next_test);
1051 // Compile the label expression.
1052 VisitForAccumulatorValue(clause->label());
1054 // Perform the comparison as if via '==='.
1055 __ Peek(x1, 0); // Switch value.
1057 JumpPatchSite patch_site(masm_);
1058 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1060 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1062 __ B(ne, &next_test);
1063 __ Drop(1); // Switch value is no longer needed.
1064 __ B(clause->body_target());
1065 __ Bind(&slow_case);
1068 // Record position before stub call for type feedback.
1069 SetSourcePosition(clause->position());
1070 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1071 CallIC(ic, clause->CompareId());
1072 patch_site.EmitPatchInfo();
1076 PrepareForBailout(clause, TOS_REG);
1077 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1079 __ B(clause->body_target());
1082 __ Cbnz(x0, &next_test);
1083 __ Drop(1); // Switch value is no longer needed.
1084 __ B(clause->body_target());
1087 // Discard the test value and jump to the default if present, otherwise to
1088 // the end of the statement.
1089 __ Bind(&next_test);
1090 __ Drop(1); // Switch value is no longer needed.
1091 if (default_clause == NULL) {
1092 __ B(nested_statement.break_label());
1094 __ B(default_clause->body_target());
1097 // Compile all the case bodies.
1098 for (int i = 0; i < clauses->length(); i++) {
1099 Comment cmnt(masm_, "[ Case body");
1100 CaseClause* clause = clauses->at(i);
1101 __ Bind(clause->body_target());
1102 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1103 VisitStatements(clause->statements());
1106 __ Bind(nested_statement.break_label());
1107 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1111 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1112 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1113 Comment cmnt(masm_, "[ ForInStatement");
1114 int slot = stmt->ForInFeedbackSlot();
1115 // TODO(all): This visitor probably needs better comments and a revisit.
1116 SetStatementPosition(stmt);
1119 ForIn loop_statement(this, stmt);
1120 increment_loop_depth();
1122 // Get the object to enumerate over. If the object is null or undefined, skip
1123 // over the loop. See ECMA-262 version 5, section 12.6.4.
1124 VisitForAccumulatorValue(stmt->enumerable());
1125 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1126 Register null_value = x15;
1127 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1128 __ Cmp(x0, null_value);
1131 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1133 // Convert the object to a JS object.
1134 Label convert, done_convert;
1135 __ JumpIfSmi(x0, &convert);
1136 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1139 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1140 __ Bind(&done_convert);
1143 // Check for proxies.
1145 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1146 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1148 // Check cache validity in generated code. This is a fast case for
1149 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1150 // guarantee cache validity, call the runtime system to check cache
1151 // validity or get the property names in a fixed array.
1152 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1154 // The enum cache is valid. Load the map of the object being
1155 // iterated over and use the cache for the iteration.
1157 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1160 // Get the set of properties to enumerate.
1161 __ Bind(&call_runtime);
1162 __ Push(x0); // Duplicate the enumerable object on the stack.
1163 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1165 // If we got a map from the runtime call, we can do a fast
1166 // modification check. Otherwise, we got a fixed array, and we have
1167 // to do a slow check.
1168 Label fixed_array, no_descriptors;
1169 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1170 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1172 // We got a map in register x0. Get the enumeration cache from it.
1173 __ Bind(&use_cache);
1175 __ EnumLengthUntagged(x1, x0);
1176 __ Cbz(x1, &no_descriptors);
1178 __ LoadInstanceDescriptors(x0, x2);
1179 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1181 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1183 // Set up the four remaining stack slots.
1184 __ Push(x0); // Map.
1185 __ Mov(x0, Smi::FromInt(0));
1186 // Push enumeration cache, enumeration cache length (as smi) and zero.
1188 __ Push(x2, x1, x0);
1191 __ Bind(&no_descriptors);
1195 // We got a fixed array in register x0. Iterate through that.
1196 __ Bind(&fixed_array);
1198 Handle<Object> feedback = Handle<Object>(
1199 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1201 StoreFeedbackVectorSlot(slot, feedback);
1202 __ LoadObject(x1, FeedbackVector());
1203 __ Mov(x10, Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
1204 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1206 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1207 __ Peek(x10, 0); // Get enumerated object.
1208 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1209 // TODO(all): similar check was done already. Can we avoid it here?
1210 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1211 ASSERT(Smi::FromInt(0) == 0);
1212 __ CzeroX(x1, le); // Zero indicates proxy.
1213 __ Push(x1, x0); // Smi and array
1214 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
1215 __ Push(x1, xzr); // Fixed array length (as smi) and initial index.
1217 // Generate code for doing the condition check.
1218 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1220 // Load the current count to x0, load the length to x1.
1221 __ PeekPair(x0, x1, 0);
1222 __ Cmp(x0, x1); // Compare to the array length.
1223 __ B(hs, loop_statement.break_label());
1225 // Get the current entry of the array into register r3.
1226 __ Peek(x10, 2 * kXRegSize);
1227 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1228 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1230 // Get the expected map from the stack or a smi in the
1231 // permanent slow case into register x10.
1232 __ Peek(x2, 3 * kXRegSize);
1234 // Check if the expected map still matches that of the enumerable.
1235 // If not, we may have to filter the key.
1237 __ Peek(x1, 4 * kXRegSize);
1238 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1240 __ B(eq, &update_each);
1242 // For proxies, no filtering is done.
1243 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1244 STATIC_ASSERT(kSmiTag == 0);
1245 __ Cbz(x2, &update_each);
1247 // Convert the entry to a string or (smi) 0 if it isn't a property
1248 // any more. If the property has been removed while iterating, we
1251 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1253 __ Cbz(x0, loop_statement.continue_label());
1255 // Update the 'each' property or variable from the possibly filtered
1256 // entry in register x3.
1257 __ Bind(&update_each);
1258 __ Mov(result_register(), x3);
1259 // Perform the assignment as if via '='.
1260 { EffectContext context(this);
1261 EmitAssignment(stmt->each());
1264 // Generate code for the body of the loop.
1265 Visit(stmt->body());
1267 // Generate code for going to the next element by incrementing
1268 // the index (smi) stored on top of the stack.
1269 __ Bind(loop_statement.continue_label());
1270 // TODO(all): We could use a callee saved register to avoid popping.
1272 __ Add(x0, x0, Smi::FromInt(1));
1275 EmitBackEdgeBookkeeping(stmt, &loop);
1278 // Remove the pointers stored on the stack.
1279 __ Bind(loop_statement.break_label());
1282 // Exit and decrement the loop depth.
1283 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1285 decrement_loop_depth();
1289 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1290 Comment cmnt(masm_, "[ ForOfStatement");
1291 SetStatementPosition(stmt);
1293 Iteration loop_statement(this, stmt);
1294 increment_loop_depth();
1296 // var iterator = iterable[@@iterator]()
1297 VisitForAccumulatorValue(stmt->assign_iterator());
1299 // As with for-in, skip the loop if the iterator is null or undefined.
1300 Register iterator = x0;
1301 __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex,
1302 loop_statement.break_label());
1303 __ JumpIfRoot(iterator, Heap::kNullValueRootIndex,
1304 loop_statement.break_label());
1306 // Convert the iterator to a JS object.
1307 Label convert, done_convert;
1308 __ JumpIfSmi(iterator, &convert);
1309 __ CompareObjectType(iterator, x1, x1, FIRST_SPEC_OBJECT_TYPE);
1310 __ B(ge, &done_convert);
1313 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1314 __ Bind(&done_convert);
1318 __ Bind(loop_statement.continue_label());
1320 // result = iterator.next()
1321 VisitForEffect(stmt->next_result());
1323 // if (result.done) break;
1324 Label result_not_done;
1325 VisitForControl(stmt->result_done(),
1326 loop_statement.break_label(),
1329 __ Bind(&result_not_done);
1331 // each = result.value
1332 VisitForEffect(stmt->assign_each());
1334 // Generate code for the body of the loop.
1335 Visit(stmt->body());
1337 // Check stack before looping.
1338 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1339 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1340 __ B(loop_statement.continue_label());
1342 // Exit and decrement the loop depth.
1343 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1344 __ Bind(loop_statement.break_label());
1345 decrement_loop_depth();
1349 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1351 // Use the fast case closure allocation code that allocates in new space for
1352 // nested functions that don't need literals cloning. If we're running with
1353 // the --always-opt or the --prepare-always-opt flag, we need to use the
1354 // runtime function so that the new function we are creating here gets a
1355 // chance to have its code optimized and doesn't just get a copy of the
1356 // existing unoptimized code.
1357 if (!FLAG_always_opt &&
1358 !FLAG_prepare_always_opt &&
1360 scope()->is_function_scope() &&
1361 info->num_literals() == 0) {
1362 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1363 __ Mov(x2, Operand(info));
1366 __ Mov(x11, Operand(info));
1367 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1368 : Heap::kFalseValueRootIndex);
1369 __ Push(cp, x11, x10);
1370 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1372 context()->Plug(x0);
1376 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1377 Comment cmnt(masm_, "[ VariableProxy");
1378 EmitVariableLoad(expr);
1382 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1383 TypeofState typeof_state,
1385 Register current = cp;
1386 Register next = x10;
1387 Register temp = x11;
1391 if (s->num_heap_slots() > 0) {
1392 if (s->calls_sloppy_eval()) {
1393 // Check that extension is NULL.
1394 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1395 __ Cbnz(temp, slow);
1397 // Load next context in chain.
1398 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1399 // Walk the rest of the chain without clobbering cp.
1402 // If no outer scope calls eval, we do not need to check more
1403 // context extensions.
1404 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1405 s = s->outer_scope();
1408 if (s->is_eval_scope()) {
1410 __ Mov(next, current);
1413 // Terminate at native context.
1414 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1415 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1416 // Check that extension is NULL.
1417 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1418 __ Cbnz(temp, slow);
1419 // Load next context in chain.
1420 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1425 __ Ldr(x0, GlobalObjectMemOperand());
1426 __ Mov(x2, Operand(var->name()));
1427 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1433 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1435 ASSERT(var->IsContextSlot());
1436 Register context = cp;
1437 Register next = x10;
1438 Register temp = x11;
1440 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1441 if (s->num_heap_slots() > 0) {
1442 if (s->calls_sloppy_eval()) {
1443 // Check that extension is NULL.
1444 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1445 __ Cbnz(temp, slow);
1447 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1448 // Walk the rest of the chain without clobbering cp.
1452 // Check that last extension is NULL.
1453 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1454 __ Cbnz(temp, slow);
1456 // This function is used only for loads, not stores, so it's safe to
1457 // return an cp-based operand (the write barrier cannot be allowed to
1458 // destroy the cp register).
1459 return ContextMemOperand(context, var->index());
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1464 TypeofState typeof_state,
1467 // Generate fast-case code for variables that might be shadowed by
1468 // eval-introduced variables. Eval is used a lot without
1469 // introducing variables. In those cases, we do not want to
1470 // perform a runtime call for all variables in the scope
1471 // containing the eval.
1472 if (var->mode() == DYNAMIC_GLOBAL) {
1473 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1475 } else if (var->mode() == DYNAMIC_LOCAL) {
1476 Variable* local = var->local_if_not_shadowed();
1477 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1478 if (local->mode() == LET || local->mode() == CONST ||
1479 local->mode() == CONST_LEGACY) {
1480 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1481 if (local->mode() == CONST_LEGACY) {
1482 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1483 } else { // LET || CONST
1484 __ Mov(x0, Operand(var->name()));
1486 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1494 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1495 // Record position before possible IC call.
1496 SetSourcePosition(proxy->position());
1497 Variable* var = proxy->var();
1499 // Three cases: global variables, lookup variables, and all other types of
1501 switch (var->location()) {
1502 case Variable::UNALLOCATED: {
1503 Comment cmnt(masm_, "Global variable");
1504 // Use inline caching. Variable name is passed in x2 and the global
1505 // object (receiver) in x0.
1506 __ Ldr(x0, GlobalObjectMemOperand());
1507 __ Mov(x2, Operand(var->name()));
1508 CallLoadIC(CONTEXTUAL);
1509 context()->Plug(x0);
1513 case Variable::PARAMETER:
1514 case Variable::LOCAL:
1515 case Variable::CONTEXT: {
1516 Comment cmnt(masm_, var->IsContextSlot()
1517 ? "Context variable"
1518 : "Stack variable");
1519 if (var->binding_needs_init()) {
1520 // var->scope() may be NULL when the proxy is located in eval code and
1521 // refers to a potential outside binding. Currently those bindings are
1522 // always looked up dynamically, i.e. in that case
1523 // var->location() == LOOKUP.
1525 ASSERT(var->scope() != NULL);
1527 // Check if the binding really needs an initialization check. The check
1528 // can be skipped in the following situation: we have a LET or CONST
1529 // binding in harmony mode, both the Variable and the VariableProxy have
1530 // the same declaration scope (i.e. they are both in global code, in the
1531 // same function or in the same eval code) and the VariableProxy is in
1532 // the source physically located after the initializer of the variable.
1534 // We cannot skip any initialization checks for CONST in non-harmony
1535 // mode because const variables may be declared but never initialized:
1536 // if (false) { const x; }; var y = x;
1538 // The condition on the declaration scopes is a conservative check for
1539 // nested functions that access a binding and are called before the
1540 // binding is initialized:
1541 // function() { f(); let x = 1; function f() { x = 2; } }
1543 bool skip_init_check;
1544 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1545 skip_init_check = false;
1547 // Check that we always have valid source position.
1548 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1549 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1550 skip_init_check = var->mode() != CONST_LEGACY &&
1551 var->initializer_position() < proxy->position();
1554 if (!skip_init_check) {
1555 // Let and const need a read barrier.
1558 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1559 if (var->mode() == LET || var->mode() == CONST) {
1560 // Throw a reference error when using an uninitialized let/const
1561 // binding in harmony mode.
1562 __ Mov(x0, Operand(var->name()));
1564 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1567 // Uninitalized const bindings outside of harmony mode are unholed.
1568 ASSERT(var->mode() == CONST_LEGACY);
1569 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1572 context()->Plug(x0);
1576 context()->Plug(var);
1580 case Variable::LOOKUP: {
1582 // Generate code for loading from variables potentially shadowed by
1583 // eval-introduced variables.
1584 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1586 Comment cmnt(masm_, "Lookup variable");
1587 __ Mov(x1, Operand(var->name()));
1588 __ Push(cp, x1); // Context and name.
1589 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1591 context()->Plug(x0);
1598 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1599 Comment cmnt(masm_, "[ RegExpLiteral");
1601 // Registers will be used as follows:
1602 // x5 = materialized value (RegExp literal)
1603 // x4 = JS function, literals array
1604 // x3 = literal index
1605 // x2 = RegExp pattern
1606 // x1 = RegExp flags
1607 // x0 = RegExp literal clone
1608 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1609 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1610 int literal_offset =
1611 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1612 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1613 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1615 // Create regexp literal using runtime function.
1616 // Result will be in x0.
1617 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1618 __ Mov(x2, Operand(expr->pattern()));
1619 __ Mov(x1, Operand(expr->flags()));
1620 __ Push(x4, x3, x2, x1);
1621 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1624 __ Bind(&materialized);
1625 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1626 Label allocated, runtime_allocate;
1627 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1630 __ Bind(&runtime_allocate);
1631 __ Mov(x10, Smi::FromInt(size));
1633 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1636 __ Bind(&allocated);
1637 // After this, registers are used as follows:
1638 // x0: Newly allocated regexp.
1639 // x5: Materialized regexp.
1640 // x10, x11, x12: temps.
1641 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1642 context()->Plug(x0);
1646 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1647 if (expression == NULL) {
1648 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1651 VisitForStackValue(expression);
1656 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1657 Comment cmnt(masm_, "[ ObjectLiteral");
1659 expr->BuildConstantProperties(isolate());
1660 Handle<FixedArray> constant_properties = expr->constant_properties();
1661 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1662 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1663 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1664 __ Mov(x1, Operand(constant_properties));
1665 int flags = expr->fast_elements()
1666 ? ObjectLiteral::kFastElements
1667 : ObjectLiteral::kNoFlags;
1668 flags |= expr->has_function()
1669 ? ObjectLiteral::kHasFunction
1670 : ObjectLiteral::kNoFlags;
1671 __ Mov(x0, Smi::FromInt(flags));
1672 int properties_count = constant_properties->length() / 2;
1673 const int max_cloned_properties =
1674 FastCloneShallowObjectStub::kMaximumClonedProperties;
1675 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1676 flags != ObjectLiteral::kFastElements ||
1677 properties_count > max_cloned_properties) {
1678 __ Push(x3, x2, x1, x0);
1679 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1681 FastCloneShallowObjectStub stub(properties_count);
1685 // If result_saved is true the result is on top of the stack. If
1686 // result_saved is false the result is in x0.
1687 bool result_saved = false;
1689 // Mark all computed expressions that are bound to a key that
1690 // is shadowed by a later occurrence of the same key. For the
1691 // marked expressions, no store code is emitted.
1692 expr->CalculateEmitStore(zone());
1694 AccessorTable accessor_table(zone());
1695 for (int i = 0; i < expr->properties()->length(); i++) {
1696 ObjectLiteral::Property* property = expr->properties()->at(i);
1697 if (property->IsCompileTimeValue()) continue;
1699 Literal* key = property->key();
1700 Expression* value = property->value();
1701 if (!result_saved) {
1702 __ Push(x0); // Save result on stack
1703 result_saved = true;
1705 switch (property->kind()) {
1706 case ObjectLiteral::Property::CONSTANT:
1708 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1709 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1711 case ObjectLiteral::Property::COMPUTED:
1712 if (key->value()->IsInternalizedString()) {
1713 if (property->emit_store()) {
1714 VisitForAccumulatorValue(value);
1715 __ Mov(x2, Operand(key->value()));
1717 CallStoreIC(key->LiteralFeedbackId());
1718 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1720 VisitForEffect(value);
1724 if (property->emit_store()) {
1725 // Duplicate receiver on stack.
1728 VisitForStackValue(key);
1729 VisitForStackValue(value);
1730 __ Mov(x0, Smi::FromInt(NONE)); // PropertyAttributes
1732 __ CallRuntime(Runtime::kSetProperty, 4);
1734 VisitForEffect(key);
1735 VisitForEffect(value);
1738 case ObjectLiteral::Property::PROTOTYPE:
1739 if (property->emit_store()) {
1740 // Duplicate receiver on stack.
1743 VisitForStackValue(value);
1744 __ CallRuntime(Runtime::kSetPrototype, 2);
1746 VisitForEffect(value);
1749 case ObjectLiteral::Property::GETTER:
1750 accessor_table.lookup(key)->second->getter = value;
1752 case ObjectLiteral::Property::SETTER:
1753 accessor_table.lookup(key)->second->setter = value;
1758 // Emit code to define accessors, using only a single call to the runtime for
1759 // each pair of corresponding getters and setters.
1760 for (AccessorTable::Iterator it = accessor_table.begin();
1761 it != accessor_table.end();
1763 __ Peek(x10, 0); // Duplicate receiver.
1765 VisitForStackValue(it->first);
1766 EmitAccessor(it->second->getter);
1767 EmitAccessor(it->second->setter);
1768 __ Mov(x10, Smi::FromInt(NONE));
1770 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1773 if (expr->has_function()) {
1774 ASSERT(result_saved);
1777 __ CallRuntime(Runtime::kToFastProperties, 1);
1781 context()->PlugTOS();
1783 context()->Plug(x0);
1788 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1789 Comment cmnt(masm_, "[ ArrayLiteral");
1791 expr->BuildConstantElements(isolate());
1792 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1793 : ArrayLiteral::kNoFlags;
1795 ZoneList<Expression*>* subexprs = expr->values();
1796 int length = subexprs->length();
1797 Handle<FixedArray> constant_elements = expr->constant_elements();
1798 ASSERT_EQ(2, constant_elements->length());
1799 ElementsKind constant_elements_kind =
1800 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1801 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1802 Handle<FixedArrayBase> constant_elements_values(
1803 FixedArrayBase::cast(constant_elements->get(1)));
1805 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1806 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1807 // If the only customer of allocation sites is transitioning, then
1808 // we can turn it off if we don't have anywhere else to transition to.
1809 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1812 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1813 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1814 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1815 __ Mov(x1, Operand(constant_elements));
1816 if (has_fast_elements && constant_elements_values->map() ==
1817 isolate()->heap()->fixed_cow_array_map()) {
1818 FastCloneShallowArrayStub stub(
1819 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1820 allocation_site_mode,
1823 __ IncrementCounter(
1824 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
1825 } else if ((expr->depth() > 1) || Serializer::enabled() ||
1826 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1827 __ Mov(x0, Smi::FromInt(flags));
1828 __ Push(x3, x2, x1, x0);
1829 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1831 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1832 FLAG_smi_only_arrays);
1833 FastCloneShallowArrayStub::Mode mode =
1834 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1836 if (has_fast_elements) {
1837 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1840 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1844 bool result_saved = false; // Is the result saved to the stack?
1846 // Emit code to evaluate all the non-constant subexpressions and to store
1847 // them into the newly cloned array.
1848 for (int i = 0; i < length; i++) {
1849 Expression* subexpr = subexprs->at(i);
1850 // If the subexpression is a literal or a simple materialized literal it
1851 // is already set in the cloned array.
1852 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1854 if (!result_saved) {
1856 __ Push(Smi::FromInt(expr->literal_index()));
1857 result_saved = true;
1859 VisitForAccumulatorValue(subexpr);
1861 if (IsFastObjectElementsKind(constant_elements_kind)) {
1862 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1863 __ Peek(x6, kPointerSize); // Copy of array literal.
1864 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1865 __ Str(result_register(), FieldMemOperand(x1, offset));
1866 // Update the write barrier for the array store.
1867 __ RecordWriteField(x1, offset, result_register(), x10,
1868 kLRHasBeenSaved, kDontSaveFPRegs,
1869 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1871 __ Mov(x3, Smi::FromInt(i));
1872 StoreArrayLiteralElementStub stub;
1876 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1880 __ Drop(1); // literal index
1881 context()->PlugTOS();
1883 context()->Plug(x0);
1888 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1889 ASSERT(expr->target()->IsValidLeftHandSide());
1891 Comment cmnt(masm_, "[ Assignment");
1893 // Left-hand side can only be a property, a global or a (parameter or local)
1895 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1896 LhsKind assign_type = VARIABLE;
1897 Property* property = expr->target()->AsProperty();
1898 if (property != NULL) {
1899 assign_type = (property->key()->IsPropertyName())
1904 // Evaluate LHS expression.
1905 switch (assign_type) {
1907 // Nothing to do here.
1909 case NAMED_PROPERTY:
1910 if (expr->is_compound()) {
1911 // We need the receiver both on the stack and in the accumulator.
1912 VisitForAccumulatorValue(property->obj());
1913 __ Push(result_register());
1915 VisitForStackValue(property->obj());
1918 case KEYED_PROPERTY:
1919 if (expr->is_compound()) {
1920 VisitForStackValue(property->obj());
1921 VisitForAccumulatorValue(property->key());
1925 VisitForStackValue(property->obj());
1926 VisitForStackValue(property->key());
1931 // For compound assignments we need another deoptimization point after the
1932 // variable/property load.
1933 if (expr->is_compound()) {
1934 { AccumulatorValueContext context(this);
1935 switch (assign_type) {
1937 EmitVariableLoad(expr->target()->AsVariableProxy());
1938 PrepareForBailout(expr->target(), TOS_REG);
1940 case NAMED_PROPERTY:
1941 EmitNamedPropertyLoad(property);
1942 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1944 case KEYED_PROPERTY:
1945 EmitKeyedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1951 Token::Value op = expr->binary_op();
1952 __ Push(x0); // Left operand goes on the stack.
1953 VisitForAccumulatorValue(expr->value());
1955 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1958 SetSourcePosition(expr->position() + 1);
1959 AccumulatorValueContext context(this);
1960 if (ShouldInlineSmiCase(op)) {
1961 EmitInlineSmiBinaryOp(expr->binary_operation(),
1967 EmitBinaryOp(expr->binary_operation(), op, mode);
1970 // Deoptimization point in case the binary operation may have side effects.
1971 PrepareForBailout(expr->binary_operation(), TOS_REG);
1973 VisitForAccumulatorValue(expr->value());
1976 // Record source position before possible IC call.
1977 SetSourcePosition(expr->position());
1980 switch (assign_type) {
1982 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1984 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1985 context()->Plug(x0);
1987 case NAMED_PROPERTY:
1988 EmitNamedPropertyAssignment(expr);
1990 case KEYED_PROPERTY:
1991 EmitKeyedPropertyAssignment(expr);
1997 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1998 SetSourcePosition(prop->position());
1999 Literal* key = prop->key()->AsLiteral();
2000 __ Mov(x2, Operand(key->value()));
2001 // Call load IC. It has arguments receiver and property name x0 and x2.
2002 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2006 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2007 SetSourcePosition(prop->position());
2008 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2009 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2010 CallIC(ic, prop->PropertyFeedbackId());
2014 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2017 Expression* left_expr,
2018 Expression* right_expr) {
2019 Label done, both_smis, stub_call;
2021 // Get the arguments.
2023 Register right = x0;
2024 Register result = x0;
2027 // Perform combined smi check on both operands.
2028 __ Orr(x10, left, right);
2029 JumpPatchSite patch_site(masm_);
2030 patch_site.EmitJumpIfSmi(x10, &both_smis);
2032 __ Bind(&stub_call);
2033 BinaryOpICStub stub(op, mode);
2035 Assembler::BlockPoolsScope scope(masm_);
2036 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2037 patch_site.EmitPatchInfo();
2041 __ Bind(&both_smis);
2042 // Smi case. This code works in the same way as the smi-smi case in the type
2043 // recording binary operation stub, see
2044 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2045 // TODO(all): That doesn't exist any more. Where are the comments?
2047 // The set of operations that needs to be supported here is controlled by
2048 // FullCodeGenerator::ShouldInlineSmiCase().
2051 __ Ubfx(right, right, kSmiShift, 5);
2052 __ Asr(result, left, right);
2053 __ Bic(result, result, kSmiShiftMask);
2056 __ Ubfx(right, right, kSmiShift, 5);
2057 __ Lsl(result, left, right);
2060 Label right_not_zero;
2061 __ Cbnz(right, &right_not_zero);
2062 __ Tbnz(left, kXSignBit, &stub_call);
2063 __ Bind(&right_not_zero);
2064 __ Ubfx(right, right, kSmiShift, 5);
2065 __ Lsr(result, left, right);
2066 __ Bic(result, result, kSmiShiftMask);
2070 __ Adds(x10, left, right);
2071 __ B(vs, &stub_call);
2072 __ Mov(result, x10);
2075 __ Subs(x10, left, right);
2076 __ B(vs, &stub_call);
2077 __ Mov(result, x10);
2080 Label not_minus_zero, done;
2081 __ Smulh(x10, left, right);
2082 __ Cbnz(x10, ¬_minus_zero);
2083 __ Eor(x11, left, right);
2084 __ Tbnz(x11, kXSignBit, &stub_call);
2085 STATIC_ASSERT(kSmiTag == 0);
2086 __ Mov(result, x10);
2088 __ Bind(¬_minus_zero);
2090 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2091 __ B(lt, &stub_call);
2092 __ SmiTag(result, x10);
2097 __ Orr(result, left, right);
2099 case Token::BIT_AND:
2100 __ And(result, left, right);
2102 case Token::BIT_XOR:
2103 __ Eor(result, left, right);
2110 context()->Plug(x0);
2114 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2116 OverwriteMode mode) {
2118 BinaryOpICStub stub(op, mode);
2119 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2121 Assembler::BlockPoolsScope scope(masm_);
2122 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2123 patch_site.EmitPatchInfo();
2125 context()->Plug(x0);
2129 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2130 ASSERT(expr->IsValidLeftHandSide());
2132 // Left-hand side can only be a property, a global or a (parameter or local)
2134 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2135 LhsKind assign_type = VARIABLE;
2136 Property* prop = expr->AsProperty();
2138 assign_type = (prop->key()->IsPropertyName())
2143 switch (assign_type) {
2145 Variable* var = expr->AsVariableProxy()->var();
2146 EffectContext context(this);
2147 EmitVariableAssignment(var, Token::ASSIGN);
2150 case NAMED_PROPERTY: {
2151 __ Push(x0); // Preserve value.
2152 VisitForAccumulatorValue(prop->obj());
2153 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2156 __ Pop(x0); // Restore value.
2157 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2161 case KEYED_PROPERTY: {
2162 __ Push(x0); // Preserve value.
2163 VisitForStackValue(prop->obj());
2164 VisitForAccumulatorValue(prop->key());
2167 Handle<Code> ic = strict_mode() == SLOPPY
2168 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2169 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2174 context()->Plug(x0);
2178 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2179 Variable* var, MemOperand location) {
2180 __ Str(result_register(), location);
2181 if (var->IsContextSlot()) {
2182 // RecordWrite may destroy all its register arguments.
2183 __ Mov(x10, result_register());
2184 int offset = Context::SlotOffset(var->index());
2185 __ RecordWriteContextSlot(
2186 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2191 void FullCodeGenerator::EmitCallStoreContextSlot(
2192 Handle<String> name, StrictMode strict_mode) {
2193 __ Mov(x11, Operand(name));
2194 __ Mov(x10, Smi::FromInt(strict_mode));
2197 // jssp[16] : context.
2198 // jssp[24] : value.
2199 __ Push(x0, cp, x11, x10);
2200 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2204 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2206 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2207 if (var->IsUnallocated()) {
2208 // Global var, const, or let.
2209 __ Mov(x2, Operand(var->name()));
2210 __ Ldr(x1, GlobalObjectMemOperand());
2213 } else if (op == Token::INIT_CONST_LEGACY) {
2214 // Const initializers need a write barrier.
2215 ASSERT(!var->IsParameter()); // No const parameters.
2216 if (var->IsLookupSlot()) {
2218 __ Mov(x0, Operand(var->name()));
2219 __ Push(cp, x0); // Context and name.
2220 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2222 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2224 MemOperand location = VarOperand(var, x1);
2225 __ Ldr(x10, location);
2226 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2227 EmitStoreToStackLocalOrContextSlot(var, location);
2231 } else if (var->mode() == LET && op != Token::INIT_LET) {
2232 // Non-initializing assignment to let variable needs a write barrier.
2233 if (var->IsLookupSlot()) {
2234 EmitCallStoreContextSlot(var->name(), strict_mode());
2236 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2238 MemOperand location = VarOperand(var, x1);
2239 __ Ldr(x10, location);
2240 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2241 __ Mov(x10, Operand(var->name()));
2243 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2244 // Perform the assignment.
2246 EmitStoreToStackLocalOrContextSlot(var, location);
2249 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2250 // Assignment to var or initializing assignment to let/const
2252 if (var->IsLookupSlot()) {
2253 EmitCallStoreContextSlot(var->name(), strict_mode());
2255 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2256 MemOperand location = VarOperand(var, x1);
2257 if (FLAG_debug_code && op == Token::INIT_LET) {
2258 __ Ldr(x10, location);
2259 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2260 __ Check(eq, kLetBindingReInitialization);
2262 EmitStoreToStackLocalOrContextSlot(var, location);
2265 // Non-initializing assignments to consts are ignored.
2269 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2270 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2271 // Assignment to a property, using a named store IC.
2272 Property* prop = expr->target()->AsProperty();
2273 ASSERT(prop != NULL);
2274 ASSERT(prop->key()->AsLiteral() != NULL);
2276 // Record source code position before IC call.
2277 SetSourcePosition(expr->position());
2278 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2281 CallStoreIC(expr->AssignmentFeedbackId());
2283 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2284 context()->Plug(x0);
2288 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2289 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2290 // Assignment to a property, using a keyed store IC.
2292 // Record source code position before IC call.
2293 SetSourcePosition(expr->position());
2294 // TODO(all): Could we pass this in registers rather than on the stack?
2295 __ Pop(x1, x2); // Key and object holding the property.
2297 Handle<Code> ic = strict_mode() == SLOPPY
2298 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2299 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2300 CallIC(ic, expr->AssignmentFeedbackId());
2302 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2303 context()->Plug(x0);
2307 void FullCodeGenerator::VisitProperty(Property* expr) {
2308 Comment cmnt(masm_, "[ Property");
2309 Expression* key = expr->key();
2311 if (key->IsPropertyName()) {
2312 VisitForAccumulatorValue(expr->obj());
2313 EmitNamedPropertyLoad(expr);
2314 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2315 context()->Plug(x0);
2317 VisitForStackValue(expr->obj());
2318 VisitForAccumulatorValue(expr->key());
2320 EmitKeyedPropertyLoad(expr);
2321 context()->Plug(x0);
2326 void FullCodeGenerator::CallIC(Handle<Code> code,
2327 TypeFeedbackId ast_id) {
2329 // All calls must have a predictable size in full-codegen code to ensure that
2330 // the debugger can patch them correctly.
2331 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2335 // Code common for calls using the IC.
2336 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2337 ASM_LOCATION("EmitCallWithIC");
2339 Expression* callee = expr->expression();
2340 ZoneList<Expression*>* args = expr->arguments();
2341 int arg_count = args->length();
2343 CallFunctionFlags flags;
2344 // Get the target function.
2345 if (callee->IsVariableProxy()) {
2346 { StackValueContext context(this);
2347 EmitVariableLoad(callee->AsVariableProxy());
2348 PrepareForBailout(callee, NO_REGISTERS);
2350 // Push undefined as receiver. This is patched in the method prologue if it
2351 // is a sloppy mode method.
2352 __ Push(isolate()->factory()->undefined_value());
2353 flags = NO_CALL_FUNCTION_FLAGS;
2355 // Load the function from the receiver.
2356 ASSERT(callee->IsProperty());
2358 EmitNamedPropertyLoad(callee->AsProperty());
2359 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2360 // Push the target function under the receiver.
2363 flags = CALL_AS_METHOD;
2366 // Load the arguments.
2367 { PreservePositionScope scope(masm()->positions_recorder());
2368 for (int i = 0; i < arg_count; i++) {
2369 VisitForStackValue(args->at(i));
2373 // Record source position for debugger.
2374 SetSourcePosition(expr->position());
2375 CallFunctionStub stub(arg_count, flags);
2376 __ Peek(x1, (arg_count + 1) * kPointerSize);
2379 RecordJSReturnSite(expr);
2381 // Restore context register.
2382 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2384 context()->DropAndPlug(1, x0);
2388 // Code common for calls using the IC.
2389 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2392 VisitForAccumulatorValue(key);
2394 Expression* callee = expr->expression();
2395 ZoneList<Expression*>* args = expr->arguments();
2396 int arg_count = args->length();
2398 // Load the function from the receiver.
2399 ASSERT(callee->IsProperty());
2401 EmitKeyedPropertyLoad(callee->AsProperty());
2402 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2404 // Push the target function under the receiver.
2408 { PreservePositionScope scope(masm()->positions_recorder());
2409 for (int i = 0; i < arg_count; i++) {
2410 VisitForStackValue(args->at(i));
2414 // Record source position for debugger.
2415 SetSourcePosition(expr->position());
2416 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2417 __ Peek(x1, (arg_count + 1) * kPointerSize);
2420 RecordJSReturnSite(expr);
2421 // Restore context register.
2422 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2424 context()->DropAndPlug(1, x0);
2428 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2429 // Code common for calls using the call stub.
2430 ZoneList<Expression*>* args = expr->arguments();
2431 int arg_count = args->length();
2432 { PreservePositionScope scope(masm()->positions_recorder());
2433 for (int i = 0; i < arg_count; i++) {
2434 VisitForStackValue(args->at(i));
2437 // Record source position for debugger.
2438 SetSourcePosition(expr->position());
2440 Handle<Object> uninitialized =
2441 TypeFeedbackInfo::UninitializedSentinel(isolate());
2442 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2443 __ LoadObject(x2, FeedbackVector());
2444 __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
2446 // Record call targets in unoptimized code.
2447 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2448 __ Peek(x1, (arg_count + 1) * kXRegSize);
2450 RecordJSReturnSite(expr);
2451 // Restore context register.
2452 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2453 context()->DropAndPlug(1, x0);
2457 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2458 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2459 // Prepare to push a copy of the first argument or undefined if it doesn't
2461 if (arg_count > 0) {
2462 __ Peek(x10, arg_count * kXRegSize);
2464 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2467 // Prepare to push the receiver of the enclosing function.
2468 int receiver_offset = 2 + info_->scope()->num_parameters();
2469 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2474 // Prepare to push the language mode.
2475 __ Mov(x10, Smi::FromInt(strict_mode()));
2476 // Prepare to push the start position of the scope the calls resides in.
2477 __ Mov(x11, Smi::FromInt(scope()->start_position()));
2482 // Do the runtime call.
2483 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2487 void FullCodeGenerator::VisitCall(Call* expr) {
2489 // We want to verify that RecordJSReturnSite gets called on all paths
2490 // through this function. Avoid early returns.
2491 expr->return_is_recorded_ = false;
2494 Comment cmnt(masm_, "[ Call");
2495 Expression* callee = expr->expression();
2496 Call::CallType call_type = expr->GetCallType(isolate());
2498 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2499 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2500 // to resolve the function we need to call and the receiver of the
2501 // call. Then we call the resolved function using the given
2503 ZoneList<Expression*>* args = expr->arguments();
2504 int arg_count = args->length();
2507 PreservePositionScope pos_scope(masm()->positions_recorder());
2508 VisitForStackValue(callee);
2509 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2510 __ Push(x10); // Reserved receiver slot.
2512 // Push the arguments.
2513 for (int i = 0; i < arg_count; i++) {
2514 VisitForStackValue(args->at(i));
2517 // Push a copy of the function (found below the arguments) and
2519 __ Peek(x10, (arg_count + 1) * kPointerSize);
2521 EmitResolvePossiblyDirectEval(arg_count);
2523 // The runtime call returns a pair of values in x0 (function) and
2524 // x1 (receiver). Touch up the stack with the right values.
2525 __ PokePair(x1, x0, arg_count * kPointerSize);
2528 // Record source position for debugger.
2529 SetSourcePosition(expr->position());
2531 // Call the evaluated function.
2532 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2533 __ Peek(x1, (arg_count + 1) * kXRegSize);
2535 RecordJSReturnSite(expr);
2536 // Restore context register.
2537 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2538 context()->DropAndPlug(1, x0);
2540 } else if (call_type == Call::GLOBAL_CALL) {
2541 EmitCallWithIC(expr);
2543 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2544 // Call to a lookup slot (dynamically introduced variable).
2545 VariableProxy* proxy = callee->AsVariableProxy();
2548 { PreservePositionScope scope(masm()->positions_recorder());
2549 // Generate code for loading from variables potentially shadowed
2550 // by eval-introduced variables.
2551 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2555 // Call the runtime to find the function to call (returned in x0)
2556 // and the object holding it (returned in x1).
2557 __ Push(context_register());
2558 __ Mov(x10, Operand(proxy->name()));
2560 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2561 __ Push(x0, x1); // Receiver, function.
2563 // If fast case code has been generated, emit code to push the
2564 // function and receiver and have the slow path jump around this
2566 if (done.is_linked()) {
2572 // The receiver is implicitly the global receiver. Indicate this
2573 // by passing the undefined to the call function stub.
2574 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2579 // The receiver is either the global receiver or an object found
2580 // by LoadContextSlot.
2581 EmitCallWithStub(expr);
2582 } else if (call_type == Call::PROPERTY_CALL) {
2583 Property* property = callee->AsProperty();
2584 { PreservePositionScope scope(masm()->positions_recorder());
2585 VisitForStackValue(property->obj());
2587 if (property->key()->IsPropertyName()) {
2588 EmitCallWithIC(expr);
2590 EmitKeyedCallWithIC(expr, property->key());
2594 ASSERT(call_type == Call::OTHER_CALL);
2595 // Call to an arbitrary expression not handled specially above.
2596 { PreservePositionScope scope(masm()->positions_recorder());
2597 VisitForStackValue(callee);
2599 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2601 // Emit function call.
2602 EmitCallWithStub(expr);
2606 // RecordJSReturnSite should have been called.
2607 ASSERT(expr->return_is_recorded_);
2612 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2613 Comment cmnt(masm_, "[ CallNew");
2614 // According to ECMA-262, section 11.2.2, page 44, the function
2615 // expression in new calls must be evaluated before the
2618 // Push constructor on the stack. If it's not a function it's used as
2619 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2621 VisitForStackValue(expr->expression());
2623 // Push the arguments ("left-to-right") on the stack.
2624 ZoneList<Expression*>* args = expr->arguments();
2625 int arg_count = args->length();
2626 for (int i = 0; i < arg_count; i++) {
2627 VisitForStackValue(args->at(i));
2630 // Call the construct call builtin that handles allocation and
2631 // constructor invocation.
2632 SetSourcePosition(expr->position());
2634 // Load function and argument count into x1 and x0.
2635 __ Mov(x0, arg_count);
2636 __ Peek(x1, arg_count * kXRegSize);
2638 // Record call targets in unoptimized code.
2639 Handle<Object> uninitialized =
2640 TypeFeedbackInfo::UninitializedSentinel(isolate());
2641 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2642 if (FLAG_pretenuring_call_new) {
2643 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2644 isolate()->factory()->NewAllocationSite());
2645 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2646 expr->CallNewFeedbackSlot() + 1);
2649 __ LoadObject(x2, FeedbackVector());
2650 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2652 CallConstructStub stub(RECORD_CALL_TARGET);
2653 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2654 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2655 context()->Plug(x0);
2659 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2660 ZoneList<Expression*>* args = expr->arguments();
2661 ASSERT(args->length() == 1);
2663 VisitForAccumulatorValue(args->at(0));
2665 Label materialize_true, materialize_false;
2666 Label* if_true = NULL;
2667 Label* if_false = NULL;
2668 Label* fall_through = NULL;
2669 context()->PrepareTest(&materialize_true, &materialize_false,
2670 &if_true, &if_false, &fall_through);
2672 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2673 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2675 context()->Plug(if_true, if_false);
2679 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2680 ZoneList<Expression*>* args = expr->arguments();
2681 ASSERT(args->length() == 1);
2683 VisitForAccumulatorValue(args->at(0));
2685 Label materialize_true, materialize_false;
2686 Label* if_true = NULL;
2687 Label* if_false = NULL;
2688 Label* fall_through = NULL;
2689 context()->PrepareTest(&materialize_true, &materialize_false,
2690 &if_true, &if_false, &fall_through);
2692 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2693 __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true,
2694 if_false, fall_through);
2696 context()->Plug(if_true, if_false);
2700 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2701 ZoneList<Expression*>* args = expr->arguments();
2702 ASSERT(args->length() == 1);
2704 VisitForAccumulatorValue(args->at(0));
2706 Label materialize_true, materialize_false;
2707 Label* if_true = NULL;
2708 Label* if_false = NULL;
2709 Label* fall_through = NULL;
2710 context()->PrepareTest(&materialize_true, &materialize_false,
2711 &if_true, &if_false, &fall_through);
2713 __ JumpIfSmi(x0, if_false);
2714 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2715 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2716 // Undetectable objects behave like undefined when tested with typeof.
2717 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2718 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2719 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2720 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2722 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2723 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2724 Split(le, if_true, if_false, fall_through);
2726 context()->Plug(if_true, if_false);
2730 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2731 ZoneList<Expression*>* args = expr->arguments();
2732 ASSERT(args->length() == 1);
2734 VisitForAccumulatorValue(args->at(0));
2736 Label materialize_true, materialize_false;
2737 Label* if_true = NULL;
2738 Label* if_false = NULL;
2739 Label* fall_through = NULL;
2740 context()->PrepareTest(&materialize_true, &materialize_false,
2741 &if_true, &if_false, &fall_through);
2743 __ JumpIfSmi(x0, if_false);
2744 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2745 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2746 Split(ge, if_true, if_false, fall_through);
2748 context()->Plug(if_true, if_false);
2752 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2753 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2754 ZoneList<Expression*>* args = expr->arguments();
2755 ASSERT(args->length() == 1);
2757 VisitForAccumulatorValue(args->at(0));
2759 Label materialize_true, materialize_false;
2760 Label* if_true = NULL;
2761 Label* if_false = NULL;
2762 Label* fall_through = NULL;
2763 context()->PrepareTest(&materialize_true, &materialize_false,
2764 &if_true, &if_false, &fall_through);
2766 __ JumpIfSmi(x0, if_false);
2767 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2768 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2769 __ Tst(x11, 1 << Map::kIsUndetectable);
2770 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2771 Split(ne, if_true, if_false, fall_through);
2773 context()->Plug(if_true, if_false);
2777 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2778 CallRuntime* expr) {
2779 ZoneList<Expression*>* args = expr->arguments();
2780 ASSERT(args->length() == 1);
2781 VisitForAccumulatorValue(args->at(0));
2783 Label materialize_true, materialize_false, skip_lookup;
2784 Label* if_true = NULL;
2785 Label* if_false = NULL;
2786 Label* fall_through = NULL;
2787 context()->PrepareTest(&materialize_true, &materialize_false,
2788 &if_true, &if_false, &fall_through);
2790 Register object = x0;
2791 __ AssertNotSmi(object);
2794 Register bitfield2 = x11;
2795 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2796 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2797 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
2799 // Check for fast case object. Generate false result for slow case object.
2800 Register props = x12;
2801 Register props_map = x12;
2802 Register hash_table_map = x13;
2803 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
2804 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2805 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2806 __ Cmp(props_map, hash_table_map);
2809 // Look for valueOf name in the descriptor array, and indicate false if found.
2810 // Since we omit an enumeration index check, if it is added via a transition
2811 // that shares its descriptor array, this is a false positive.
2814 // Skip loop if no descriptors are valid.
2815 Register descriptors = x12;
2816 Register descriptors_length = x13;
2817 __ NumberOfOwnDescriptors(descriptors_length, map);
2818 __ Cbz(descriptors_length, &done);
2820 __ LoadInstanceDescriptors(map, descriptors);
2822 // Calculate the end of the descriptor array.
2823 Register descriptors_end = x14;
2824 __ Mov(x15, DescriptorArray::kDescriptorSize);
2825 __ Mul(descriptors_length, descriptors_length, x15);
2826 // Calculate location of the first key name.
2827 __ Add(descriptors, descriptors,
2828 DescriptorArray::kFirstOffset - kHeapObjectTag);
2829 // Calculate the end of the descriptor array.
2830 __ Add(descriptors_end, descriptors,
2831 Operand(descriptors_length, LSL, kPointerSizeLog2));
2833 // Loop through all the keys in the descriptor array. If one of these is the
2834 // string "valueOf" the result is false.
2835 Register valueof_string = x1;
2836 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
2837 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2839 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
2840 __ Cmp(x15, valueof_string);
2842 __ Cmp(descriptors, descriptors_end);
2847 // Set the bit in the map to indicate that there is no local valueOf field.
2848 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2849 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2850 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2852 __ Bind(&skip_lookup);
2854 // If a valueOf property is not found on the object check that its prototype
2855 // is the unmodified String prototype. If not result is false.
2856 Register prototype = x1;
2857 Register global_idx = x2;
2858 Register native_context = x2;
2859 Register string_proto = x3;
2860 Register proto_map = x4;
2861 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
2862 __ JumpIfSmi(prototype, if_false);
2863 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2864 __ Ldr(global_idx, GlobalObjectMemOperand());
2865 __ Ldr(native_context,
2866 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
2867 __ Ldr(string_proto,
2868 ContextMemOperand(native_context,
2869 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2870 __ Cmp(proto_map, string_proto);
2872 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2873 Split(eq, if_true, if_false, fall_through);
2875 context()->Plug(if_true, if_false);
2879 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2880 ZoneList<Expression*>* args = expr->arguments();
2881 ASSERT(args->length() == 1);
2883 VisitForAccumulatorValue(args->at(0));
2885 Label materialize_true, materialize_false;
2886 Label* if_true = NULL;
2887 Label* if_false = NULL;
2888 Label* fall_through = NULL;
2889 context()->PrepareTest(&materialize_true, &materialize_false,
2890 &if_true, &if_false, &fall_through);
2892 __ JumpIfSmi(x0, if_false);
2893 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2894 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2895 Split(eq, if_true, if_false, fall_through);
2897 context()->Plug(if_true, if_false);
2901 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2902 ZoneList<Expression*>* args = expr->arguments();
2903 ASSERT(args->length() == 1);
2905 VisitForAccumulatorValue(args->at(0));
2907 Label materialize_true, materialize_false;
2908 Label* if_true = NULL;
2909 Label* if_false = NULL;
2910 Label* fall_through = NULL;
2911 context()->PrepareTest(&materialize_true, &materialize_false,
2912 &if_true, &if_false, &fall_through);
2914 // Only a HeapNumber can be -0.0, so return false if we have something else.
2915 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
2917 // Test the bit pattern.
2918 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2919 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
2921 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2922 Split(vs, if_true, if_false, fall_through);
2924 context()->Plug(if_true, if_false);
2928 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2929 ZoneList<Expression*>* args = expr->arguments();
2930 ASSERT(args->length() == 1);
2932 VisitForAccumulatorValue(args->at(0));
2934 Label materialize_true, materialize_false;
2935 Label* if_true = NULL;
2936 Label* if_false = NULL;
2937 Label* fall_through = NULL;
2938 context()->PrepareTest(&materialize_true, &materialize_false,
2939 &if_true, &if_false, &fall_through);
2941 __ JumpIfSmi(x0, if_false);
2942 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2943 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2944 Split(eq, if_true, if_false, fall_through);
2946 context()->Plug(if_true, if_false);
2950 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2951 ZoneList<Expression*>* args = expr->arguments();
2952 ASSERT(args->length() == 1);
2954 VisitForAccumulatorValue(args->at(0));
2956 Label materialize_true, materialize_false;
2957 Label* if_true = NULL;
2958 Label* if_false = NULL;
2959 Label* fall_through = NULL;
2960 context()->PrepareTest(&materialize_true, &materialize_false,
2961 &if_true, &if_false, &fall_through);
2963 __ JumpIfSmi(x0, if_false);
2964 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2965 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2966 Split(eq, if_true, if_false, fall_through);
2968 context()->Plug(if_true, if_false);
2973 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2974 ASSERT(expr->arguments()->length() == 0);
2976 Label materialize_true, materialize_false;
2977 Label* if_true = NULL;
2978 Label* if_false = NULL;
2979 Label* fall_through = NULL;
2980 context()->PrepareTest(&materialize_true, &materialize_false,
2981 &if_true, &if_false, &fall_through);
2983 // Get the frame pointer for the calling frame.
2984 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2986 // Skip the arguments adaptor frame if it exists.
2987 Label check_frame_marker;
2988 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
2989 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2990 __ B(ne, &check_frame_marker);
2991 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
2993 // Check the marker in the calling frame.
2994 __ Bind(&check_frame_marker);
2995 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
2996 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
2997 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2998 Split(eq, if_true, if_false, fall_through);
3000 context()->Plug(if_true, if_false);
3004 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3005 ZoneList<Expression*>* args = expr->arguments();
3006 ASSERT(args->length() == 2);
3008 // Load the two objects into registers and perform the comparison.
3009 VisitForStackValue(args->at(0));
3010 VisitForAccumulatorValue(args->at(1));
3012 Label materialize_true, materialize_false;
3013 Label* if_true = NULL;
3014 Label* if_false = NULL;
3015 Label* fall_through = NULL;
3016 context()->PrepareTest(&materialize_true, &materialize_false,
3017 &if_true, &if_false, &fall_through);
3021 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3022 Split(eq, if_true, if_false, fall_through);
3024 context()->Plug(if_true, if_false);
3028 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3029 ZoneList<Expression*>* args = expr->arguments();
3030 ASSERT(args->length() == 1);
3032 // ArgumentsAccessStub expects the key in x1.
3033 VisitForAccumulatorValue(args->at(0));
3035 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3036 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3038 context()->Plug(x0);
3042 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3043 ASSERT(expr->arguments()->length() == 0);
3045 // Get the number of formal parameters.
3046 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3048 // Check if the calling frame is an arguments adaptor frame.
3049 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3050 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3051 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3054 // Arguments adaptor case: Read the arguments length from the
3056 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3059 context()->Plug(x0);
3063 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3064 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3065 ZoneList<Expression*>* args = expr->arguments();
3066 ASSERT(args->length() == 1);
3067 Label done, null, function, non_function_constructor;
3069 VisitForAccumulatorValue(args->at(0));
3071 // If the object is a smi, we return null.
3072 __ JumpIfSmi(x0, &null);
3074 // Check that the object is a JS object but take special care of JS
3075 // functions to make sure they have 'Function' as their class.
3076 // Assume that there are only two callable types, and one of them is at
3077 // either end of the type range for JS object types. Saves extra comparisons.
3078 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3079 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3080 // x10: object's map.
3081 // x11: object's type.
3083 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3084 FIRST_SPEC_OBJECT_TYPE + 1);
3085 __ B(eq, &function);
3087 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3088 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3089 LAST_SPEC_OBJECT_TYPE - 1);
3090 __ B(eq, &function);
3091 // Assume that there is no larger type.
3092 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3094 // Check if the constructor in the map is a JS function.
3095 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3096 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3097 &non_function_constructor);
3099 // x12 now contains the constructor function. Grab the
3100 // instance class name from there.
3101 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3103 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3106 // Functions have class 'Function'.
3108 __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
3111 // Objects with a non-function constructor have class 'Object'.
3112 __ Bind(&non_function_constructor);
3113 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3116 // Non-JS objects have class null.
3118 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3123 context()->Plug(x0);
3127 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3128 // Conditionally generate a log call.
3130 // 0 (literal string): The type of logging (corresponds to the flags).
3131 // This is used to determine whether or not to generate the log call.
3132 // 1 (string): Format string. Access the string at argument index 2
3133 // with '%2s' (see Logger::LogRuntime for all the formats).
3134 // 2 (array): Arguments to the format string.
3135 ZoneList<Expression*>* args = expr->arguments();
3136 ASSERT_EQ(args->length(), 3);
3137 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3138 VisitForStackValue(args->at(1));
3139 VisitForStackValue(args->at(2));
3140 __ CallRuntime(Runtime::kHiddenLog, 2);
3143 // Finally, we're expected to leave a value on the top of the stack.
3144 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3145 context()->Plug(x0);
3149 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3150 // Load the arguments on the stack and call the stub.
3152 ZoneList<Expression*>* args = expr->arguments();
3153 ASSERT(args->length() == 3);
3154 VisitForStackValue(args->at(0));
3155 VisitForStackValue(args->at(1));
3156 VisitForStackValue(args->at(2));
3158 context()->Plug(x0);
3162 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3163 // Load the arguments on the stack and call the stub.
3164 RegExpExecStub stub;
3165 ZoneList<Expression*>* args = expr->arguments();
3166 ASSERT(args->length() == 4);
3167 VisitForStackValue(args->at(0));
3168 VisitForStackValue(args->at(1));
3169 VisitForStackValue(args->at(2));
3170 VisitForStackValue(args->at(3));
3172 context()->Plug(x0);
3176 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3177 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3178 ZoneList<Expression*>* args = expr->arguments();
3179 ASSERT(args->length() == 1);
3180 VisitForAccumulatorValue(args->at(0)); // Load the object.
3183 // If the object is a smi return the object.
3184 __ JumpIfSmi(x0, &done);
3185 // If the object is not a value type, return the object.
3186 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3187 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3190 context()->Plug(x0);
3194 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3195 ZoneList<Expression*>* args = expr->arguments();
3196 ASSERT(args->length() == 2);
3197 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3198 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3200 VisitForAccumulatorValue(args->at(0)); // Load the object.
3202 Label runtime, done, not_date_object;
3203 Register object = x0;
3204 Register result = x0;
3205 Register stamp_addr = x10;
3206 Register stamp_cache = x11;
3208 __ JumpIfSmi(object, ¬_date_object);
3209 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object);
3211 if (index->value() == 0) {
3212 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3215 if (index->value() < JSDate::kFirstUncachedField) {
3216 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3218 __ Ldr(stamp_addr, MemOperand(x10));
3219 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3220 __ Cmp(stamp_addr, stamp_cache);
3222 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3223 kPointerSize * index->value()));
3229 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3233 __ Bind(¬_date_object);
3234 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3236 context()->Plug(x0);
3240 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3241 ZoneList<Expression*>* args = expr->arguments();
3242 ASSERT_EQ(3, args->length());
3244 Register string = x0;
3245 Register index = x1;
3246 Register value = x2;
3247 Register scratch = x10;
3249 VisitForStackValue(args->at(1)); // index
3250 VisitForStackValue(args->at(2)); // value
3251 VisitForAccumulatorValue(args->at(0)); // string
3252 __ Pop(value, index);
3254 if (FLAG_debug_code) {
3255 __ AssertSmi(value, kNonSmiValue);
3256 __ AssertSmi(index, kNonSmiIndex);
3257 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3258 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3262 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3265 __ Strb(value, MemOperand(scratch, index));
3266 context()->Plug(string);
3270 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3271 ZoneList<Expression*>* args = expr->arguments();
3272 ASSERT_EQ(3, args->length());
3274 Register string = x0;
3275 Register index = x1;
3276 Register value = x2;
3277 Register scratch = x10;
3279 VisitForStackValue(args->at(1)); // index
3280 VisitForStackValue(args->at(2)); // value
3281 VisitForAccumulatorValue(args->at(0)); // string
3282 __ Pop(value, index);
3284 if (FLAG_debug_code) {
3285 __ AssertSmi(value, kNonSmiValue);
3286 __ AssertSmi(index, kNonSmiIndex);
3287 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3288 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3292 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3295 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3296 context()->Plug(string);
3300 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3301 // Load the arguments on the stack and call the MathPow stub.
3302 ZoneList<Expression*>* args = expr->arguments();
3303 ASSERT(args->length() == 2);
3304 VisitForStackValue(args->at(0));
3305 VisitForStackValue(args->at(1));
3306 MathPowStub stub(MathPowStub::ON_STACK);
3308 context()->Plug(x0);
3312 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 ASSERT(args->length() == 2);
3315 VisitForStackValue(args->at(0)); // Load the object.
3316 VisitForAccumulatorValue(args->at(1)); // Load the value.
3322 // If the object is a smi, return the value.
3323 __ JumpIfSmi(x1, &done);
3325 // If the object is not a value type, return the value.
3326 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3329 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3330 // Update the write barrier. Save the value as it will be
3331 // overwritten by the write barrier code and is needed afterward.
3333 __ RecordWriteField(
3334 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3337 context()->Plug(x0);
3341 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3342 ZoneList<Expression*>* args = expr->arguments();
3343 ASSERT_EQ(args->length(), 1);
3345 // Load the argument into x0 and call the stub.
3346 VisitForAccumulatorValue(args->at(0));
3348 NumberToStringStub stub;
3350 context()->Plug(x0);
3354 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments();
3356 ASSERT(args->length() == 1);
3358 VisitForAccumulatorValue(args->at(0));
3362 Register result = x1;
3364 StringCharFromCodeGenerator generator(code, result);
3365 generator.GenerateFast(masm_);
3368 NopRuntimeCallHelper call_helper;
3369 generator.GenerateSlow(masm_, call_helper);
3372 context()->Plug(result);
3376 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3377 ZoneList<Expression*>* args = expr->arguments();
3378 ASSERT(args->length() == 2);
3380 VisitForStackValue(args->at(0));
3381 VisitForAccumulatorValue(args->at(1));
3383 Register object = x1;
3384 Register index = x0;
3385 Register result = x3;
3389 Label need_conversion;
3390 Label index_out_of_range;
3392 StringCharCodeAtGenerator generator(object,
3397 &index_out_of_range,
3398 STRING_INDEX_IS_NUMBER);
3399 generator.GenerateFast(masm_);
3402 __ Bind(&index_out_of_range);
3403 // When the index is out of range, the spec requires us to return NaN.
3404 __ LoadRoot(result, Heap::kNanValueRootIndex);
3407 __ Bind(&need_conversion);
3408 // Load the undefined value into the result register, which will
3409 // trigger conversion.
3410 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3413 NopRuntimeCallHelper call_helper;
3414 generator.GenerateSlow(masm_, call_helper);
3417 context()->Plug(result);
3421 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
3423 ASSERT(args->length() == 2);
3425 VisitForStackValue(args->at(0));
3426 VisitForAccumulatorValue(args->at(1));
3428 Register object = x1;
3429 Register index = x0;
3430 Register result = x0;
3434 Label need_conversion;
3435 Label index_out_of_range;
3437 StringCharAtGenerator generator(object,
3443 &index_out_of_range,
3444 STRING_INDEX_IS_NUMBER);
3445 generator.GenerateFast(masm_);
3448 __ Bind(&index_out_of_range);
3449 // When the index is out of range, the spec requires us to return
3450 // the empty string.
3451 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3454 __ Bind(&need_conversion);
3455 // Move smi zero into the result register, which will trigger conversion.
3456 __ Mov(result, Smi::FromInt(0));
3459 NopRuntimeCallHelper call_helper;
3460 generator.GenerateSlow(masm_, call_helper);
3463 context()->Plug(result);
3467 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3468 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3469 ZoneList<Expression*>* args = expr->arguments();
3470 ASSERT_EQ(2, args->length());
3472 VisitForStackValue(args->at(0));
3473 VisitForAccumulatorValue(args->at(1));
3476 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3479 context()->Plug(x0);
3483 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 ASSERT_EQ(2, args->length());
3486 VisitForStackValue(args->at(0));
3487 VisitForStackValue(args->at(1));
3489 StringCompareStub stub;
3491 context()->Plug(x0);
3495 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3496 // Load the argument on the stack and call the runtime function.
3497 ZoneList<Expression*>* args = expr->arguments();
3498 ASSERT(args->length() == 1);
3499 VisitForStackValue(args->at(0));
3500 __ CallRuntime(Runtime::kMath_log, 1);
3501 context()->Plug(x0);
3505 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3506 // Load the argument on the stack and call the runtime function.
3507 ZoneList<Expression*>* args = expr->arguments();
3508 ASSERT(args->length() == 1);
3509 VisitForStackValue(args->at(0));
3510 __ CallRuntime(Runtime::kMath_sqrt, 1);
3511 context()->Plug(x0);
3515 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3516 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3517 ZoneList<Expression*>* args = expr->arguments();
3518 ASSERT(args->length() >= 2);
3520 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3521 for (int i = 0; i < arg_count + 1; i++) {
3522 VisitForStackValue(args->at(i));
3524 VisitForAccumulatorValue(args->last()); // Function.
3526 Label runtime, done;
3527 // Check for non-function argument (including proxy).
3528 __ JumpIfSmi(x0, &runtime);
3529 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3531 // InvokeFunction requires the function in x1. Move it in there.
3533 ParameterCount count(arg_count);
3534 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3535 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3540 __ CallRuntime(Runtime::kCall, args->length());
3543 context()->Plug(x0);
3547 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3548 RegExpConstructResultStub stub;
3549 ZoneList<Expression*>* args = expr->arguments();
3550 ASSERT(args->length() == 3);
3551 VisitForStackValue(args->at(0));
3552 VisitForStackValue(args->at(1));
3553 VisitForAccumulatorValue(args->at(2));
3556 context()->Plug(x0);
3560 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3561 ZoneList<Expression*>* args = expr->arguments();
3562 ASSERT_EQ(2, args->length());
3563 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3564 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3566 Handle<FixedArray> jsfunction_result_caches(
3567 isolate()->native_context()->jsfunction_result_caches());
3568 if (jsfunction_result_caches->length() <= cache_id) {
3569 __ Abort(kAttemptToUseUndefinedCache);
3570 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3571 context()->Plug(x0);
3575 VisitForAccumulatorValue(args->at(1));
3578 Register cache = x1;
3579 __ Ldr(cache, GlobalObjectMemOperand());
3580 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3581 __ Ldr(cache, ContextMemOperand(cache,
3582 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3584 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3587 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3588 JSFunctionResultCache::kFingerOffset));
3589 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3590 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3592 // Load the key and data from the cache.
3593 __ Ldp(x2, x3, MemOperand(x3));
3596 __ CmovX(x0, x3, eq);
3599 // Call runtime to perform the lookup.
3600 __ Push(cache, key);
3601 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3604 context()->Plug(x0);
3608 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3609 ZoneList<Expression*>* args = expr->arguments();
3610 VisitForAccumulatorValue(args->at(0));
3612 Label materialize_true, materialize_false;
3613 Label* if_true = NULL;
3614 Label* if_false = NULL;
3615 Label* fall_through = NULL;
3616 context()->PrepareTest(&materialize_true, &materialize_false,
3617 &if_true, &if_false, &fall_through);
3619 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3620 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3621 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3622 Split(eq, if_true, if_false, fall_through);
3624 context()->Plug(if_true, if_false);
3628 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3629 ZoneList<Expression*>* args = expr->arguments();
3630 ASSERT(args->length() == 1);
3631 VisitForAccumulatorValue(args->at(0));
3633 __ AssertString(x0);
3635 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3636 __ IndexFromHash(x10, x0);
3638 context()->Plug(x0);
3642 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3643 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
3645 ZoneList<Expression*>* args = expr->arguments();
3646 ASSERT(args->length() == 2);
3647 VisitForStackValue(args->at(1));
3648 VisitForAccumulatorValue(args->at(0));
3650 Register array = x0;
3651 Register result = x0;
3652 Register elements = x1;
3653 Register element = x2;
3654 Register separator = x3;
3655 Register array_length = x4;
3656 Register result_pos = x5;
3658 Register string_length = x10;
3659 Register elements_end = x11;
3660 Register string = x12;
3661 Register scratch1 = x13;
3662 Register scratch2 = x14;
3663 Register scratch3 = x7;
3664 Register separator_length = x15;
3666 Label bailout, done, one_char_separator, long_separator,
3667 non_trivial_array, not_size_one_array, loop,
3668 empty_separator_loop, one_char_separator_loop,
3669 one_char_separator_loop_entry, long_separator_loop;
3671 // The separator operand is on the stack.
3674 // Check that the array is a JSArray.
3675 __ JumpIfSmi(array, &bailout);
3676 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3678 // Check that the array has fast elements.
3679 __ CheckFastElements(map, scratch1, &bailout);
3681 // If the array has length zero, return the empty string.
3682 // Load and untag the length of the array.
3683 // It is an unsigned value, so we can skip sign extension.
3684 // We assume little endianness.
3685 __ Ldrsw(array_length,
3686 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3687 __ Cbnz(array_length, &non_trivial_array);
3688 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3691 __ Bind(&non_trivial_array);
3692 // Get the FixedArray containing array's elements.
3693 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3695 // Check that all array elements are sequential ASCII strings, and
3696 // accumulate the sum of their lengths.
3697 __ Mov(string_length, 0);
3698 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3699 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3700 // Loop condition: while (element < elements_end).
3701 // Live values in registers:
3702 // elements: Fixed array of strings.
3703 // array_length: Length of the fixed array of strings (not smi)
3704 // separator: Separator string
3705 // string_length: Accumulated sum of string lengths (not smi).
3706 // element: Current array element.
3707 // elements_end: Array end.
3708 if (FLAG_debug_code) {
3709 __ Cmp(array_length, 0);
3710 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3713 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3714 __ JumpIfSmi(string, &bailout);
3715 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3716 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3717 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3719 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3720 __ Adds(string_length, string_length, scratch1);
3722 __ Cmp(element, elements_end);
3725 // If array_length is 1, return elements[0], a string.
3726 __ Cmp(array_length, 1);
3727 __ B(ne, ¬_size_one_array);
3728 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3731 __ Bind(¬_size_one_array);
3733 // Live values in registers:
3734 // separator: Separator string
3735 // array_length: Length of the array (not smi).
3736 // string_length: Sum of string lengths (not smi).
3737 // elements: FixedArray of strings.
3739 // Check that the separator is a flat ASCII string.
3740 __ JumpIfSmi(separator, &bailout);
3741 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3742 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3743 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3745 // Add (separator length times array_length) - separator length to the
3746 // string_length to get the length of the result string.
3747 // Load the separator length as untagged.
3748 // We assume little endianness, and that the length is positive.
3749 __ Ldrsw(separator_length,
3750 UntagSmiFieldMemOperand(separator,
3751 SeqOneByteString::kLengthOffset));
3752 __ Sub(string_length, string_length, separator_length);
3753 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3756 // Get first element in the array.
3757 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3758 // Live values in registers:
3759 // element: First array element
3760 // separator: Separator string
3761 // string_length: Length of result string (not smi)
3762 // array_length: Length of the array (not smi).
3763 __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
3766 // Prepare for looping. Set up elements_end to end of the array. Set
3767 // result_pos to the position of the result where to write the first
3769 // TODO(all): useless unless AllocateAsciiString trashes the register.
3770 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3771 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3773 // Check the length of the separator.
3774 __ Cmp(separator_length, 1);
3775 __ B(eq, &one_char_separator);
3776 __ B(gt, &long_separator);
3778 // Empty separator case
3779 __ Bind(&empty_separator_loop);
3780 // Live values in registers:
3781 // result_pos: the position to which we are currently copying characters.
3782 // element: Current array element.
3783 // elements_end: Array end.
3785 // Copy next array element to the result.
3786 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3787 __ Ldrsw(string_length,
3788 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3789 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3790 __ CopyBytes(result_pos, string, string_length, scratch1);
3791 __ Cmp(element, elements_end);
3792 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
3795 // One-character separator case
3796 __ Bind(&one_char_separator);
3797 // Replace separator with its ASCII character value.
3798 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3799 // Jump into the loop after the code that copies the separator, so the first
3800 // element is not preceded by a separator
3801 __ B(&one_char_separator_loop_entry);
3803 __ Bind(&one_char_separator_loop);
3804 // Live values in registers:
3805 // result_pos: the position to which we are currently copying characters.
3806 // element: Current array element.
3807 // elements_end: Array end.
3808 // separator: Single separator ASCII char (in lower byte).
3810 // Copy the separator character to the result.
3811 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3813 // Copy next array element to the result.
3814 __ Bind(&one_char_separator_loop_entry);
3815 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3816 __ Ldrsw(string_length,
3817 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3818 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3819 __ CopyBytes(result_pos, string, string_length, scratch1);
3820 __ Cmp(element, elements_end);
3821 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
3824 // Long separator case (separator is more than one character). Entry is at the
3825 // label long_separator below.
3826 __ Bind(&long_separator_loop);
3827 // Live values in registers:
3828 // result_pos: the position to which we are currently copying characters.
3829 // element: Current array element.
3830 // elements_end: Array end.
3831 // separator: Separator string.
3833 // Copy the separator to the result.
3834 // TODO(all): hoist next two instructions.
3835 __ Ldrsw(string_length,
3836 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
3837 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3838 __ CopyBytes(result_pos, string, string_length, scratch1);
3840 __ Bind(&long_separator);
3841 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3842 __ Ldrsw(string_length,
3843 UntagSmiFieldMemOperand(string, String::kLengthOffset));
3844 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3845 __ CopyBytes(result_pos, string, string_length, scratch1);
3846 __ Cmp(element, elements_end);
3847 __ B(lt, &long_separator_loop); // End while (element < elements_end).
3851 // Returning undefined will force slower code to handle it.
3852 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3854 context()->Plug(result);
3858 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3859 if (expr->function() != NULL &&
3860 expr->function()->intrinsic_type == Runtime::INLINE) {
3861 Comment cmnt(masm_, "[ InlineRuntimeCall");
3862 EmitInlineRuntimeCall(expr);
3866 Comment cmnt(masm_, "[ CallRunTime");
3867 ZoneList<Expression*>* args = expr->arguments();
3868 int arg_count = args->length();
3870 if (expr->is_jsruntime()) {
3871 // Push the builtins object as the receiver.
3872 __ Ldr(x10, GlobalObjectMemOperand());
3873 __ Ldr(x0, FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
3876 // Load the function from the receiver.
3877 Handle<String> name = expr->name();
3878 __ Mov(x2, Operand(name));
3879 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
3881 // Push the target function under the receiver.
3885 int arg_count = args->length();
3886 for (int i = 0; i < arg_count; i++) {
3887 VisitForStackValue(args->at(i));
3890 // Record source position of the IC call.
3891 SetSourcePosition(expr->position());
3892 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
3893 __ Peek(x1, (arg_count + 1) * kPointerSize);
3896 // Restore context register.
3897 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3899 context()->DropAndPlug(1, x0);
3901 // Push the arguments ("left-to-right").
3902 for (int i = 0; i < arg_count; i++) {
3903 VisitForStackValue(args->at(i));
3906 // Call the C runtime function.
3907 __ CallRuntime(expr->function(), arg_count);
3908 context()->Plug(x0);
3913 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3914 switch (expr->op()) {
3915 case Token::DELETE: {
3916 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3917 Property* property = expr->expression()->AsProperty();
3918 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3920 if (property != NULL) {
3921 VisitForStackValue(property->obj());
3922 VisitForStackValue(property->key());
3923 __ Mov(x10, Smi::FromInt(strict_mode()));
3925 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3926 context()->Plug(x0);
3927 } else if (proxy != NULL) {
3928 Variable* var = proxy->var();
3929 // Delete of an unqualified identifier is disallowed in strict mode
3930 // but "delete this" is allowed.
3931 ASSERT(strict_mode() == SLOPPY || var->is_this());
3932 if (var->IsUnallocated()) {
3933 __ Ldr(x12, GlobalObjectMemOperand());
3934 __ Mov(x11, Operand(var->name()));
3935 __ Mov(x10, Smi::FromInt(SLOPPY));
3936 __ Push(x12, x11, x10);
3937 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3938 context()->Plug(x0);
3939 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3940 // Result of deleting non-global, non-dynamic variables is false.
3941 // The subexpression does not have side effects.
3942 context()->Plug(var->is_this());
3944 // Non-global variable. Call the runtime to try to delete from the
3945 // context where the variable was introduced.
3946 __ Mov(x2, Operand(var->name()));
3947 __ Push(context_register(), x2);
3948 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
3949 context()->Plug(x0);
3952 // Result of deleting non-property, non-variable reference is true.
3953 // The subexpression may have side effects.
3954 VisitForEffect(expr->expression());
3955 context()->Plug(true);
3961 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3962 VisitForEffect(expr->expression());
3963 context()->Plug(Heap::kUndefinedValueRootIndex);
3967 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3968 if (context()->IsEffect()) {
3969 // Unary NOT has no side effects so it's only necessary to visit the
3970 // subexpression. Match the optimizing compiler by not branching.
3971 VisitForEffect(expr->expression());
3972 } else if (context()->IsTest()) {
3973 const TestContext* test = TestContext::cast(context());
3974 // The labels are swapped for the recursive call.
3975 VisitForControl(expr->expression(),
3976 test->false_label(),
3978 test->fall_through());
3979 context()->Plug(test->true_label(), test->false_label());
3981 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3982 // TODO(jbramley): This could be much more efficient using (for
3983 // example) the CSEL instruction.
3984 Label materialize_true, materialize_false, done;
3985 VisitForControl(expr->expression(),
3990 __ Bind(&materialize_true);
3991 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3992 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3995 __ Bind(&materialize_false);
3996 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3997 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4001 if (context()->IsStackValue()) {
4002 __ Push(result_register());
4007 case Token::TYPEOF: {
4008 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4010 StackValueContext context(this);
4011 VisitForTypeofValue(expr->expression());
4013 __ CallRuntime(Runtime::kTypeof, 1);
4014 context()->Plug(x0);
4023 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4024 ASSERT(expr->expression()->IsValidLeftHandSide());
4026 Comment cmnt(masm_, "[ CountOperation");
4027 SetSourcePosition(expr->position());
4029 // Expression can only be a property, a global or a (parameter or local)
4031 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4032 LhsKind assign_type = VARIABLE;
4033 Property* prop = expr->expression()->AsProperty();
4034 // In case of a property we use the uninitialized expression context
4035 // of the key to detect a named property.
4038 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4041 // Evaluate expression and get value.
4042 if (assign_type == VARIABLE) {
4043 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4044 AccumulatorValueContext context(this);
4045 EmitVariableLoad(expr->expression()->AsVariableProxy());
4047 // Reserve space for result of postfix operation.
4048 if (expr->is_postfix() && !context()->IsEffect()) {
4051 if (assign_type == NAMED_PROPERTY) {
4052 // Put the object both on the stack and in the accumulator.
4053 VisitForAccumulatorValue(prop->obj());
4055 EmitNamedPropertyLoad(prop);
4058 VisitForStackValue(prop->obj());
4059 VisitForAccumulatorValue(prop->key());
4062 EmitKeyedPropertyLoad(prop);
4066 // We need a second deoptimization point after loading the value
4067 // in case evaluating the property load my have a side effect.
4068 if (assign_type == VARIABLE) {
4069 PrepareForBailout(expr->expression(), TOS_REG);
4071 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4074 // Inline smi case if we are in a loop.
4075 Label stub_call, done;
4076 JumpPatchSite patch_site(masm_);
4078 int count_value = expr->op() == Token::INC ? 1 : -1;
4079 if (ShouldInlineSmiCase(expr->op())) {
4081 patch_site.EmitJumpIfNotSmi(x0, &slow);
4083 // Save result for postfix expressions.
4084 if (expr->is_postfix()) {
4085 if (!context()->IsEffect()) {
4086 // Save the result on the stack. If we have a named or keyed property we
4087 // store the result under the receiver that is currently on top of the
4089 switch (assign_type) {
4093 case NAMED_PROPERTY:
4094 __ Poke(x0, kPointerSize);
4096 case KEYED_PROPERTY:
4097 __ Poke(x0, kPointerSize * 2);
4103 __ Adds(x0, x0, Smi::FromInt(count_value));
4105 // Call stub. Undo operation first.
4106 __ Sub(x0, x0, Smi::FromInt(count_value));
4110 ToNumberStub convert_stub;
4111 __ CallStub(&convert_stub);
4113 // Save result for postfix expressions.
4114 if (expr->is_postfix()) {
4115 if (!context()->IsEffect()) {
4116 // Save the result on the stack. If we have a named or keyed property
4117 // we store the result under the receiver that is currently on top
4119 switch (assign_type) {
4123 case NAMED_PROPERTY:
4124 __ Poke(x0, kXRegSize);
4126 case KEYED_PROPERTY:
4127 __ Poke(x0, 2 * kXRegSize);
4133 __ Bind(&stub_call);
4135 __ Mov(x0, Smi::FromInt(count_value));
4137 // Record position before stub call.
4138 SetSourcePosition(expr->position());
4141 Assembler::BlockPoolsScope scope(masm_);
4142 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4143 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4144 patch_site.EmitPatchInfo();
4148 // Store the value returned in x0.
4149 switch (assign_type) {
4151 if (expr->is_postfix()) {
4152 { EffectContext context(this);
4153 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4155 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4158 // For all contexts except EffectConstant We have the result on
4159 // top of the stack.
4160 if (!context()->IsEffect()) {
4161 context()->PlugTOS();
4164 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4166 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4167 context()->Plug(x0);
4170 case NAMED_PROPERTY: {
4171 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
4173 CallStoreIC(expr->CountStoreFeedbackId());
4174 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4175 if (expr->is_postfix()) {
4176 if (!context()->IsEffect()) {
4177 context()->PlugTOS();
4180 context()->Plug(x0);
4184 case KEYED_PROPERTY: {
4186 __ Pop(x2); // Receiver.
4187 Handle<Code> ic = strict_mode() == SLOPPY
4188 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4189 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4190 CallIC(ic, expr->CountStoreFeedbackId());
4191 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4192 if (expr->is_postfix()) {
4193 if (!context()->IsEffect()) {
4194 context()->PlugTOS();
4197 context()->Plug(x0);
4205 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4206 ASSERT(!context()->IsEffect());
4207 ASSERT(!context()->IsTest());
4208 VariableProxy* proxy = expr->AsVariableProxy();
4209 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4210 Comment cmnt(masm_, "Global variable");
4211 __ Ldr(x0, GlobalObjectMemOperand());
4212 __ Mov(x2, Operand(proxy->name()));
4213 // Use a regular load, not a contextual load, to avoid a reference
4215 CallLoadIC(NOT_CONTEXTUAL);
4216 PrepareForBailout(expr, TOS_REG);
4217 context()->Plug(x0);
4218 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4221 // Generate code for loading from variables potentially shadowed
4222 // by eval-introduced variables.
4223 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4226 __ Mov(x0, Operand(proxy->name()));
4228 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4229 PrepareForBailout(expr, TOS_REG);
4232 context()->Plug(x0);
4234 // This expression cannot throw a reference error at the top level.
4235 VisitInDuplicateContext(expr);
4240 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4241 Expression* sub_expr,
4242 Handle<String> check) {
4243 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4244 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4245 Label materialize_true, materialize_false;
4246 Label* if_true = NULL;
4247 Label* if_false = NULL;
4248 Label* fall_through = NULL;
4249 context()->PrepareTest(&materialize_true, &materialize_false,
4250 &if_true, &if_false, &fall_through);
4252 { AccumulatorValueContext context(this);
4253 VisitForTypeofValue(sub_expr);
4255 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4257 if (check->Equals(isolate()->heap()->number_string())) {
4258 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4259 __ JumpIfSmi(x0, if_true);
4260 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4261 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4262 Split(eq, if_true, if_false, fall_through);
4263 } else if (check->Equals(isolate()->heap()->string_string())) {
4264 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4265 __ JumpIfSmi(x0, if_false);
4266 // Check for undetectable objects => false.
4267 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4268 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4269 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4271 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4272 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4273 __ JumpIfSmi(x0, if_false);
4274 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4275 Split(eq, if_true, if_false, fall_through);
4276 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4277 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4278 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4279 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4280 Split(eq, if_true, if_false, fall_through);
4281 } else if (FLAG_harmony_typeof &&
4282 check->Equals(isolate()->heap()->null_string())) {
4283 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string");
4284 __ CompareRoot(x0, Heap::kNullValueRootIndex);
4285 Split(eq, if_true, if_false, fall_through);
4286 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4288 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4289 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4290 __ JumpIfSmi(x0, if_false);
4291 // Check for undetectable objects => true.
4292 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4293 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4294 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4296 } else if (check->Equals(isolate()->heap()->function_string())) {
4297 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4298 __ JumpIfSmi(x0, if_false);
4299 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4300 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4301 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4304 } else if (check->Equals(isolate()->heap()->object_string())) {
4305 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4306 __ JumpIfSmi(x0, if_false);
4307 if (!FLAG_harmony_typeof) {
4308 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4310 // Check for JS objects => true.
4312 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4314 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4316 // Check for undetectable objects => false.
4317 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4319 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4323 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4324 if (if_false != fall_through) __ B(if_false);
4326 context()->Plug(if_true, if_false);
4330 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4331 Comment cmnt(masm_, "[ CompareOperation");
4332 SetSourcePosition(expr->position());
4334 // Try to generate an optimized comparison with a literal value.
4335 // TODO(jbramley): This only checks common values like NaN or undefined.
4336 // Should it also handle ARM64 immediate operands?
4337 if (TryLiteralCompare(expr)) {
4341 // Assign labels according to context()->PrepareTest.
4342 Label materialize_true;
4343 Label materialize_false;
4344 Label* if_true = NULL;
4345 Label* if_false = NULL;
4346 Label* fall_through = NULL;
4347 context()->PrepareTest(&materialize_true, &materialize_false,
4348 &if_true, &if_false, &fall_through);
4350 Token::Value op = expr->op();
4351 VisitForStackValue(expr->left());
4354 VisitForStackValue(expr->right());
4355 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4356 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4357 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4358 Split(eq, if_true, if_false, fall_through);
4361 case Token::INSTANCEOF: {
4362 VisitForStackValue(expr->right());
4363 InstanceofStub stub(InstanceofStub::kNoFlags);
4365 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4366 // The stub returns 0 for true.
4367 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4372 VisitForAccumulatorValue(expr->right());
4373 Condition cond = CompareIC::ComputeCondition(op);
4375 // Pop the stack value.
4378 JumpPatchSite patch_site(masm_);
4379 if (ShouldInlineSmiCase(op)) {
4381 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4383 Split(cond, if_true, if_false, NULL);
4384 __ Bind(&slow_case);
4387 // Record position and call the compare IC.
4388 SetSourcePosition(expr->position());
4389 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4390 CallIC(ic, expr->CompareOperationFeedbackId());
4391 patch_site.EmitPatchInfo();
4392 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4393 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4397 // Convert the result of the comparison into one expected for this
4398 // expression's context.
4399 context()->Plug(if_true, if_false);
4403 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4404 Expression* sub_expr,
4406 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4407 Label materialize_true, materialize_false;
4408 Label* if_true = NULL;
4409 Label* if_false = NULL;
4410 Label* fall_through = NULL;
4411 context()->PrepareTest(&materialize_true, &materialize_false,
4412 &if_true, &if_false, &fall_through);
4414 VisitForAccumulatorValue(sub_expr);
4415 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4417 if (expr->op() == Token::EQ_STRICT) {
4418 Heap::RootListIndex nil_value = nil == kNullValue ?
4419 Heap::kNullValueRootIndex :
4420 Heap::kUndefinedValueRootIndex;
4421 __ CompareRoot(x0, nil_value);
4422 Split(eq, if_true, if_false, fall_through);
4424 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4425 CallIC(ic, expr->CompareOperationFeedbackId());
4426 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4429 context()->Plug(if_true, if_false);
4433 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4434 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4435 context()->Plug(x0);
4439 void FullCodeGenerator::VisitYield(Yield* expr) {
4440 Comment cmnt(masm_, "[ Yield");
4441 // Evaluate yielded value first; the initial iterator definition depends on
4442 // this. It stays on the stack while we update the iterator.
4443 VisitForStackValue(expr->expression());
4445 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4446 // and suchlike. The implementation changes a little by bleeding_edge so I
4447 // don't want to spend too much time on it now.
4449 switch (expr->yield_kind()) {
4450 case Yield::SUSPEND:
4451 // Pop value from top-of-stack slot; box result into result register.
4452 EmitCreateIteratorResult(false);
4453 __ Push(result_register());
4455 case Yield::INITIAL: {
4456 Label suspend, continuation, post_runtime, resume;
4460 // TODO(jbramley): This label is bound here because the following code
4461 // looks at its pos(). Is it possible to do something more efficient here,
4462 // perhaps using Adr?
4463 __ Bind(&continuation);
4467 VisitForAccumulatorValue(expr->generator_object());
4468 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4469 __ Mov(x1, Smi::FromInt(continuation.pos()));
4470 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4471 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4473 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4474 kLRHasBeenSaved, kDontSaveFPRegs);
4475 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4476 __ Cmp(__ StackPointer(), x1);
4477 __ B(eq, &post_runtime);
4478 __ Push(x0); // generator object
4479 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4480 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4481 __ Bind(&post_runtime);
4482 __ Pop(result_register());
4483 EmitReturnSequence();
4486 context()->Plug(result_register());
4490 case Yield::FINAL: {
4491 VisitForAccumulatorValue(expr->generator_object());
4492 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4493 __ Str(x1, FieldMemOperand(result_register(),
4494 JSGeneratorObject::kContinuationOffset));
4495 // Pop value from top-of-stack slot, box result into result register.
4496 EmitCreateIteratorResult(true);
4497 EmitUnwindBeforeReturn();
4498 EmitReturnSequence();
4502 case Yield::DELEGATING: {
4503 VisitForStackValue(expr->generator_object());
4505 // Initial stack layout is as follows:
4506 // [sp + 1 * kPointerSize] iter
4507 // [sp + 0 * kPointerSize] g
4509 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4510 Label l_next, l_call, l_loop;
4511 // Initial send value is undefined.
4512 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4515 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4517 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4518 __ LoadRoot(x2, Heap::kthrow_stringRootIndex); // "throw"
4519 __ Peek(x3, 1 * kPointerSize); // iter
4520 __ Push(x2, x3, x0); // "throw", iter, except
4523 // try { received = %yield result }
4524 // Shuffle the received result above a try handler and yield it without
4527 __ Pop(x0); // result
4528 __ PushTryHandler(StackHandler::CATCH, expr->index());
4529 const int handler_size = StackHandlerConstants::kSize;
4530 __ Push(x0); // result
4533 // TODO(jbramley): This label is bound here because the following code
4534 // looks at its pos(). Is it possible to do something more efficient here,
4535 // perhaps using Adr?
4536 __ Bind(&l_continuation);
4539 __ Bind(&l_suspend);
4540 const int generator_object_depth = kPointerSize + handler_size;
4541 __ Peek(x0, generator_object_depth);
4543 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4544 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4545 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4546 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4548 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4549 kLRHasBeenSaved, kDontSaveFPRegs);
4550 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4551 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4552 __ Pop(x0); // result
4553 EmitReturnSequence();
4554 __ Bind(&l_resume); // received in x0
4557 // receiver = iter; f = 'next'; arg = received;
4559 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next"
4560 __ Peek(x3, 1 * kPointerSize); // iter
4561 __ Push(x2, x3, x0); // "next", iter, received
4563 // result = receiver[f](arg);
4565 __ Peek(x1, 1 * kPointerSize);
4566 __ Peek(x0, 2 * kPointerSize);
4567 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
4568 CallIC(ic, TypeFeedbackId::None());
4570 __ Poke(x1, 2 * kPointerSize);
4571 CallFunctionStub stub(1, CALL_AS_METHOD);
4574 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4575 __ Drop(1); // The function is still on the stack; drop it.
4577 // if (!result.done) goto l_try;
4579 __ Push(x0); // save result
4580 __ LoadRoot(x2, Heap::kdone_stringRootIndex); // "done"
4581 CallLoadIC(NOT_CONTEXTUAL); // result.done in x0
4582 // The ToBooleanStub argument (result.done) is in x0.
4583 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4588 __ Pop(x0); // result
4589 __ LoadRoot(x2, Heap::kvalue_stringRootIndex); // "value"
4590 CallLoadIC(NOT_CONTEXTUAL); // result.value in x0
4591 context()->DropAndPlug(2, x0); // drop iter and g
4598 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4600 JSGeneratorObject::ResumeMode resume_mode) {
4601 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4602 Register value_reg = x0;
4603 Register generator_object = x1;
4604 Register the_hole = x2;
4605 Register operand_stack_size = w3;
4606 Register function = x4;
4608 // The value stays in x0, and is ultimately read by the resumed generator, as
4609 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
4610 // is read to throw the value when the resumed generator is already closed. r1
4611 // will hold the generator object until the activation has been resumed.
4612 VisitForStackValue(generator);
4613 VisitForAccumulatorValue(value);
4614 __ Pop(generator_object);
4616 // Check generator state.
4617 Label wrong_state, closed_state, done;
4618 __ Ldr(x10, FieldMemOperand(generator_object,
4619 JSGeneratorObject::kContinuationOffset));
4620 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
4621 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
4622 __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4623 __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4625 // Load suspended function and context.
4626 __ Ldr(cp, FieldMemOperand(generator_object,
4627 JSGeneratorObject::kContextOffset));
4628 __ Ldr(function, FieldMemOperand(generator_object,
4629 JSGeneratorObject::kFunctionOffset));
4631 // Load receiver and store as the first argument.
4632 __ Ldr(x10, FieldMemOperand(generator_object,
4633 JSGeneratorObject::kReceiverOffset));
4636 // Push holes for the rest of the arguments to the generator function.
4637 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4639 // The number of arguments is stored as an int32_t, and -1 is a marker
4640 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4641 // extension to correctly handle it. However, in this case, we operate on
4642 // 32-bit W registers, so extension isn't required.
4643 __ Ldr(w10, FieldMemOperand(x10,
4644 SharedFunctionInfo::kFormalParameterCountOffset));
4645 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4646 __ PushMultipleTimes(the_hole, w10);
4648 // Enter a new JavaScript frame, and initialize its slots as they were when
4649 // the generator was suspended.
4651 __ Bl(&resume_frame);
4654 __ Bind(&resume_frame);
4655 __ Push(lr, // Return address.
4656 fp, // Caller's frame pointer.
4657 cp, // Callee's context.
4658 function); // Callee's JS Function.
4659 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4661 // Load and untag the operand stack size.
4662 __ Ldr(x10, FieldMemOperand(generator_object,
4663 JSGeneratorObject::kOperandStackOffset));
4664 __ Ldr(operand_stack_size,
4665 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4667 // If we are sending a value and there is no operand stack, we can jump back
4669 if (resume_mode == JSGeneratorObject::NEXT) {
4671 __ Cbnz(operand_stack_size, &slow_resume);
4672 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4674 UntagSmiFieldMemOperand(generator_object,
4675 JSGeneratorObject::kContinuationOffset));
4676 __ Add(x10, x10, x11);
4677 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4678 __ Str(x12, FieldMemOperand(generator_object,
4679 JSGeneratorObject::kContinuationOffset));
4682 __ Bind(&slow_resume);
4685 // Otherwise, we push holes for the operand stack and call the runtime to fix
4686 // up the stack and the handlers.
4687 __ PushMultipleTimes(the_hole, operand_stack_size);
4689 __ Mov(x10, Smi::FromInt(resume_mode));
4690 __ Push(generator_object, result_register(), x10);
4691 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
4692 // Not reached: the runtime call returns elsewhere.
4695 // Reach here when generator is closed.
4696 __ Bind(&closed_state);
4697 if (resume_mode == JSGeneratorObject::NEXT) {
4698 // Return completed iterator result when generator is closed.
4699 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4701 // Pop value from top-of-stack slot; box result into result register.
4702 EmitCreateIteratorResult(true);
4704 // Throw the provided value.
4706 __ CallRuntime(Runtime::kHiddenThrow, 1);
4710 // Throw error if we attempt to operate on a running generator.
4711 __ Bind(&wrong_state);
4712 __ Push(generator_object);
4713 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
4716 context()->Plug(result_register());
4720 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4724 Handle<Map> map(isolate()->native_context()->generator_result_map());
4726 // Allocate and populate an object with this form: { value: VAL, done: DONE }
4728 Register result = x0;
4729 __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4732 __ Bind(&gc_required);
4733 __ Push(Smi::FromInt(map->instance_size()));
4734 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
4735 __ Ldr(context_register(),
4736 MemOperand(fp, StandardFrameConstants::kContextOffset));
4738 __ Bind(&allocated);
4739 Register map_reg = x1;
4740 Register result_value = x2;
4741 Register boolean_done = x3;
4742 Register empty_fixed_array = x4;
4743 __ Mov(map_reg, Operand(map));
4744 __ Pop(result_value);
4745 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4746 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4747 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
4748 // TODO(jbramley): Use Stp if possible.
4749 __ Str(map_reg, FieldMemOperand(result, HeapObject::kMapOffset));
4750 __ Str(empty_fixed_array,
4751 FieldMemOperand(result, JSObject::kPropertiesOffset));
4752 __ Str(empty_fixed_array, FieldMemOperand(result, JSObject::kElementsOffset));
4753 __ Str(result_value,
4754 FieldMemOperand(result,
4755 JSGeneratorObject::kResultValuePropertyOffset));
4756 __ Str(boolean_done,
4757 FieldMemOperand(result,
4758 JSGeneratorObject::kResultDonePropertyOffset));
4760 // Only the value field needs a write barrier, as the other values are in the
4762 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4763 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4767 // TODO(all): I don't like this method.
4768 // It seems to me that in too many places x0 is used in place of this.
4769 // Also, this function is not suitable for all places where x0 should be
4770 // abstracted (eg. when used as an argument). But some places assume that the
4771 // first argument register is x0, and use this function instead.
4772 // Considering that most of the register allocation is hard-coded in the
4773 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4774 // that abstracting the allocation through functions would not yield any
4775 // performance benefit, I think the existence of this function is debatable.
4776 Register FullCodeGenerator::result_register() {
4781 Register FullCodeGenerator::context_register() {
4786 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4787 ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4788 __ Str(value, MemOperand(fp, frame_offset));
4792 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4793 __ Ldr(dst, ContextMemOperand(cp, context_index));
4797 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4798 Scope* declaration_scope = scope()->DeclarationScope();
4799 if (declaration_scope->is_global_scope() ||
4800 declaration_scope->is_module_scope()) {
4801 // Contexts nested in the native context have a canonical empty function
4802 // as their closure, not the anonymous closure containing the global
4803 // code. Pass a smi sentinel and let the runtime look up the empty
4805 ASSERT(kSmiTag == 0);
4807 } else if (declaration_scope->is_eval_scope()) {
4808 // Contexts created by a call to eval have the same closure as the
4809 // context calling eval, not the anonymous closure containing the eval
4810 // code. Fetch it from the context.
4811 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4814 ASSERT(declaration_scope->is_function_scope());
4815 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4821 void FullCodeGenerator::EnterFinallyBlock() {
4822 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4823 ASSERT(!result_register().is(x10));
4824 // Preserve the result register while executing finally block.
4825 // Also cook the return address in lr to the stack (smi encoded Code* delta).
4826 __ Sub(x10, lr, Operand(masm_->CodeObject()));
4828 __ Push(result_register(), x10);
4830 // Store pending message while executing finally block.
4831 ExternalReference pending_message_obj =
4832 ExternalReference::address_of_pending_message_obj(isolate());
4833 __ Mov(x10, pending_message_obj);
4834 __ Ldr(x10, MemOperand(x10));
4836 ExternalReference has_pending_message =
4837 ExternalReference::address_of_has_pending_message(isolate());
4838 __ Mov(x11, has_pending_message);
4839 __ Ldr(x11, MemOperand(x11));
4844 ExternalReference pending_message_script =
4845 ExternalReference::address_of_pending_message_script(isolate());
4846 __ Mov(x10, pending_message_script);
4847 __ Ldr(x10, MemOperand(x10));
4852 void FullCodeGenerator::ExitFinallyBlock() {
4853 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4854 ASSERT(!result_register().is(x10));
4856 // Restore pending message from stack.
4857 __ Pop(x10, x11, x12);
4858 ExternalReference pending_message_script =
4859 ExternalReference::address_of_pending_message_script(isolate());
4860 __ Mov(x13, pending_message_script);
4861 __ Str(x10, MemOperand(x13));
4864 ExternalReference has_pending_message =
4865 ExternalReference::address_of_has_pending_message(isolate());
4866 __ Mov(x13, has_pending_message);
4867 __ Str(x11, MemOperand(x13));
4869 ExternalReference pending_message_obj =
4870 ExternalReference::address_of_pending_message_obj(isolate());
4871 __ Mov(x13, pending_message_obj);
4872 __ Str(x12, MemOperand(x13));
4874 // Restore result register and cooked return address from the stack.
4875 __ Pop(x10, result_register());
4877 // Uncook the return address (see EnterFinallyBlock).
4879 __ Add(x11, x10, Operand(masm_->CodeObject()));
4887 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4889 BackEdgeState target_state,
4890 Code* replacement_code) {
4891 // Turn the jump into a nop.
4892 Address branch_address = pc - 3 * kInstructionSize;
4893 PatchingAssembler patcher(branch_address, 1);
4895 ASSERT(Instruction::Cast(branch_address)
4896 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4897 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4898 Instruction::Cast(branch_address)->ImmPCOffset() ==
4899 6 * kInstructionSize));
4901 switch (target_state) {
4903 // <decrement profiling counter>
4904 // .. .. .. .. b.pl ok
4905 // .. .. .. .. ldr x16, pc+<interrupt stub address>
4906 // .. .. .. .. blr x16
4907 // ... more instructions.
4909 // Jump offset is 6 instructions.
4912 case ON_STACK_REPLACEMENT:
4913 case OSR_AFTER_STACK_CHECK:
4914 // <decrement profiling counter>
4915 // .. .. .. .. mov x0, x0 (NOP)
4916 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4917 // .. .. .. .. blr x16
4918 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4922 // Replace the call address.
4923 Instruction* load = Instruction::Cast(pc)->preceding(2);
4924 Address interrupt_address_pointer =
4925 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4926 ASSERT((Memory::uint64_at(interrupt_address_pointer) ==
4927 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4929 ->OnStackReplacement()
4931 (Memory::uint64_at(interrupt_address_pointer) ==
4932 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4936 (Memory::uint64_at(interrupt_address_pointer) ==
4937 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4939 ->OsrAfterStackCheck()
4941 (Memory::uint64_at(interrupt_address_pointer) ==
4942 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4944 ->OnStackReplacement()
4946 Memory::uint64_at(interrupt_address_pointer) =
4947 reinterpret_cast<uint64_t>(replacement_code->entry());
4949 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4950 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4954 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4956 Code* unoptimized_code,
4958 // TODO(jbramley): There should be some extra assertions here (as in the ARM
4959 // back-end), but this function is gone in bleeding_edge so it might not
4961 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4963 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4964 Instruction* load = Instruction::Cast(pc)->preceding(2);
4965 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4966 load->ImmPCOffset());
4967 if (entry == reinterpret_cast<uint64_t>(
4968 isolate->builtins()->OnStackReplacement()->entry())) {
4969 return ON_STACK_REPLACEMENT;
4970 } else if (entry == reinterpret_cast<uint64_t>(
4971 isolate->builtins()->OsrAfterStackCheck()->entry())) {
4972 return OSR_AFTER_STACK_CHECK;
4982 #define __ ACCESS_MASM(masm())
4985 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4987 int* context_length) {
4988 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4989 // The macros used here must preserve the result register.
4991 // Because the handler block contains the context of the finally
4992 // code, we can restore it directly from there for the finally code
4993 // rather than iteratively unwinding contexts via their previous
4995 __ Drop(*stack_depth); // Down to the handler block.
4996 if (*context_length > 0) {
4997 // Restore the context to its dedicated register and the stack.
4998 __ Peek(cp, StackHandlerConstants::kContextOffset);
4999 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5002 __ Bl(finally_entry_);
5005 *context_length = 0;
5013 } } // namespace v8::internal
5015 #endif // V8_TARGET_ARCH_ARM64