1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_ARM
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
42 #include "arm/code-stubs-arm.h"
43 #include "arm/macro-assembler-arm.h"
48 #define __ ACCESS_MASM(masm_)
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
61 info_emitted_ = false;
66 ASSERT(patch_site_.is_bound() == info_emitted_);
69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code.
71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
73 Assembler::BlockConstPoolScope block_const_pool(masm_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
76 __ b(eq, target); // Always taken before patched.
79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 Assembler::BlockConstPoolScope block_const_pool(masm_);
84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg));
86 __ b(ne, target); // Never taken before patched.
89 void EmitPatchInfo() {
90 // Block literal pool emission whilst recording patch site information.
91 Assembler::BlockConstPoolScope block_const_pool(masm_);
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 reg.set_code(delta_to_patch_site / kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
101 __ nop(); // Signals no inlined code.
106 MacroAssembler* masm_;
114 static void EmitStackCheck(MacroAssembler* masm_,
115 Register stack_limit_scratch,
117 Register scratch = sp) {
118 Isolate* isolate = masm_->isolate();
120 ASSERT(scratch.is(sp) == (pointers == 0));
121 Heap::RootListIndex index;
123 __ sub(scratch, sp, Operand(pointers * kPointerSize));
124 index = Heap::kRealStackLimitRootIndex;
126 index = Heap::kStackLimitRootIndex;
128 __ LoadRoot(stack_limit_scratch, index);
129 __ cmp(scratch, Operand(stack_limit_scratch));
131 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
132 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
137 // Generate code for a JS function. On entry to the function the receiver
138 // and arguments have been pushed on the stack left to right. The actual
139 // argument count matches the formal parameter count expected by the
142 // The live registers are:
143 // o r1: the JS function object being called (i.e., ourselves)
145 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
146 // o fp: our caller's frame pointer
147 // o sp: stack pointer
148 // o lr: return address
150 // The function builds a JS frame. Please see JavaScriptFrameConstants in
151 // frames-arm.h for its layout.
152 void FullCodeGenerator::Generate() {
153 CompilationInfo* info = info_;
155 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
157 InitializeFeedbackVector();
159 profiling_counter_ = isolate()->factory()->NewCell(
160 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
161 SetFunctionPosition(function());
162 Comment cmnt(masm_, "[ function compiled by full code generator");
164 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
167 if (strlen(FLAG_stop_at) > 0 &&
168 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
173 // Sloppy mode functions and builtins need to replace the receiver with the
174 // global proxy when called as functions (without an explicit receiver
176 if (info->strict_mode() == SLOPPY && !info->is_native()) {
178 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
179 __ ldr(r2, MemOperand(sp, receiver_offset));
180 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
183 __ ldr(r2, GlobalObjectOperand());
184 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
186 __ str(r2, MemOperand(sp, receiver_offset));
191 // Open a frame scope to indicate that there is a frame on the stack. The
192 // MANUAL indicates that the scope shouldn't actually generate code to set up
193 // the frame (that is done below).
194 FrameScope frame_scope(masm_, StackFrame::MANUAL);
196 info->set_prologue_offset(masm_->pc_offset());
197 __ Prologue(BUILD_FUNCTION_FRAME);
198 info->AddNoFrameRange(0, masm_->pc_offset());
200 { Comment cmnt(masm_, "[ Allocate locals");
201 int locals_count = info->scope()->num_stack_slots();
202 // Generators allocate locals, if any, in context slots.
203 ASSERT(!info->function()->is_generator() || locals_count == 0);
204 if (locals_count > 0) {
205 if (locals_count >= 128) {
206 EmitStackCheck(masm_, r2, locals_count, r9);
208 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
209 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
210 if (locals_count >= kMaxPushes) {
211 int loop_iterations = locals_count / kMaxPushes;
212 __ mov(r2, Operand(loop_iterations));
214 __ bind(&loop_header);
216 for (int i = 0; i < kMaxPushes; i++) {
219 // Continue loop if not done.
220 __ sub(r2, r2, Operand(1), SetCC);
221 __ b(&loop_header, ne);
223 int remaining = locals_count % kMaxPushes;
224 // Emit the remaining pushes.
225 for (int i = 0; i < remaining; i++) {
231 bool function_in_register = true;
233 // Possibly allocate a local context.
234 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
235 if (heap_slots > 0) {
236 // Argument to NewContext is the function, which is still in r1.
237 Comment cmnt(masm_, "[ Allocate context");
238 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
240 __ Push(info->scope()->GetScopeInfo());
241 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
242 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
243 FastNewContextStub stub(heap_slots);
247 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
249 function_in_register = false;
250 // Context is returned in r0. It replaces the context passed to us.
251 // It's saved in the stack and kept live in cp.
253 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
254 // Copy any necessary parameters into the context.
255 int num_parameters = info->scope()->num_parameters();
256 for (int i = 0; i < num_parameters; i++) {
257 Variable* var = scope()->parameter(i);
258 if (var->IsContextSlot()) {
259 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
260 (num_parameters - 1 - i) * kPointerSize;
261 // Load parameter from stack.
262 __ ldr(r0, MemOperand(fp, parameter_offset));
263 // Store it in the context.
264 MemOperand target = ContextOperand(cp, var->index());
267 // Update the write barrier.
268 __ RecordWriteContextSlot(
269 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
274 Variable* arguments = scope()->arguments();
275 if (arguments != NULL) {
276 // Function uses arguments object.
277 Comment cmnt(masm_, "[ Allocate arguments object");
278 if (!function_in_register) {
279 // Load this again, if it's used by the local context below.
280 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
284 // Receiver is just before the parameters on the caller's stack.
285 int num_parameters = info->scope()->num_parameters();
286 int offset = num_parameters * kPointerSize;
288 Operand(StandardFrameConstants::kCallerSPOffset + offset));
289 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
292 // Arguments to ArgumentsAccessStub:
293 // function, receiver address, parameter count.
294 // The stub will rewrite receiever and parameter count if the previous
295 // stack frame was an arguments adapter frame.
296 ArgumentsAccessStub::Type type;
297 if (strict_mode() == STRICT) {
298 type = ArgumentsAccessStub::NEW_STRICT;
299 } else if (function()->has_duplicate_parameters()) {
300 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
302 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
304 ArgumentsAccessStub stub(type);
307 SetVar(arguments, r0, r1, r2);
311 __ CallRuntime(Runtime::kTraceEnter, 0);
314 // Visit the declarations and body unless there is an illegal
316 if (scope()->HasIllegalRedeclaration()) {
317 Comment cmnt(masm_, "[ Declarations");
318 scope()->VisitIllegalRedeclaration(this);
321 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
322 { Comment cmnt(masm_, "[ Declarations");
323 // For named function expressions, declare the function name as a
325 if (scope()->is_function_scope() && scope()->function() != NULL) {
326 VariableDeclaration* function = scope()->function();
327 ASSERT(function->proxy()->var()->mode() == CONST ||
328 function->proxy()->var()->mode() == CONST_LEGACY);
329 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
330 VisitVariableDeclaration(function);
332 VisitDeclarations(scope()->declarations());
335 { Comment cmnt(masm_, "[ Stack check");
336 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
337 EmitStackCheck(masm_, ip);
340 { Comment cmnt(masm_, "[ Body");
341 ASSERT(loop_depth() == 0);
342 VisitStatements(function()->body());
343 ASSERT(loop_depth() == 0);
347 // Always emit a 'return undefined' in case control fell off the end of
349 { Comment cmnt(masm_, "[ return <undefined>;");
350 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
352 EmitReturnSequence();
354 // Force emit the constant pool, so it doesn't get emitted in the middle
355 // of the back edge table.
356 masm()->CheckConstPool(true, false);
360 void FullCodeGenerator::ClearAccumulator() {
361 __ mov(r0, Operand(Smi::FromInt(0)));
365 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
366 __ mov(r2, Operand(profiling_counter_));
367 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
368 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
369 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
373 void FullCodeGenerator::EmitProfilingCounterReset() {
374 int reset_value = FLAG_interrupt_budget;
375 if (isolate()->IsDebuggerActive()) {
376 // Detect debug break requests as soon as possible.
377 reset_value = FLAG_interrupt_budget >> 4;
379 __ mov(r2, Operand(profiling_counter_));
380 __ mov(r3, Operand(Smi::FromInt(reset_value)));
381 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
385 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
386 Label* back_edge_target) {
387 Comment cmnt(masm_, "[ Back edge bookkeeping");
388 // Block literal pools whilst emitting back edge code.
389 Assembler::BlockConstPoolScope block_const_pool(masm_);
392 ASSERT(back_edge_target->is_bound());
393 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
394 int weight = Min(kMaxBackEdgeWeight,
395 Max(1, distance / kCodeSizeMultiplier));
396 EmitProfilingCounterDecrement(weight);
398 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
400 // Record a mapping of this PC offset to the OSR id. This is used to find
401 // the AST id from the unoptimized code in order to use it as a key into
402 // the deoptimization input data found in the optimized code.
403 RecordBackEdge(stmt->OsrEntryId());
405 EmitProfilingCounterReset();
408 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
409 // Record a mapping of the OSR id to this PC. This is used if the OSR
410 // entry becomes the target of a bailout. We don't expect it to be, but
411 // we want it to work if it is.
412 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
416 void FullCodeGenerator::EmitReturnSequence() {
417 Comment cmnt(masm_, "[ Return sequence");
418 if (return_label_.is_bound()) {
419 __ b(&return_label_);
421 __ bind(&return_label_);
423 // Push the return value on the stack as the parameter.
424 // Runtime::TraceExit returns its parameter in r0.
426 __ CallRuntime(Runtime::kTraceExit, 1);
428 // Pretend that the exit is a backwards jump to the entry.
430 if (info_->ShouldSelfOptimize()) {
431 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
433 int distance = masm_->pc_offset();
434 weight = Min(kMaxBackEdgeWeight,
435 Max(1, distance / kCodeSizeMultiplier));
437 EmitProfilingCounterDecrement(weight);
441 __ Call(isolate()->builtins()->InterruptCheck(),
442 RelocInfo::CODE_TARGET);
444 EmitProfilingCounterReset();
448 // Add a label for checking the size of the code used for returning.
449 Label check_exit_codesize;
450 __ bind(&check_exit_codesize);
452 // Make sure that the constant pool is not emitted inside of the return
454 { Assembler::BlockConstPoolScope block_const_pool(masm_);
455 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
456 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
457 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
458 PredictableCodeSizeScope predictable(masm_, -1);
460 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
461 __ add(sp, sp, Operand(sp_delta));
463 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
467 // Check that the size of the code used for returning is large enough
468 // for the debugger's requirements.
469 ASSERT(Assembler::kJSReturnSequenceInstructions <=
470 masm_->InstructionsGeneratedSince(&check_exit_codesize));
476 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
477 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
481 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
482 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
483 codegen()->GetVar(result_register(), var);
487 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
488 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
489 codegen()->GetVar(result_register(), var);
490 __ push(result_register());
494 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
495 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
496 // For simplicity we always test the accumulator register.
497 codegen()->GetVar(result_register(), var);
498 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
499 codegen()->DoTest(this);
503 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
507 void FullCodeGenerator::AccumulatorValueContext::Plug(
508 Heap::RootListIndex index) const {
509 __ LoadRoot(result_register(), index);
513 void FullCodeGenerator::StackValueContext::Plug(
514 Heap::RootListIndex index) const {
515 __ LoadRoot(result_register(), index);
516 __ push(result_register());
520 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 if (index == Heap::kUndefinedValueRootIndex ||
526 index == Heap::kNullValueRootIndex ||
527 index == Heap::kFalseValueRootIndex) {
528 if (false_label_ != fall_through_) __ b(false_label_);
529 } else if (index == Heap::kTrueValueRootIndex) {
530 if (true_label_ != fall_through_) __ b(true_label_);
532 __ LoadRoot(result_register(), index);
533 codegen()->DoTest(this);
538 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
542 void FullCodeGenerator::AccumulatorValueContext::Plug(
543 Handle<Object> lit) const {
544 __ mov(result_register(), Operand(lit));
548 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
549 // Immediates cannot be pushed directly.
550 __ mov(result_register(), Operand(lit));
551 __ push(result_register());
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(),
560 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562 if (false_label_ != fall_through_) __ b(false_label_);
563 } else if (lit->IsTrue() || lit->IsJSObject()) {
564 if (true_label_ != fall_through_) __ b(true_label_);
565 } else if (lit->IsString()) {
566 if (String::cast(*lit)->length() == 0) {
567 if (false_label_ != fall_through_) __ b(false_label_);
569 if (true_label_ != fall_through_) __ b(true_label_);
571 } else if (lit->IsSmi()) {
572 if (Smi::cast(*lit)->value() == 0) {
573 if (false_label_ != fall_through_) __ b(false_label_);
575 if (true_label_ != fall_through_) __ b(true_label_);
578 // For simplicity we always test the accumulator register.
579 __ mov(result_register(), Operand(lit));
580 codegen()->DoTest(this);
585 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
586 Register reg) const {
592 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
594 Register reg) const {
597 __ Move(result_register(), reg);
601 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
602 Register reg) const {
604 if (count > 1) __ Drop(count - 1);
605 __ str(reg, MemOperand(sp, 0));
609 void FullCodeGenerator::TestContext::DropAndPlug(int count,
610 Register reg) const {
612 // For simplicity we always test the accumulator register.
614 __ Move(result_register(), reg);
615 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
616 codegen()->DoTest(this);
620 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
621 Label* materialize_false) const {
622 ASSERT(materialize_true == materialize_false);
623 __ bind(materialize_true);
627 void FullCodeGenerator::AccumulatorValueContext::Plug(
628 Label* materialize_true,
629 Label* materialize_false) const {
631 __ bind(materialize_true);
632 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
634 __ bind(materialize_false);
635 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
640 void FullCodeGenerator::StackValueContext::Plug(
641 Label* materialize_true,
642 Label* materialize_false) const {
644 __ bind(materialize_true);
645 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
647 __ bind(materialize_false);
648 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
654 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
655 Label* materialize_false) const {
656 ASSERT(materialize_true == true_label_);
657 ASSERT(materialize_false == false_label_);
661 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
665 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
666 Heap::RootListIndex value_root_index =
667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
668 __ LoadRoot(result_register(), value_root_index);
672 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
673 Heap::RootListIndex value_root_index =
674 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
675 __ LoadRoot(ip, value_root_index);
680 void FullCodeGenerator::TestContext::Plug(bool flag) const {
681 codegen()->PrepareForBailoutBeforeSplit(condition(),
686 if (true_label_ != fall_through_) __ b(true_label_);
688 if (false_label_ != fall_through_) __ b(false_label_);
693 void FullCodeGenerator::DoTest(Expression* condition,
696 Label* fall_through) {
697 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
698 CallIC(ic, condition->test_id());
699 __ tst(result_register(), result_register());
700 Split(ne, if_true, if_false, fall_through);
704 void FullCodeGenerator::Split(Condition cond,
707 Label* fall_through) {
708 if (if_false == fall_through) {
710 } else if (if_true == fall_through) {
711 __ b(NegateCondition(cond), if_false);
719 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
720 ASSERT(var->IsStackAllocated());
721 // Offset is negative because higher indexes are at lower addresses.
722 int offset = -var->index() * kPointerSize;
723 // Adjust by a (parameter or local) base offset.
724 if (var->IsParameter()) {
725 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
727 offset += JavaScriptFrameConstants::kLocal0Offset;
729 return MemOperand(fp, offset);
733 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
734 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
735 if (var->IsContextSlot()) {
736 int context_chain_length = scope()->ContextChainLength(var->scope());
737 __ LoadContext(scratch, context_chain_length);
738 return ContextOperand(scratch, var->index());
740 return StackOperand(var);
745 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
746 // Use destination as scratch.
747 MemOperand location = VarOperand(var, dest);
748 __ ldr(dest, location);
752 void FullCodeGenerator::SetVar(Variable* var,
756 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
757 ASSERT(!scratch0.is(src));
758 ASSERT(!scratch0.is(scratch1));
759 ASSERT(!scratch1.is(src));
760 MemOperand location = VarOperand(var, scratch0);
761 __ str(src, location);
763 // Emit the write barrier code if the location is in the heap.
764 if (var->IsContextSlot()) {
765 __ RecordWriteContextSlot(scratch0,
775 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
776 bool should_normalize,
779 // Only prepare for bailouts before splits if we're in a test
780 // context. Otherwise, we let the Visit function deal with the
781 // preparation to avoid preparing with the same AST id twice.
782 if (!context()->IsTest() || !info_->IsOptimizable()) return;
785 if (should_normalize) __ b(&skip);
786 PrepareForBailout(expr, TOS_REG);
787 if (should_normalize) {
788 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
790 Split(eq, if_true, if_false, NULL);
796 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
797 // The variable in the declaration always resides in the current function
799 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
800 if (generate_debug_code_) {
801 // Check that we're not inside a with or catch context.
802 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
803 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
804 __ Check(ne, kDeclarationInWithContext);
805 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
806 __ Check(ne, kDeclarationInCatchContext);
811 void FullCodeGenerator::VisitVariableDeclaration(
812 VariableDeclaration* declaration) {
813 // If it was not possible to allocate the variable at compile time, we
814 // need to "declare" it at runtime to make sure it actually exists in the
816 VariableProxy* proxy = declaration->proxy();
817 VariableMode mode = declaration->mode();
818 Variable* variable = proxy->var();
819 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
820 switch (variable->location()) {
821 case Variable::UNALLOCATED:
822 globals_->Add(variable->name(), zone());
823 globals_->Add(variable->binding_needs_init()
824 ? isolate()->factory()->the_hole_value()
825 : isolate()->factory()->undefined_value(),
829 case Variable::PARAMETER:
830 case Variable::LOCAL:
832 Comment cmnt(masm_, "[ VariableDeclaration");
833 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
834 __ str(ip, StackOperand(variable));
838 case Variable::CONTEXT:
840 Comment cmnt(masm_, "[ VariableDeclaration");
841 EmitDebugCheckDeclarationContext(variable);
842 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
843 __ str(ip, ContextOperand(cp, variable->index()));
844 // No write barrier since the_hole_value is in old space.
845 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
849 case Variable::LOOKUP: {
850 Comment cmnt(masm_, "[ VariableDeclaration");
851 __ mov(r2, Operand(variable->name()));
852 // Declaration nodes are always introduced in one of four modes.
853 ASSERT(IsDeclaredVariableMode(mode));
854 PropertyAttributes attr =
855 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
856 __ mov(r1, Operand(Smi::FromInt(attr)));
857 // Push initial value, if any.
858 // Note: For variables we must not push an initial value (such as
859 // 'undefined') because we may have a (legal) redeclaration and we
860 // must not destroy the current value.
862 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
863 __ Push(cp, r2, r1, r0);
865 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
866 __ Push(cp, r2, r1, r0);
868 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
875 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var();
879 switch (variable->location()) {
880 case Variable::UNALLOCATED: {
881 globals_->Add(variable->name(), zone());
882 Handle<SharedFunctionInfo> function =
883 Compiler::BuildFunctionInfo(declaration->fun(), script());
884 // Check for stack-overflow exception.
885 if (function.is_null()) return SetStackOverflow();
886 globals_->Add(function, zone());
890 case Variable::PARAMETER:
891 case Variable::LOCAL: {
892 Comment cmnt(masm_, "[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun());
894 __ str(result_register(), StackOperand(variable));
898 case Variable::CONTEXT: {
899 Comment cmnt(masm_, "[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun());
902 __ str(result_register(), ContextOperand(cp, variable->index()));
903 int offset = Context::SlotOffset(variable->index());
904 // We know that we have written a function, which is not a smi.
905 __ RecordWriteContextSlot(cp,
913 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
917 case Variable::LOOKUP: {
918 Comment cmnt(masm_, "[ FunctionDeclaration");
919 __ mov(r2, Operand(variable->name()));
920 __ mov(r1, Operand(Smi::FromInt(NONE)));
922 // Push initial value for function declaration.
923 VisitForStackValue(declaration->fun());
924 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 Variable* variable = declaration->proxy()->var();
933 ASSERT(variable->location() == Variable::CONTEXT);
934 ASSERT(variable->interface()->IsFrozen());
936 Comment cmnt(masm_, "[ ModuleDeclaration");
937 EmitDebugCheckDeclarationContext(variable);
939 // Load instance object.
940 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
941 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
942 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
945 __ str(r1, ContextOperand(cp, variable->index()));
946 // We know that we have written a module, which is not a smi.
947 __ RecordWriteContextSlot(cp,
948 Context::SlotOffset(variable->index()),
955 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
957 // Traverse into body.
958 Visit(declaration->module());
962 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
963 VariableProxy* proxy = declaration->proxy();
964 Variable* variable = proxy->var();
965 switch (variable->location()) {
966 case Variable::UNALLOCATED:
970 case Variable::CONTEXT: {
971 Comment cmnt(masm_, "[ ImportDeclaration");
972 EmitDebugCheckDeclarationContext(variable);
977 case Variable::PARAMETER:
978 case Variable::LOCAL:
979 case Variable::LOOKUP:
985 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
990 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
991 // Call the runtime to declare the globals.
992 // The context is the first argument.
993 __ mov(r1, Operand(pairs));
994 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
996 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
997 // Return value is ignored.
1001 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1002 // Call the runtime to declare the modules.
1003 __ Push(descriptions);
1004 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1005 // Return value is ignored.
1009 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1010 Comment cmnt(masm_, "[ SwitchStatement");
1011 Breakable nested_statement(this, stmt);
1012 SetStatementPosition(stmt);
1014 // Keep the switch value on the stack until a case matches.
1015 VisitForStackValue(stmt->tag());
1016 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1018 ZoneList<CaseClause*>* clauses = stmt->cases();
1019 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1021 Label next_test; // Recycled for each test.
1022 // Compile all the tests with branches to their bodies.
1023 for (int i = 0; i < clauses->length(); i++) {
1024 CaseClause* clause = clauses->at(i);
1025 clause->body_target()->Unuse();
1027 // The default is not a test, but remember it as final fall through.
1028 if (clause->is_default()) {
1029 default_clause = clause;
1033 Comment cmnt(masm_, "[ Case comparison");
1034 __ bind(&next_test);
1037 // Compile the label expression.
1038 VisitForAccumulatorValue(clause->label());
1040 // Perform the comparison as if via '==='.
1041 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1042 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1043 JumpPatchSite patch_site(masm_);
1044 if (inline_smi_code) {
1047 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1050 __ b(ne, &next_test);
1051 __ Drop(1); // Switch value is no longer needed.
1052 __ b(clause->body_target());
1053 __ bind(&slow_case);
1056 // Record position before stub call for type feedback.
1057 SetSourcePosition(clause->position());
1058 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1059 CallIC(ic, clause->CompareId());
1060 patch_site.EmitPatchInfo();
1064 PrepareForBailout(clause, TOS_REG);
1065 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1067 __ b(ne, &next_test);
1069 __ jmp(clause->body_target());
1072 __ cmp(r0, Operand::Zero());
1073 __ b(ne, &next_test);
1074 __ Drop(1); // Switch value is no longer needed.
1075 __ b(clause->body_target());
1078 // Discard the test value and jump to the default if present, otherwise to
1079 // the end of the statement.
1080 __ bind(&next_test);
1081 __ Drop(1); // Switch value is no longer needed.
1082 if (default_clause == NULL) {
1083 __ b(nested_statement.break_label());
1085 __ b(default_clause->body_target());
1088 // Compile all the case bodies.
1089 for (int i = 0; i < clauses->length(); i++) {
1090 Comment cmnt(masm_, "[ Case body");
1091 CaseClause* clause = clauses->at(i);
1092 __ bind(clause->body_target());
1093 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1094 VisitStatements(clause->statements());
1097 __ bind(nested_statement.break_label());
1098 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1102 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1103 Comment cmnt(masm_, "[ ForInStatement");
1104 int slot = stmt->ForInFeedbackSlot();
1105 SetStatementPosition(stmt);
1108 ForIn loop_statement(this, stmt);
1109 increment_loop_depth();
1111 // Get the object to enumerate over. If the object is null or undefined, skip
1112 // over the loop. See ECMA-262 version 5, section 12.6.4.
1113 VisitForAccumulatorValue(stmt->enumerable());
1114 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1117 Register null_value = r5;
1118 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1119 __ cmp(r0, null_value);
1122 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1124 // Convert the object to a JS object.
1125 Label convert, done_convert;
1126 __ JumpIfSmi(r0, &convert);
1127 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1128 __ b(ge, &done_convert);
1131 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1132 __ bind(&done_convert);
1135 // Check for proxies.
1137 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1138 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1139 __ b(le, &call_runtime);
1141 // Check cache validity in generated code. This is a fast case for
1142 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1143 // guarantee cache validity, call the runtime system to check cache
1144 // validity or get the property names in a fixed array.
1145 __ CheckEnumCache(null_value, &call_runtime);
1147 // The enum cache is valid. Load the map of the object being
1148 // iterated over and use the cache for the iteration.
1150 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1153 // Get the set of properties to enumerate.
1154 __ bind(&call_runtime);
1155 __ push(r0); // Duplicate the enumerable object on the stack.
1156 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1158 // If we got a map from the runtime call, we can do a fast
1159 // modification check. Otherwise, we got a fixed array, and we have
1160 // to do a slow check.
1162 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1163 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1165 __ b(ne, &fixed_array);
1167 // We got a map in register r0. Get the enumeration cache from it.
1168 Label no_descriptors;
1169 __ bind(&use_cache);
1171 __ EnumLength(r1, r0);
1172 __ cmp(r1, Operand(Smi::FromInt(0)));
1173 __ b(eq, &no_descriptors);
1175 __ LoadInstanceDescriptors(r0, r2);
1176 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1177 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1179 // Set up the four remaining stack slots.
1180 __ push(r0); // Map.
1181 __ mov(r0, Operand(Smi::FromInt(0)));
1182 // Push enumeration cache, enumeration cache length (as smi) and zero.
1183 __ Push(r2, r1, r0);
1186 __ bind(&no_descriptors);
1190 // We got a fixed array in register r0. Iterate through that.
1192 __ bind(&fixed_array);
1194 Handle<Object> feedback = Handle<Object>(
1195 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1197 StoreFeedbackVectorSlot(slot, feedback);
1198 __ Move(r1, FeedbackVector());
1199 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
1200 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
1202 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1203 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1204 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1205 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1206 __ b(gt, &non_proxy);
1207 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1208 __ bind(&non_proxy);
1209 __ Push(r1, r0); // Smi and array
1210 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1211 __ mov(r0, Operand(Smi::FromInt(0)));
1212 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1214 // Generate code for doing the condition check.
1215 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1217 // Load the current count to r0, load the length to r1.
1218 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1219 __ cmp(r0, r1); // Compare to the array length.
1220 __ b(hs, loop_statement.break_label());
1222 // Get the current entry of the array into register r3.
1223 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1224 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1225 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1227 // Get the expected map from the stack or a smi in the
1228 // permanent slow case into register r2.
1229 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1231 // Check if the expected map still matches that of the enumerable.
1232 // If not, we may have to filter the key.
1234 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1235 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1236 __ cmp(r4, Operand(r2));
1237 __ b(eq, &update_each);
1239 // For proxies, no filtering is done.
1240 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1241 __ cmp(r2, Operand(Smi::FromInt(0)));
1242 __ b(eq, &update_each);
1244 // Convert the entry to a string or (smi) 0 if it isn't a property
1245 // any more. If the property has been removed while iterating, we
1247 __ push(r1); // Enumerable.
1248 __ push(r3); // Current entry.
1249 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1250 __ mov(r3, Operand(r0), SetCC);
1251 __ b(eq, loop_statement.continue_label());
1253 // Update the 'each' property or variable from the possibly filtered
1254 // entry in register r3.
1255 __ bind(&update_each);
1256 __ mov(result_register(), r3);
1257 // Perform the assignment as if via '='.
1258 { EffectContext context(this);
1259 EmitAssignment(stmt->each());
1262 // Generate code for the body of the loop.
1263 Visit(stmt->body());
1265 // Generate code for the going to the next element by incrementing
1266 // the index (smi) stored on top of the stack.
1267 __ bind(loop_statement.continue_label());
1269 __ add(r0, r0, Operand(Smi::FromInt(1)));
1272 EmitBackEdgeBookkeeping(stmt, &loop);
1275 // Remove the pointers stored on the stack.
1276 __ bind(loop_statement.break_label());
1279 // Exit and decrement the loop depth.
1280 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1282 decrement_loop_depth();
1286 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1287 Comment cmnt(masm_, "[ ForOfStatement");
1288 SetStatementPosition(stmt);
1290 Iteration loop_statement(this, stmt);
1291 increment_loop_depth();
1293 // var iterator = iterable[@@iterator]()
1294 VisitForAccumulatorValue(stmt->assign_iterator());
1296 // As with for-in, skip the loop if the iterator is null or undefined.
1297 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1298 __ b(eq, loop_statement.break_label());
1299 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1300 __ b(eq, loop_statement.break_label());
1302 // Convert the iterator to a JS object.
1303 Label convert, done_convert;
1304 __ JumpIfSmi(r0, &convert);
1305 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1306 __ b(ge, &done_convert);
1309 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1310 __ bind(&done_convert);
1314 __ bind(loop_statement.continue_label());
1316 // result = iterator.next()
1317 VisitForEffect(stmt->next_result());
1319 // if (result.done) break;
1320 Label result_not_done;
1321 VisitForControl(stmt->result_done(),
1322 loop_statement.break_label(),
1325 __ bind(&result_not_done);
1327 // each = result.value
1328 VisitForEffect(stmt->assign_each());
1330 // Generate code for the body of the loop.
1331 Visit(stmt->body());
1333 // Check stack before looping.
1334 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1335 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1336 __ jmp(loop_statement.continue_label());
1338 // Exit and decrement the loop depth.
1339 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1340 __ bind(loop_statement.break_label());
1341 decrement_loop_depth();
1345 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1347 // Use the fast case closure allocation code that allocates in new
1348 // space for nested functions that don't need literals cloning. If
1349 // we're running with the --always-opt or the --prepare-always-opt
1350 // flag, we need to use the runtime function so that the new function
1351 // we are creating here gets a chance to have its code optimized and
1352 // doesn't just get a copy of the existing unoptimized code.
1353 if (!FLAG_always_opt &&
1354 !FLAG_prepare_always_opt &&
1356 scope()->is_function_scope() &&
1357 info->num_literals() == 0) {
1358 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1359 __ mov(r2, Operand(info));
1362 __ mov(r0, Operand(info));
1363 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1364 : Heap::kFalseValueRootIndex);
1365 __ Push(cp, r0, r1);
1366 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1368 context()->Plug(r0);
1372 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1373 Comment cmnt(masm_, "[ VariableProxy");
1374 EmitVariableLoad(expr);
1378 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1379 TypeofState typeof_state,
1381 Register current = cp;
1387 if (s->num_heap_slots() > 0) {
1388 if (s->calls_sloppy_eval()) {
1389 // Check that extension is NULL.
1390 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1394 // Load next context in chain.
1395 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1396 // Walk the rest of the chain without clobbering cp.
1399 // If no outer scope calls eval, we do not need to check more
1400 // context extensions.
1401 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1402 s = s->outer_scope();
1405 if (s->is_eval_scope()) {
1407 if (!current.is(next)) {
1408 __ Move(next, current);
1411 // Terminate at native context.
1412 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1413 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1416 // Check that extension is NULL.
1417 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1420 // Load next context in chain.
1421 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1426 __ ldr(r0, GlobalObjectOperand());
1427 __ mov(r2, Operand(var->name()));
1428 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1435 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1437 ASSERT(var->IsContextSlot());
1438 Register context = cp;
1442 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1443 if (s->num_heap_slots() > 0) {
1444 if (s->calls_sloppy_eval()) {
1445 // Check that extension is NULL.
1446 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1450 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1451 // Walk the rest of the chain without clobbering cp.
1455 // Check that last extension is NULL.
1456 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1460 // This function is used only for loads, not stores, so it's safe to
1461 // return an cp-based operand (the write barrier cannot be allowed to
1462 // destroy the cp register).
1463 return ContextOperand(context, var->index());
1467 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1468 TypeofState typeof_state,
1471 // Generate fast-case code for variables that might be shadowed by
1472 // eval-introduced variables. Eval is used a lot without
1473 // introducing variables. In those cases, we do not want to
1474 // perform a runtime call for all variables in the scope
1475 // containing the eval.
1476 if (var->mode() == DYNAMIC_GLOBAL) {
1477 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1479 } else if (var->mode() == DYNAMIC_LOCAL) {
1480 Variable* local = var->local_if_not_shadowed();
1481 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1482 if (local->mode() == LET || local->mode() == CONST ||
1483 local->mode() == CONST_LEGACY) {
1484 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1485 if (local->mode() == CONST_LEGACY) {
1486 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1487 } else { // LET || CONST
1489 __ mov(r0, Operand(var->name()));
1491 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1499 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1500 // Record position before possible IC call.
1501 SetSourcePosition(proxy->position());
1502 Variable* var = proxy->var();
1504 // Three cases: global variables, lookup variables, and all other types of
1506 switch (var->location()) {
1507 case Variable::UNALLOCATED: {
1508 Comment cmnt(masm_, "[ Global variable");
1509 // Use inline caching. Variable name is passed in r2 and the global
1510 // object (receiver) in r0.
1511 __ ldr(r0, GlobalObjectOperand());
1512 __ mov(r2, Operand(var->name()));
1513 CallLoadIC(CONTEXTUAL);
1514 context()->Plug(r0);
1518 case Variable::PARAMETER:
1519 case Variable::LOCAL:
1520 case Variable::CONTEXT: {
1521 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1522 : "[ Stack variable");
1523 if (var->binding_needs_init()) {
1524 // var->scope() may be NULL when the proxy is located in eval code and
1525 // refers to a potential outside binding. Currently those bindings are
1526 // always looked up dynamically, i.e. in that case
1527 // var->location() == LOOKUP.
1529 ASSERT(var->scope() != NULL);
1531 // Check if the binding really needs an initialization check. The check
1532 // can be skipped in the following situation: we have a LET or CONST
1533 // binding in harmony mode, both the Variable and the VariableProxy have
1534 // the same declaration scope (i.e. they are both in global code, in the
1535 // same function or in the same eval code) and the VariableProxy is in
1536 // the source physically located after the initializer of the variable.
1538 // We cannot skip any initialization checks for CONST in non-harmony
1539 // mode because const variables may be declared but never initialized:
1540 // if (false) { const x; }; var y = x;
1542 // The condition on the declaration scopes is a conservative check for
1543 // nested functions that access a binding and are called before the
1544 // binding is initialized:
1545 // function() { f(); let x = 1; function f() { x = 2; } }
1547 bool skip_init_check;
1548 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1549 skip_init_check = false;
1551 // Check that we always have valid source position.
1552 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1553 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1554 skip_init_check = var->mode() != CONST_LEGACY &&
1555 var->initializer_position() < proxy->position();
1558 if (!skip_init_check) {
1559 // Let and const need a read barrier.
1561 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1562 if (var->mode() == LET || var->mode() == CONST) {
1563 // Throw a reference error when using an uninitialized let/const
1564 // binding in harmony mode.
1567 __ mov(r0, Operand(var->name()));
1569 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1572 // Uninitalized const bindings outside of harmony mode are unholed.
1573 ASSERT(var->mode() == CONST_LEGACY);
1574 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1576 context()->Plug(r0);
1580 context()->Plug(var);
1584 case Variable::LOOKUP: {
1585 Comment cmnt(masm_, "[ Lookup variable");
1587 // Generate code for loading from variables potentially shadowed
1588 // by eval-introduced variables.
1589 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1591 __ mov(r1, Operand(var->name()));
1592 __ Push(cp, r1); // Context and name.
1593 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1595 context()->Plug(r0);
1601 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1602 Comment cmnt(masm_, "[ RegExpLiteral");
1604 // Registers will be used as follows:
1605 // r5 = materialized value (RegExp literal)
1606 // r4 = JS function, literals array
1607 // r3 = literal index
1608 // r2 = RegExp pattern
1609 // r1 = RegExp flags
1610 // r0 = RegExp literal clone
1611 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1612 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1613 int literal_offset =
1614 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1615 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1616 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1618 __ b(ne, &materialized);
1620 // Create regexp literal using runtime function.
1621 // Result will be in r0.
1622 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1623 __ mov(r2, Operand(expr->pattern()));
1624 __ mov(r1, Operand(expr->flags()));
1625 __ Push(r4, r3, r2, r1);
1626 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1629 __ bind(&materialized);
1630 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1631 Label allocated, runtime_allocate;
1632 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1635 __ bind(&runtime_allocate);
1636 __ mov(r0, Operand(Smi::FromInt(size)));
1638 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1641 __ bind(&allocated);
1642 // After this, registers are used as follows:
1643 // r0: Newly allocated regexp.
1644 // r5: Materialized regexp.
1646 __ CopyFields(r0, r5, d0, size / kPointerSize);
1647 context()->Plug(r0);
1651 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1652 if (expression == NULL) {
1653 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1656 VisitForStackValue(expression);
1661 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1662 Comment cmnt(masm_, "[ ObjectLiteral");
1664 expr->BuildConstantProperties(isolate());
1665 Handle<FixedArray> constant_properties = expr->constant_properties();
1666 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1667 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1668 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1669 __ mov(r1, Operand(constant_properties));
1670 int flags = expr->fast_elements()
1671 ? ObjectLiteral::kFastElements
1672 : ObjectLiteral::kNoFlags;
1673 flags |= expr->has_function()
1674 ? ObjectLiteral::kHasFunction
1675 : ObjectLiteral::kNoFlags;
1676 __ mov(r0, Operand(Smi::FromInt(flags)));
1677 int properties_count = constant_properties->length() / 2;
1678 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1679 flags != ObjectLiteral::kFastElements ||
1680 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1681 __ Push(r3, r2, r1, r0);
1682 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1684 FastCloneShallowObjectStub stub(properties_count);
1688 // If result_saved is true the result is on top of the stack. If
1689 // result_saved is false the result is in r0.
1690 bool result_saved = false;
1692 // Mark all computed expressions that are bound to a key that
1693 // is shadowed by a later occurrence of the same key. For the
1694 // marked expressions, no store code is emitted.
1695 expr->CalculateEmitStore(zone());
1697 AccessorTable accessor_table(zone());
1698 for (int i = 0; i < expr->properties()->length(); i++) {
1699 ObjectLiteral::Property* property = expr->properties()->at(i);
1700 if (property->IsCompileTimeValue()) continue;
1702 Literal* key = property->key();
1703 Expression* value = property->value();
1704 if (!result_saved) {
1705 __ push(r0); // Save result on stack
1706 result_saved = true;
1708 switch (property->kind()) {
1709 case ObjectLiteral::Property::CONSTANT:
1711 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1712 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1714 case ObjectLiteral::Property::COMPUTED:
1715 if (key->value()->IsInternalizedString()) {
1716 if (property->emit_store()) {
1717 VisitForAccumulatorValue(value);
1718 __ mov(r2, Operand(key->value()));
1719 __ ldr(r1, MemOperand(sp));
1720 CallStoreIC(key->LiteralFeedbackId());
1721 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1723 VisitForEffect(value);
1727 // Duplicate receiver on stack.
1728 __ ldr(r0, MemOperand(sp));
1730 VisitForStackValue(key);
1731 VisitForStackValue(value);
1732 if (property->emit_store()) {
1733 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1735 __ CallRuntime(Runtime::kSetProperty, 4);
1740 case ObjectLiteral::Property::PROTOTYPE:
1741 // Duplicate receiver on stack.
1742 __ ldr(r0, MemOperand(sp));
1744 VisitForStackValue(value);
1745 if (property->emit_store()) {
1746 __ CallRuntime(Runtime::kSetPrototype, 2);
1752 case ObjectLiteral::Property::GETTER:
1753 accessor_table.lookup(key)->second->getter = value;
1755 case ObjectLiteral::Property::SETTER:
1756 accessor_table.lookup(key)->second->setter = value;
1761 // Emit code to define accessors, using only a single call to the runtime for
1762 // each pair of corresponding getters and setters.
1763 for (AccessorTable::Iterator it = accessor_table.begin();
1764 it != accessor_table.end();
1766 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1768 VisitForStackValue(it->first);
1769 EmitAccessor(it->second->getter);
1770 EmitAccessor(it->second->setter);
1771 __ mov(r0, Operand(Smi::FromInt(NONE)));
1773 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1776 if (expr->has_function()) {
1777 ASSERT(result_saved);
1778 __ ldr(r0, MemOperand(sp));
1780 __ CallRuntime(Runtime::kToFastProperties, 1);
1784 context()->PlugTOS();
1786 context()->Plug(r0);
1791 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1792 Comment cmnt(masm_, "[ ArrayLiteral");
1794 expr->BuildConstantElements(isolate());
1795 int flags = expr->depth() == 1
1796 ? ArrayLiteral::kShallowElements
1797 : ArrayLiteral::kNoFlags;
1799 ZoneList<Expression*>* subexprs = expr->values();
1800 int length = subexprs->length();
1801 Handle<FixedArray> constant_elements = expr->constant_elements();
1802 ASSERT_EQ(2, constant_elements->length());
1803 ElementsKind constant_elements_kind =
1804 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1805 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1806 Handle<FixedArrayBase> constant_elements_values(
1807 FixedArrayBase::cast(constant_elements->get(1)));
1809 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1810 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1811 // If the only customer of allocation sites is transitioning, then
1812 // we can turn it off if we don't have anywhere else to transition to.
1813 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1816 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1817 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1818 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1819 __ mov(r1, Operand(constant_elements));
1820 if (has_fast_elements && constant_elements_values->map() ==
1821 isolate()->heap()->fixed_cow_array_map()) {
1822 FastCloneShallowArrayStub stub(
1823 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1824 allocation_site_mode,
1827 __ IncrementCounter(
1828 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1829 } else if (expr->depth() > 1 || Serializer::enabled() ||
1830 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1831 __ mov(r0, Operand(Smi::FromInt(flags)));
1832 __ Push(r3, r2, r1, r0);
1833 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1835 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1836 FLAG_smi_only_arrays);
1837 FastCloneShallowArrayStub::Mode mode =
1838 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1840 if (has_fast_elements) {
1841 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1844 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1848 bool result_saved = false; // Is the result saved to the stack?
1850 // Emit code to evaluate all the non-constant subexpressions and to store
1851 // them into the newly cloned array.
1852 for (int i = 0; i < length; i++) {
1853 Expression* subexpr = subexprs->at(i);
1854 // If the subexpression is a literal or a simple materialized literal it
1855 // is already set in the cloned array.
1856 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1858 if (!result_saved) {
1860 __ Push(Smi::FromInt(expr->literal_index()));
1861 result_saved = true;
1863 VisitForAccumulatorValue(subexpr);
1865 if (IsFastObjectElementsKind(constant_elements_kind)) {
1866 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1867 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1868 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1869 __ str(result_register(), FieldMemOperand(r1, offset));
1870 // Update the write barrier for the array store.
1871 __ RecordWriteField(r1, offset, result_register(), r2,
1872 kLRHasBeenSaved, kDontSaveFPRegs,
1873 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1875 __ mov(r3, Operand(Smi::FromInt(i)));
1876 StoreArrayLiteralElementStub stub;
1880 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1884 __ pop(); // literal index
1885 context()->PlugTOS();
1887 context()->Plug(r0);
1892 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1893 ASSERT(expr->target()->IsValidReferenceExpression());
1895 Comment cmnt(masm_, "[ Assignment");
1897 // Left-hand side can only be a property, a global or a (parameter or local)
1899 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1900 LhsKind assign_type = VARIABLE;
1901 Property* property = expr->target()->AsProperty();
1902 if (property != NULL) {
1903 assign_type = (property->key()->IsPropertyName())
1908 // Evaluate LHS expression.
1909 switch (assign_type) {
1911 // Nothing to do here.
1913 case NAMED_PROPERTY:
1914 if (expr->is_compound()) {
1915 // We need the receiver both on the stack and in the accumulator.
1916 VisitForAccumulatorValue(property->obj());
1917 __ push(result_register());
1919 VisitForStackValue(property->obj());
1922 case KEYED_PROPERTY:
1923 if (expr->is_compound()) {
1924 VisitForStackValue(property->obj());
1925 VisitForAccumulatorValue(property->key());
1926 __ ldr(r1, MemOperand(sp, 0));
1929 VisitForStackValue(property->obj());
1930 VisitForStackValue(property->key());
1935 // For compound assignments we need another deoptimization point after the
1936 // variable/property load.
1937 if (expr->is_compound()) {
1938 { AccumulatorValueContext context(this);
1939 switch (assign_type) {
1941 EmitVariableLoad(expr->target()->AsVariableProxy());
1942 PrepareForBailout(expr->target(), TOS_REG);
1944 case NAMED_PROPERTY:
1945 EmitNamedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1948 case KEYED_PROPERTY:
1949 EmitKeyedPropertyLoad(property);
1950 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1955 Token::Value op = expr->binary_op();
1956 __ push(r0); // Left operand goes on the stack.
1957 VisitForAccumulatorValue(expr->value());
1959 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1962 SetSourcePosition(expr->position() + 1);
1963 AccumulatorValueContext context(this);
1964 if (ShouldInlineSmiCase(op)) {
1965 EmitInlineSmiBinaryOp(expr->binary_operation(),
1971 EmitBinaryOp(expr->binary_operation(), op, mode);
1974 // Deoptimization point in case the binary operation may have side effects.
1975 PrepareForBailout(expr->binary_operation(), TOS_REG);
1977 VisitForAccumulatorValue(expr->value());
1980 // Record source position before possible IC call.
1981 SetSourcePosition(expr->position());
1984 switch (assign_type) {
1986 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1988 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1989 context()->Plug(r0);
1991 case NAMED_PROPERTY:
1992 EmitNamedPropertyAssignment(expr);
1994 case KEYED_PROPERTY:
1995 EmitKeyedPropertyAssignment(expr);
2001 void FullCodeGenerator::VisitYield(Yield* expr) {
2002 Comment cmnt(masm_, "[ Yield");
2003 // Evaluate yielded value first; the initial iterator definition depends on
2004 // this. It stays on the stack while we update the iterator.
2005 VisitForStackValue(expr->expression());
2007 switch (expr->yield_kind()) {
2008 case Yield::SUSPEND:
2009 // Pop value from top-of-stack slot; box result into result register.
2010 EmitCreateIteratorResult(false);
2011 __ push(result_register());
2013 case Yield::INITIAL: {
2014 Label suspend, continuation, post_runtime, resume;
2018 __ bind(&continuation);
2022 VisitForAccumulatorValue(expr->generator_object());
2023 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2024 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2025 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2026 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2028 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2029 kLRHasBeenSaved, kDontSaveFPRegs);
2030 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2032 __ b(eq, &post_runtime);
2033 __ push(r0); // generator object
2034 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2035 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2036 __ bind(&post_runtime);
2037 __ pop(result_register());
2038 EmitReturnSequence();
2041 context()->Plug(result_register());
2045 case Yield::FINAL: {
2046 VisitForAccumulatorValue(expr->generator_object());
2047 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2048 __ str(r1, FieldMemOperand(result_register(),
2049 JSGeneratorObject::kContinuationOffset));
2050 // Pop value from top-of-stack slot, box result into result register.
2051 EmitCreateIteratorResult(true);
2052 EmitUnwindBeforeReturn();
2053 EmitReturnSequence();
2057 case Yield::DELEGATING: {
2058 VisitForStackValue(expr->generator_object());
2060 // Initial stack layout is as follows:
2061 // [sp + 1 * kPointerSize] iter
2062 // [sp + 0 * kPointerSize] g
2064 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2065 Label l_next, l_call, l_loop;
2066 // Initial send value is undefined.
2067 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2070 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2072 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2073 __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2074 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2075 __ Push(r2, r3, r0); // "throw", iter, except
2078 // try { received = %yield result }
2079 // Shuffle the received result above a try handler and yield it without
2082 __ pop(r0); // result
2083 __ PushTryHandler(StackHandler::CATCH, expr->index());
2084 const int handler_size = StackHandlerConstants::kSize;
2085 __ push(r0); // result
2087 __ bind(&l_continuation);
2089 __ bind(&l_suspend);
2090 const int generator_object_depth = kPointerSize + handler_size;
2091 __ ldr(r0, MemOperand(sp, generator_object_depth));
2093 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2094 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2095 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2096 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2098 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2099 kLRHasBeenSaved, kDontSaveFPRegs);
2100 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2101 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2102 __ pop(r0); // result
2103 EmitReturnSequence();
2104 __ bind(&l_resume); // received in r0
2107 // receiver = iter; f = 'next'; arg = received;
2109 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2110 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2111 __ Push(r2, r3, r0); // "next", iter, received
2113 // result = receiver[f](arg);
2115 __ ldr(r1, MemOperand(sp, kPointerSize));
2116 __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2117 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2118 CallIC(ic, TypeFeedbackId::None());
2120 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2121 CallFunctionStub stub(1, CALL_AS_METHOD);
2124 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2125 __ Drop(1); // The function is still on the stack; drop it.
2127 // if (!result.done) goto l_try;
2129 __ push(r0); // save result
2130 __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2131 CallLoadIC(NOT_CONTEXTUAL); // result.done in r0
2132 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2134 __ cmp(r0, Operand(0));
2138 __ pop(r0); // result
2139 __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2140 CallLoadIC(NOT_CONTEXTUAL); // result.value in r0
2141 context()->DropAndPlug(2, r0); // drop iter and g
2148 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2150 JSGeneratorObject::ResumeMode resume_mode) {
2151 // The value stays in r0, and is ultimately read by the resumed generator, as
2152 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2153 // is read to throw the value when the resumed generator is already closed.
2154 // r1 will hold the generator object until the activation has been resumed.
2155 VisitForStackValue(generator);
2156 VisitForAccumulatorValue(value);
2159 // Check generator state.
2160 Label wrong_state, closed_state, done;
2161 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2162 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2163 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2164 __ cmp(r3, Operand(Smi::FromInt(0)));
2165 __ b(eq, &closed_state);
2166 __ b(lt, &wrong_state);
2168 // Load suspended function and context.
2169 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2170 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2172 // Load receiver and store as the first argument.
2173 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2176 // Push holes for the rest of the arguments to the generator function.
2177 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2179 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2180 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2181 Label push_argument_holes, push_frame;
2182 __ bind(&push_argument_holes);
2183 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2184 __ b(mi, &push_frame);
2186 __ jmp(&push_argument_holes);
2188 // Enter a new JavaScript frame, and initialize its slots as they were when
2189 // the generator was suspended.
2191 __ bind(&push_frame);
2192 __ bl(&resume_frame);
2194 __ bind(&resume_frame);
2195 // lr = return address.
2196 // fp = caller's frame pointer.
2197 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2198 // cp = callee's context,
2199 // r4 = callee's JS function.
2200 __ PushFixedFrame(r4);
2201 // Adjust FP to point to saved FP.
2202 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2204 // Load the operand stack size.
2205 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2206 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2209 // If we are sending a value and there is no operand stack, we can jump back
2211 if (resume_mode == JSGeneratorObject::NEXT) {
2213 __ cmp(r3, Operand(0));
2214 __ b(ne, &slow_resume);
2215 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2217 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2218 if (FLAG_enable_ool_constant_pool) {
2219 // Load the new code object's constant pool pointer.
2221 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
2224 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2227 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2228 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2231 __ bind(&slow_resume);
2234 // Otherwise, we push holes for the operand stack and call the runtime to fix
2235 // up the stack and the handlers.
2236 Label push_operand_holes, call_resume;
2237 __ bind(&push_operand_holes);
2238 __ sub(r3, r3, Operand(1), SetCC);
2239 __ b(mi, &call_resume);
2241 __ b(&push_operand_holes);
2242 __ bind(&call_resume);
2243 ASSERT(!result_register().is(r1));
2244 __ Push(r1, result_register());
2245 __ Push(Smi::FromInt(resume_mode));
2246 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2247 // Not reached: the runtime call returns elsewhere.
2248 __ stop("not-reached");
2250 // Reach here when generator is closed.
2251 __ bind(&closed_state);
2252 if (resume_mode == JSGeneratorObject::NEXT) {
2253 // Return completed iterator result when generator is closed.
2254 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2256 // Pop value from top-of-stack slot; box result into result register.
2257 EmitCreateIteratorResult(true);
2259 // Throw the provided value.
2261 __ CallRuntime(Runtime::kHiddenThrow, 1);
2265 // Throw error if we attempt to operate on a running generator.
2266 __ bind(&wrong_state);
2268 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2271 context()->Plug(result_register());
2275 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2279 Handle<Map> map(isolate()->native_context()->generator_result_map());
2281 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2284 __ bind(&gc_required);
2285 __ Push(Smi::FromInt(map->instance_size()));
2286 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2287 __ ldr(context_register(),
2288 MemOperand(fp, StandardFrameConstants::kContextOffset));
2290 __ bind(&allocated);
2291 __ mov(r1, Operand(map));
2293 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2294 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2295 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2296 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2297 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2298 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2300 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2302 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2304 // Only the value field needs a write barrier, as the other values are in the
2306 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2307 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2311 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2312 SetSourcePosition(prop->position());
2313 Literal* key = prop->key()->AsLiteral();
2314 __ mov(r2, Operand(key->value()));
2315 // Call load IC. It has arguments receiver and property name r0 and r2.
2316 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2320 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2321 SetSourcePosition(prop->position());
2322 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2323 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2324 CallIC(ic, prop->PropertyFeedbackId());
2328 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2331 Expression* left_expr,
2332 Expression* right_expr) {
2333 Label done, smi_case, stub_call;
2335 Register scratch1 = r2;
2336 Register scratch2 = r3;
2338 // Get the arguments.
2340 Register right = r0;
2343 // Perform combined smi check on both operands.
2344 __ orr(scratch1, left, Operand(right));
2345 STATIC_ASSERT(kSmiTag == 0);
2346 JumpPatchSite patch_site(masm_);
2347 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2349 __ bind(&stub_call);
2350 BinaryOpICStub stub(op, mode);
2351 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2352 patch_site.EmitPatchInfo();
2356 // Smi case. This code works the same way as the smi-smi case in the type
2357 // recording binary operation stub, see
2360 __ GetLeastBitsFromSmi(scratch1, right, 5);
2361 __ mov(right, Operand(left, ASR, scratch1));
2362 __ bic(right, right, Operand(kSmiTagMask));
2365 __ SmiUntag(scratch1, left);
2366 __ GetLeastBitsFromSmi(scratch2, right, 5);
2367 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2368 __ TrySmiTag(right, scratch1, &stub_call);
2372 __ SmiUntag(scratch1, left);
2373 __ GetLeastBitsFromSmi(scratch2, right, 5);
2374 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2375 __ tst(scratch1, Operand(0xc0000000));
2376 __ b(ne, &stub_call);
2377 __ SmiTag(right, scratch1);
2381 __ add(scratch1, left, Operand(right), SetCC);
2382 __ b(vs, &stub_call);
2383 __ mov(right, scratch1);
2386 __ sub(scratch1, left, Operand(right), SetCC);
2387 __ b(vs, &stub_call);
2388 __ mov(right, scratch1);
2391 __ SmiUntag(ip, right);
2392 __ smull(scratch1, scratch2, left, ip);
2393 __ mov(ip, Operand(scratch1, ASR, 31));
2394 __ cmp(ip, Operand(scratch2));
2395 __ b(ne, &stub_call);
2396 __ cmp(scratch1, Operand::Zero());
2397 __ mov(right, Operand(scratch1), LeaveCC, ne);
2399 __ add(scratch2, right, Operand(left), SetCC);
2400 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2401 __ b(mi, &stub_call);
2405 __ orr(right, left, Operand(right));
2407 case Token::BIT_AND:
2408 __ and_(right, left, Operand(right));
2410 case Token::BIT_XOR:
2411 __ eor(right, left, Operand(right));
2418 context()->Plug(r0);
2422 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2424 OverwriteMode mode) {
2426 BinaryOpICStub stub(op, mode);
2427 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2428 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2429 patch_site.EmitPatchInfo();
2430 context()->Plug(r0);
2434 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2435 ASSERT(expr->IsValidReferenceExpression());
2437 // Left-hand side can only be a property, a global or a (parameter or local)
2439 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2440 LhsKind assign_type = VARIABLE;
2441 Property* prop = expr->AsProperty();
2443 assign_type = (prop->key()->IsPropertyName())
2448 switch (assign_type) {
2450 Variable* var = expr->AsVariableProxy()->var();
2451 EffectContext context(this);
2452 EmitVariableAssignment(var, Token::ASSIGN);
2455 case NAMED_PROPERTY: {
2456 __ push(r0); // Preserve value.
2457 VisitForAccumulatorValue(prop->obj());
2459 __ pop(r0); // Restore value.
2460 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2464 case KEYED_PROPERTY: {
2465 __ push(r0); // Preserve value.
2466 VisitForStackValue(prop->obj());
2467 VisitForAccumulatorValue(prop->key());
2469 __ Pop(r0, r2); // r0 = restored value.
2470 Handle<Code> ic = strict_mode() == SLOPPY
2471 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2472 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2477 context()->Plug(r0);
2481 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2482 Variable* var, MemOperand location) {
2483 __ str(result_register(), location);
2484 if (var->IsContextSlot()) {
2485 // RecordWrite may destroy all its register arguments.
2486 __ mov(r3, result_register());
2487 int offset = Context::SlotOffset(var->index());
2488 __ RecordWriteContextSlot(
2489 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2494 void FullCodeGenerator::EmitCallStoreContextSlot(
2495 Handle<String> name, StrictMode strict_mode) {
2496 __ push(r0); // Value.
2497 __ mov(r1, Operand(name));
2498 __ mov(r0, Operand(Smi::FromInt(strict_mode)));
2499 __ Push(cp, r1, r0); // Context, name, strict mode.
2500 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2504 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2505 if (var->IsUnallocated()) {
2506 // Global var, const, or let.
2507 __ mov(r2, Operand(var->name()));
2508 __ ldr(r1, GlobalObjectOperand());
2511 } else if (op == Token::INIT_CONST_LEGACY) {
2512 // Const initializers need a write barrier.
2513 ASSERT(!var->IsParameter()); // No const parameters.
2514 if (var->IsLookupSlot()) {
2516 __ mov(r0, Operand(var->name()));
2517 __ Push(cp, r0); // Context and name.
2518 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2520 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2522 MemOperand location = VarOperand(var, r1);
2523 __ ldr(r2, location);
2524 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2526 EmitStoreToStackLocalOrContextSlot(var, location);
2530 } else if (var->mode() == LET && op != Token::INIT_LET) {
2531 // Non-initializing assignment to let variable needs a write barrier.
2532 if (var->IsLookupSlot()) {
2533 EmitCallStoreContextSlot(var->name(), strict_mode());
2535 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2537 MemOperand location = VarOperand(var, r1);
2538 __ ldr(r3, location);
2539 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2541 __ mov(r3, Operand(var->name()));
2543 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2544 // Perform the assignment.
2546 EmitStoreToStackLocalOrContextSlot(var, location);
2549 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2550 // Assignment to var or initializing assignment to let/const
2552 if (var->IsLookupSlot()) {
2553 EmitCallStoreContextSlot(var->name(), strict_mode());
2555 ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2556 MemOperand location = VarOperand(var, r1);
2557 if (generate_debug_code_ && op == Token::INIT_LET) {
2558 // Check for an uninitialized let binding.
2559 __ ldr(r2, location);
2560 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2561 __ Check(eq, kLetBindingReInitialization);
2563 EmitStoreToStackLocalOrContextSlot(var, location);
2566 // Non-initializing assignments to consts are ignored.
2570 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2571 // Assignment to a property, using a named store IC.
2572 Property* prop = expr->target()->AsProperty();
2573 ASSERT(prop != NULL);
2574 ASSERT(prop->key()->AsLiteral() != NULL);
2576 // Record source code position before IC call.
2577 SetSourcePosition(expr->position());
2578 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2581 CallStoreIC(expr->AssignmentFeedbackId());
2583 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2584 context()->Plug(r0);
2588 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2589 // Assignment to a property, using a keyed store IC.
2591 // Record source code position before IC call.
2592 SetSourcePosition(expr->position());
2593 __ Pop(r2, r1); // r1 = key.
2595 Handle<Code> ic = strict_mode() == SLOPPY
2596 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2597 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2598 CallIC(ic, expr->AssignmentFeedbackId());
2600 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2601 context()->Plug(r0);
2605 void FullCodeGenerator::VisitProperty(Property* expr) {
2606 Comment cmnt(masm_, "[ Property");
2607 Expression* key = expr->key();
2609 if (key->IsPropertyName()) {
2610 VisitForAccumulatorValue(expr->obj());
2611 EmitNamedPropertyLoad(expr);
2612 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2613 context()->Plug(r0);
2615 VisitForStackValue(expr->obj());
2616 VisitForAccumulatorValue(expr->key());
2618 EmitKeyedPropertyLoad(expr);
2619 context()->Plug(r0);
2624 void FullCodeGenerator::CallIC(Handle<Code> code,
2625 TypeFeedbackId ast_id) {
2627 // All calls must have a predictable size in full-codegen code to ensure that
2628 // the debugger can patch them correctly.
2629 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2630 NEVER_INLINE_TARGET_ADDRESS);
2634 // Code common for calls using the IC.
2635 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2636 Expression* callee = expr->expression();
2637 ZoneList<Expression*>* args = expr->arguments();
2638 int arg_count = args->length();
2640 CallFunctionFlags flags;
2641 // Get the target function.
2642 if (callee->IsVariableProxy()) {
2643 { StackValueContext context(this);
2644 EmitVariableLoad(callee->AsVariableProxy());
2645 PrepareForBailout(callee, NO_REGISTERS);
2647 // Push undefined as receiver. This is patched in the method prologue if it
2648 // is a sloppy mode method.
2649 __ Push(isolate()->factory()->undefined_value());
2650 flags = NO_CALL_FUNCTION_FLAGS;
2652 // Load the function from the receiver.
2653 ASSERT(callee->IsProperty());
2654 __ ldr(r0, MemOperand(sp, 0));
2655 EmitNamedPropertyLoad(callee->AsProperty());
2656 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2657 // Push the target function under the receiver.
2658 __ ldr(ip, MemOperand(sp, 0));
2660 __ str(r0, MemOperand(sp, kPointerSize));
2661 flags = CALL_AS_METHOD;
2664 // Load the arguments.
2665 { PreservePositionScope scope(masm()->positions_recorder());
2666 for (int i = 0; i < arg_count; i++) {
2667 VisitForStackValue(args->at(i));
2671 // Record source position for debugger.
2672 SetSourcePosition(expr->position());
2673 CallFunctionStub stub(arg_count, flags);
2674 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2677 RecordJSReturnSite(expr);
2679 // Restore context register.
2680 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2682 context()->DropAndPlug(1, r0);
2686 // Code common for calls using the IC.
2687 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2690 VisitForAccumulatorValue(key);
2692 Expression* callee = expr->expression();
2693 ZoneList<Expression*>* args = expr->arguments();
2694 int arg_count = args->length();
2696 // Load the function from the receiver.
2697 ASSERT(callee->IsProperty());
2698 __ ldr(r1, MemOperand(sp, 0));
2699 EmitKeyedPropertyLoad(callee->AsProperty());
2700 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2702 // Push the target function under the receiver.
2703 __ ldr(ip, MemOperand(sp, 0));
2705 __ str(r0, MemOperand(sp, kPointerSize));
2707 { PreservePositionScope scope(masm()->positions_recorder());
2708 for (int i = 0; i < arg_count; i++) {
2709 VisitForStackValue(args->at(i));
2713 // Record source position for debugger.
2714 SetSourcePosition(expr->position());
2715 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2716 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2719 RecordJSReturnSite(expr);
2720 // Restore context register.
2721 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2723 context()->DropAndPlug(1, r0);
2727 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2728 // Code common for calls using the call stub.
2729 ZoneList<Expression*>* args = expr->arguments();
2730 int arg_count = args->length();
2731 { PreservePositionScope scope(masm()->positions_recorder());
2732 for (int i = 0; i < arg_count; i++) {
2733 VisitForStackValue(args->at(i));
2736 // Record source position for debugger.
2737 SetSourcePosition(expr->position());
2739 Handle<Object> uninitialized =
2740 TypeFeedbackInfo::UninitializedSentinel(isolate());
2741 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2742 __ Move(r2, FeedbackVector());
2743 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2745 // Record call targets in unoptimized code.
2746 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2747 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2749 RecordJSReturnSite(expr);
2750 // Restore context register.
2751 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2752 context()->DropAndPlug(1, r0);
2756 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2757 // r4: copy of the first argument or undefined if it doesn't exist.
2758 if (arg_count > 0) {
2759 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2761 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2764 // r3: the receiver of the enclosing function.
2765 int receiver_offset = 2 + info_->scope()->num_parameters();
2766 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2769 __ mov(r2, Operand(Smi::FromInt(strict_mode())));
2771 // r1: the start position of the scope the calls resides in.
2772 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2774 // Do the runtime call.
2775 __ Push(r4, r3, r2, r1);
2776 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2780 void FullCodeGenerator::VisitCall(Call* expr) {
2782 // We want to verify that RecordJSReturnSite gets called on all paths
2783 // through this function. Avoid early returns.
2784 expr->return_is_recorded_ = false;
2787 Comment cmnt(masm_, "[ Call");
2788 Expression* callee = expr->expression();
2789 Call::CallType call_type = expr->GetCallType(isolate());
2791 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2792 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2793 // to resolve the function we need to call and the receiver of the
2794 // call. Then we call the resolved function using the given
2796 ZoneList<Expression*>* args = expr->arguments();
2797 int arg_count = args->length();
2799 { PreservePositionScope pos_scope(masm()->positions_recorder());
2800 VisitForStackValue(callee);
2801 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2802 __ push(r2); // Reserved receiver slot.
2804 // Push the arguments.
2805 for (int i = 0; i < arg_count; i++) {
2806 VisitForStackValue(args->at(i));
2809 // Push a copy of the function (found below the arguments) and
2811 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2813 EmitResolvePossiblyDirectEval(arg_count);
2815 // The runtime call returns a pair of values in r0 (function) and
2816 // r1 (receiver). Touch up the stack with the right values.
2817 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2818 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2821 // Record source position for debugger.
2822 SetSourcePosition(expr->position());
2823 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2824 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2826 RecordJSReturnSite(expr);
2827 // Restore context register.
2828 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2829 context()->DropAndPlug(1, r0);
2830 } else if (call_type == Call::GLOBAL_CALL) {
2831 EmitCallWithIC(expr);
2833 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2834 // Call to a lookup slot (dynamically introduced variable).
2835 VariableProxy* proxy = callee->AsVariableProxy();
2838 { PreservePositionScope scope(masm()->positions_recorder());
2839 // Generate code for loading from variables potentially shadowed
2840 // by eval-introduced variables.
2841 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2845 // Call the runtime to find the function to call (returned in r0)
2846 // and the object holding it (returned in edx).
2847 ASSERT(!context_register().is(r2));
2848 __ mov(r2, Operand(proxy->name()));
2849 __ Push(context_register(), r2);
2850 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2851 __ Push(r0, r1); // Function, receiver.
2853 // If fast case code has been generated, emit code to push the
2854 // function and receiver and have the slow path jump around this
2856 if (done.is_linked()) {
2862 // The receiver is implicitly the global receiver. Indicate this
2863 // by passing the hole to the call function stub.
2864 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2869 // The receiver is either the global receiver or an object found
2870 // by LoadContextSlot.
2871 EmitCallWithStub(expr);
2872 } else if (call_type == Call::PROPERTY_CALL) {
2873 Property* property = callee->AsProperty();
2874 { PreservePositionScope scope(masm()->positions_recorder());
2875 VisitForStackValue(property->obj());
2877 if (property->key()->IsPropertyName()) {
2878 EmitCallWithIC(expr);
2880 EmitKeyedCallWithIC(expr, property->key());
2883 ASSERT(call_type == Call::OTHER_CALL);
2884 // Call to an arbitrary expression not handled specially above.
2885 { PreservePositionScope scope(masm()->positions_recorder());
2886 VisitForStackValue(callee);
2888 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2890 // Emit function call.
2891 EmitCallWithStub(expr);
2895 // RecordJSReturnSite should have been called.
2896 ASSERT(expr->return_is_recorded_);
2901 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2902 Comment cmnt(masm_, "[ CallNew");
2903 // According to ECMA-262, section 11.2.2, page 44, the function
2904 // expression in new calls must be evaluated before the
2907 // Push constructor on the stack. If it's not a function it's used as
2908 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2910 VisitForStackValue(expr->expression());
2912 // Push the arguments ("left-to-right") on the stack.
2913 ZoneList<Expression*>* args = expr->arguments();
2914 int arg_count = args->length();
2915 for (int i = 0; i < arg_count; i++) {
2916 VisitForStackValue(args->at(i));
2919 // Call the construct call builtin that handles allocation and
2920 // constructor invocation.
2921 SetSourcePosition(expr->position());
2923 // Load function and argument count into r1 and r0.
2924 __ mov(r0, Operand(arg_count));
2925 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2927 // Record call targets in unoptimized code.
2928 Handle<Object> uninitialized =
2929 TypeFeedbackInfo::UninitializedSentinel(isolate());
2930 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2931 if (FLAG_pretenuring_call_new) {
2932 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2933 isolate()->factory()->NewAllocationSite());
2934 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2935 expr->CallNewFeedbackSlot() + 1);
2938 __ Move(r2, FeedbackVector());
2939 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2941 CallConstructStub stub(RECORD_CALL_TARGET);
2942 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2943 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2944 context()->Plug(r0);
2948 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2949 ZoneList<Expression*>* args = expr->arguments();
2950 ASSERT(args->length() == 1);
2952 VisitForAccumulatorValue(args->at(0));
2954 Label materialize_true, materialize_false;
2955 Label* if_true = NULL;
2956 Label* if_false = NULL;
2957 Label* fall_through = NULL;
2958 context()->PrepareTest(&materialize_true, &materialize_false,
2959 &if_true, &if_false, &fall_through);
2961 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2963 Split(eq, if_true, if_false, fall_through);
2965 context()->Plug(if_true, if_false);
2969 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2970 ZoneList<Expression*>* args = expr->arguments();
2971 ASSERT(args->length() == 1);
2973 VisitForAccumulatorValue(args->at(0));
2975 Label materialize_true, materialize_false;
2976 Label* if_true = NULL;
2977 Label* if_false = NULL;
2978 Label* fall_through = NULL;
2979 context()->PrepareTest(&materialize_true, &materialize_false,
2980 &if_true, &if_false, &fall_through);
2982 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2983 __ NonNegativeSmiTst(r0);
2984 Split(eq, if_true, if_false, fall_through);
2986 context()->Plug(if_true, if_false);
2990 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2991 ZoneList<Expression*>* args = expr->arguments();
2992 ASSERT(args->length() == 1);
2994 VisitForAccumulatorValue(args->at(0));
2996 Label materialize_true, materialize_false;
2997 Label* if_true = NULL;
2998 Label* if_false = NULL;
2999 Label* fall_through = NULL;
3000 context()->PrepareTest(&materialize_true, &materialize_false,
3001 &if_true, &if_false, &fall_through);
3003 __ JumpIfSmi(r0, if_false);
3004 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3007 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3008 // Undetectable objects behave like undefined when tested with typeof.
3009 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3010 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3012 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3013 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3015 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3016 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3017 Split(le, if_true, if_false, fall_through);
3019 context()->Plug(if_true, if_false);
3023 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3024 ZoneList<Expression*>* args = expr->arguments();
3025 ASSERT(args->length() == 1);
3027 VisitForAccumulatorValue(args->at(0));
3029 Label materialize_true, materialize_false;
3030 Label* if_true = NULL;
3031 Label* if_false = NULL;
3032 Label* fall_through = NULL;
3033 context()->PrepareTest(&materialize_true, &materialize_false,
3034 &if_true, &if_false, &fall_through);
3036 __ JumpIfSmi(r0, if_false);
3037 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3038 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3039 Split(ge, if_true, if_false, fall_through);
3041 context()->Plug(if_true, if_false);
3045 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3046 ZoneList<Expression*>* args = expr->arguments();
3047 ASSERT(args->length() == 1);
3049 VisitForAccumulatorValue(args->at(0));
3051 Label materialize_true, materialize_false;
3052 Label* if_true = NULL;
3053 Label* if_false = NULL;
3054 Label* fall_through = NULL;
3055 context()->PrepareTest(&materialize_true, &materialize_false,
3056 &if_true, &if_false, &fall_through);
3058 __ JumpIfSmi(r0, if_false);
3059 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3060 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3061 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3062 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3063 Split(ne, if_true, if_false, fall_through);
3065 context()->Plug(if_true, if_false);
3069 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3070 CallRuntime* expr) {
3071 ZoneList<Expression*>* args = expr->arguments();
3072 ASSERT(args->length() == 1);
3074 VisitForAccumulatorValue(args->at(0));
3076 Label materialize_true, materialize_false, skip_lookup;
3077 Label* if_true = NULL;
3078 Label* if_false = NULL;
3079 Label* fall_through = NULL;
3080 context()->PrepareTest(&materialize_true, &materialize_false,
3081 &if_true, &if_false, &fall_through);
3083 __ AssertNotSmi(r0);
3085 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3086 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3087 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3088 __ b(ne, &skip_lookup);
3090 // Check for fast case object. Generate false result for slow case object.
3091 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3092 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3093 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3097 // Look for valueOf name in the descriptor array, and indicate false if
3098 // found. Since we omit an enumeration index check, if it is added via a
3099 // transition that shares its descriptor array, this is a false positive.
3100 Label entry, loop, done;
3102 // Skip loop if no descriptors are valid.
3103 __ NumberOfOwnDescriptors(r3, r1);
3104 __ cmp(r3, Operand::Zero());
3107 __ LoadInstanceDescriptors(r1, r4);
3108 // r4: descriptor array.
3109 // r3: valid entries in the descriptor array.
3110 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3112 // Calculate location of the first key name.
3113 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3114 // Calculate the end of the descriptor array.
3116 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3118 // Loop through all the keys in the descriptor array. If one of these is the
3119 // string "valueOf" the result is false.
3120 // The use of ip to store the valueOf string assumes that it is not otherwise
3121 // used in the loop below.
3122 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3125 __ ldr(r3, MemOperand(r4, 0));
3128 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3130 __ cmp(r4, Operand(r2));
3135 // Set the bit in the map to indicate that there is no local valueOf field.
3136 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3137 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3138 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3140 __ bind(&skip_lookup);
3142 // If a valueOf property is not found on the object check that its
3143 // prototype is the un-modified String prototype. If not result is false.
3144 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3145 __ JumpIfSmi(r2, if_false);
3146 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3147 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3148 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3149 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3151 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3152 Split(eq, if_true, if_false, fall_through);
3154 context()->Plug(if_true, if_false);
3158 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3159 ZoneList<Expression*>* args = expr->arguments();
3160 ASSERT(args->length() == 1);
3162 VisitForAccumulatorValue(args->at(0));
3164 Label materialize_true, materialize_false;
3165 Label* if_true = NULL;
3166 Label* if_false = NULL;
3167 Label* fall_through = NULL;
3168 context()->PrepareTest(&materialize_true, &materialize_false,
3169 &if_true, &if_false, &fall_through);
3171 __ JumpIfSmi(r0, if_false);
3172 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3173 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3174 Split(eq, if_true, if_false, fall_through);
3176 context()->Plug(if_true, if_false);
3180 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3181 ZoneList<Expression*>* args = expr->arguments();
3182 ASSERT(args->length() == 1);
3184 VisitForAccumulatorValue(args->at(0));
3186 Label materialize_true, materialize_false;
3187 Label* if_true = NULL;
3188 Label* if_false = NULL;
3189 Label* fall_through = NULL;
3190 context()->PrepareTest(&materialize_true, &materialize_false,
3191 &if_true, &if_false, &fall_through);
3193 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3194 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3195 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3196 __ cmp(r2, Operand(0x80000000));
3197 __ cmp(r1, Operand(0x00000000), eq);
3199 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3200 Split(eq, if_true, if_false, fall_through);
3202 context()->Plug(if_true, if_false);
3206 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3207 ZoneList<Expression*>* args = expr->arguments();
3208 ASSERT(args->length() == 1);
3210 VisitForAccumulatorValue(args->at(0));
3212 Label materialize_true, materialize_false;
3213 Label* if_true = NULL;
3214 Label* if_false = NULL;
3215 Label* fall_through = NULL;
3216 context()->PrepareTest(&materialize_true, &materialize_false,
3217 &if_true, &if_false, &fall_through);
3219 __ JumpIfSmi(r0, if_false);
3220 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3221 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3222 Split(eq, if_true, if_false, fall_through);
3224 context()->Plug(if_true, if_false);
3228 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3229 ZoneList<Expression*>* args = expr->arguments();
3230 ASSERT(args->length() == 1);
3232 VisitForAccumulatorValue(args->at(0));
3234 Label materialize_true, materialize_false;
3235 Label* if_true = NULL;
3236 Label* if_false = NULL;
3237 Label* fall_through = NULL;
3238 context()->PrepareTest(&materialize_true, &materialize_false,
3239 &if_true, &if_false, &fall_through);
3241 __ JumpIfSmi(r0, if_false);
3242 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3243 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3244 Split(eq, if_true, if_false, fall_through);
3246 context()->Plug(if_true, if_false);
3251 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3252 ASSERT(expr->arguments()->length() == 0);
3254 Label materialize_true, materialize_false;
3255 Label* if_true = NULL;
3256 Label* if_false = NULL;
3257 Label* fall_through = NULL;
3258 context()->PrepareTest(&materialize_true, &materialize_false,
3259 &if_true, &if_false, &fall_through);
3261 // Get the frame pointer for the calling frame.
3262 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3264 // Skip the arguments adaptor frame if it exists.
3265 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3266 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3267 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3269 // Check the marker in the calling frame.
3270 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3271 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3272 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3273 Split(eq, if_true, if_false, fall_through);
3275 context()->Plug(if_true, if_false);
3279 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3280 ZoneList<Expression*>* args = expr->arguments();
3281 ASSERT(args->length() == 2);
3283 // Load the two objects into registers and perform the comparison.
3284 VisitForStackValue(args->at(0));
3285 VisitForAccumulatorValue(args->at(1));
3287 Label materialize_true, materialize_false;
3288 Label* if_true = NULL;
3289 Label* if_false = NULL;
3290 Label* fall_through = NULL;
3291 context()->PrepareTest(&materialize_true, &materialize_false,
3292 &if_true, &if_false, &fall_through);
3296 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3297 Split(eq, if_true, if_false, fall_through);
3299 context()->Plug(if_true, if_false);
3303 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3304 ZoneList<Expression*>* args = expr->arguments();
3305 ASSERT(args->length() == 1);
3307 // ArgumentsAccessStub expects the key in edx and the formal
3308 // parameter count in r0.
3309 VisitForAccumulatorValue(args->at(0));
3311 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3312 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3314 context()->Plug(r0);
3318 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3319 ASSERT(expr->arguments()->length() == 0);
3321 // Get the number of formal parameters.
3322 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3324 // Check if the calling frame is an arguments adaptor frame.
3325 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3326 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3327 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3329 // Arguments adaptor case: Read the arguments length from the
3331 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3333 context()->Plug(r0);
3337 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3338 ZoneList<Expression*>* args = expr->arguments();
3339 ASSERT(args->length() == 1);
3340 Label done, null, function, non_function_constructor;
3342 VisitForAccumulatorValue(args->at(0));
3344 // If the object is a smi, we return null.
3345 __ JumpIfSmi(r0, &null);
3347 // Check that the object is a JS object but take special care of JS
3348 // functions to make sure they have 'Function' as their class.
3349 // Assume that there are only two callable types, and one of them is at
3350 // either end of the type range for JS object types. Saves extra comparisons.
3351 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3352 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3353 // Map is now in r0.
3355 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3356 FIRST_SPEC_OBJECT_TYPE + 1);
3357 __ b(eq, &function);
3359 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3360 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3361 LAST_SPEC_OBJECT_TYPE - 1);
3362 __ b(eq, &function);
3363 // Assume that there is no larger type.
3364 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3366 // Check if the constructor in the map is a JS function.
3367 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3368 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3369 __ b(ne, &non_function_constructor);
3371 // r0 now contains the constructor function. Grab the
3372 // instance class name from there.
3373 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3374 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3377 // Functions have class 'Function'.
3379 __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
3382 // Objects with a non-function constructor have class 'Object'.
3383 __ bind(&non_function_constructor);
3384 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3387 // Non-JS objects have class null.
3389 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3394 context()->Plug(r0);
3398 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3399 // Conditionally generate a log call.
3401 // 0 (literal string): The type of logging (corresponds to the flags).
3402 // This is used to determine whether or not to generate the log call.
3403 // 1 (string): Format string. Access the string at argument index 2
3404 // with '%2s' (see Logger::LogRuntime for all the formats).
3405 // 2 (array): Arguments to the format string.
3406 ZoneList<Expression*>* args = expr->arguments();
3407 ASSERT_EQ(args->length(), 3);
3408 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3409 VisitForStackValue(args->at(1));
3410 VisitForStackValue(args->at(2));
3411 __ CallRuntime(Runtime::kHiddenLog, 2);
3414 // Finally, we're expected to leave a value on the top of the stack.
3415 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3416 context()->Plug(r0);
3420 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3421 // Load the arguments on the stack and call the stub.
3423 ZoneList<Expression*>* args = expr->arguments();
3424 ASSERT(args->length() == 3);
3425 VisitForStackValue(args->at(0));
3426 VisitForStackValue(args->at(1));
3427 VisitForStackValue(args->at(2));
3429 context()->Plug(r0);
3433 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3434 // Load the arguments on the stack and call the stub.
3435 RegExpExecStub stub;
3436 ZoneList<Expression*>* args = expr->arguments();
3437 ASSERT(args->length() == 4);
3438 VisitForStackValue(args->at(0));
3439 VisitForStackValue(args->at(1));
3440 VisitForStackValue(args->at(2));
3441 VisitForStackValue(args->at(3));
3443 context()->Plug(r0);
3447 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3448 ZoneList<Expression*>* args = expr->arguments();
3449 ASSERT(args->length() == 1);
3450 VisitForAccumulatorValue(args->at(0)); // Load the object.
3453 // If the object is a smi return the object.
3454 __ JumpIfSmi(r0, &done);
3455 // If the object is not a value type, return the object.
3456 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3457 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3460 context()->Plug(r0);
3464 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3465 ZoneList<Expression*>* args = expr->arguments();
3466 ASSERT(args->length() == 2);
3467 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3468 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3470 VisitForAccumulatorValue(args->at(0)); // Load the object.
3472 Label runtime, done, not_date_object;
3473 Register object = r0;
3474 Register result = r0;
3475 Register scratch0 = r9;
3476 Register scratch1 = r1;
3478 __ JumpIfSmi(object, ¬_date_object);
3479 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3480 __ b(ne, ¬_date_object);
3482 if (index->value() == 0) {
3483 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3486 if (index->value() < JSDate::kFirstUncachedField) {
3487 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3488 __ mov(scratch1, Operand(stamp));
3489 __ ldr(scratch1, MemOperand(scratch1));
3490 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3491 __ cmp(scratch1, scratch0);
3493 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3494 kPointerSize * index->value()));
3498 __ PrepareCallCFunction(2, scratch1);
3499 __ mov(r1, Operand(index));
3500 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3504 __ bind(¬_date_object);
3505 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3507 context()->Plug(r0);
3511 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3512 ZoneList<Expression*>* args = expr->arguments();
3513 ASSERT_EQ(3, args->length());
3515 Register string = r0;
3516 Register index = r1;
3517 Register value = r2;
3519 VisitForStackValue(args->at(1)); // index
3520 VisitForStackValue(args->at(2)); // value
3521 VisitForAccumulatorValue(args->at(0)); // string
3522 __ Pop(index, value);
3524 if (FLAG_debug_code) {
3526 __ Check(eq, kNonSmiValue);
3528 __ Check(eq, kNonSmiIndex);
3529 __ SmiUntag(index, index);
3530 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3531 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3532 __ SmiTag(index, index);
3535 __ SmiUntag(value, value);
3538 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3539 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3540 context()->Plug(string);
3544 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3545 ZoneList<Expression*>* args = expr->arguments();
3546 ASSERT_EQ(3, args->length());
3548 Register string = r0;
3549 Register index = r1;
3550 Register value = r2;
3552 VisitForStackValue(args->at(1)); // index
3553 VisitForStackValue(args->at(2)); // value
3554 VisitForAccumulatorValue(args->at(0)); // string
3555 __ Pop(index, value);
3557 if (FLAG_debug_code) {
3559 __ Check(eq, kNonSmiValue);
3561 __ Check(eq, kNonSmiIndex);
3562 __ SmiUntag(index, index);
3563 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3564 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3565 __ SmiTag(index, index);
3568 __ SmiUntag(value, value);
3571 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3572 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3573 __ strh(value, MemOperand(ip, index));
3574 context()->Plug(string);
3579 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3580 // Load the arguments on the stack and call the runtime function.
3581 ZoneList<Expression*>* args = expr->arguments();
3582 ASSERT(args->length() == 2);
3583 VisitForStackValue(args->at(0));
3584 VisitForStackValue(args->at(1));
3585 MathPowStub stub(MathPowStub::ON_STACK);
3587 context()->Plug(r0);
3591 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3592 ZoneList<Expression*>* args = expr->arguments();
3593 ASSERT(args->length() == 2);
3594 VisitForStackValue(args->at(0)); // Load the object.
3595 VisitForAccumulatorValue(args->at(1)); // Load the value.
3596 __ pop(r1); // r0 = value. r1 = object.
3599 // If the object is a smi, return the value.
3600 __ JumpIfSmi(r1, &done);
3602 // If the object is not a value type, return the value.
3603 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3607 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3608 // Update the write barrier. Save the value as it will be
3609 // overwritten by the write barrier code and is needed afterward.
3611 __ RecordWriteField(
3612 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3615 context()->Plug(r0);
3619 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3620 ZoneList<Expression*>* args = expr->arguments();
3621 ASSERT_EQ(args->length(), 1);
3622 // Load the argument into r0 and call the stub.
3623 VisitForAccumulatorValue(args->at(0));
3625 NumberToStringStub stub;
3627 context()->Plug(r0);
3631 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3632 ZoneList<Expression*>* args = expr->arguments();
3633 ASSERT(args->length() == 1);
3634 VisitForAccumulatorValue(args->at(0));
3637 StringCharFromCodeGenerator generator(r0, r1);
3638 generator.GenerateFast(masm_);
3641 NopRuntimeCallHelper call_helper;
3642 generator.GenerateSlow(masm_, call_helper);
3645 context()->Plug(r1);
3649 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3650 ZoneList<Expression*>* args = expr->arguments();
3651 ASSERT(args->length() == 2);
3652 VisitForStackValue(args->at(0));
3653 VisitForAccumulatorValue(args->at(1));
3655 Register object = r1;
3656 Register index = r0;
3657 Register result = r3;
3661 Label need_conversion;
3662 Label index_out_of_range;
3664 StringCharCodeAtGenerator generator(object,
3669 &index_out_of_range,
3670 STRING_INDEX_IS_NUMBER);
3671 generator.GenerateFast(masm_);
3674 __ bind(&index_out_of_range);
3675 // When the index is out of range, the spec requires us to return
3677 __ LoadRoot(result, Heap::kNanValueRootIndex);
3680 __ bind(&need_conversion);
3681 // Load the undefined value into the result register, which will
3682 // trigger conversion.
3683 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3686 NopRuntimeCallHelper call_helper;
3687 generator.GenerateSlow(masm_, call_helper);
3690 context()->Plug(result);
3694 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3695 ZoneList<Expression*>* args = expr->arguments();
3696 ASSERT(args->length() == 2);
3697 VisitForStackValue(args->at(0));
3698 VisitForAccumulatorValue(args->at(1));
3700 Register object = r1;
3701 Register index = r0;
3702 Register scratch = r3;
3703 Register result = r0;
3707 Label need_conversion;
3708 Label index_out_of_range;
3710 StringCharAtGenerator generator(object,
3716 &index_out_of_range,
3717 STRING_INDEX_IS_NUMBER);
3718 generator.GenerateFast(masm_);
3721 __ bind(&index_out_of_range);
3722 // When the index is out of range, the spec requires us to return
3723 // the empty string.
3724 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3727 __ bind(&need_conversion);
3728 // Move smi zero into the result register, which will trigger
3730 __ mov(result, Operand(Smi::FromInt(0)));
3733 NopRuntimeCallHelper call_helper;
3734 generator.GenerateSlow(masm_, call_helper);
3737 context()->Plug(result);
3741 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3742 ZoneList<Expression*>* args = expr->arguments();
3743 ASSERT_EQ(2, args->length());
3744 VisitForStackValue(args->at(0));
3745 VisitForAccumulatorValue(args->at(1));
3748 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3750 context()->Plug(r0);
3754 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3755 ZoneList<Expression*>* args = expr->arguments();
3756 ASSERT_EQ(2, args->length());
3757 VisitForStackValue(args->at(0));
3758 VisitForStackValue(args->at(1));
3760 StringCompareStub stub;
3762 context()->Plug(r0);
3766 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3767 // Load the argument on the stack and call the runtime function.
3768 ZoneList<Expression*>* args = expr->arguments();
3769 ASSERT(args->length() == 1);
3770 VisitForStackValue(args->at(0));
3771 __ CallRuntime(Runtime::kMath_log, 1);
3772 context()->Plug(r0);
3776 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3777 // Load the argument on the stack and call the runtime function.
3778 ZoneList<Expression*>* args = expr->arguments();
3779 ASSERT(args->length() == 1);
3780 VisitForStackValue(args->at(0));
3781 __ CallRuntime(Runtime::kMath_sqrt, 1);
3782 context()->Plug(r0);
3786 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3787 ZoneList<Expression*>* args = expr->arguments();
3788 ASSERT(args->length() >= 2);
3790 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3791 for (int i = 0; i < arg_count + 1; i++) {
3792 VisitForStackValue(args->at(i));
3794 VisitForAccumulatorValue(args->last()); // Function.
3796 Label runtime, done;
3797 // Check for non-function argument (including proxy).
3798 __ JumpIfSmi(r0, &runtime);
3799 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3802 // InvokeFunction requires the function in r1. Move it in there.
3803 __ mov(r1, result_register());
3804 ParameterCount count(arg_count);
3805 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3806 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3811 __ CallRuntime(Runtime::kCall, args->length());
3814 context()->Plug(r0);
3818 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3819 RegExpConstructResultStub stub;
3820 ZoneList<Expression*>* args = expr->arguments();
3821 ASSERT(args->length() == 3);
3822 VisitForStackValue(args->at(0));
3823 VisitForStackValue(args->at(1));
3824 VisitForAccumulatorValue(args->at(2));
3828 context()->Plug(r0);
3832 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3833 ZoneList<Expression*>* args = expr->arguments();
3834 ASSERT_EQ(2, args->length());
3835 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3836 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3838 Handle<FixedArray> jsfunction_result_caches(
3839 isolate()->native_context()->jsfunction_result_caches());
3840 if (jsfunction_result_caches->length() <= cache_id) {
3841 __ Abort(kAttemptToUseUndefinedCache);
3842 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3843 context()->Plug(r0);
3847 VisitForAccumulatorValue(args->at(1));
3850 Register cache = r1;
3851 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3852 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3853 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3855 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3858 Label done, not_found;
3859 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3860 // r2 now holds finger offset as a smi.
3861 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3862 // r3 now points to the start of fixed array elements.
3863 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3864 // Note side effect of PreIndex: r3 now points to the key of the pair.
3866 __ b(ne, ¬_found);
3868 __ ldr(r0, MemOperand(r3, kPointerSize));
3871 __ bind(¬_found);
3872 // Call runtime to perform the lookup.
3873 __ Push(cache, key);
3874 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3877 context()->Plug(r0);
3881 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3882 ZoneList<Expression*>* args = expr->arguments();
3883 VisitForAccumulatorValue(args->at(0));
3885 Label materialize_true, materialize_false;
3886 Label* if_true = NULL;
3887 Label* if_false = NULL;
3888 Label* fall_through = NULL;
3889 context()->PrepareTest(&materialize_true, &materialize_false,
3890 &if_true, &if_false, &fall_through);
3892 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3893 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3894 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3895 Split(eq, if_true, if_false, fall_through);
3897 context()->Plug(if_true, if_false);
3901 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3902 ZoneList<Expression*>* args = expr->arguments();
3903 ASSERT(args->length() == 1);
3904 VisitForAccumulatorValue(args->at(0));
3906 __ AssertString(r0);
3908 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3909 __ IndexFromHash(r0, r0);
3911 context()->Plug(r0);
3915 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3916 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3917 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3918 one_char_separator_loop_entry, long_separator_loop;
3919 ZoneList<Expression*>* args = expr->arguments();
3920 ASSERT(args->length() == 2);
3921 VisitForStackValue(args->at(1));
3922 VisitForAccumulatorValue(args->at(0));
3924 // All aliases of the same register have disjoint lifetimes.
3925 Register array = r0;
3926 Register elements = no_reg; // Will be r0.
3927 Register result = no_reg; // Will be r0.
3928 Register separator = r1;
3929 Register array_length = r2;
3930 Register result_pos = no_reg; // Will be r2
3931 Register string_length = r3;
3932 Register string = r4;
3933 Register element = r5;
3934 Register elements_end = r6;
3935 Register scratch = r9;
3937 // Separator operand is on the stack.
3940 // Check that the array is a JSArray.
3941 __ JumpIfSmi(array, &bailout);
3942 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3945 // Check that the array has fast elements.
3946 __ CheckFastElements(scratch, array_length, &bailout);
3948 // If the array has length zero, return the empty string.
3949 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3950 __ SmiUntag(array_length, SetCC);
3951 __ b(ne, &non_trivial_array);
3952 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3955 __ bind(&non_trivial_array);
3957 // Get the FixedArray containing array's elements.
3959 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3960 array = no_reg; // End of array's live range.
3962 // Check that all array elements are sequential ASCII strings, and
3963 // accumulate the sum of their lengths, as a smi-encoded value.
3964 __ mov(string_length, Operand::Zero());
3966 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3967 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3968 // Loop condition: while (element < elements_end).
3969 // Live values in registers:
3970 // elements: Fixed array of strings.
3971 // array_length: Length of the fixed array of strings (not smi)
3972 // separator: Separator string
3973 // string_length: Accumulated sum of string lengths (smi).
3974 // element: Current array element.
3975 // elements_end: Array end.
3976 if (generate_debug_code_) {
3977 __ cmp(array_length, Operand::Zero());
3978 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3981 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3982 __ JumpIfSmi(string, &bailout);
3983 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3984 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3985 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3986 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3987 __ add(string_length, string_length, Operand(scratch), SetCC);
3989 __ cmp(element, elements_end);
3992 // If array_length is 1, return elements[0], a string.
3993 __ cmp(array_length, Operand(1));
3994 __ b(ne, ¬_size_one_array);
3995 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3998 __ bind(¬_size_one_array);
4000 // Live values in registers:
4001 // separator: Separator string
4002 // array_length: Length of the array.
4003 // string_length: Sum of string lengths (smi).
4004 // elements: FixedArray of strings.
4006 // Check that the separator is a flat ASCII string.
4007 __ JumpIfSmi(separator, &bailout);
4008 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4009 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4010 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
4012 // Add (separator length times array_length) - separator length to the
4013 // string_length to get the length of the result string. array_length is not
4014 // smi but the other values are, so the result is a smi
4015 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4016 __ sub(string_length, string_length, Operand(scratch));
4017 __ smull(scratch, ip, array_length, scratch);
4018 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4020 __ cmp(ip, Operand::Zero());
4022 __ tst(scratch, Operand(0x80000000));
4024 __ add(string_length, string_length, Operand(scratch), SetCC);
4026 __ SmiUntag(string_length);
4028 // Get first element in the array to free up the elements register to be used
4031 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4032 result = elements; // End of live range for elements.
4034 // Live values in registers:
4035 // element: First array element
4036 // separator: Separator string
4037 // string_length: Length of result string (not smi)
4038 // array_length: Length of the array.
4039 __ AllocateAsciiString(result,
4042 string, // used as scratch
4043 elements_end, // used as scratch
4045 // Prepare for looping. Set up elements_end to end of the array. Set
4046 // result_pos to the position of the result where to write the first
4048 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4049 result_pos = array_length; // End of live range for array_length.
4050 array_length = no_reg;
4053 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4055 // Check the length of the separator.
4056 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4057 __ cmp(scratch, Operand(Smi::FromInt(1)));
4058 __ b(eq, &one_char_separator);
4059 __ b(gt, &long_separator);
4061 // Empty separator case
4062 __ bind(&empty_separator_loop);
4063 // Live values in registers:
4064 // result_pos: the position to which we are currently copying characters.
4065 // element: Current array element.
4066 // elements_end: Array end.
4068 // Copy next array element to the result.
4069 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4070 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4071 __ SmiUntag(string_length);
4074 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4075 __ CopyBytes(string, result_pos, string_length, scratch);
4076 __ cmp(element, elements_end);
4077 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4078 ASSERT(result.is(r0));
4081 // One-character separator case
4082 __ bind(&one_char_separator);
4083 // Replace separator with its ASCII character value.
4084 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4085 // Jump into the loop after the code that copies the separator, so the first
4086 // element is not preceded by a separator
4087 __ jmp(&one_char_separator_loop_entry);
4089 __ bind(&one_char_separator_loop);
4090 // Live values in registers:
4091 // result_pos: the position to which we are currently copying characters.
4092 // element: Current array element.
4093 // elements_end: Array end.
4094 // separator: Single separator ASCII char (in lower byte).
4096 // Copy the separator character to the result.
4097 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4099 // Copy next array element to the result.
4100 __ bind(&one_char_separator_loop_entry);
4101 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4102 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4103 __ SmiUntag(string_length);
4106 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4107 __ CopyBytes(string, result_pos, string_length, scratch);
4108 __ cmp(element, elements_end);
4109 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4110 ASSERT(result.is(r0));
4113 // Long separator case (separator is more than one character). Entry is at the
4114 // label long_separator below.
4115 __ bind(&long_separator_loop);
4116 // Live values in registers:
4117 // result_pos: the position to which we are currently copying characters.
4118 // element: Current array element.
4119 // elements_end: Array end.
4120 // separator: Separator string.
4122 // Copy the separator to the result.
4123 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4124 __ SmiUntag(string_length);
4127 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4128 __ CopyBytes(string, result_pos, string_length, scratch);
4130 __ bind(&long_separator);
4131 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4132 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4133 __ SmiUntag(string_length);
4136 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4137 __ CopyBytes(string, result_pos, string_length, scratch);
4138 __ cmp(element, elements_end);
4139 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4140 ASSERT(result.is(r0));
4144 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4146 context()->Plug(r0);
4150 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4151 if (expr->function() != NULL &&
4152 expr->function()->intrinsic_type == Runtime::INLINE) {
4153 Comment cmnt(masm_, "[ InlineRuntimeCall");
4154 EmitInlineRuntimeCall(expr);
4158 Comment cmnt(masm_, "[ CallRuntime");
4159 ZoneList<Expression*>* args = expr->arguments();
4160 int arg_count = args->length();
4162 if (expr->is_jsruntime()) {
4163 // Push the builtins object as the receiver.
4164 __ ldr(r0, GlobalObjectOperand());
4165 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
4168 // Load the function from the receiver.
4169 __ mov(r2, Operand(expr->name()));
4170 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4172 // Push the target function under the receiver.
4173 __ ldr(ip, MemOperand(sp, 0));
4175 __ str(r0, MemOperand(sp, kPointerSize));
4177 // Push the arguments ("left-to-right").
4178 int arg_count = args->length();
4179 for (int i = 0; i < arg_count; i++) {
4180 VisitForStackValue(args->at(i));
4183 // Record source position of the IC call.
4184 SetSourcePosition(expr->position());
4185 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4186 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4189 // Restore context register.
4190 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4192 context()->DropAndPlug(1, r0);
4194 // Push the arguments ("left-to-right").
4195 for (int i = 0; i < arg_count; i++) {
4196 VisitForStackValue(args->at(i));
4199 // Call the C runtime function.
4200 __ CallRuntime(expr->function(), arg_count);
4201 context()->Plug(r0);
4206 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4207 switch (expr->op()) {
4208 case Token::DELETE: {
4209 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4210 Property* property = expr->expression()->AsProperty();
4211 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4213 if (property != NULL) {
4214 VisitForStackValue(property->obj());
4215 VisitForStackValue(property->key());
4216 __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4218 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4219 context()->Plug(r0);
4220 } else if (proxy != NULL) {
4221 Variable* var = proxy->var();
4222 // Delete of an unqualified identifier is disallowed in strict mode
4223 // but "delete this" is allowed.
4224 ASSERT(strict_mode() == SLOPPY || var->is_this());
4225 if (var->IsUnallocated()) {
4226 __ ldr(r2, GlobalObjectOperand());
4227 __ mov(r1, Operand(var->name()));
4228 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4229 __ Push(r2, r1, r0);
4230 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4231 context()->Plug(r0);
4232 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4233 // Result of deleting non-global, non-dynamic variables is false.
4234 // The subexpression does not have side effects.
4235 context()->Plug(var->is_this());
4237 // Non-global variable. Call the runtime to try to delete from the
4238 // context where the variable was introduced.
4239 ASSERT(!context_register().is(r2));
4240 __ mov(r2, Operand(var->name()));
4241 __ Push(context_register(), r2);
4242 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4243 context()->Plug(r0);
4246 // Result of deleting non-property, non-variable reference is true.
4247 // The subexpression may have side effects.
4248 VisitForEffect(expr->expression());
4249 context()->Plug(true);
4255 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4256 VisitForEffect(expr->expression());
4257 context()->Plug(Heap::kUndefinedValueRootIndex);
4262 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4263 if (context()->IsEffect()) {
4264 // Unary NOT has no side effects so it's only necessary to visit the
4265 // subexpression. Match the optimizing compiler by not branching.
4266 VisitForEffect(expr->expression());
4267 } else if (context()->IsTest()) {
4268 const TestContext* test = TestContext::cast(context());
4269 // The labels are swapped for the recursive call.
4270 VisitForControl(expr->expression(),
4271 test->false_label(),
4273 test->fall_through());
4274 context()->Plug(test->true_label(), test->false_label());
4276 // We handle value contexts explicitly rather than simply visiting
4277 // for control and plugging the control flow into the context,
4278 // because we need to prepare a pair of extra administrative AST ids
4279 // for the optimizing compiler.
4280 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4281 Label materialize_true, materialize_false, done;
4282 VisitForControl(expr->expression(),
4286 __ bind(&materialize_true);
4287 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4288 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4289 if (context()->IsStackValue()) __ push(r0);
4291 __ bind(&materialize_false);
4292 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4293 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4294 if (context()->IsStackValue()) __ push(r0);
4300 case Token::TYPEOF: {
4301 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4302 { StackValueContext context(this);
4303 VisitForTypeofValue(expr->expression());
4305 __ CallRuntime(Runtime::kTypeof, 1);
4306 context()->Plug(r0);
4316 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4317 ASSERT(expr->expression()->IsValidReferenceExpression());
4319 Comment cmnt(masm_, "[ CountOperation");
4320 SetSourcePosition(expr->position());
4322 // Expression can only be a property, a global or a (parameter or local)
4324 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4325 LhsKind assign_type = VARIABLE;
4326 Property* prop = expr->expression()->AsProperty();
4327 // In case of a property we use the uninitialized expression context
4328 // of the key to detect a named property.
4331 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4334 // Evaluate expression and get value.
4335 if (assign_type == VARIABLE) {
4336 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4337 AccumulatorValueContext context(this);
4338 EmitVariableLoad(expr->expression()->AsVariableProxy());
4340 // Reserve space for result of postfix operation.
4341 if (expr->is_postfix() && !context()->IsEffect()) {
4342 __ mov(ip, Operand(Smi::FromInt(0)));
4345 if (assign_type == NAMED_PROPERTY) {
4346 // Put the object both on the stack and in the accumulator.
4347 VisitForAccumulatorValue(prop->obj());
4349 EmitNamedPropertyLoad(prop);
4351 VisitForStackValue(prop->obj());
4352 VisitForAccumulatorValue(prop->key());
4353 __ ldr(r1, MemOperand(sp, 0));
4355 EmitKeyedPropertyLoad(prop);
4359 // We need a second deoptimization point after loading the value
4360 // in case evaluating the property load my have a side effect.
4361 if (assign_type == VARIABLE) {
4362 PrepareForBailout(expr->expression(), TOS_REG);
4364 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4367 // Inline smi case if we are in a loop.
4368 Label stub_call, done;
4369 JumpPatchSite patch_site(masm_);
4371 int count_value = expr->op() == Token::INC ? 1 : -1;
4372 if (ShouldInlineSmiCase(expr->op())) {
4374 patch_site.EmitJumpIfNotSmi(r0, &slow);
4376 // Save result for postfix expressions.
4377 if (expr->is_postfix()) {
4378 if (!context()->IsEffect()) {
4379 // Save the result on the stack. If we have a named or keyed property
4380 // we store the result under the receiver that is currently on top
4382 switch (assign_type) {
4386 case NAMED_PROPERTY:
4387 __ str(r0, MemOperand(sp, kPointerSize));
4389 case KEYED_PROPERTY:
4390 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4396 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4398 // Call stub. Undo operation first.
4399 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4403 ToNumberStub convert_stub;
4404 __ CallStub(&convert_stub);
4406 // Save result for postfix expressions.
4407 if (expr->is_postfix()) {
4408 if (!context()->IsEffect()) {
4409 // Save the result on the stack. If we have a named or keyed property
4410 // we store the result under the receiver that is currently on top
4412 switch (assign_type) {
4416 case NAMED_PROPERTY:
4417 __ str(r0, MemOperand(sp, kPointerSize));
4419 case KEYED_PROPERTY:
4420 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4427 __ bind(&stub_call);
4429 __ mov(r0, Operand(Smi::FromInt(count_value)));
4431 // Record position before stub call.
4432 SetSourcePosition(expr->position());
4434 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4435 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4436 patch_site.EmitPatchInfo();
4439 // Store the value returned in r0.
4440 switch (assign_type) {
4442 if (expr->is_postfix()) {
4443 { EffectContext context(this);
4444 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4446 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4449 // For all contexts except EffectConstant We have the result on
4450 // top of the stack.
4451 if (!context()->IsEffect()) {
4452 context()->PlugTOS();
4455 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4457 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4458 context()->Plug(r0);
4461 case NAMED_PROPERTY: {
4462 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4464 CallStoreIC(expr->CountStoreFeedbackId());
4465 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4466 if (expr->is_postfix()) {
4467 if (!context()->IsEffect()) {
4468 context()->PlugTOS();
4471 context()->Plug(r0);
4475 case KEYED_PROPERTY: {
4476 __ Pop(r2, r1); // r1 = key. r2 = receiver.
4477 Handle<Code> ic = strict_mode() == SLOPPY
4478 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4479 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4480 CallIC(ic, expr->CountStoreFeedbackId());
4481 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4482 if (expr->is_postfix()) {
4483 if (!context()->IsEffect()) {
4484 context()->PlugTOS();
4487 context()->Plug(r0);
4495 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4496 ASSERT(!context()->IsEffect());
4497 ASSERT(!context()->IsTest());
4498 VariableProxy* proxy = expr->AsVariableProxy();
4499 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4500 Comment cmnt(masm_, "[ Global variable");
4501 __ ldr(r0, GlobalObjectOperand());
4502 __ mov(r2, Operand(proxy->name()));
4503 // Use a regular load, not a contextual load, to avoid a reference
4505 CallLoadIC(NOT_CONTEXTUAL);
4506 PrepareForBailout(expr, TOS_REG);
4507 context()->Plug(r0);
4508 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4509 Comment cmnt(masm_, "[ Lookup slot");
4512 // Generate code for loading from variables potentially shadowed
4513 // by eval-introduced variables.
4514 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4517 __ mov(r0, Operand(proxy->name()));
4519 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4520 PrepareForBailout(expr, TOS_REG);
4523 context()->Plug(r0);
4525 // This expression cannot throw a reference error at the top level.
4526 VisitInDuplicateContext(expr);
4531 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4532 Expression* sub_expr,
4533 Handle<String> check) {
4534 Label materialize_true, materialize_false;
4535 Label* if_true = NULL;
4536 Label* if_false = NULL;
4537 Label* fall_through = NULL;
4538 context()->PrepareTest(&materialize_true, &materialize_false,
4539 &if_true, &if_false, &fall_through);
4541 { AccumulatorValueContext context(this);
4542 VisitForTypeofValue(sub_expr);
4544 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4546 if (check->Equals(isolate()->heap()->number_string())) {
4547 __ JumpIfSmi(r0, if_true);
4548 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4549 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4551 Split(eq, if_true, if_false, fall_through);
4552 } else if (check->Equals(isolate()->heap()->float32x4_string())) {
4553 __ JumpIfSmi(r0, if_false);
4554 __ CompareObjectType(r0, r0, r1, FLOAT32x4_TYPE);
4555 Split(eq, if_true, if_false, fall_through);
4556 } else if (check->Equals(isolate()->heap()->int32x4_string())) {
4557 __ JumpIfSmi(r0, if_false);
4558 __ CompareObjectType(r0, r0, r1, INT32x4_TYPE);
4559 Split(eq, if_true, if_false, fall_through);
4560 } else if (check->Equals(isolate()->heap()->string_string())) {
4561 __ JumpIfSmi(r0, if_false);
4562 // Check for undetectable objects => false.
4563 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4565 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4566 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4567 Split(eq, if_true, if_false, fall_through);
4568 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4569 __ JumpIfSmi(r0, if_false);
4570 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4571 Split(eq, if_true, if_false, fall_through);
4572 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4573 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4575 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4576 Split(eq, if_true, if_false, fall_through);
4577 } else if (FLAG_harmony_typeof &&
4578 check->Equals(isolate()->heap()->null_string())) {
4579 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4580 Split(eq, if_true, if_false, fall_through);
4581 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4582 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4584 __ JumpIfSmi(r0, if_false);
4585 // Check for undetectable objects => true.
4586 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4587 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4588 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4589 Split(ne, if_true, if_false, fall_through);
4591 } else if (check->Equals(isolate()->heap()->function_string())) {
4592 __ JumpIfSmi(r0, if_false);
4593 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4594 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4596 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4597 Split(eq, if_true, if_false, fall_through);
4598 } else if (check->Equals(isolate()->heap()->object_string())) {
4599 __ JumpIfSmi(r0, if_false);
4600 if (!FLAG_harmony_typeof) {
4601 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4604 // Check for JS objects => true.
4605 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4607 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4609 // Check for undetectable objects => false.
4610 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4611 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4612 Split(eq, if_true, if_false, fall_through);
4614 if (if_false != fall_through) __ jmp(if_false);
4616 context()->Plug(if_true, if_false);
4620 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4621 Comment cmnt(masm_, "[ CompareOperation");
4622 SetSourcePosition(expr->position());
4624 // First we try a fast inlined version of the compare when one of
4625 // the operands is a literal.
4626 if (TryLiteralCompare(expr)) return;
4628 // Always perform the comparison for its control flow. Pack the result
4629 // into the expression's context after the comparison is performed.
4630 Label materialize_true, materialize_false;
4631 Label* if_true = NULL;
4632 Label* if_false = NULL;
4633 Label* fall_through = NULL;
4634 context()->PrepareTest(&materialize_true, &materialize_false,
4635 &if_true, &if_false, &fall_through);
4637 Token::Value op = expr->op();
4638 VisitForStackValue(expr->left());
4641 VisitForStackValue(expr->right());
4642 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4643 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4644 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4646 Split(eq, if_true, if_false, fall_through);
4649 case Token::INSTANCEOF: {
4650 VisitForStackValue(expr->right());
4651 InstanceofStub stub(InstanceofStub::kNoFlags);
4653 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4654 // The stub returns 0 for true.
4656 Split(eq, if_true, if_false, fall_through);
4661 VisitForAccumulatorValue(expr->right());
4662 Condition cond = CompareIC::ComputeCondition(op);
4665 bool inline_smi_code = ShouldInlineSmiCase(op);
4666 JumpPatchSite patch_site(masm_);
4667 if (inline_smi_code) {
4669 __ orr(r2, r0, Operand(r1));
4670 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4672 Split(cond, if_true, if_false, NULL);
4673 __ bind(&slow_case);
4676 // Record position and call the compare IC.
4677 SetSourcePosition(expr->position());
4678 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4679 CallIC(ic, expr->CompareOperationFeedbackId());
4680 patch_site.EmitPatchInfo();
4681 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4682 __ cmp(r0, Operand::Zero());
4683 Split(cond, if_true, if_false, fall_through);
4687 // Convert the result of the comparison into one expected for this
4688 // expression's context.
4689 context()->Plug(if_true, if_false);
4693 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4694 Expression* sub_expr,
4696 Label materialize_true, materialize_false;
4697 Label* if_true = NULL;
4698 Label* if_false = NULL;
4699 Label* fall_through = NULL;
4700 context()->PrepareTest(&materialize_true, &materialize_false,
4701 &if_true, &if_false, &fall_through);
4703 VisitForAccumulatorValue(sub_expr);
4704 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4705 if (expr->op() == Token::EQ_STRICT) {
4706 Heap::RootListIndex nil_value = nil == kNullValue ?
4707 Heap::kNullValueRootIndex :
4708 Heap::kUndefinedValueRootIndex;
4709 __ LoadRoot(r1, nil_value);
4711 Split(eq, if_true, if_false, fall_through);
4713 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4714 CallIC(ic, expr->CompareOperationFeedbackId());
4715 __ cmp(r0, Operand(0));
4716 Split(ne, if_true, if_false, fall_through);
4718 context()->Plug(if_true, if_false);
4722 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4723 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4724 context()->Plug(r0);
4728 Register FullCodeGenerator::result_register() {
4733 Register FullCodeGenerator::context_register() {
4738 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4739 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4740 __ str(value, MemOperand(fp, frame_offset));
4744 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4745 __ ldr(dst, ContextOperand(cp, context_index));
4749 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4750 Scope* declaration_scope = scope()->DeclarationScope();
4751 if (declaration_scope->is_global_scope() ||
4752 declaration_scope->is_module_scope()) {
4753 // Contexts nested in the native context have a canonical empty function
4754 // as their closure, not the anonymous closure containing the global
4755 // code. Pass a smi sentinel and let the runtime look up the empty
4757 __ mov(ip, Operand(Smi::FromInt(0)));
4758 } else if (declaration_scope->is_eval_scope()) {
4759 // Contexts created by a call to eval have the same closure as the
4760 // context calling eval, not the anonymous closure containing the eval
4761 // code. Fetch it from the context.
4762 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4764 ASSERT(declaration_scope->is_function_scope());
4765 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4771 // ----------------------------------------------------------------------------
4772 // Non-local control flow support.
4774 void FullCodeGenerator::EnterFinallyBlock() {
4775 ASSERT(!result_register().is(r1));
4776 // Store result register while executing finally block.
4777 __ push(result_register());
4778 // Cook return address in link register to stack (smi encoded Code* delta)
4779 __ sub(r1, lr, Operand(masm_->CodeObject()));
4782 // Store result register while executing finally block.
4785 // Store pending message while executing finally block.
4786 ExternalReference pending_message_obj =
4787 ExternalReference::address_of_pending_message_obj(isolate());
4788 __ mov(ip, Operand(pending_message_obj));
4789 __ ldr(r1, MemOperand(ip));
4792 ExternalReference has_pending_message =
4793 ExternalReference::address_of_has_pending_message(isolate());
4794 __ mov(ip, Operand(has_pending_message));
4795 __ ldr(r1, MemOperand(ip));
4799 ExternalReference pending_message_script =
4800 ExternalReference::address_of_pending_message_script(isolate());
4801 __ mov(ip, Operand(pending_message_script));
4802 __ ldr(r1, MemOperand(ip));
4807 void FullCodeGenerator::ExitFinallyBlock() {
4808 ASSERT(!result_register().is(r1));
4809 // Restore pending message from stack.
4811 ExternalReference pending_message_script =
4812 ExternalReference::address_of_pending_message_script(isolate());
4813 __ mov(ip, Operand(pending_message_script));
4814 __ str(r1, MemOperand(ip));
4818 ExternalReference has_pending_message =
4819 ExternalReference::address_of_has_pending_message(isolate());
4820 __ mov(ip, Operand(has_pending_message));
4821 __ str(r1, MemOperand(ip));
4824 ExternalReference pending_message_obj =
4825 ExternalReference::address_of_pending_message_obj(isolate());
4826 __ mov(ip, Operand(pending_message_obj));
4827 __ str(r1, MemOperand(ip));
4829 // Restore result register from stack.
4832 // Uncook return address and return.
4833 __ pop(result_register());
4835 __ add(pc, r1, Operand(masm_->CodeObject()));
4841 #define __ ACCESS_MASM(masm())
4843 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4845 int* context_length) {
4846 // The macros used here must preserve the result register.
4848 // Because the handler block contains the context of the finally
4849 // code, we can restore it directly from there for the finally code
4850 // rather than iteratively unwinding contexts via their previous
4852 __ Drop(*stack_depth); // Down to the handler block.
4853 if (*context_length > 0) {
4854 // Restore the context to its dedicated register and the stack.
4855 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4856 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4859 __ bl(finally_entry_);
4862 *context_length = 0;
4870 static Address GetInterruptImmediateLoadAddress(Address pc) {
4871 Address load_address = pc - 2 * Assembler::kInstrSize;
4872 if (!FLAG_enable_ool_constant_pool) {
4873 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4874 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) {
4875 load_address -= Assembler::kInstrSize;
4876 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address)));
4878 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4880 return load_address;
4884 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4886 BackEdgeState target_state,
4887 Code* replacement_code) {
4888 static const int kInstrSize = Assembler::kInstrSize;
4889 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4890 Address branch_address = pc_immediate_load_address - kInstrSize;
4891 CodePatcher patcher(branch_address, 1);
4892 switch (target_state) {
4895 // <decrement profiling counter>
4897 // ; load interrupt stub address into ip - either of:
4898 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4899 // | movt ip, <immed high>
4903 // Calculate branch offet to the ok-label - this is the difference between
4904 // the branch address and |pc| (which points at <blx ip>) plus one instr.
4905 int branch_offset = pc + kInstrSize - branch_address;
4906 patcher.masm()->b(branch_offset, pl);
4909 case ON_STACK_REPLACEMENT:
4910 case OSR_AFTER_STACK_CHECK:
4911 // <decrement profiling counter>
4913 // ; load on-stack replacement address into ip - either of:
4914 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4915 // | movt ip, <immed high>
4918 patcher.masm()->nop();
4922 // Replace the call address.
4923 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
4924 replacement_code->entry());
4926 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4927 unoptimized_code, pc_immediate_load_address, replacement_code);
4931 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4933 Code* unoptimized_code,
4935 static const int kInstrSize = Assembler::kInstrSize;
4936 ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
4938 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4939 Address branch_address = pc_immediate_load_address - kInstrSize;
4940 Address interrupt_address = Assembler::target_address_at(
4941 pc_immediate_load_address, unoptimized_code);
4943 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4944 ASSERT(interrupt_address ==
4945 isolate->builtins()->InterruptCheck()->entry());
4949 ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
4951 if (interrupt_address ==
4952 isolate->builtins()->OnStackReplacement()->entry()) {
4953 return ON_STACK_REPLACEMENT;
4956 ASSERT(interrupt_address ==
4957 isolate->builtins()->OsrAfterStackCheck()->entry());
4958 return OSR_AFTER_STACK_CHECK;
4962 } } // namespace v8::internal
4964 #endif // V8_TARGET_ARCH_ARM