1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_ARM
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
42 #include "arm/code-stubs-arm.h"
43 #include "arm/macro-assembler-arm.h"
48 #define __ ACCESS_MASM(masm_)
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
61 info_emitted_ = false;
66 ASSERT(patch_site_.is_bound() == info_emitted_);
69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code.
71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
73 Assembler::BlockConstPoolScope block_const_pool(masm_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
76 __ b(eq, target); // Always taken before patched.
79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 Assembler::BlockConstPoolScope block_const_pool(masm_);
84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg));
86 __ b(ne, target); // Never taken before patched.
89 void EmitPatchInfo() {
90 // Block literal pool emission whilst recording patch site information.
91 Assembler::BlockConstPoolScope block_const_pool(masm_);
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 reg.set_code(delta_to_patch_site / kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
101 __ nop(); // Signals no inlined code.
106 MacroAssembler* masm_;
114 static void EmitStackCheck(MacroAssembler* masm_,
115 Register stack_limit_scratch,
117 Register scratch = sp) {
118 Isolate* isolate = masm_->isolate();
120 ASSERT(scratch.is(sp) == (pointers == 0));
122 __ sub(scratch, sp, Operand(pointers * kPointerSize));
124 __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
125 __ cmp(scratch, Operand(stack_limit_scratch));
127 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
128 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
133 // Generate code for a JS function. On entry to the function the receiver
134 // and arguments have been pushed on the stack left to right. The actual
135 // argument count matches the formal parameter count expected by the
138 // The live registers are:
139 // o r1: the JS function object being called (i.e., ourselves)
141 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
142 // o fp: our caller's frame pointer
143 // o sp: stack pointer
144 // o lr: return address
146 // The function builds a JS frame. Please see JavaScriptFrameConstants in
147 // frames-arm.h for its layout.
148 void FullCodeGenerator::Generate() {
149 CompilationInfo* info = info_;
151 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
153 InitializeFeedbackVector();
155 profiling_counter_ = isolate()->factory()->NewCell(
156 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
157 SetFunctionPosition(function());
158 Comment cmnt(masm_, "[ function compiled by full code generator");
160 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
163 if (strlen(FLAG_stop_at) > 0 &&
164 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
169 // Sloppy mode functions and builtins need to replace the receiver with the
170 // global proxy when called as functions (without an explicit receiver
172 if (info->strict_mode() == SLOPPY && !info->is_native()) {
174 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
175 __ ldr(r2, MemOperand(sp, receiver_offset));
176 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
179 __ ldr(r2, GlobalObjectOperand());
180 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
182 __ str(r2, MemOperand(sp, receiver_offset));
187 // Open a frame scope to indicate that there is a frame on the stack. The
188 // MANUAL indicates that the scope shouldn't actually generate code to set up
189 // the frame (that is done below).
190 FrameScope frame_scope(masm_, StackFrame::MANUAL);
192 info->set_prologue_offset(masm_->pc_offset());
193 __ Prologue(BUILD_FUNCTION_FRAME);
194 info->AddNoFrameRange(0, masm_->pc_offset());
196 { Comment cmnt(masm_, "[ Allocate locals");
197 int locals_count = info->scope()->num_stack_slots();
198 // Generators allocate locals, if any, in context slots.
199 ASSERT(!info->function()->is_generator() || locals_count == 0);
200 if (locals_count > 0) {
201 if (locals_count >= 128) {
202 EmitStackCheck(masm_, r2, locals_count, r9);
204 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
205 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
206 if (locals_count >= kMaxPushes) {
207 int loop_iterations = locals_count / kMaxPushes;
208 __ mov(r2, Operand(loop_iterations));
210 __ bind(&loop_header);
212 for (int i = 0; i < kMaxPushes; i++) {
215 // Continue loop if not done.
216 __ sub(r2, r2, Operand(1), SetCC);
217 __ b(&loop_header, ne);
219 int remaining = locals_count % kMaxPushes;
220 // Emit the remaining pushes.
221 for (int i = 0; i < remaining; i++) {
227 bool function_in_register = true;
229 // Possibly allocate a local context.
230 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
231 if (heap_slots > 0) {
232 // Argument to NewContext is the function, which is still in r1.
233 Comment cmnt(masm_, "[ Allocate context");
234 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
236 __ Push(info->scope()->GetScopeInfo());
237 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
238 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
239 FastNewContextStub stub(heap_slots);
243 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
245 function_in_register = false;
246 // Context is returned in r0. It replaces the context passed to us.
247 // It's saved in the stack and kept live in cp.
249 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
250 // Copy any necessary parameters into the context.
251 int num_parameters = info->scope()->num_parameters();
252 for (int i = 0; i < num_parameters; i++) {
253 Variable* var = scope()->parameter(i);
254 if (var->IsContextSlot()) {
255 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
256 (num_parameters - 1 - i) * kPointerSize;
257 // Load parameter from stack.
258 __ ldr(r0, MemOperand(fp, parameter_offset));
259 // Store it in the context.
260 MemOperand target = ContextOperand(cp, var->index());
263 // Update the write barrier.
264 __ RecordWriteContextSlot(
265 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
270 Variable* arguments = scope()->arguments();
271 if (arguments != NULL) {
272 // Function uses arguments object.
273 Comment cmnt(masm_, "[ Allocate arguments object");
274 if (!function_in_register) {
275 // Load this again, if it's used by the local context below.
276 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
280 // Receiver is just before the parameters on the caller's stack.
281 int num_parameters = info->scope()->num_parameters();
282 int offset = num_parameters * kPointerSize;
284 Operand(StandardFrameConstants::kCallerSPOffset + offset));
285 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
288 // Arguments to ArgumentsAccessStub:
289 // function, receiver address, parameter count.
290 // The stub will rewrite receiever and parameter count if the previous
291 // stack frame was an arguments adapter frame.
292 ArgumentsAccessStub::Type type;
293 if (strict_mode() == STRICT) {
294 type = ArgumentsAccessStub::NEW_STRICT;
295 } else if (function()->has_duplicate_parameters()) {
296 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
298 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
300 ArgumentsAccessStub stub(type);
303 SetVar(arguments, r0, r1, r2);
307 __ CallRuntime(Runtime::kTraceEnter, 0);
310 // Visit the declarations and body unless there is an illegal
312 if (scope()->HasIllegalRedeclaration()) {
313 Comment cmnt(masm_, "[ Declarations");
314 scope()->VisitIllegalRedeclaration(this);
317 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
318 { Comment cmnt(masm_, "[ Declarations");
319 // For named function expressions, declare the function name as a
321 if (scope()->is_function_scope() && scope()->function() != NULL) {
322 VariableDeclaration* function = scope()->function();
323 ASSERT(function->proxy()->var()->mode() == CONST ||
324 function->proxy()->var()->mode() == CONST_LEGACY);
325 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
326 VisitVariableDeclaration(function);
328 VisitDeclarations(scope()->declarations());
331 { Comment cmnt(masm_, "[ Stack check");
332 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
333 EmitStackCheck(masm_, ip);
336 { Comment cmnt(masm_, "[ Body");
337 ASSERT(loop_depth() == 0);
338 VisitStatements(function()->body());
339 ASSERT(loop_depth() == 0);
343 // Always emit a 'return undefined' in case control fell off the end of
345 { Comment cmnt(masm_, "[ return <undefined>;");
346 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
348 EmitReturnSequence();
350 // Force emit the constant pool, so it doesn't get emitted in the middle
351 // of the back edge table.
352 masm()->CheckConstPool(true, false);
356 void FullCodeGenerator::ClearAccumulator() {
357 __ mov(r0, Operand(Smi::FromInt(0)));
361 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
362 __ mov(r2, Operand(profiling_counter_));
363 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
364 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
365 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
369 void FullCodeGenerator::EmitProfilingCounterReset() {
370 int reset_value = FLAG_interrupt_budget;
371 if (isolate()->IsDebuggerActive()) {
372 // Detect debug break requests as soon as possible.
373 reset_value = FLAG_interrupt_budget >> 4;
375 __ mov(r2, Operand(profiling_counter_));
376 __ mov(r3, Operand(Smi::FromInt(reset_value)));
377 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382 Label* back_edge_target) {
383 Comment cmnt(masm_, "[ Back edge bookkeeping");
384 // Block literal pools whilst emitting back edge code.
385 Assembler::BlockConstPoolScope block_const_pool(masm_);
388 ASSERT(back_edge_target->is_bound());
389 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
390 int weight = Min(kMaxBackEdgeWeight,
391 Max(1, distance / kCodeSizeMultiplier));
392 EmitProfilingCounterDecrement(weight);
394 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 // Record a mapping of this PC offset to the OSR id. This is used to find
397 // the AST id from the unoptimized code in order to use it as a key into
398 // the deoptimization input data found in the optimized code.
399 RecordBackEdge(stmt->OsrEntryId());
401 EmitProfilingCounterReset();
404 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
405 // Record a mapping of the OSR id to this PC. This is used if the OSR
406 // entry becomes the target of a bailout. We don't expect it to be, but
407 // we want it to work if it is.
408 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
412 void FullCodeGenerator::EmitReturnSequence() {
413 Comment cmnt(masm_, "[ Return sequence");
414 if (return_label_.is_bound()) {
415 __ b(&return_label_);
417 __ bind(&return_label_);
419 // Push the return value on the stack as the parameter.
420 // Runtime::TraceExit returns its parameter in r0.
422 __ CallRuntime(Runtime::kTraceExit, 1);
424 // Pretend that the exit is a backwards jump to the entry.
426 if (info_->ShouldSelfOptimize()) {
427 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
429 int distance = masm_->pc_offset();
430 weight = Min(kMaxBackEdgeWeight,
431 Max(1, distance / kCodeSizeMultiplier));
433 EmitProfilingCounterDecrement(weight);
437 __ Call(isolate()->builtins()->InterruptCheck(),
438 RelocInfo::CODE_TARGET);
440 EmitProfilingCounterReset();
444 // Add a label for checking the size of the code used for returning.
445 Label check_exit_codesize;
446 __ bind(&check_exit_codesize);
448 // Make sure that the constant pool is not emitted inside of the return
450 { Assembler::BlockConstPoolScope block_const_pool(masm_);
451 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
452 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
453 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
454 PredictableCodeSizeScope predictable(masm_, -1);
456 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
457 __ add(sp, sp, Operand(sp_delta));
459 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
463 // Check that the size of the code used for returning is large enough
464 // for the debugger's requirements.
465 ASSERT(Assembler::kJSReturnSequenceInstructions <=
466 masm_->InstructionsGeneratedSince(&check_exit_codesize));
472 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
473 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
477 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
478 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
479 codegen()->GetVar(result_register(), var);
483 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
484 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485 codegen()->GetVar(result_register(), var);
486 __ push(result_register());
490 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
491 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
492 // For simplicity we always test the accumulator register.
493 codegen()->GetVar(result_register(), var);
494 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
495 codegen()->DoTest(this);
499 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
503 void FullCodeGenerator::AccumulatorValueContext::Plug(
504 Heap::RootListIndex index) const {
505 __ LoadRoot(result_register(), index);
509 void FullCodeGenerator::StackValueContext::Plug(
510 Heap::RootListIndex index) const {
511 __ LoadRoot(result_register(), index);
512 __ push(result_register());
516 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
517 codegen()->PrepareForBailoutBeforeSplit(condition(),
521 if (index == Heap::kUndefinedValueRootIndex ||
522 index == Heap::kNullValueRootIndex ||
523 index == Heap::kFalseValueRootIndex) {
524 if (false_label_ != fall_through_) __ b(false_label_);
525 } else if (index == Heap::kTrueValueRootIndex) {
526 if (true_label_ != fall_through_) __ b(true_label_);
528 __ LoadRoot(result_register(), index);
529 codegen()->DoTest(this);
534 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
538 void FullCodeGenerator::AccumulatorValueContext::Plug(
539 Handle<Object> lit) const {
540 __ mov(result_register(), Operand(lit));
544 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
545 // Immediates cannot be pushed directly.
546 __ mov(result_register(), Operand(lit));
547 __ push(result_register());
551 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
552 codegen()->PrepareForBailoutBeforeSplit(condition(),
556 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
557 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
558 if (false_label_ != fall_through_) __ b(false_label_);
559 } else if (lit->IsTrue() || lit->IsJSObject()) {
560 if (true_label_ != fall_through_) __ b(true_label_);
561 } else if (lit->IsString()) {
562 if (String::cast(*lit)->length() == 0) {
563 if (false_label_ != fall_through_) __ b(false_label_);
565 if (true_label_ != fall_through_) __ b(true_label_);
567 } else if (lit->IsSmi()) {
568 if (Smi::cast(*lit)->value() == 0) {
569 if (false_label_ != fall_through_) __ b(false_label_);
571 if (true_label_ != fall_through_) __ b(true_label_);
574 // For simplicity we always test the accumulator register.
575 __ mov(result_register(), Operand(lit));
576 codegen()->DoTest(this);
581 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
582 Register reg) const {
588 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
590 Register reg) const {
593 __ Move(result_register(), reg);
597 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
598 Register reg) const {
600 if (count > 1) __ Drop(count - 1);
601 __ str(reg, MemOperand(sp, 0));
605 void FullCodeGenerator::TestContext::DropAndPlug(int count,
606 Register reg) const {
608 // For simplicity we always test the accumulator register.
610 __ Move(result_register(), reg);
611 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
612 codegen()->DoTest(this);
616 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
617 Label* materialize_false) const {
618 ASSERT(materialize_true == materialize_false);
619 __ bind(materialize_true);
623 void FullCodeGenerator::AccumulatorValueContext::Plug(
624 Label* materialize_true,
625 Label* materialize_false) const {
627 __ bind(materialize_true);
628 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
630 __ bind(materialize_false);
631 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636 void FullCodeGenerator::StackValueContext::Plug(
637 Label* materialize_true,
638 Label* materialize_false) const {
640 __ bind(materialize_true);
641 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
643 __ bind(materialize_false);
644 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651 Label* materialize_false) const {
652 ASSERT(materialize_true == true_label_);
653 ASSERT(materialize_false == false_label_);
657 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
661 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
662 Heap::RootListIndex value_root_index =
663 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
664 __ LoadRoot(result_register(), value_root_index);
668 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
669 Heap::RootListIndex value_root_index =
670 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
671 __ LoadRoot(ip, value_root_index);
676 void FullCodeGenerator::TestContext::Plug(bool flag) const {
677 codegen()->PrepareForBailoutBeforeSplit(condition(),
682 if (true_label_ != fall_through_) __ b(true_label_);
684 if (false_label_ != fall_through_) __ b(false_label_);
689 void FullCodeGenerator::DoTest(Expression* condition,
692 Label* fall_through) {
693 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
694 CallIC(ic, condition->test_id());
695 __ tst(result_register(), result_register());
696 Split(ne, if_true, if_false, fall_through);
700 void FullCodeGenerator::Split(Condition cond,
703 Label* fall_through) {
704 if (if_false == fall_through) {
706 } else if (if_true == fall_through) {
707 __ b(NegateCondition(cond), if_false);
715 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
716 ASSERT(var->IsStackAllocated());
717 // Offset is negative because higher indexes are at lower addresses.
718 int offset = -var->index() * kPointerSize;
719 // Adjust by a (parameter or local) base offset.
720 if (var->IsParameter()) {
721 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
723 offset += JavaScriptFrameConstants::kLocal0Offset;
725 return MemOperand(fp, offset);
729 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
730 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
731 if (var->IsContextSlot()) {
732 int context_chain_length = scope()->ContextChainLength(var->scope());
733 __ LoadContext(scratch, context_chain_length);
734 return ContextOperand(scratch, var->index());
736 return StackOperand(var);
741 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
742 // Use destination as scratch.
743 MemOperand location = VarOperand(var, dest);
744 __ ldr(dest, location);
748 void FullCodeGenerator::SetVar(Variable* var,
752 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
753 ASSERT(!scratch0.is(src));
754 ASSERT(!scratch0.is(scratch1));
755 ASSERT(!scratch1.is(src));
756 MemOperand location = VarOperand(var, scratch0);
757 __ str(src, location);
759 // Emit the write barrier code if the location is in the heap.
760 if (var->IsContextSlot()) {
761 __ RecordWriteContextSlot(scratch0,
771 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
772 bool should_normalize,
775 // Only prepare for bailouts before splits if we're in a test
776 // context. Otherwise, we let the Visit function deal with the
777 // preparation to avoid preparing with the same AST id twice.
778 if (!context()->IsTest() || !info_->IsOptimizable()) return;
781 if (should_normalize) __ b(&skip);
782 PrepareForBailout(expr, TOS_REG);
783 if (should_normalize) {
784 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
786 Split(eq, if_true, if_false, NULL);
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
793 // The variable in the declaration always resides in the current function
795 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (generate_debug_code_) {
797 // Check that we're not inside a with or catch context.
798 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
799 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
800 __ Check(ne, kDeclarationInWithContext);
801 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
802 __ Check(ne, kDeclarationInCatchContext);
807 void FullCodeGenerator::VisitVariableDeclaration(
808 VariableDeclaration* declaration) {
809 // If it was not possible to allocate the variable at compile time, we
810 // need to "declare" it at runtime to make sure it actually exists in the
812 VariableProxy* proxy = declaration->proxy();
813 VariableMode mode = declaration->mode();
814 Variable* variable = proxy->var();
815 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
816 switch (variable->location()) {
817 case Variable::UNALLOCATED:
818 globals_->Add(variable->name(), zone());
819 globals_->Add(variable->binding_needs_init()
820 ? isolate()->factory()->the_hole_value()
821 : isolate()->factory()->undefined_value(),
825 case Variable::PARAMETER:
826 case Variable::LOCAL:
828 Comment cmnt(masm_, "[ VariableDeclaration");
829 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
830 __ str(ip, StackOperand(variable));
834 case Variable::CONTEXT:
836 Comment cmnt(masm_, "[ VariableDeclaration");
837 EmitDebugCheckDeclarationContext(variable);
838 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
839 __ str(ip, ContextOperand(cp, variable->index()));
840 // No write barrier since the_hole_value is in old space.
841 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
845 case Variable::LOOKUP: {
846 Comment cmnt(masm_, "[ VariableDeclaration");
847 __ mov(r2, Operand(variable->name()));
848 // Declaration nodes are always introduced in one of four modes.
849 ASSERT(IsDeclaredVariableMode(mode));
850 PropertyAttributes attr =
851 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
852 __ mov(r1, Operand(Smi::FromInt(attr)));
853 // Push initial value, if any.
854 // Note: For variables we must not push an initial value (such as
855 // 'undefined') because we may have a (legal) redeclaration and we
856 // must not destroy the current value.
858 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
859 __ Push(cp, r2, r1, r0);
861 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
862 __ Push(cp, r2, r1, r0);
864 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
871 void FullCodeGenerator::VisitFunctionDeclaration(
872 FunctionDeclaration* declaration) {
873 VariableProxy* proxy = declaration->proxy();
874 Variable* variable = proxy->var();
875 switch (variable->location()) {
876 case Variable::UNALLOCATED: {
877 globals_->Add(variable->name(), zone());
878 Handle<SharedFunctionInfo> function =
879 Compiler::BuildFunctionInfo(declaration->fun(), script());
880 // Check for stack-overflow exception.
881 if (function.is_null()) return SetStackOverflow();
882 globals_->Add(function, zone());
886 case Variable::PARAMETER:
887 case Variable::LOCAL: {
888 Comment cmnt(masm_, "[ FunctionDeclaration");
889 VisitForAccumulatorValue(declaration->fun());
890 __ str(result_register(), StackOperand(variable));
894 case Variable::CONTEXT: {
895 Comment cmnt(masm_, "[ FunctionDeclaration");
896 EmitDebugCheckDeclarationContext(variable);
897 VisitForAccumulatorValue(declaration->fun());
898 __ str(result_register(), ContextOperand(cp, variable->index()));
899 int offset = Context::SlotOffset(variable->index());
900 // We know that we have written a function, which is not a smi.
901 __ RecordWriteContextSlot(cp,
909 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
913 case Variable::LOOKUP: {
914 Comment cmnt(masm_, "[ FunctionDeclaration");
915 __ mov(r2, Operand(variable->name()));
916 __ mov(r1, Operand(Smi::FromInt(NONE)));
918 // Push initial value for function declaration.
919 VisitForStackValue(declaration->fun());
920 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
927 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
928 Variable* variable = declaration->proxy()->var();
929 ASSERT(variable->location() == Variable::CONTEXT);
930 ASSERT(variable->interface()->IsFrozen());
932 Comment cmnt(masm_, "[ ModuleDeclaration");
933 EmitDebugCheckDeclarationContext(variable);
935 // Load instance object.
936 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
937 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
938 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
941 __ str(r1, ContextOperand(cp, variable->index()));
942 // We know that we have written a module, which is not a smi.
943 __ RecordWriteContextSlot(cp,
944 Context::SlotOffset(variable->index()),
951 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
953 // Traverse into body.
954 Visit(declaration->module());
958 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
959 VariableProxy* proxy = declaration->proxy();
960 Variable* variable = proxy->var();
961 switch (variable->location()) {
962 case Variable::UNALLOCATED:
966 case Variable::CONTEXT: {
967 Comment cmnt(masm_, "[ ImportDeclaration");
968 EmitDebugCheckDeclarationContext(variable);
973 case Variable::PARAMETER:
974 case Variable::LOCAL:
975 case Variable::LOOKUP:
981 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
986 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
987 // Call the runtime to declare the globals.
988 // The context is the first argument.
989 __ mov(r1, Operand(pairs));
990 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
992 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
993 // Return value is ignored.
997 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
998 // Call the runtime to declare the modules.
999 __ Push(descriptions);
1000 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1001 // Return value is ignored.
1005 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1006 Comment cmnt(masm_, "[ SwitchStatement");
1007 Breakable nested_statement(this, stmt);
1008 SetStatementPosition(stmt);
1010 // Keep the switch value on the stack until a case matches.
1011 VisitForStackValue(stmt->tag());
1012 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1014 ZoneList<CaseClause*>* clauses = stmt->cases();
1015 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1017 Label next_test; // Recycled for each test.
1018 // Compile all the tests with branches to their bodies.
1019 for (int i = 0; i < clauses->length(); i++) {
1020 CaseClause* clause = clauses->at(i);
1021 clause->body_target()->Unuse();
1023 // The default is not a test, but remember it as final fall through.
1024 if (clause->is_default()) {
1025 default_clause = clause;
1029 Comment cmnt(masm_, "[ Case comparison");
1030 __ bind(&next_test);
1033 // Compile the label expression.
1034 VisitForAccumulatorValue(clause->label());
1036 // Perform the comparison as if via '==='.
1037 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1038 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1039 JumpPatchSite patch_site(masm_);
1040 if (inline_smi_code) {
1043 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1046 __ b(ne, &next_test);
1047 __ Drop(1); // Switch value is no longer needed.
1048 __ b(clause->body_target());
1049 __ bind(&slow_case);
1052 // Record position before stub call for type feedback.
1053 SetSourcePosition(clause->position());
1054 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1055 CallIC(ic, clause->CompareId());
1056 patch_site.EmitPatchInfo();
1060 PrepareForBailout(clause, TOS_REG);
1061 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1063 __ b(ne, &next_test);
1065 __ jmp(clause->body_target());
1068 __ cmp(r0, Operand::Zero());
1069 __ b(ne, &next_test);
1070 __ Drop(1); // Switch value is no longer needed.
1071 __ b(clause->body_target());
1074 // Discard the test value and jump to the default if present, otherwise to
1075 // the end of the statement.
1076 __ bind(&next_test);
1077 __ Drop(1); // Switch value is no longer needed.
1078 if (default_clause == NULL) {
1079 __ b(nested_statement.break_label());
1081 __ b(default_clause->body_target());
1084 // Compile all the case bodies.
1085 for (int i = 0; i < clauses->length(); i++) {
1086 Comment cmnt(masm_, "[ Case body");
1087 CaseClause* clause = clauses->at(i);
1088 __ bind(clause->body_target());
1089 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1090 VisitStatements(clause->statements());
1093 __ bind(nested_statement.break_label());
1094 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1098 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1099 Comment cmnt(masm_, "[ ForInStatement");
1100 int slot = stmt->ForInFeedbackSlot();
1101 SetStatementPosition(stmt);
1104 ForIn loop_statement(this, stmt);
1105 increment_loop_depth();
1107 // Get the object to enumerate over. If the object is null or undefined, skip
1108 // over the loop. See ECMA-262 version 5, section 12.6.4.
1109 VisitForAccumulatorValue(stmt->enumerable());
1110 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1113 Register null_value = r5;
1114 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1115 __ cmp(r0, null_value);
1118 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1120 // Convert the object to a JS object.
1121 Label convert, done_convert;
1122 __ JumpIfSmi(r0, &convert);
1123 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1124 __ b(ge, &done_convert);
1127 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1128 __ bind(&done_convert);
1131 // Check for proxies.
1133 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1134 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1135 __ b(le, &call_runtime);
1137 // Check cache validity in generated code. This is a fast case for
1138 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1139 // guarantee cache validity, call the runtime system to check cache
1140 // validity or get the property names in a fixed array.
1141 __ CheckEnumCache(null_value, &call_runtime);
1143 // The enum cache is valid. Load the map of the object being
1144 // iterated over and use the cache for the iteration.
1146 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1149 // Get the set of properties to enumerate.
1150 __ bind(&call_runtime);
1151 __ push(r0); // Duplicate the enumerable object on the stack.
1152 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1154 // If we got a map from the runtime call, we can do a fast
1155 // modification check. Otherwise, we got a fixed array, and we have
1156 // to do a slow check.
1158 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1159 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1161 __ b(ne, &fixed_array);
1163 // We got a map in register r0. Get the enumeration cache from it.
1164 Label no_descriptors;
1165 __ bind(&use_cache);
1167 __ EnumLength(r1, r0);
1168 __ cmp(r1, Operand(Smi::FromInt(0)));
1169 __ b(eq, &no_descriptors);
1171 __ LoadInstanceDescriptors(r0, r2);
1172 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1173 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1175 // Set up the four remaining stack slots.
1176 __ push(r0); // Map.
1177 __ mov(r0, Operand(Smi::FromInt(0)));
1178 // Push enumeration cache, enumeration cache length (as smi) and zero.
1179 __ Push(r2, r1, r0);
1182 __ bind(&no_descriptors);
1186 // We got a fixed array in register r0. Iterate through that.
1188 __ bind(&fixed_array);
1190 Handle<Object> feedback = Handle<Object>(
1191 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1193 StoreFeedbackVectorSlot(slot, feedback);
1194 __ Move(r1, FeedbackVector());
1195 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
1196 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
1198 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1199 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1200 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1201 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1202 __ b(gt, &non_proxy);
1203 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1204 __ bind(&non_proxy);
1205 __ Push(r1, r0); // Smi and array
1206 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1207 __ mov(r0, Operand(Smi::FromInt(0)));
1208 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1210 // Generate code for doing the condition check.
1211 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1213 // Load the current count to r0, load the length to r1.
1214 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1215 __ cmp(r0, r1); // Compare to the array length.
1216 __ b(hs, loop_statement.break_label());
1218 // Get the current entry of the array into register r3.
1219 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1220 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1221 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1223 // Get the expected map from the stack or a smi in the
1224 // permanent slow case into register r2.
1225 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1227 // Check if the expected map still matches that of the enumerable.
1228 // If not, we may have to filter the key.
1230 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1231 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1232 __ cmp(r4, Operand(r2));
1233 __ b(eq, &update_each);
1235 // For proxies, no filtering is done.
1236 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1237 __ cmp(r2, Operand(Smi::FromInt(0)));
1238 __ b(eq, &update_each);
1240 // Convert the entry to a string or (smi) 0 if it isn't a property
1241 // any more. If the property has been removed while iterating, we
1243 __ push(r1); // Enumerable.
1244 __ push(r3); // Current entry.
1245 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1246 __ mov(r3, Operand(r0), SetCC);
1247 __ b(eq, loop_statement.continue_label());
1249 // Update the 'each' property or variable from the possibly filtered
1250 // entry in register r3.
1251 __ bind(&update_each);
1252 __ mov(result_register(), r3);
1253 // Perform the assignment as if via '='.
1254 { EffectContext context(this);
1255 EmitAssignment(stmt->each());
1258 // Generate code for the body of the loop.
1259 Visit(stmt->body());
1261 // Generate code for the going to the next element by incrementing
1262 // the index (smi) stored on top of the stack.
1263 __ bind(loop_statement.continue_label());
1265 __ add(r0, r0, Operand(Smi::FromInt(1)));
1268 EmitBackEdgeBookkeeping(stmt, &loop);
1271 // Remove the pointers stored on the stack.
1272 __ bind(loop_statement.break_label());
1275 // Exit and decrement the loop depth.
1276 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1278 decrement_loop_depth();
1282 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1283 Comment cmnt(masm_, "[ ForOfStatement");
1284 SetStatementPosition(stmt);
1286 Iteration loop_statement(this, stmt);
1287 increment_loop_depth();
1289 // var iterator = iterable[@@iterator]()
1290 VisitForAccumulatorValue(stmt->assign_iterator());
1292 // As with for-in, skip the loop if the iterator is null or undefined.
1293 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1294 __ b(eq, loop_statement.break_label());
1295 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1296 __ b(eq, loop_statement.break_label());
1298 // Convert the iterator to a JS object.
1299 Label convert, done_convert;
1300 __ JumpIfSmi(r0, &convert);
1301 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1302 __ b(ge, &done_convert);
1305 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1306 __ bind(&done_convert);
1310 __ bind(loop_statement.continue_label());
1312 // result = iterator.next()
1313 VisitForEffect(stmt->next_result());
1315 // if (result.done) break;
1316 Label result_not_done;
1317 VisitForControl(stmt->result_done(),
1318 loop_statement.break_label(),
1321 __ bind(&result_not_done);
1323 // each = result.value
1324 VisitForEffect(stmt->assign_each());
1326 // Generate code for the body of the loop.
1327 Visit(stmt->body());
1329 // Check stack before looping.
1330 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1331 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1332 __ jmp(loop_statement.continue_label());
1334 // Exit and decrement the loop depth.
1335 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1336 __ bind(loop_statement.break_label());
1337 decrement_loop_depth();
1341 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1343 // Use the fast case closure allocation code that allocates in new
1344 // space for nested functions that don't need literals cloning. If
1345 // we're running with the --always-opt or the --prepare-always-opt
1346 // flag, we need to use the runtime function so that the new function
1347 // we are creating here gets a chance to have its code optimized and
1348 // doesn't just get a copy of the existing unoptimized code.
1349 if (!FLAG_always_opt &&
1350 !FLAG_prepare_always_opt &&
1352 scope()->is_function_scope() &&
1353 info->num_literals() == 0) {
1354 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1355 __ mov(r2, Operand(info));
1358 __ mov(r0, Operand(info));
1359 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1360 : Heap::kFalseValueRootIndex);
1361 __ Push(cp, r0, r1);
1362 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1364 context()->Plug(r0);
1368 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1369 Comment cmnt(masm_, "[ VariableProxy");
1370 EmitVariableLoad(expr);
1374 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1375 TypeofState typeof_state,
1377 Register current = cp;
1383 if (s->num_heap_slots() > 0) {
1384 if (s->calls_sloppy_eval()) {
1385 // Check that extension is NULL.
1386 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1390 // Load next context in chain.
1391 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1392 // Walk the rest of the chain without clobbering cp.
1395 // If no outer scope calls eval, we do not need to check more
1396 // context extensions.
1397 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1398 s = s->outer_scope();
1401 if (s->is_eval_scope()) {
1403 if (!current.is(next)) {
1404 __ Move(next, current);
1407 // Terminate at native context.
1408 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1409 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1412 // Check that extension is NULL.
1413 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1416 // Load next context in chain.
1417 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1422 __ ldr(r0, GlobalObjectOperand());
1423 __ mov(r2, Operand(var->name()));
1424 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1431 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1433 ASSERT(var->IsContextSlot());
1434 Register context = cp;
1438 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1439 if (s->num_heap_slots() > 0) {
1440 if (s->calls_sloppy_eval()) {
1441 // Check that extension is NULL.
1442 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1446 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1447 // Walk the rest of the chain without clobbering cp.
1451 // Check that last extension is NULL.
1452 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1456 // This function is used only for loads, not stores, so it's safe to
1457 // return an cp-based operand (the write barrier cannot be allowed to
1458 // destroy the cp register).
1459 return ContextOperand(context, var->index());
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1464 TypeofState typeof_state,
1467 // Generate fast-case code for variables that might be shadowed by
1468 // eval-introduced variables. Eval is used a lot without
1469 // introducing variables. In those cases, we do not want to
1470 // perform a runtime call for all variables in the scope
1471 // containing the eval.
1472 if (var->mode() == DYNAMIC_GLOBAL) {
1473 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1475 } else if (var->mode() == DYNAMIC_LOCAL) {
1476 Variable* local = var->local_if_not_shadowed();
1477 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1478 if (local->mode() == LET || local->mode() == CONST ||
1479 local->mode() == CONST_LEGACY) {
1480 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1481 if (local->mode() == CONST_LEGACY) {
1482 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1483 } else { // LET || CONST
1485 __ mov(r0, Operand(var->name()));
1487 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1495 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1496 // Record position before possible IC call.
1497 SetSourcePosition(proxy->position());
1498 Variable* var = proxy->var();
1500 // Three cases: global variables, lookup variables, and all other types of
1502 switch (var->location()) {
1503 case Variable::UNALLOCATED: {
1504 Comment cmnt(masm_, "[ Global variable");
1505 // Use inline caching. Variable name is passed in r2 and the global
1506 // object (receiver) in r0.
1507 __ ldr(r0, GlobalObjectOperand());
1508 __ mov(r2, Operand(var->name()));
1509 CallLoadIC(CONTEXTUAL);
1510 context()->Plug(r0);
1514 case Variable::PARAMETER:
1515 case Variable::LOCAL:
1516 case Variable::CONTEXT: {
1517 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1518 : "[ Stack variable");
1519 if (var->binding_needs_init()) {
1520 // var->scope() may be NULL when the proxy is located in eval code and
1521 // refers to a potential outside binding. Currently those bindings are
1522 // always looked up dynamically, i.e. in that case
1523 // var->location() == LOOKUP.
1525 ASSERT(var->scope() != NULL);
1527 // Check if the binding really needs an initialization check. The check
1528 // can be skipped in the following situation: we have a LET or CONST
1529 // binding in harmony mode, both the Variable and the VariableProxy have
1530 // the same declaration scope (i.e. they are both in global code, in the
1531 // same function or in the same eval code) and the VariableProxy is in
1532 // the source physically located after the initializer of the variable.
1534 // We cannot skip any initialization checks for CONST in non-harmony
1535 // mode because const variables may be declared but never initialized:
1536 // if (false) { const x; }; var y = x;
1538 // The condition on the declaration scopes is a conservative check for
1539 // nested functions that access a binding and are called before the
1540 // binding is initialized:
1541 // function() { f(); let x = 1; function f() { x = 2; } }
1543 bool skip_init_check;
1544 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1545 skip_init_check = false;
1547 // Check that we always have valid source position.
1548 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1549 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1550 skip_init_check = var->mode() != CONST_LEGACY &&
1551 var->initializer_position() < proxy->position();
1554 if (!skip_init_check) {
1555 // Let and const need a read barrier.
1557 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1558 if (var->mode() == LET || var->mode() == CONST) {
1559 // Throw a reference error when using an uninitialized let/const
1560 // binding in harmony mode.
1563 __ mov(r0, Operand(var->name()));
1565 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1568 // Uninitalized const bindings outside of harmony mode are unholed.
1569 ASSERT(var->mode() == CONST_LEGACY);
1570 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1572 context()->Plug(r0);
1576 context()->Plug(var);
1580 case Variable::LOOKUP: {
1581 Comment cmnt(masm_, "[ Lookup variable");
1583 // Generate code for loading from variables potentially shadowed
1584 // by eval-introduced variables.
1585 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1587 __ mov(r1, Operand(var->name()));
1588 __ Push(cp, r1); // Context and name.
1589 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1591 context()->Plug(r0);
1597 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1598 Comment cmnt(masm_, "[ RegExpLiteral");
1600 // Registers will be used as follows:
1601 // r5 = materialized value (RegExp literal)
1602 // r4 = JS function, literals array
1603 // r3 = literal index
1604 // r2 = RegExp pattern
1605 // r1 = RegExp flags
1606 // r0 = RegExp literal clone
1607 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1608 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1609 int literal_offset =
1610 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1611 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1612 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1614 __ b(ne, &materialized);
1616 // Create regexp literal using runtime function.
1617 // Result will be in r0.
1618 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1619 __ mov(r2, Operand(expr->pattern()));
1620 __ mov(r1, Operand(expr->flags()));
1621 __ Push(r4, r3, r2, r1);
1622 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1625 __ bind(&materialized);
1626 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1627 Label allocated, runtime_allocate;
1628 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1631 __ bind(&runtime_allocate);
1632 __ mov(r0, Operand(Smi::FromInt(size)));
1634 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1637 __ bind(&allocated);
1638 // After this, registers are used as follows:
1639 // r0: Newly allocated regexp.
1640 // r5: Materialized regexp.
1642 __ CopyFields(r0, r5, d0, size / kPointerSize);
1643 context()->Plug(r0);
1647 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1648 if (expression == NULL) {
1649 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1652 VisitForStackValue(expression);
1657 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1658 Comment cmnt(masm_, "[ ObjectLiteral");
1660 expr->BuildConstantProperties(isolate());
1661 Handle<FixedArray> constant_properties = expr->constant_properties();
1662 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1663 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1664 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1665 __ mov(r1, Operand(constant_properties));
1666 int flags = expr->fast_elements()
1667 ? ObjectLiteral::kFastElements
1668 : ObjectLiteral::kNoFlags;
1669 flags |= expr->has_function()
1670 ? ObjectLiteral::kHasFunction
1671 : ObjectLiteral::kNoFlags;
1672 __ mov(r0, Operand(Smi::FromInt(flags)));
1673 int properties_count = constant_properties->length() / 2;
1674 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1675 flags != ObjectLiteral::kFastElements ||
1676 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1677 __ Push(r3, r2, r1, r0);
1678 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1680 FastCloneShallowObjectStub stub(properties_count);
1684 // If result_saved is true the result is on top of the stack. If
1685 // result_saved is false the result is in r0.
1686 bool result_saved = false;
1688 // Mark all computed expressions that are bound to a key that
1689 // is shadowed by a later occurrence of the same key. For the
1690 // marked expressions, no store code is emitted.
1691 expr->CalculateEmitStore(zone());
1693 AccessorTable accessor_table(zone());
1694 for (int i = 0; i < expr->properties()->length(); i++) {
1695 ObjectLiteral::Property* property = expr->properties()->at(i);
1696 if (property->IsCompileTimeValue()) continue;
1698 Literal* key = property->key();
1699 Expression* value = property->value();
1700 if (!result_saved) {
1701 __ push(r0); // Save result on stack
1702 result_saved = true;
1704 switch (property->kind()) {
1705 case ObjectLiteral::Property::CONSTANT:
1707 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1708 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1710 case ObjectLiteral::Property::COMPUTED:
1711 if (key->value()->IsInternalizedString()) {
1712 if (property->emit_store()) {
1713 VisitForAccumulatorValue(value);
1714 __ mov(r2, Operand(key->value()));
1715 __ ldr(r1, MemOperand(sp));
1716 CallStoreIC(key->LiteralFeedbackId());
1717 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1719 VisitForEffect(value);
1723 // Duplicate receiver on stack.
1724 __ ldr(r0, MemOperand(sp));
1726 VisitForStackValue(key);
1727 VisitForStackValue(value);
1728 if (property->emit_store()) {
1729 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1731 __ CallRuntime(Runtime::kSetProperty, 4);
1736 case ObjectLiteral::Property::PROTOTYPE:
1737 // Duplicate receiver on stack.
1738 __ ldr(r0, MemOperand(sp));
1740 VisitForStackValue(value);
1741 if (property->emit_store()) {
1742 __ CallRuntime(Runtime::kSetPrototype, 2);
1748 case ObjectLiteral::Property::GETTER:
1749 accessor_table.lookup(key)->second->getter = value;
1751 case ObjectLiteral::Property::SETTER:
1752 accessor_table.lookup(key)->second->setter = value;
1757 // Emit code to define accessors, using only a single call to the runtime for
1758 // each pair of corresponding getters and setters.
1759 for (AccessorTable::Iterator it = accessor_table.begin();
1760 it != accessor_table.end();
1762 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1764 VisitForStackValue(it->first);
1765 EmitAccessor(it->second->getter);
1766 EmitAccessor(it->second->setter);
1767 __ mov(r0, Operand(Smi::FromInt(NONE)));
1769 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1772 if (expr->has_function()) {
1773 ASSERT(result_saved);
1774 __ ldr(r0, MemOperand(sp));
1776 __ CallRuntime(Runtime::kToFastProperties, 1);
1780 context()->PlugTOS();
1782 context()->Plug(r0);
1787 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1788 Comment cmnt(masm_, "[ ArrayLiteral");
1790 expr->BuildConstantElements(isolate());
1791 int flags = expr->depth() == 1
1792 ? ArrayLiteral::kShallowElements
1793 : ArrayLiteral::kNoFlags;
1795 ZoneList<Expression*>* subexprs = expr->values();
1796 int length = subexprs->length();
1797 Handle<FixedArray> constant_elements = expr->constant_elements();
1798 ASSERT_EQ(2, constant_elements->length());
1799 ElementsKind constant_elements_kind =
1800 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1801 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1802 Handle<FixedArrayBase> constant_elements_values(
1803 FixedArrayBase::cast(constant_elements->get(1)));
1805 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1806 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1807 // If the only customer of allocation sites is transitioning, then
1808 // we can turn it off if we don't have anywhere else to transition to.
1809 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1812 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1813 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1814 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1815 __ mov(r1, Operand(constant_elements));
1816 if (has_fast_elements && constant_elements_values->map() ==
1817 isolate()->heap()->fixed_cow_array_map()) {
1818 FastCloneShallowArrayStub stub(
1819 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1820 allocation_site_mode,
1823 __ IncrementCounter(
1824 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1825 } else if (expr->depth() > 1 || Serializer::enabled() ||
1826 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1827 __ mov(r0, Operand(Smi::FromInt(flags)));
1828 __ Push(r3, r2, r1, r0);
1829 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1831 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1832 FLAG_smi_only_arrays);
1833 FastCloneShallowArrayStub::Mode mode =
1834 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1836 if (has_fast_elements) {
1837 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1840 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1844 bool result_saved = false; // Is the result saved to the stack?
1846 // Emit code to evaluate all the non-constant subexpressions and to store
1847 // them into the newly cloned array.
1848 for (int i = 0; i < length; i++) {
1849 Expression* subexpr = subexprs->at(i);
1850 // If the subexpression is a literal or a simple materialized literal it
1851 // is already set in the cloned array.
1852 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1854 if (!result_saved) {
1856 __ Push(Smi::FromInt(expr->literal_index()));
1857 result_saved = true;
1859 VisitForAccumulatorValue(subexpr);
1861 if (IsFastObjectElementsKind(constant_elements_kind)) {
1862 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1863 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1864 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1865 __ str(result_register(), FieldMemOperand(r1, offset));
1866 // Update the write barrier for the array store.
1867 __ RecordWriteField(r1, offset, result_register(), r2,
1868 kLRHasBeenSaved, kDontSaveFPRegs,
1869 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1871 __ mov(r3, Operand(Smi::FromInt(i)));
1872 StoreArrayLiteralElementStub stub;
1876 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1880 __ pop(); // literal index
1881 context()->PlugTOS();
1883 context()->Plug(r0);
1888 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1889 ASSERT(expr->target()->IsValidLeftHandSide());
1891 Comment cmnt(masm_, "[ Assignment");
1893 // Left-hand side can only be a property, a global or a (parameter or local)
1895 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1896 LhsKind assign_type = VARIABLE;
1897 Property* property = expr->target()->AsProperty();
1898 if (property != NULL) {
1899 assign_type = (property->key()->IsPropertyName())
1904 // Evaluate LHS expression.
1905 switch (assign_type) {
1907 // Nothing to do here.
1909 case NAMED_PROPERTY:
1910 if (expr->is_compound()) {
1911 // We need the receiver both on the stack and in the accumulator.
1912 VisitForAccumulatorValue(property->obj());
1913 __ push(result_register());
1915 VisitForStackValue(property->obj());
1918 case KEYED_PROPERTY:
1919 if (expr->is_compound()) {
1920 VisitForStackValue(property->obj());
1921 VisitForAccumulatorValue(property->key());
1922 __ ldr(r1, MemOperand(sp, 0));
1925 VisitForStackValue(property->obj());
1926 VisitForStackValue(property->key());
1931 // For compound assignments we need another deoptimization point after the
1932 // variable/property load.
1933 if (expr->is_compound()) {
1934 { AccumulatorValueContext context(this);
1935 switch (assign_type) {
1937 EmitVariableLoad(expr->target()->AsVariableProxy());
1938 PrepareForBailout(expr->target(), TOS_REG);
1940 case NAMED_PROPERTY:
1941 EmitNamedPropertyLoad(property);
1942 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1944 case KEYED_PROPERTY:
1945 EmitKeyedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1951 Token::Value op = expr->binary_op();
1952 __ push(r0); // Left operand goes on the stack.
1953 VisitForAccumulatorValue(expr->value());
1955 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1958 SetSourcePosition(expr->position() + 1);
1959 AccumulatorValueContext context(this);
1960 if (ShouldInlineSmiCase(op)) {
1961 EmitInlineSmiBinaryOp(expr->binary_operation(),
1967 EmitBinaryOp(expr->binary_operation(), op, mode);
1970 // Deoptimization point in case the binary operation may have side effects.
1971 PrepareForBailout(expr->binary_operation(), TOS_REG);
1973 VisitForAccumulatorValue(expr->value());
1976 // Record source position before possible IC call.
1977 SetSourcePosition(expr->position());
1980 switch (assign_type) {
1982 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1984 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1985 context()->Plug(r0);
1987 case NAMED_PROPERTY:
1988 EmitNamedPropertyAssignment(expr);
1990 case KEYED_PROPERTY:
1991 EmitKeyedPropertyAssignment(expr);
1997 void FullCodeGenerator::VisitYield(Yield* expr) {
1998 Comment cmnt(masm_, "[ Yield");
1999 // Evaluate yielded value first; the initial iterator definition depends on
2000 // this. It stays on the stack while we update the iterator.
2001 VisitForStackValue(expr->expression());
2003 switch (expr->yield_kind()) {
2004 case Yield::SUSPEND:
2005 // Pop value from top-of-stack slot; box result into result register.
2006 EmitCreateIteratorResult(false);
2007 __ push(result_register());
2009 case Yield::INITIAL: {
2010 Label suspend, continuation, post_runtime, resume;
2014 __ bind(&continuation);
2018 VisitForAccumulatorValue(expr->generator_object());
2019 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2020 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2021 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2022 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2024 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2025 kLRHasBeenSaved, kDontSaveFPRegs);
2026 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2028 __ b(eq, &post_runtime);
2029 __ push(r0); // generator object
2030 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2031 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2032 __ bind(&post_runtime);
2033 __ pop(result_register());
2034 EmitReturnSequence();
2037 context()->Plug(result_register());
2041 case Yield::FINAL: {
2042 VisitForAccumulatorValue(expr->generator_object());
2043 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2044 __ str(r1, FieldMemOperand(result_register(),
2045 JSGeneratorObject::kContinuationOffset));
2046 // Pop value from top-of-stack slot, box result into result register.
2047 EmitCreateIteratorResult(true);
2048 EmitUnwindBeforeReturn();
2049 EmitReturnSequence();
2053 case Yield::DELEGATING: {
2054 VisitForStackValue(expr->generator_object());
2056 // Initial stack layout is as follows:
2057 // [sp + 1 * kPointerSize] iter
2058 // [sp + 0 * kPointerSize] g
2060 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2061 Label l_next, l_call, l_loop;
2062 // Initial send value is undefined.
2063 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2066 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2068 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2069 __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2070 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2071 __ Push(r2, r3, r0); // "throw", iter, except
2074 // try { received = %yield result }
2075 // Shuffle the received result above a try handler and yield it without
2078 __ pop(r0); // result
2079 __ PushTryHandler(StackHandler::CATCH, expr->index());
2080 const int handler_size = StackHandlerConstants::kSize;
2081 __ push(r0); // result
2083 __ bind(&l_continuation);
2085 __ bind(&l_suspend);
2086 const int generator_object_depth = kPointerSize + handler_size;
2087 __ ldr(r0, MemOperand(sp, generator_object_depth));
2089 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2090 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2091 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2092 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2094 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2095 kLRHasBeenSaved, kDontSaveFPRegs);
2096 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2097 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2098 __ pop(r0); // result
2099 EmitReturnSequence();
2100 __ bind(&l_resume); // received in r0
2103 // receiver = iter; f = 'next'; arg = received;
2105 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2106 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2107 __ Push(r2, r3, r0); // "next", iter, received
2109 // result = receiver[f](arg);
2111 __ ldr(r1, MemOperand(sp, kPointerSize));
2112 __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2113 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2114 CallIC(ic, TypeFeedbackId::None());
2116 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2117 CallFunctionStub stub(1, CALL_AS_METHOD);
2120 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2121 __ Drop(1); // The function is still on the stack; drop it.
2123 // if (!result.done) goto l_try;
2125 __ push(r0); // save result
2126 __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2127 CallLoadIC(NOT_CONTEXTUAL); // result.done in r0
2128 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2130 __ cmp(r0, Operand(0));
2134 __ pop(r0); // result
2135 __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2136 CallLoadIC(NOT_CONTEXTUAL); // result.value in r0
2137 context()->DropAndPlug(2, r0); // drop iter and g
2144 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2146 JSGeneratorObject::ResumeMode resume_mode) {
2147 // The value stays in r0, and is ultimately read by the resumed generator, as
2148 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2149 // is read to throw the value when the resumed generator is already closed.
2150 // r1 will hold the generator object until the activation has been resumed.
2151 VisitForStackValue(generator);
2152 VisitForAccumulatorValue(value);
2155 // Check generator state.
2156 Label wrong_state, closed_state, done;
2157 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2158 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2159 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2160 __ cmp(r3, Operand(Smi::FromInt(0)));
2161 __ b(eq, &closed_state);
2162 __ b(lt, &wrong_state);
2164 // Load suspended function and context.
2165 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2166 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2168 // Load receiver and store as the first argument.
2169 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2172 // Push holes for the rest of the arguments to the generator function.
2173 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2175 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2176 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2177 Label push_argument_holes, push_frame;
2178 __ bind(&push_argument_holes);
2179 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2180 __ b(mi, &push_frame);
2182 __ jmp(&push_argument_holes);
2184 // Enter a new JavaScript frame, and initialize its slots as they were when
2185 // the generator was suspended.
2187 __ bind(&push_frame);
2188 __ bl(&resume_frame);
2190 __ bind(&resume_frame);
2191 // lr = return address.
2192 // fp = caller's frame pointer.
2193 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2194 // cp = callee's context,
2195 // r4 = callee's JS function.
2196 __ PushFixedFrame(r4);
2197 // Adjust FP to point to saved FP.
2198 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2200 // Load the operand stack size.
2201 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2202 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2205 // If we are sending a value and there is no operand stack, we can jump back
2207 if (resume_mode == JSGeneratorObject::NEXT) {
2209 __ cmp(r3, Operand(0));
2210 __ b(ne, &slow_resume);
2211 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2213 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2214 if (FLAG_enable_ool_constant_pool) {
2215 // Load the new code object's constant pool pointer.
2217 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
2220 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2223 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2224 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2227 __ bind(&slow_resume);
2230 // Otherwise, we push holes for the operand stack and call the runtime to fix
2231 // up the stack and the handlers.
2232 Label push_operand_holes, call_resume;
2233 __ bind(&push_operand_holes);
2234 __ sub(r3, r3, Operand(1), SetCC);
2235 __ b(mi, &call_resume);
2237 __ b(&push_operand_holes);
2238 __ bind(&call_resume);
2239 ASSERT(!result_register().is(r1));
2240 __ Push(r1, result_register());
2241 __ Push(Smi::FromInt(resume_mode));
2242 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2243 // Not reached: the runtime call returns elsewhere.
2244 __ stop("not-reached");
2246 // Reach here when generator is closed.
2247 __ bind(&closed_state);
2248 if (resume_mode == JSGeneratorObject::NEXT) {
2249 // Return completed iterator result when generator is closed.
2250 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2252 // Pop value from top-of-stack slot; box result into result register.
2253 EmitCreateIteratorResult(true);
2255 // Throw the provided value.
2257 __ CallRuntime(Runtime::kHiddenThrow, 1);
2261 // Throw error if we attempt to operate on a running generator.
2262 __ bind(&wrong_state);
2264 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2267 context()->Plug(result_register());
2271 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2275 Handle<Map> map(isolate()->native_context()->generator_result_map());
2277 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2280 __ bind(&gc_required);
2281 __ Push(Smi::FromInt(map->instance_size()));
2282 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2283 __ ldr(context_register(),
2284 MemOperand(fp, StandardFrameConstants::kContextOffset));
2286 __ bind(&allocated);
2287 __ mov(r1, Operand(map));
2289 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2290 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2291 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2292 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2293 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2294 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2296 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2298 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2300 // Only the value field needs a write barrier, as the other values are in the
2302 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2303 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2307 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2308 SetSourcePosition(prop->position());
2309 Literal* key = prop->key()->AsLiteral();
2310 __ mov(r2, Operand(key->value()));
2311 // Call load IC. It has arguments receiver and property name r0 and r2.
2312 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2316 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2317 SetSourcePosition(prop->position());
2318 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2319 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2320 CallIC(ic, prop->PropertyFeedbackId());
2324 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2327 Expression* left_expr,
2328 Expression* right_expr) {
2329 Label done, smi_case, stub_call;
2331 Register scratch1 = r2;
2332 Register scratch2 = r3;
2334 // Get the arguments.
2336 Register right = r0;
2339 // Perform combined smi check on both operands.
2340 __ orr(scratch1, left, Operand(right));
2341 STATIC_ASSERT(kSmiTag == 0);
2342 JumpPatchSite patch_site(masm_);
2343 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2345 __ bind(&stub_call);
2346 BinaryOpICStub stub(op, mode);
2347 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2348 patch_site.EmitPatchInfo();
2352 // Smi case. This code works the same way as the smi-smi case in the type
2353 // recording binary operation stub, see
2356 __ GetLeastBitsFromSmi(scratch1, right, 5);
2357 __ mov(right, Operand(left, ASR, scratch1));
2358 __ bic(right, right, Operand(kSmiTagMask));
2361 __ SmiUntag(scratch1, left);
2362 __ GetLeastBitsFromSmi(scratch2, right, 5);
2363 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2364 __ TrySmiTag(right, scratch1, &stub_call);
2368 __ SmiUntag(scratch1, left);
2369 __ GetLeastBitsFromSmi(scratch2, right, 5);
2370 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2371 __ tst(scratch1, Operand(0xc0000000));
2372 __ b(ne, &stub_call);
2373 __ SmiTag(right, scratch1);
2377 __ add(scratch1, left, Operand(right), SetCC);
2378 __ b(vs, &stub_call);
2379 __ mov(right, scratch1);
2382 __ sub(scratch1, left, Operand(right), SetCC);
2383 __ b(vs, &stub_call);
2384 __ mov(right, scratch1);
2387 __ SmiUntag(ip, right);
2388 __ smull(scratch1, scratch2, left, ip);
2389 __ mov(ip, Operand(scratch1, ASR, 31));
2390 __ cmp(ip, Operand(scratch2));
2391 __ b(ne, &stub_call);
2392 __ cmp(scratch1, Operand::Zero());
2393 __ mov(right, Operand(scratch1), LeaveCC, ne);
2395 __ add(scratch2, right, Operand(left), SetCC);
2396 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2397 __ b(mi, &stub_call);
2401 __ orr(right, left, Operand(right));
2403 case Token::BIT_AND:
2404 __ and_(right, left, Operand(right));
2406 case Token::BIT_XOR:
2407 __ eor(right, left, Operand(right));
2414 context()->Plug(r0);
2418 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2420 OverwriteMode mode) {
2422 BinaryOpICStub stub(op, mode);
2423 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2424 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2425 patch_site.EmitPatchInfo();
2426 context()->Plug(r0);
2430 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2431 ASSERT(expr->IsValidLeftHandSide());
2433 // Left-hand side can only be a property, a global or a (parameter or local)
2435 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2436 LhsKind assign_type = VARIABLE;
2437 Property* prop = expr->AsProperty();
2439 assign_type = (prop->key()->IsPropertyName())
2444 switch (assign_type) {
2446 Variable* var = expr->AsVariableProxy()->var();
2447 EffectContext context(this);
2448 EmitVariableAssignment(var, Token::ASSIGN);
2451 case NAMED_PROPERTY: {
2452 __ push(r0); // Preserve value.
2453 VisitForAccumulatorValue(prop->obj());
2455 __ pop(r0); // Restore value.
2456 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2460 case KEYED_PROPERTY: {
2461 __ push(r0); // Preserve value.
2462 VisitForStackValue(prop->obj());
2463 VisitForAccumulatorValue(prop->key());
2465 __ Pop(r0, r2); // r0 = restored value.
2466 Handle<Code> ic = strict_mode() == SLOPPY
2467 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2468 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2473 context()->Plug(r0);
2477 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2478 Variable* var, MemOperand location) {
2479 __ str(result_register(), location);
2480 if (var->IsContextSlot()) {
2481 // RecordWrite may destroy all its register arguments.
2482 __ mov(r3, result_register());
2483 int offset = Context::SlotOffset(var->index());
2484 __ RecordWriteContextSlot(
2485 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2490 void FullCodeGenerator::EmitCallStoreContextSlot(
2491 Handle<String> name, StrictMode strict_mode) {
2492 __ push(r0); // Value.
2493 __ mov(r1, Operand(name));
2494 __ mov(r0, Operand(Smi::FromInt(strict_mode)));
2495 __ Push(cp, r1, r0); // Context, name, strict mode.
2496 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2500 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2501 if (var->IsUnallocated()) {
2502 // Global var, const, or let.
2503 __ mov(r2, Operand(var->name()));
2504 __ ldr(r1, GlobalObjectOperand());
2507 } else if (op == Token::INIT_CONST_LEGACY) {
2508 // Const initializers need a write barrier.
2509 ASSERT(!var->IsParameter()); // No const parameters.
2510 if (var->IsLookupSlot()) {
2512 __ mov(r0, Operand(var->name()));
2513 __ Push(cp, r0); // Context and name.
2514 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2516 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2518 MemOperand location = VarOperand(var, r1);
2519 __ ldr(r2, location);
2520 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2522 EmitStoreToStackLocalOrContextSlot(var, location);
2526 } else if (var->mode() == LET && op != Token::INIT_LET) {
2527 // Non-initializing assignment to let variable needs a write barrier.
2528 if (var->IsLookupSlot()) {
2529 EmitCallStoreContextSlot(var->name(), strict_mode());
2531 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2533 MemOperand location = VarOperand(var, r1);
2534 __ ldr(r3, location);
2535 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2537 __ mov(r3, Operand(var->name()));
2539 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2540 // Perform the assignment.
2542 EmitStoreToStackLocalOrContextSlot(var, location);
2545 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2546 // Assignment to var or initializing assignment to let/const
2548 if (var->IsLookupSlot()) {
2549 EmitCallStoreContextSlot(var->name(), strict_mode());
2551 ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2552 MemOperand location = VarOperand(var, r1);
2553 if (generate_debug_code_ && op == Token::INIT_LET) {
2554 // Check for an uninitialized let binding.
2555 __ ldr(r2, location);
2556 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2557 __ Check(eq, kLetBindingReInitialization);
2559 EmitStoreToStackLocalOrContextSlot(var, location);
2562 // Non-initializing assignments to consts are ignored.
2566 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2567 // Assignment to a property, using a named store IC.
2568 Property* prop = expr->target()->AsProperty();
2569 ASSERT(prop != NULL);
2570 ASSERT(prop->key()->AsLiteral() != NULL);
2572 // Record source code position before IC call.
2573 SetSourcePosition(expr->position());
2574 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2577 CallStoreIC(expr->AssignmentFeedbackId());
2579 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580 context()->Plug(r0);
2584 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2585 // Assignment to a property, using a keyed store IC.
2587 // Record source code position before IC call.
2588 SetSourcePosition(expr->position());
2589 __ Pop(r2, r1); // r1 = key.
2591 Handle<Code> ic = strict_mode() == SLOPPY
2592 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2593 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2594 CallIC(ic, expr->AssignmentFeedbackId());
2596 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2597 context()->Plug(r0);
2601 void FullCodeGenerator::VisitProperty(Property* expr) {
2602 Comment cmnt(masm_, "[ Property");
2603 Expression* key = expr->key();
2605 if (key->IsPropertyName()) {
2606 VisitForAccumulatorValue(expr->obj());
2607 EmitNamedPropertyLoad(expr);
2608 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2609 context()->Plug(r0);
2611 VisitForStackValue(expr->obj());
2612 VisitForAccumulatorValue(expr->key());
2614 EmitKeyedPropertyLoad(expr);
2615 context()->Plug(r0);
2620 void FullCodeGenerator::CallIC(Handle<Code> code,
2621 TypeFeedbackId ast_id) {
2623 // All calls must have a predictable size in full-codegen code to ensure that
2624 // the debugger can patch them correctly.
2625 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2626 NEVER_INLINE_TARGET_ADDRESS);
2630 // Code common for calls using the IC.
2631 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2632 Expression* callee = expr->expression();
2633 ZoneList<Expression*>* args = expr->arguments();
2634 int arg_count = args->length();
2636 CallFunctionFlags flags;
2637 // Get the target function.
2638 if (callee->IsVariableProxy()) {
2639 { StackValueContext context(this);
2640 EmitVariableLoad(callee->AsVariableProxy());
2641 PrepareForBailout(callee, NO_REGISTERS);
2643 // Push undefined as receiver. This is patched in the method prologue if it
2644 // is a sloppy mode method.
2645 __ Push(isolate()->factory()->undefined_value());
2646 flags = NO_CALL_FUNCTION_FLAGS;
2648 // Load the function from the receiver.
2649 ASSERT(callee->IsProperty());
2650 __ ldr(r0, MemOperand(sp, 0));
2651 EmitNamedPropertyLoad(callee->AsProperty());
2652 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2653 // Push the target function under the receiver.
2654 __ ldr(ip, MemOperand(sp, 0));
2656 __ str(r0, MemOperand(sp, kPointerSize));
2657 flags = CALL_AS_METHOD;
2660 // Load the arguments.
2661 { PreservePositionScope scope(masm()->positions_recorder());
2662 for (int i = 0; i < arg_count; i++) {
2663 VisitForStackValue(args->at(i));
2667 // Record source position for debugger.
2668 SetSourcePosition(expr->position());
2669 CallFunctionStub stub(arg_count, flags);
2670 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2673 RecordJSReturnSite(expr);
2675 // Restore context register.
2676 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2678 context()->DropAndPlug(1, r0);
2682 // Code common for calls using the IC.
2683 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2686 VisitForAccumulatorValue(key);
2688 Expression* callee = expr->expression();
2689 ZoneList<Expression*>* args = expr->arguments();
2690 int arg_count = args->length();
2692 // Load the function from the receiver.
2693 ASSERT(callee->IsProperty());
2694 __ ldr(r1, MemOperand(sp, 0));
2695 EmitKeyedPropertyLoad(callee->AsProperty());
2696 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2698 // Push the target function under the receiver.
2699 __ ldr(ip, MemOperand(sp, 0));
2701 __ str(r0, MemOperand(sp, kPointerSize));
2703 { PreservePositionScope scope(masm()->positions_recorder());
2704 for (int i = 0; i < arg_count; i++) {
2705 VisitForStackValue(args->at(i));
2709 // Record source position for debugger.
2710 SetSourcePosition(expr->position());
2711 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2712 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2715 RecordJSReturnSite(expr);
2716 // Restore context register.
2717 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2719 context()->DropAndPlug(1, r0);
2723 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2724 // Code common for calls using the call stub.
2725 ZoneList<Expression*>* args = expr->arguments();
2726 int arg_count = args->length();
2727 { PreservePositionScope scope(masm()->positions_recorder());
2728 for (int i = 0; i < arg_count; i++) {
2729 VisitForStackValue(args->at(i));
2732 // Record source position for debugger.
2733 SetSourcePosition(expr->position());
2735 Handle<Object> uninitialized =
2736 TypeFeedbackInfo::UninitializedSentinel(isolate());
2737 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2738 __ Move(r2, FeedbackVector());
2739 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2741 // Record call targets in unoptimized code.
2742 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2743 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2745 RecordJSReturnSite(expr);
2746 // Restore context register.
2747 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2748 context()->DropAndPlug(1, r0);
2752 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2753 // r4: copy of the first argument or undefined if it doesn't exist.
2754 if (arg_count > 0) {
2755 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2757 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2760 // r3: the receiver of the enclosing function.
2761 int receiver_offset = 2 + info_->scope()->num_parameters();
2762 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2765 __ mov(r2, Operand(Smi::FromInt(strict_mode())));
2767 // r1: the start position of the scope the calls resides in.
2768 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2770 // Do the runtime call.
2771 __ Push(r4, r3, r2, r1);
2772 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2776 void FullCodeGenerator::VisitCall(Call* expr) {
2778 // We want to verify that RecordJSReturnSite gets called on all paths
2779 // through this function. Avoid early returns.
2780 expr->return_is_recorded_ = false;
2783 Comment cmnt(masm_, "[ Call");
2784 Expression* callee = expr->expression();
2785 Call::CallType call_type = expr->GetCallType(isolate());
2787 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2788 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2789 // to resolve the function we need to call and the receiver of the
2790 // call. Then we call the resolved function using the given
2792 ZoneList<Expression*>* args = expr->arguments();
2793 int arg_count = args->length();
2795 { PreservePositionScope pos_scope(masm()->positions_recorder());
2796 VisitForStackValue(callee);
2797 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2798 __ push(r2); // Reserved receiver slot.
2800 // Push the arguments.
2801 for (int i = 0; i < arg_count; i++) {
2802 VisitForStackValue(args->at(i));
2805 // Push a copy of the function (found below the arguments) and
2807 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2809 EmitResolvePossiblyDirectEval(arg_count);
2811 // The runtime call returns a pair of values in r0 (function) and
2812 // r1 (receiver). Touch up the stack with the right values.
2813 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2814 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2817 // Record source position for debugger.
2818 SetSourcePosition(expr->position());
2819 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2820 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2822 RecordJSReturnSite(expr);
2823 // Restore context register.
2824 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2825 context()->DropAndPlug(1, r0);
2826 } else if (call_type == Call::GLOBAL_CALL) {
2827 EmitCallWithIC(expr);
2829 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2830 // Call to a lookup slot (dynamically introduced variable).
2831 VariableProxy* proxy = callee->AsVariableProxy();
2834 { PreservePositionScope scope(masm()->positions_recorder());
2835 // Generate code for loading from variables potentially shadowed
2836 // by eval-introduced variables.
2837 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2841 // Call the runtime to find the function to call (returned in r0)
2842 // and the object holding it (returned in edx).
2843 ASSERT(!context_register().is(r2));
2844 __ mov(r2, Operand(proxy->name()));
2845 __ Push(context_register(), r2);
2846 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2847 __ Push(r0, r1); // Function, receiver.
2849 // If fast case code has been generated, emit code to push the
2850 // function and receiver and have the slow path jump around this
2852 if (done.is_linked()) {
2858 // The receiver is implicitly the global receiver. Indicate this
2859 // by passing the hole to the call function stub.
2860 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2865 // The receiver is either the global receiver or an object found
2866 // by LoadContextSlot.
2867 EmitCallWithStub(expr);
2868 } else if (call_type == Call::PROPERTY_CALL) {
2869 Property* property = callee->AsProperty();
2870 { PreservePositionScope scope(masm()->positions_recorder());
2871 VisitForStackValue(property->obj());
2873 if (property->key()->IsPropertyName()) {
2874 EmitCallWithIC(expr);
2876 EmitKeyedCallWithIC(expr, property->key());
2879 ASSERT(call_type == Call::OTHER_CALL);
2880 // Call to an arbitrary expression not handled specially above.
2881 { PreservePositionScope scope(masm()->positions_recorder());
2882 VisitForStackValue(callee);
2884 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2886 // Emit function call.
2887 EmitCallWithStub(expr);
2891 // RecordJSReturnSite should have been called.
2892 ASSERT(expr->return_is_recorded_);
2897 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2898 Comment cmnt(masm_, "[ CallNew");
2899 // According to ECMA-262, section 11.2.2, page 44, the function
2900 // expression in new calls must be evaluated before the
2903 // Push constructor on the stack. If it's not a function it's used as
2904 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2906 VisitForStackValue(expr->expression());
2908 // Push the arguments ("left-to-right") on the stack.
2909 ZoneList<Expression*>* args = expr->arguments();
2910 int arg_count = args->length();
2911 for (int i = 0; i < arg_count; i++) {
2912 VisitForStackValue(args->at(i));
2915 // Call the construct call builtin that handles allocation and
2916 // constructor invocation.
2917 SetSourcePosition(expr->position());
2919 // Load function and argument count into r1 and r0.
2920 __ mov(r0, Operand(arg_count));
2921 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2923 // Record call targets in unoptimized code.
2924 Handle<Object> uninitialized =
2925 TypeFeedbackInfo::UninitializedSentinel(isolate());
2926 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2927 if (FLAG_pretenuring_call_new) {
2928 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2929 isolate()->factory()->NewAllocationSite());
2930 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2931 expr->CallNewFeedbackSlot() + 1);
2934 __ Move(r2, FeedbackVector());
2935 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2937 CallConstructStub stub(RECORD_CALL_TARGET);
2938 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2939 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2940 context()->Plug(r0);
2944 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2945 ZoneList<Expression*>* args = expr->arguments();
2946 ASSERT(args->length() == 1);
2948 VisitForAccumulatorValue(args->at(0));
2950 Label materialize_true, materialize_false;
2951 Label* if_true = NULL;
2952 Label* if_false = NULL;
2953 Label* fall_through = NULL;
2954 context()->PrepareTest(&materialize_true, &materialize_false,
2955 &if_true, &if_false, &fall_through);
2957 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2959 Split(eq, if_true, if_false, fall_through);
2961 context()->Plug(if_true, if_false);
2965 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2966 ZoneList<Expression*>* args = expr->arguments();
2967 ASSERT(args->length() == 1);
2969 VisitForAccumulatorValue(args->at(0));
2971 Label materialize_true, materialize_false;
2972 Label* if_true = NULL;
2973 Label* if_false = NULL;
2974 Label* fall_through = NULL;
2975 context()->PrepareTest(&materialize_true, &materialize_false,
2976 &if_true, &if_false, &fall_through);
2978 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2979 __ NonNegativeSmiTst(r0);
2980 Split(eq, if_true, if_false, fall_through);
2982 context()->Plug(if_true, if_false);
2986 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2987 ZoneList<Expression*>* args = expr->arguments();
2988 ASSERT(args->length() == 1);
2990 VisitForAccumulatorValue(args->at(0));
2992 Label materialize_true, materialize_false;
2993 Label* if_true = NULL;
2994 Label* if_false = NULL;
2995 Label* fall_through = NULL;
2996 context()->PrepareTest(&materialize_true, &materialize_false,
2997 &if_true, &if_false, &fall_through);
2999 __ JumpIfSmi(r0, if_false);
3000 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3003 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3004 // Undetectable objects behave like undefined when tested with typeof.
3005 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3006 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3008 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3009 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3011 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3012 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013 Split(le, if_true, if_false, fall_through);
3015 context()->Plug(if_true, if_false);
3019 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3020 ZoneList<Expression*>* args = expr->arguments();
3021 ASSERT(args->length() == 1);
3023 VisitForAccumulatorValue(args->at(0));
3025 Label materialize_true, materialize_false;
3026 Label* if_true = NULL;
3027 Label* if_false = NULL;
3028 Label* fall_through = NULL;
3029 context()->PrepareTest(&materialize_true, &materialize_false,
3030 &if_true, &if_false, &fall_through);
3032 __ JumpIfSmi(r0, if_false);
3033 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3034 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035 Split(ge, if_true, if_false, fall_through);
3037 context()->Plug(if_true, if_false);
3041 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 ASSERT(args->length() == 1);
3045 VisitForAccumulatorValue(args->at(0));
3047 Label materialize_true, materialize_false;
3048 Label* if_true = NULL;
3049 Label* if_false = NULL;
3050 Label* fall_through = NULL;
3051 context()->PrepareTest(&materialize_true, &materialize_false,
3052 &if_true, &if_false, &fall_through);
3054 __ JumpIfSmi(r0, if_false);
3055 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3056 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3057 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3058 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3059 Split(ne, if_true, if_false, fall_through);
3061 context()->Plug(if_true, if_false);
3065 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3066 CallRuntime* expr) {
3067 ZoneList<Expression*>* args = expr->arguments();
3068 ASSERT(args->length() == 1);
3070 VisitForAccumulatorValue(args->at(0));
3072 Label materialize_true, materialize_false, skip_lookup;
3073 Label* if_true = NULL;
3074 Label* if_false = NULL;
3075 Label* fall_through = NULL;
3076 context()->PrepareTest(&materialize_true, &materialize_false,
3077 &if_true, &if_false, &fall_through);
3079 __ AssertNotSmi(r0);
3081 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3082 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3083 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3084 __ b(ne, &skip_lookup);
3086 // Check for fast case object. Generate false result for slow case object.
3087 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3088 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3089 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3093 // Look for valueOf name in the descriptor array, and indicate false if
3094 // found. Since we omit an enumeration index check, if it is added via a
3095 // transition that shares its descriptor array, this is a false positive.
3096 Label entry, loop, done;
3098 // Skip loop if no descriptors are valid.
3099 __ NumberOfOwnDescriptors(r3, r1);
3100 __ cmp(r3, Operand::Zero());
3103 __ LoadInstanceDescriptors(r1, r4);
3104 // r4: descriptor array.
3105 // r3: valid entries in the descriptor array.
3106 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3108 // Calculate location of the first key name.
3109 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3110 // Calculate the end of the descriptor array.
3112 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3114 // Loop through all the keys in the descriptor array. If one of these is the
3115 // string "valueOf" the result is false.
3116 // The use of ip to store the valueOf string assumes that it is not otherwise
3117 // used in the loop below.
3118 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3121 __ ldr(r3, MemOperand(r4, 0));
3124 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3126 __ cmp(r4, Operand(r2));
3131 // Set the bit in the map to indicate that there is no local valueOf field.
3132 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3133 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3134 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3136 __ bind(&skip_lookup);
3138 // If a valueOf property is not found on the object check that its
3139 // prototype is the un-modified String prototype. If not result is false.
3140 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3141 __ JumpIfSmi(r2, if_false);
3142 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3143 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3144 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3145 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3147 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3148 Split(eq, if_true, if_false, fall_through);
3150 context()->Plug(if_true, if_false);
3154 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3155 ZoneList<Expression*>* args = expr->arguments();
3156 ASSERT(args->length() == 1);
3158 VisitForAccumulatorValue(args->at(0));
3160 Label materialize_true, materialize_false;
3161 Label* if_true = NULL;
3162 Label* if_false = NULL;
3163 Label* fall_through = NULL;
3164 context()->PrepareTest(&materialize_true, &materialize_false,
3165 &if_true, &if_false, &fall_through);
3167 __ JumpIfSmi(r0, if_false);
3168 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3169 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3170 Split(eq, if_true, if_false, fall_through);
3172 context()->Plug(if_true, if_false);
3176 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3177 ZoneList<Expression*>* args = expr->arguments();
3178 ASSERT(args->length() == 1);
3180 VisitForAccumulatorValue(args->at(0));
3182 Label materialize_true, materialize_false;
3183 Label* if_true = NULL;
3184 Label* if_false = NULL;
3185 Label* fall_through = NULL;
3186 context()->PrepareTest(&materialize_true, &materialize_false,
3187 &if_true, &if_false, &fall_through);
3189 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3190 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3191 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3192 __ cmp(r2, Operand(0x80000000));
3193 __ cmp(r1, Operand(0x00000000), eq);
3195 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3196 Split(eq, if_true, if_false, fall_through);
3198 context()->Plug(if_true, if_false);
3202 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3203 ZoneList<Expression*>* args = expr->arguments();
3204 ASSERT(args->length() == 1);
3206 VisitForAccumulatorValue(args->at(0));
3208 Label materialize_true, materialize_false;
3209 Label* if_true = NULL;
3210 Label* if_false = NULL;
3211 Label* fall_through = NULL;
3212 context()->PrepareTest(&materialize_true, &materialize_false,
3213 &if_true, &if_false, &fall_through);
3215 __ JumpIfSmi(r0, if_false);
3216 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3217 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3218 Split(eq, if_true, if_false, fall_through);
3220 context()->Plug(if_true, if_false);
3224 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3225 ZoneList<Expression*>* args = expr->arguments();
3226 ASSERT(args->length() == 1);
3228 VisitForAccumulatorValue(args->at(0));
3230 Label materialize_true, materialize_false;
3231 Label* if_true = NULL;
3232 Label* if_false = NULL;
3233 Label* fall_through = NULL;
3234 context()->PrepareTest(&materialize_true, &materialize_false,
3235 &if_true, &if_false, &fall_through);
3237 __ JumpIfSmi(r0, if_false);
3238 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3239 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3240 Split(eq, if_true, if_false, fall_through);
3242 context()->Plug(if_true, if_false);
3247 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3248 ASSERT(expr->arguments()->length() == 0);
3250 Label materialize_true, materialize_false;
3251 Label* if_true = NULL;
3252 Label* if_false = NULL;
3253 Label* fall_through = NULL;
3254 context()->PrepareTest(&materialize_true, &materialize_false,
3255 &if_true, &if_false, &fall_through);
3257 // Get the frame pointer for the calling frame.
3258 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3260 // Skip the arguments adaptor frame if it exists.
3261 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3262 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3263 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3265 // Check the marker in the calling frame.
3266 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3267 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3268 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3269 Split(eq, if_true, if_false, fall_through);
3271 context()->Plug(if_true, if_false);
3275 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 ASSERT(args->length() == 2);
3279 // Load the two objects into registers and perform the comparison.
3280 VisitForStackValue(args->at(0));
3281 VisitForAccumulatorValue(args->at(1));
3283 Label materialize_true, materialize_false;
3284 Label* if_true = NULL;
3285 Label* if_false = NULL;
3286 Label* fall_through = NULL;
3287 context()->PrepareTest(&materialize_true, &materialize_false,
3288 &if_true, &if_false, &fall_through);
3292 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3293 Split(eq, if_true, if_false, fall_through);
3295 context()->Plug(if_true, if_false);
3299 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3300 ZoneList<Expression*>* args = expr->arguments();
3301 ASSERT(args->length() == 1);
3303 // ArgumentsAccessStub expects the key in edx and the formal
3304 // parameter count in r0.
3305 VisitForAccumulatorValue(args->at(0));
3307 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3308 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3310 context()->Plug(r0);
3314 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3315 ASSERT(expr->arguments()->length() == 0);
3317 // Get the number of formal parameters.
3318 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3320 // Check if the calling frame is an arguments adaptor frame.
3321 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3322 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3323 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3325 // Arguments adaptor case: Read the arguments length from the
3327 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3329 context()->Plug(r0);
3333 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3334 ZoneList<Expression*>* args = expr->arguments();
3335 ASSERT(args->length() == 1);
3336 Label done, null, function, non_function_constructor;
3338 VisitForAccumulatorValue(args->at(0));
3340 // If the object is a smi, we return null.
3341 __ JumpIfSmi(r0, &null);
3343 // Check that the object is a JS object but take special care of JS
3344 // functions to make sure they have 'Function' as their class.
3345 // Assume that there are only two callable types, and one of them is at
3346 // either end of the type range for JS object types. Saves extra comparisons.
3347 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3348 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3349 // Map is now in r0.
3351 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3352 FIRST_SPEC_OBJECT_TYPE + 1);
3353 __ b(eq, &function);
3355 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3356 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3357 LAST_SPEC_OBJECT_TYPE - 1);
3358 __ b(eq, &function);
3359 // Assume that there is no larger type.
3360 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3362 // Check if the constructor in the map is a JS function.
3363 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3364 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3365 __ b(ne, &non_function_constructor);
3367 // r0 now contains the constructor function. Grab the
3368 // instance class name from there.
3369 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3370 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3373 // Functions have class 'Function'.
3375 __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
3378 // Objects with a non-function constructor have class 'Object'.
3379 __ bind(&non_function_constructor);
3380 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3383 // Non-JS objects have class null.
3385 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3390 context()->Plug(r0);
3394 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3395 // Conditionally generate a log call.
3397 // 0 (literal string): The type of logging (corresponds to the flags).
3398 // This is used to determine whether or not to generate the log call.
3399 // 1 (string): Format string. Access the string at argument index 2
3400 // with '%2s' (see Logger::LogRuntime for all the formats).
3401 // 2 (array): Arguments to the format string.
3402 ZoneList<Expression*>* args = expr->arguments();
3403 ASSERT_EQ(args->length(), 3);
3404 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3405 VisitForStackValue(args->at(1));
3406 VisitForStackValue(args->at(2));
3407 __ CallRuntime(Runtime::kHiddenLog, 2);
3410 // Finally, we're expected to leave a value on the top of the stack.
3411 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3412 context()->Plug(r0);
3416 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3417 // Load the arguments on the stack and call the stub.
3419 ZoneList<Expression*>* args = expr->arguments();
3420 ASSERT(args->length() == 3);
3421 VisitForStackValue(args->at(0));
3422 VisitForStackValue(args->at(1));
3423 VisitForStackValue(args->at(2));
3425 context()->Plug(r0);
3429 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3430 // Load the arguments on the stack and call the stub.
3431 RegExpExecStub stub;
3432 ZoneList<Expression*>* args = expr->arguments();
3433 ASSERT(args->length() == 4);
3434 VisitForStackValue(args->at(0));
3435 VisitForStackValue(args->at(1));
3436 VisitForStackValue(args->at(2));
3437 VisitForStackValue(args->at(3));
3439 context()->Plug(r0);
3443 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3444 ZoneList<Expression*>* args = expr->arguments();
3445 ASSERT(args->length() == 1);
3446 VisitForAccumulatorValue(args->at(0)); // Load the object.
3449 // If the object is a smi return the object.
3450 __ JumpIfSmi(r0, &done);
3451 // If the object is not a value type, return the object.
3452 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3453 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3456 context()->Plug(r0);
3460 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3461 ZoneList<Expression*>* args = expr->arguments();
3462 ASSERT(args->length() == 2);
3463 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3464 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3466 VisitForAccumulatorValue(args->at(0)); // Load the object.
3468 Label runtime, done, not_date_object;
3469 Register object = r0;
3470 Register result = r0;
3471 Register scratch0 = r9;
3472 Register scratch1 = r1;
3474 __ JumpIfSmi(object, ¬_date_object);
3475 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3476 __ b(ne, ¬_date_object);
3478 if (index->value() == 0) {
3479 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3482 if (index->value() < JSDate::kFirstUncachedField) {
3483 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3484 __ mov(scratch1, Operand(stamp));
3485 __ ldr(scratch1, MemOperand(scratch1));
3486 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3487 __ cmp(scratch1, scratch0);
3489 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3490 kPointerSize * index->value()));
3494 __ PrepareCallCFunction(2, scratch1);
3495 __ mov(r1, Operand(index));
3496 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3500 __ bind(¬_date_object);
3501 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3503 context()->Plug(r0);
3507 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3508 ZoneList<Expression*>* args = expr->arguments();
3509 ASSERT_EQ(3, args->length());
3511 Register string = r0;
3512 Register index = r1;
3513 Register value = r2;
3515 VisitForStackValue(args->at(1)); // index
3516 VisitForStackValue(args->at(2)); // value
3517 VisitForAccumulatorValue(args->at(0)); // string
3518 __ Pop(index, value);
3520 if (FLAG_debug_code) {
3522 __ Check(eq, kNonSmiValue);
3524 __ Check(eq, kNonSmiIndex);
3525 __ SmiUntag(index, index);
3526 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3527 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3528 __ SmiTag(index, index);
3531 __ SmiUntag(value, value);
3534 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3535 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3536 context()->Plug(string);
3540 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3541 ZoneList<Expression*>* args = expr->arguments();
3542 ASSERT_EQ(3, args->length());
3544 Register string = r0;
3545 Register index = r1;
3546 Register value = r2;
3548 VisitForStackValue(args->at(1)); // index
3549 VisitForStackValue(args->at(2)); // value
3550 VisitForAccumulatorValue(args->at(0)); // string
3551 __ Pop(index, value);
3553 if (FLAG_debug_code) {
3555 __ Check(eq, kNonSmiValue);
3557 __ Check(eq, kNonSmiIndex);
3558 __ SmiUntag(index, index);
3559 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3560 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3561 __ SmiTag(index, index);
3564 __ SmiUntag(value, value);
3567 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3568 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3569 __ strh(value, MemOperand(ip, index));
3570 context()->Plug(string);
3575 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3576 // Load the arguments on the stack and call the runtime function.
3577 ZoneList<Expression*>* args = expr->arguments();
3578 ASSERT(args->length() == 2);
3579 VisitForStackValue(args->at(0));
3580 VisitForStackValue(args->at(1));
3581 MathPowStub stub(MathPowStub::ON_STACK);
3583 context()->Plug(r0);
3587 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3588 ZoneList<Expression*>* args = expr->arguments();
3589 ASSERT(args->length() == 2);
3590 VisitForStackValue(args->at(0)); // Load the object.
3591 VisitForAccumulatorValue(args->at(1)); // Load the value.
3592 __ pop(r1); // r0 = value. r1 = object.
3595 // If the object is a smi, return the value.
3596 __ JumpIfSmi(r1, &done);
3598 // If the object is not a value type, return the value.
3599 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3603 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3604 // Update the write barrier. Save the value as it will be
3605 // overwritten by the write barrier code and is needed afterward.
3607 __ RecordWriteField(
3608 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3611 context()->Plug(r0);
3615 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3616 ZoneList<Expression*>* args = expr->arguments();
3617 ASSERT_EQ(args->length(), 1);
3618 // Load the argument into r0 and call the stub.
3619 VisitForAccumulatorValue(args->at(0));
3621 NumberToStringStub stub;
3623 context()->Plug(r0);
3627 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3628 ZoneList<Expression*>* args = expr->arguments();
3629 ASSERT(args->length() == 1);
3630 VisitForAccumulatorValue(args->at(0));
3633 StringCharFromCodeGenerator generator(r0, r1);
3634 generator.GenerateFast(masm_);
3637 NopRuntimeCallHelper call_helper;
3638 generator.GenerateSlow(masm_, call_helper);
3641 context()->Plug(r1);
3645 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3647 ASSERT(args->length() == 2);
3648 VisitForStackValue(args->at(0));
3649 VisitForAccumulatorValue(args->at(1));
3651 Register object = r1;
3652 Register index = r0;
3653 Register result = r3;
3657 Label need_conversion;
3658 Label index_out_of_range;
3660 StringCharCodeAtGenerator generator(object,
3665 &index_out_of_range,
3666 STRING_INDEX_IS_NUMBER);
3667 generator.GenerateFast(masm_);
3670 __ bind(&index_out_of_range);
3671 // When the index is out of range, the spec requires us to return
3673 __ LoadRoot(result, Heap::kNanValueRootIndex);
3676 __ bind(&need_conversion);
3677 // Load the undefined value into the result register, which will
3678 // trigger conversion.
3679 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3682 NopRuntimeCallHelper call_helper;
3683 generator.GenerateSlow(masm_, call_helper);
3686 context()->Plug(result);
3690 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3691 ZoneList<Expression*>* args = expr->arguments();
3692 ASSERT(args->length() == 2);
3693 VisitForStackValue(args->at(0));
3694 VisitForAccumulatorValue(args->at(1));
3696 Register object = r1;
3697 Register index = r0;
3698 Register scratch = r3;
3699 Register result = r0;
3703 Label need_conversion;
3704 Label index_out_of_range;
3706 StringCharAtGenerator generator(object,
3712 &index_out_of_range,
3713 STRING_INDEX_IS_NUMBER);
3714 generator.GenerateFast(masm_);
3717 __ bind(&index_out_of_range);
3718 // When the index is out of range, the spec requires us to return
3719 // the empty string.
3720 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3723 __ bind(&need_conversion);
3724 // Move smi zero into the result register, which will trigger
3726 __ mov(result, Operand(Smi::FromInt(0)));
3729 NopRuntimeCallHelper call_helper;
3730 generator.GenerateSlow(masm_, call_helper);
3733 context()->Plug(result);
3737 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3738 ZoneList<Expression*>* args = expr->arguments();
3739 ASSERT_EQ(2, args->length());
3740 VisitForStackValue(args->at(0));
3741 VisitForAccumulatorValue(args->at(1));
3744 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3746 context()->Plug(r0);
3750 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3751 ZoneList<Expression*>* args = expr->arguments();
3752 ASSERT_EQ(2, args->length());
3753 VisitForStackValue(args->at(0));
3754 VisitForStackValue(args->at(1));
3756 StringCompareStub stub;
3758 context()->Plug(r0);
3762 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3763 // Load the argument on the stack and call the runtime function.
3764 ZoneList<Expression*>* args = expr->arguments();
3765 ASSERT(args->length() == 1);
3766 VisitForStackValue(args->at(0));
3767 __ CallRuntime(Runtime::kMath_log, 1);
3768 context()->Plug(r0);
3772 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3773 // Load the argument on the stack and call the runtime function.
3774 ZoneList<Expression*>* args = expr->arguments();
3775 ASSERT(args->length() == 1);
3776 VisitForStackValue(args->at(0));
3777 __ CallRuntime(Runtime::kMath_sqrt, 1);
3778 context()->Plug(r0);
3782 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3783 ZoneList<Expression*>* args = expr->arguments();
3784 ASSERT(args->length() >= 2);
3786 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3787 for (int i = 0; i < arg_count + 1; i++) {
3788 VisitForStackValue(args->at(i));
3790 VisitForAccumulatorValue(args->last()); // Function.
3792 Label runtime, done;
3793 // Check for non-function argument (including proxy).
3794 __ JumpIfSmi(r0, &runtime);
3795 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3798 // InvokeFunction requires the function in r1. Move it in there.
3799 __ mov(r1, result_register());
3800 ParameterCount count(arg_count);
3801 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3802 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3807 __ CallRuntime(Runtime::kCall, args->length());
3810 context()->Plug(r0);
3814 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3815 RegExpConstructResultStub stub;
3816 ZoneList<Expression*>* args = expr->arguments();
3817 ASSERT(args->length() == 3);
3818 VisitForStackValue(args->at(0));
3819 VisitForStackValue(args->at(1));
3820 VisitForAccumulatorValue(args->at(2));
3824 context()->Plug(r0);
3828 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3829 ZoneList<Expression*>* args = expr->arguments();
3830 ASSERT_EQ(2, args->length());
3831 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3832 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3834 Handle<FixedArray> jsfunction_result_caches(
3835 isolate()->native_context()->jsfunction_result_caches());
3836 if (jsfunction_result_caches->length() <= cache_id) {
3837 __ Abort(kAttemptToUseUndefinedCache);
3838 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3839 context()->Plug(r0);
3843 VisitForAccumulatorValue(args->at(1));
3846 Register cache = r1;
3847 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3848 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3849 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3851 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3854 Label done, not_found;
3855 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3856 // r2 now holds finger offset as a smi.
3857 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3858 // r3 now points to the start of fixed array elements.
3859 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3860 // Note side effect of PreIndex: r3 now points to the key of the pair.
3862 __ b(ne, ¬_found);
3864 __ ldr(r0, MemOperand(r3, kPointerSize));
3867 __ bind(¬_found);
3868 // Call runtime to perform the lookup.
3869 __ Push(cache, key);
3870 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3873 context()->Plug(r0);
3877 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3878 ZoneList<Expression*>* args = expr->arguments();
3879 VisitForAccumulatorValue(args->at(0));
3881 Label materialize_true, materialize_false;
3882 Label* if_true = NULL;
3883 Label* if_false = NULL;
3884 Label* fall_through = NULL;
3885 context()->PrepareTest(&materialize_true, &materialize_false,
3886 &if_true, &if_false, &fall_through);
3888 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3889 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3890 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3891 Split(eq, if_true, if_false, fall_through);
3893 context()->Plug(if_true, if_false);
3897 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3898 ZoneList<Expression*>* args = expr->arguments();
3899 ASSERT(args->length() == 1);
3900 VisitForAccumulatorValue(args->at(0));
3902 __ AssertString(r0);
3904 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3905 __ IndexFromHash(r0, r0);
3907 context()->Plug(r0);
3911 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3912 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3913 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3914 one_char_separator_loop_entry, long_separator_loop;
3915 ZoneList<Expression*>* args = expr->arguments();
3916 ASSERT(args->length() == 2);
3917 VisitForStackValue(args->at(1));
3918 VisitForAccumulatorValue(args->at(0));
3920 // All aliases of the same register have disjoint lifetimes.
3921 Register array = r0;
3922 Register elements = no_reg; // Will be r0.
3923 Register result = no_reg; // Will be r0.
3924 Register separator = r1;
3925 Register array_length = r2;
3926 Register result_pos = no_reg; // Will be r2
3927 Register string_length = r3;
3928 Register string = r4;
3929 Register element = r5;
3930 Register elements_end = r6;
3931 Register scratch = r9;
3933 // Separator operand is on the stack.
3936 // Check that the array is a JSArray.
3937 __ JumpIfSmi(array, &bailout);
3938 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3941 // Check that the array has fast elements.
3942 __ CheckFastElements(scratch, array_length, &bailout);
3944 // If the array has length zero, return the empty string.
3945 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3946 __ SmiUntag(array_length, SetCC);
3947 __ b(ne, &non_trivial_array);
3948 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3951 __ bind(&non_trivial_array);
3953 // Get the FixedArray containing array's elements.
3955 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3956 array = no_reg; // End of array's live range.
3958 // Check that all array elements are sequential ASCII strings, and
3959 // accumulate the sum of their lengths, as a smi-encoded value.
3960 __ mov(string_length, Operand::Zero());
3962 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3963 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3964 // Loop condition: while (element < elements_end).
3965 // Live values in registers:
3966 // elements: Fixed array of strings.
3967 // array_length: Length of the fixed array of strings (not smi)
3968 // separator: Separator string
3969 // string_length: Accumulated sum of string lengths (smi).
3970 // element: Current array element.
3971 // elements_end: Array end.
3972 if (generate_debug_code_) {
3973 __ cmp(array_length, Operand::Zero());
3974 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3977 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3978 __ JumpIfSmi(string, &bailout);
3979 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3980 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3981 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3982 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3983 __ add(string_length, string_length, Operand(scratch), SetCC);
3985 __ cmp(element, elements_end);
3988 // If array_length is 1, return elements[0], a string.
3989 __ cmp(array_length, Operand(1));
3990 __ b(ne, ¬_size_one_array);
3991 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3994 __ bind(¬_size_one_array);
3996 // Live values in registers:
3997 // separator: Separator string
3998 // array_length: Length of the array.
3999 // string_length: Sum of string lengths (smi).
4000 // elements: FixedArray of strings.
4002 // Check that the separator is a flat ASCII string.
4003 __ JumpIfSmi(separator, &bailout);
4004 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4005 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4006 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
4008 // Add (separator length times array_length) - separator length to the
4009 // string_length to get the length of the result string. array_length is not
4010 // smi but the other values are, so the result is a smi
4011 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4012 __ sub(string_length, string_length, Operand(scratch));
4013 __ smull(scratch, ip, array_length, scratch);
4014 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4016 __ cmp(ip, Operand::Zero());
4018 __ tst(scratch, Operand(0x80000000));
4020 __ add(string_length, string_length, Operand(scratch), SetCC);
4022 __ SmiUntag(string_length);
4024 // Get first element in the array to free up the elements register to be used
4027 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4028 result = elements; // End of live range for elements.
4030 // Live values in registers:
4031 // element: First array element
4032 // separator: Separator string
4033 // string_length: Length of result string (not smi)
4034 // array_length: Length of the array.
4035 __ AllocateAsciiString(result,
4038 string, // used as scratch
4039 elements_end, // used as scratch
4041 // Prepare for looping. Set up elements_end to end of the array. Set
4042 // result_pos to the position of the result where to write the first
4044 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4045 result_pos = array_length; // End of live range for array_length.
4046 array_length = no_reg;
4049 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4051 // Check the length of the separator.
4052 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4053 __ cmp(scratch, Operand(Smi::FromInt(1)));
4054 __ b(eq, &one_char_separator);
4055 __ b(gt, &long_separator);
4057 // Empty separator case
4058 __ bind(&empty_separator_loop);
4059 // Live values in registers:
4060 // result_pos: the position to which we are currently copying characters.
4061 // element: Current array element.
4062 // elements_end: Array end.
4064 // Copy next array element to the result.
4065 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4066 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4067 __ SmiUntag(string_length);
4070 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4071 __ CopyBytes(string, result_pos, string_length, scratch);
4072 __ cmp(element, elements_end);
4073 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4074 ASSERT(result.is(r0));
4077 // One-character separator case
4078 __ bind(&one_char_separator);
4079 // Replace separator with its ASCII character value.
4080 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4081 // Jump into the loop after the code that copies the separator, so the first
4082 // element is not preceded by a separator
4083 __ jmp(&one_char_separator_loop_entry);
4085 __ bind(&one_char_separator_loop);
4086 // Live values in registers:
4087 // result_pos: the position to which we are currently copying characters.
4088 // element: Current array element.
4089 // elements_end: Array end.
4090 // separator: Single separator ASCII char (in lower byte).
4092 // Copy the separator character to the result.
4093 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4095 // Copy next array element to the result.
4096 __ bind(&one_char_separator_loop_entry);
4097 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4098 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4099 __ SmiUntag(string_length);
4102 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4103 __ CopyBytes(string, result_pos, string_length, scratch);
4104 __ cmp(element, elements_end);
4105 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4106 ASSERT(result.is(r0));
4109 // Long separator case (separator is more than one character). Entry is at the
4110 // label long_separator below.
4111 __ bind(&long_separator_loop);
4112 // Live values in registers:
4113 // result_pos: the position to which we are currently copying characters.
4114 // element: Current array element.
4115 // elements_end: Array end.
4116 // separator: Separator string.
4118 // Copy the separator to the result.
4119 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4120 __ SmiUntag(string_length);
4123 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4124 __ CopyBytes(string, result_pos, string_length, scratch);
4126 __ bind(&long_separator);
4127 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4128 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4129 __ SmiUntag(string_length);
4132 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4133 __ CopyBytes(string, result_pos, string_length, scratch);
4134 __ cmp(element, elements_end);
4135 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4136 ASSERT(result.is(r0));
4140 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4142 context()->Plug(r0);
4146 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4147 if (expr->function() != NULL &&
4148 expr->function()->intrinsic_type == Runtime::INLINE) {
4149 Comment cmnt(masm_, "[ InlineRuntimeCall");
4150 EmitInlineRuntimeCall(expr);
4154 Comment cmnt(masm_, "[ CallRuntime");
4155 ZoneList<Expression*>* args = expr->arguments();
4156 int arg_count = args->length();
4158 if (expr->is_jsruntime()) {
4159 // Push the builtins object as the receiver.
4160 __ ldr(r0, GlobalObjectOperand());
4161 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
4164 // Load the function from the receiver.
4165 __ mov(r2, Operand(expr->name()));
4166 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4168 // Push the target function under the receiver.
4169 __ ldr(ip, MemOperand(sp, 0));
4171 __ str(r0, MemOperand(sp, kPointerSize));
4173 // Push the arguments ("left-to-right").
4174 int arg_count = args->length();
4175 for (int i = 0; i < arg_count; i++) {
4176 VisitForStackValue(args->at(i));
4179 // Record source position of the IC call.
4180 SetSourcePosition(expr->position());
4181 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4182 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4185 // Restore context register.
4186 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4188 context()->DropAndPlug(1, r0);
4190 // Push the arguments ("left-to-right").
4191 for (int i = 0; i < arg_count; i++) {
4192 VisitForStackValue(args->at(i));
4195 // Call the C runtime function.
4196 __ CallRuntime(expr->function(), arg_count);
4197 context()->Plug(r0);
4202 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4203 switch (expr->op()) {
4204 case Token::DELETE: {
4205 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4206 Property* property = expr->expression()->AsProperty();
4207 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4209 if (property != NULL) {
4210 VisitForStackValue(property->obj());
4211 VisitForStackValue(property->key());
4212 __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4214 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4215 context()->Plug(r0);
4216 } else if (proxy != NULL) {
4217 Variable* var = proxy->var();
4218 // Delete of an unqualified identifier is disallowed in strict mode
4219 // but "delete this" is allowed.
4220 ASSERT(strict_mode() == SLOPPY || var->is_this());
4221 if (var->IsUnallocated()) {
4222 __ ldr(r2, GlobalObjectOperand());
4223 __ mov(r1, Operand(var->name()));
4224 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4225 __ Push(r2, r1, r0);
4226 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4227 context()->Plug(r0);
4228 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4229 // Result of deleting non-global, non-dynamic variables is false.
4230 // The subexpression does not have side effects.
4231 context()->Plug(var->is_this());
4233 // Non-global variable. Call the runtime to try to delete from the
4234 // context where the variable was introduced.
4235 ASSERT(!context_register().is(r2));
4236 __ mov(r2, Operand(var->name()));
4237 __ Push(context_register(), r2);
4238 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4239 context()->Plug(r0);
4242 // Result of deleting non-property, non-variable reference is true.
4243 // The subexpression may have side effects.
4244 VisitForEffect(expr->expression());
4245 context()->Plug(true);
4251 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4252 VisitForEffect(expr->expression());
4253 context()->Plug(Heap::kUndefinedValueRootIndex);
4258 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4259 if (context()->IsEffect()) {
4260 // Unary NOT has no side effects so it's only necessary to visit the
4261 // subexpression. Match the optimizing compiler by not branching.
4262 VisitForEffect(expr->expression());
4263 } else if (context()->IsTest()) {
4264 const TestContext* test = TestContext::cast(context());
4265 // The labels are swapped for the recursive call.
4266 VisitForControl(expr->expression(),
4267 test->false_label(),
4269 test->fall_through());
4270 context()->Plug(test->true_label(), test->false_label());
4272 // We handle value contexts explicitly rather than simply visiting
4273 // for control and plugging the control flow into the context,
4274 // because we need to prepare a pair of extra administrative AST ids
4275 // for the optimizing compiler.
4276 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4277 Label materialize_true, materialize_false, done;
4278 VisitForControl(expr->expression(),
4282 __ bind(&materialize_true);
4283 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4284 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4285 if (context()->IsStackValue()) __ push(r0);
4287 __ bind(&materialize_false);
4288 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4289 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4290 if (context()->IsStackValue()) __ push(r0);
4296 case Token::TYPEOF: {
4297 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4298 { StackValueContext context(this);
4299 VisitForTypeofValue(expr->expression());
4301 __ CallRuntime(Runtime::kTypeof, 1);
4302 context()->Plug(r0);
4312 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4313 ASSERT(expr->expression()->IsValidLeftHandSide());
4315 Comment cmnt(masm_, "[ CountOperation");
4316 SetSourcePosition(expr->position());
4318 // Expression can only be a property, a global or a (parameter or local)
4320 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4321 LhsKind assign_type = VARIABLE;
4322 Property* prop = expr->expression()->AsProperty();
4323 // In case of a property we use the uninitialized expression context
4324 // of the key to detect a named property.
4327 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4330 // Evaluate expression and get value.
4331 if (assign_type == VARIABLE) {
4332 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4333 AccumulatorValueContext context(this);
4334 EmitVariableLoad(expr->expression()->AsVariableProxy());
4336 // Reserve space for result of postfix operation.
4337 if (expr->is_postfix() && !context()->IsEffect()) {
4338 __ mov(ip, Operand(Smi::FromInt(0)));
4341 if (assign_type == NAMED_PROPERTY) {
4342 // Put the object both on the stack and in the accumulator.
4343 VisitForAccumulatorValue(prop->obj());
4345 EmitNamedPropertyLoad(prop);
4347 VisitForStackValue(prop->obj());
4348 VisitForAccumulatorValue(prop->key());
4349 __ ldr(r1, MemOperand(sp, 0));
4351 EmitKeyedPropertyLoad(prop);
4355 // We need a second deoptimization point after loading the value
4356 // in case evaluating the property load my have a side effect.
4357 if (assign_type == VARIABLE) {
4358 PrepareForBailout(expr->expression(), TOS_REG);
4360 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4363 // Inline smi case if we are in a loop.
4364 Label stub_call, done;
4365 JumpPatchSite patch_site(masm_);
4367 int count_value = expr->op() == Token::INC ? 1 : -1;
4368 if (ShouldInlineSmiCase(expr->op())) {
4370 patch_site.EmitJumpIfNotSmi(r0, &slow);
4372 // Save result for postfix expressions.
4373 if (expr->is_postfix()) {
4374 if (!context()->IsEffect()) {
4375 // Save the result on the stack. If we have a named or keyed property
4376 // we store the result under the receiver that is currently on top
4378 switch (assign_type) {
4382 case NAMED_PROPERTY:
4383 __ str(r0, MemOperand(sp, kPointerSize));
4385 case KEYED_PROPERTY:
4386 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4392 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4394 // Call stub. Undo operation first.
4395 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4399 ToNumberStub convert_stub;
4400 __ CallStub(&convert_stub);
4402 // Save result for postfix expressions.
4403 if (expr->is_postfix()) {
4404 if (!context()->IsEffect()) {
4405 // Save the result on the stack. If we have a named or keyed property
4406 // we store the result under the receiver that is currently on top
4408 switch (assign_type) {
4412 case NAMED_PROPERTY:
4413 __ str(r0, MemOperand(sp, kPointerSize));
4415 case KEYED_PROPERTY:
4416 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4423 __ bind(&stub_call);
4425 __ mov(r0, Operand(Smi::FromInt(count_value)));
4427 // Record position before stub call.
4428 SetSourcePosition(expr->position());
4430 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4431 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4432 patch_site.EmitPatchInfo();
4435 // Store the value returned in r0.
4436 switch (assign_type) {
4438 if (expr->is_postfix()) {
4439 { EffectContext context(this);
4440 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4442 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4445 // For all contexts except EffectConstant We have the result on
4446 // top of the stack.
4447 if (!context()->IsEffect()) {
4448 context()->PlugTOS();
4451 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4453 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4454 context()->Plug(r0);
4457 case NAMED_PROPERTY: {
4458 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4460 CallStoreIC(expr->CountStoreFeedbackId());
4461 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4462 if (expr->is_postfix()) {
4463 if (!context()->IsEffect()) {
4464 context()->PlugTOS();
4467 context()->Plug(r0);
4471 case KEYED_PROPERTY: {
4472 __ Pop(r2, r1); // r1 = key. r2 = receiver.
4473 Handle<Code> ic = strict_mode() == SLOPPY
4474 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4475 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4476 CallIC(ic, expr->CountStoreFeedbackId());
4477 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4478 if (expr->is_postfix()) {
4479 if (!context()->IsEffect()) {
4480 context()->PlugTOS();
4483 context()->Plug(r0);
4491 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4492 ASSERT(!context()->IsEffect());
4493 ASSERT(!context()->IsTest());
4494 VariableProxy* proxy = expr->AsVariableProxy();
4495 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4496 Comment cmnt(masm_, "[ Global variable");
4497 __ ldr(r0, GlobalObjectOperand());
4498 __ mov(r2, Operand(proxy->name()));
4499 // Use a regular load, not a contextual load, to avoid a reference
4501 CallLoadIC(NOT_CONTEXTUAL);
4502 PrepareForBailout(expr, TOS_REG);
4503 context()->Plug(r0);
4504 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4505 Comment cmnt(masm_, "[ Lookup slot");
4508 // Generate code for loading from variables potentially shadowed
4509 // by eval-introduced variables.
4510 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4513 __ mov(r0, Operand(proxy->name()));
4515 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4516 PrepareForBailout(expr, TOS_REG);
4519 context()->Plug(r0);
4521 // This expression cannot throw a reference error at the top level.
4522 VisitInDuplicateContext(expr);
4527 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4528 Expression* sub_expr,
4529 Handle<String> check) {
4530 Label materialize_true, materialize_false;
4531 Label* if_true = NULL;
4532 Label* if_false = NULL;
4533 Label* fall_through = NULL;
4534 context()->PrepareTest(&materialize_true, &materialize_false,
4535 &if_true, &if_false, &fall_through);
4537 { AccumulatorValueContext context(this);
4538 VisitForTypeofValue(sub_expr);
4540 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4542 if (check->Equals(isolate()->heap()->number_string())) {
4543 __ JumpIfSmi(r0, if_true);
4544 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4545 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4547 Split(eq, if_true, if_false, fall_through);
4548 } else if (check->Equals(isolate()->heap()->float32x4_string())) {
4549 __ JumpIfSmi(r0, if_false);
4550 __ CompareObjectType(r0, r0, r1, FLOAT32x4_TYPE);
4551 Split(eq, if_true, if_false, fall_through);
4552 } else if (check->Equals(isolate()->heap()->int32x4_string())) {
4553 __ JumpIfSmi(r0, if_false);
4554 __ CompareObjectType(r0, r0, r1, INT32x4_TYPE);
4555 Split(eq, if_true, if_false, fall_through);
4556 } else if (check->Equals(isolate()->heap()->string_string())) {
4557 __ JumpIfSmi(r0, if_false);
4558 // Check for undetectable objects => false.
4559 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4561 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4562 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4563 Split(eq, if_true, if_false, fall_through);
4564 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4565 __ JumpIfSmi(r0, if_false);
4566 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4567 Split(eq, if_true, if_false, fall_through);
4568 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4569 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4571 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4572 Split(eq, if_true, if_false, fall_through);
4573 } else if (FLAG_harmony_typeof &&
4574 check->Equals(isolate()->heap()->null_string())) {
4575 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4576 Split(eq, if_true, if_false, fall_through);
4577 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4578 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4580 __ JumpIfSmi(r0, if_false);
4581 // Check for undetectable objects => true.
4582 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4583 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4584 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4585 Split(ne, if_true, if_false, fall_through);
4587 } else if (check->Equals(isolate()->heap()->function_string())) {
4588 __ JumpIfSmi(r0, if_false);
4589 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4590 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4592 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4593 Split(eq, if_true, if_false, fall_through);
4594 } else if (check->Equals(isolate()->heap()->object_string())) {
4595 __ JumpIfSmi(r0, if_false);
4596 if (!FLAG_harmony_typeof) {
4597 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4600 // Check for JS objects => true.
4601 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4603 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4605 // Check for undetectable objects => false.
4606 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4607 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4608 Split(eq, if_true, if_false, fall_through);
4610 if (if_false != fall_through) __ jmp(if_false);
4612 context()->Plug(if_true, if_false);
4616 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4617 Comment cmnt(masm_, "[ CompareOperation");
4618 SetSourcePosition(expr->position());
4620 // First we try a fast inlined version of the compare when one of
4621 // the operands is a literal.
4622 if (TryLiteralCompare(expr)) return;
4624 // Always perform the comparison for its control flow. Pack the result
4625 // into the expression's context after the comparison is performed.
4626 Label materialize_true, materialize_false;
4627 Label* if_true = NULL;
4628 Label* if_false = NULL;
4629 Label* fall_through = NULL;
4630 context()->PrepareTest(&materialize_true, &materialize_false,
4631 &if_true, &if_false, &fall_through);
4633 Token::Value op = expr->op();
4634 VisitForStackValue(expr->left());
4637 VisitForStackValue(expr->right());
4638 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4639 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4640 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4642 Split(eq, if_true, if_false, fall_through);
4645 case Token::INSTANCEOF: {
4646 VisitForStackValue(expr->right());
4647 InstanceofStub stub(InstanceofStub::kNoFlags);
4649 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4650 // The stub returns 0 for true.
4652 Split(eq, if_true, if_false, fall_through);
4657 VisitForAccumulatorValue(expr->right());
4658 Condition cond = CompareIC::ComputeCondition(op);
4661 bool inline_smi_code = ShouldInlineSmiCase(op);
4662 JumpPatchSite patch_site(masm_);
4663 if (inline_smi_code) {
4665 __ orr(r2, r0, Operand(r1));
4666 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4668 Split(cond, if_true, if_false, NULL);
4669 __ bind(&slow_case);
4672 // Record position and call the compare IC.
4673 SetSourcePosition(expr->position());
4674 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4675 CallIC(ic, expr->CompareOperationFeedbackId());
4676 patch_site.EmitPatchInfo();
4677 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4678 __ cmp(r0, Operand::Zero());
4679 Split(cond, if_true, if_false, fall_through);
4683 // Convert the result of the comparison into one expected for this
4684 // expression's context.
4685 context()->Plug(if_true, if_false);
4689 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4690 Expression* sub_expr,
4692 Label materialize_true, materialize_false;
4693 Label* if_true = NULL;
4694 Label* if_false = NULL;
4695 Label* fall_through = NULL;
4696 context()->PrepareTest(&materialize_true, &materialize_false,
4697 &if_true, &if_false, &fall_through);
4699 VisitForAccumulatorValue(sub_expr);
4700 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4701 if (expr->op() == Token::EQ_STRICT) {
4702 Heap::RootListIndex nil_value = nil == kNullValue ?
4703 Heap::kNullValueRootIndex :
4704 Heap::kUndefinedValueRootIndex;
4705 __ LoadRoot(r1, nil_value);
4707 Split(eq, if_true, if_false, fall_through);
4709 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4710 CallIC(ic, expr->CompareOperationFeedbackId());
4711 __ cmp(r0, Operand(0));
4712 Split(ne, if_true, if_false, fall_through);
4714 context()->Plug(if_true, if_false);
4718 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4719 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4720 context()->Plug(r0);
4724 Register FullCodeGenerator::result_register() {
4729 Register FullCodeGenerator::context_register() {
4734 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4735 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4736 __ str(value, MemOperand(fp, frame_offset));
4740 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4741 __ ldr(dst, ContextOperand(cp, context_index));
4745 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4746 Scope* declaration_scope = scope()->DeclarationScope();
4747 if (declaration_scope->is_global_scope() ||
4748 declaration_scope->is_module_scope()) {
4749 // Contexts nested in the native context have a canonical empty function
4750 // as their closure, not the anonymous closure containing the global
4751 // code. Pass a smi sentinel and let the runtime look up the empty
4753 __ mov(ip, Operand(Smi::FromInt(0)));
4754 } else if (declaration_scope->is_eval_scope()) {
4755 // Contexts created by a call to eval have the same closure as the
4756 // context calling eval, not the anonymous closure containing the eval
4757 // code. Fetch it from the context.
4758 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4760 ASSERT(declaration_scope->is_function_scope());
4761 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4767 // ----------------------------------------------------------------------------
4768 // Non-local control flow support.
4770 void FullCodeGenerator::EnterFinallyBlock() {
4771 ASSERT(!result_register().is(r1));
4772 // Store result register while executing finally block.
4773 __ push(result_register());
4774 // Cook return address in link register to stack (smi encoded Code* delta)
4775 __ sub(r1, lr, Operand(masm_->CodeObject()));
4778 // Store result register while executing finally block.
4781 // Store pending message while executing finally block.
4782 ExternalReference pending_message_obj =
4783 ExternalReference::address_of_pending_message_obj(isolate());
4784 __ mov(ip, Operand(pending_message_obj));
4785 __ ldr(r1, MemOperand(ip));
4788 ExternalReference has_pending_message =
4789 ExternalReference::address_of_has_pending_message(isolate());
4790 __ mov(ip, Operand(has_pending_message));
4791 __ ldr(r1, MemOperand(ip));
4795 ExternalReference pending_message_script =
4796 ExternalReference::address_of_pending_message_script(isolate());
4797 __ mov(ip, Operand(pending_message_script));
4798 __ ldr(r1, MemOperand(ip));
4803 void FullCodeGenerator::ExitFinallyBlock() {
4804 ASSERT(!result_register().is(r1));
4805 // Restore pending message from stack.
4807 ExternalReference pending_message_script =
4808 ExternalReference::address_of_pending_message_script(isolate());
4809 __ mov(ip, Operand(pending_message_script));
4810 __ str(r1, MemOperand(ip));
4814 ExternalReference has_pending_message =
4815 ExternalReference::address_of_has_pending_message(isolate());
4816 __ mov(ip, Operand(has_pending_message));
4817 __ str(r1, MemOperand(ip));
4820 ExternalReference pending_message_obj =
4821 ExternalReference::address_of_pending_message_obj(isolate());
4822 __ mov(ip, Operand(pending_message_obj));
4823 __ str(r1, MemOperand(ip));
4825 // Restore result register from stack.
4828 // Uncook return address and return.
4829 __ pop(result_register());
4831 __ add(pc, r1, Operand(masm_->CodeObject()));
4837 #define __ ACCESS_MASM(masm())
4839 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4841 int* context_length) {
4842 // The macros used here must preserve the result register.
4844 // Because the handler block contains the context of the finally
4845 // code, we can restore it directly from there for the finally code
4846 // rather than iteratively unwinding contexts via their previous
4848 __ Drop(*stack_depth); // Down to the handler block.
4849 if (*context_length > 0) {
4850 // Restore the context to its dedicated register and the stack.
4851 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4852 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4855 __ bl(finally_entry_);
4858 *context_length = 0;
4866 static Address GetInterruptImmediateLoadAddress(Address pc) {
4867 Address load_address = pc - 2 * Assembler::kInstrSize;
4868 if (!FLAG_enable_ool_constant_pool) {
4869 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4870 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) {
4871 load_address -= Assembler::kInstrSize;
4872 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address)));
4874 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4876 return load_address;
4880 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4882 BackEdgeState target_state,
4883 Code* replacement_code) {
4884 static const int kInstrSize = Assembler::kInstrSize;
4885 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4886 Address branch_address = pc_immediate_load_address - kInstrSize;
4887 CodePatcher patcher(branch_address, 1);
4888 switch (target_state) {
4891 // <decrement profiling counter>
4893 // ; load interrupt stub address into ip - either of:
4894 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4895 // | movt ip, <immed high>
4899 // Calculate branch offet to the ok-label - this is the difference between
4900 // the branch address and |pc| (which points at <blx ip>) plus one instr.
4901 int branch_offset = pc + kInstrSize - branch_address;
4902 patcher.masm()->b(branch_offset, pl);
4905 case ON_STACK_REPLACEMENT:
4906 case OSR_AFTER_STACK_CHECK:
4907 // <decrement profiling counter>
4909 // ; load on-stack replacement address into ip - either of:
4910 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4911 // | movt ip, <immed high>
4914 patcher.masm()->nop();
4918 // Replace the call address.
4919 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
4920 replacement_code->entry());
4922 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4923 unoptimized_code, pc_immediate_load_address, replacement_code);
4927 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4929 Code* unoptimized_code,
4931 static const int kInstrSize = Assembler::kInstrSize;
4932 ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
4934 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4935 Address branch_address = pc_immediate_load_address - kInstrSize;
4936 Address interrupt_address = Assembler::target_address_at(
4937 pc_immediate_load_address, unoptimized_code);
4939 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4940 ASSERT(interrupt_address ==
4941 isolate->builtins()->InterruptCheck()->entry());
4945 ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
4947 if (interrupt_address ==
4948 isolate->builtins()->OnStackReplacement()->entry()) {
4949 return ON_STACK_REPLACEMENT;
4952 ASSERT(interrupt_address ==
4953 isolate->builtins()->OsrAfterStackCheck()->entry());
4954 return OSR_AFTER_STACK_CHECK;
4958 } } // namespace v8::internal
4960 #endif // V8_TARGET_ARCH_ARM