1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_ARM
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
42 #include "arm/code-stubs-arm.h"
43 #include "arm/macro-assembler-arm.h"
48 #define __ ACCESS_MASM(masm_)
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
61 info_emitted_ = false;
66 ASSERT(patch_site_.is_bound() == info_emitted_);
69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code.
71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
73 Assembler::BlockConstPoolScope block_const_pool(masm_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
76 __ b(eq, target); // Always taken before patched.
79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 Assembler::BlockConstPoolScope block_const_pool(masm_);
84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg));
86 __ b(ne, target); // Never taken before patched.
89 void EmitPatchInfo() {
90 // Block literal pool emission whilst recording patch site information.
91 Assembler::BlockConstPoolScope block_const_pool(masm_);
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 reg.set_code(delta_to_patch_site / kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
101 __ nop(); // Signals no inlined code.
106 MacroAssembler* masm_;
114 // Generate code for a JS function. On entry to the function the receiver
115 // and arguments have been pushed on the stack left to right. The actual
116 // argument count matches the formal parameter count expected by the
119 // The live registers are:
120 // o r1: the JS function object being called (i.e., ourselves)
122 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
123 // o fp: our caller's frame pointer
124 // o sp: stack pointer
125 // o lr: return address
127 // The function builds a JS frame. Please see JavaScriptFrameConstants in
128 // frames-arm.h for its layout.
129 void FullCodeGenerator::Generate() {
130 CompilationInfo* info = info_;
132 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
133 profiling_counter_ = isolate()->factory()->NewCell(
134 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
135 SetFunctionPosition(function());
136 Comment cmnt(masm_, "[ function compiled by full code generator");
138 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
141 if (strlen(FLAG_stop_at) > 0 &&
142 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
147 // Classic mode functions and builtins need to replace the receiver with the
148 // global proxy when called as functions (without an explicit receiver
150 if (info->is_classic_mode() && !info->is_native()) {
152 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
153 __ ldr(r2, MemOperand(sp, receiver_offset));
154 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
157 __ ldr(r2, GlobalObjectOperand());
158 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
160 __ str(r2, MemOperand(sp, receiver_offset));
165 // Open a frame scope to indicate that there is a frame on the stack. The
166 // MANUAL indicates that the scope shouldn't actually generate code to set up
167 // the frame (that is done below).
168 FrameScope frame_scope(masm_, StackFrame::MANUAL);
170 info->set_prologue_offset(masm_->pc_offset());
171 __ Prologue(BUILD_FUNCTION_FRAME);
172 info->AddNoFrameRange(0, masm_->pc_offset());
173 __ LoadConstantPoolPointerRegister();
175 { Comment cmnt(masm_, "[ Allocate locals");
176 int locals_count = info->scope()->num_stack_slots();
177 // Generators allocate locals, if any, in context slots.
178 ASSERT(!info->function()->is_generator() || locals_count == 0);
179 if (locals_count > 0) {
180 // Emit a loop to initialize stack cells for locals when optimizing for
181 // size. Otherwise, unroll the loop for maximum performance.
182 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
183 if (FLAG_optimize_for_size && locals_count > 4) {
185 __ mov(r2, Operand(locals_count));
187 __ sub(r2, r2, Operand(1), SetCC);
191 for (int i = 0; i < locals_count; i++) {
198 bool function_in_register = true;
200 // Possibly allocate a local context.
201 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
202 if (heap_slots > 0) {
203 // Argument to NewContext is the function, which is still in r1.
204 Comment cmnt(masm_, "[ Allocate context");
205 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
207 __ Push(info->scope()->GetScopeInfo());
208 __ CallRuntime(Runtime::kNewGlobalContext, 2);
209 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
210 FastNewContextStub stub(heap_slots);
214 __ CallRuntime(Runtime::kNewFunctionContext, 1);
216 function_in_register = false;
217 // Context is returned in r0. It replaces the context passed to us.
218 // It's saved in the stack and kept live in cp.
220 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
221 // Copy any necessary parameters into the context.
222 int num_parameters = info->scope()->num_parameters();
223 for (int i = 0; i < num_parameters; i++) {
224 Variable* var = scope()->parameter(i);
225 if (var->IsContextSlot()) {
226 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
227 (num_parameters - 1 - i) * kPointerSize;
228 // Load parameter from stack.
229 __ ldr(r0, MemOperand(fp, parameter_offset));
230 // Store it in the context.
231 MemOperand target = ContextOperand(cp, var->index());
234 // Update the write barrier.
235 __ RecordWriteContextSlot(
236 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
241 Variable* arguments = scope()->arguments();
242 if (arguments != NULL) {
243 // Function uses arguments object.
244 Comment cmnt(masm_, "[ Allocate arguments object");
245 if (!function_in_register) {
246 // Load this again, if it's used by the local context below.
247 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
251 // Receiver is just before the parameters on the caller's stack.
252 int num_parameters = info->scope()->num_parameters();
253 int offset = num_parameters * kPointerSize;
255 Operand(StandardFrameConstants::kCallerSPOffset + offset));
256 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
259 // Arguments to ArgumentsAccessStub:
260 // function, receiver address, parameter count.
261 // The stub will rewrite receiever and parameter count if the previous
262 // stack frame was an arguments adapter frame.
263 ArgumentsAccessStub::Type type;
264 if (!is_classic_mode()) {
265 type = ArgumentsAccessStub::NEW_STRICT;
266 } else if (function()->has_duplicate_parameters()) {
267 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
269 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
271 ArgumentsAccessStub stub(type);
274 SetVar(arguments, r0, r1, r2);
278 __ CallRuntime(Runtime::kTraceEnter, 0);
281 // Visit the declarations and body unless there is an illegal
283 if (scope()->HasIllegalRedeclaration()) {
284 Comment cmnt(masm_, "[ Declarations");
285 scope()->VisitIllegalRedeclaration(this);
288 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
289 { Comment cmnt(masm_, "[ Declarations");
290 // For named function expressions, declare the function name as a
292 if (scope()->is_function_scope() && scope()->function() != NULL) {
293 VariableDeclaration* function = scope()->function();
294 ASSERT(function->proxy()->var()->mode() == CONST ||
295 function->proxy()->var()->mode() == CONST_HARMONY);
296 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
297 VisitVariableDeclaration(function);
299 VisitDeclarations(scope()->declarations());
302 { Comment cmnt(masm_, "[ Stack check");
303 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
305 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
306 __ cmp(sp, Operand(ip));
308 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
309 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
313 { Comment cmnt(masm_, "[ Body");
314 ASSERT(loop_depth() == 0);
315 VisitStatements(function()->body());
316 ASSERT(loop_depth() == 0);
320 // Always emit a 'return undefined' in case control fell off the end of
322 { Comment cmnt(masm_, "[ return <undefined>;");
323 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
325 EmitReturnSequence();
327 // Force emit the constant pool, so it doesn't get emitted in the middle
328 // of the back edge table.
329 masm()->CheckConstPool(true, false);
333 void FullCodeGenerator::ClearAccumulator() {
334 __ mov(r0, Operand(Smi::FromInt(0)));
338 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
339 __ mov(r2, Operand(profiling_counter_));
340 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
341 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
342 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
346 void FullCodeGenerator::EmitProfilingCounterReset() {
347 int reset_value = FLAG_interrupt_budget;
348 if (isolate()->IsDebuggerActive()) {
349 // Detect debug break requests as soon as possible.
350 reset_value = FLAG_interrupt_budget >> 4;
352 __ mov(r2, Operand(profiling_counter_));
353 __ mov(r3, Operand(Smi::FromInt(reset_value)));
354 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
358 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
359 Label* back_edge_target) {
360 Comment cmnt(masm_, "[ Back edge bookkeeping");
361 // Block literal pools whilst emitting back edge code.
362 Assembler::BlockConstPoolScope block_const_pool(masm_);
365 ASSERT(back_edge_target->is_bound());
366 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
367 int weight = Min(kMaxBackEdgeWeight,
368 Max(1, distance / kCodeSizeMultiplier));
369 EmitProfilingCounterDecrement(weight);
371 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
373 // Record a mapping of this PC offset to the OSR id. This is used to find
374 // the AST id from the unoptimized code in order to use it as a key into
375 // the deoptimization input data found in the optimized code.
376 RecordBackEdge(stmt->OsrEntryId());
378 EmitProfilingCounterReset();
381 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
382 // Record a mapping of the OSR id to this PC. This is used if the OSR
383 // entry becomes the target of a bailout. We don't expect it to be, but
384 // we want it to work if it is.
385 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
389 void FullCodeGenerator::EmitReturnSequence() {
390 Comment cmnt(masm_, "[ Return sequence");
391 if (return_label_.is_bound()) {
392 __ b(&return_label_);
394 __ bind(&return_label_);
396 // Push the return value on the stack as the parameter.
397 // Runtime::TraceExit returns its parameter in r0.
399 __ CallRuntime(Runtime::kTraceExit, 1);
401 // Pretend that the exit is a backwards jump to the entry.
403 if (info_->ShouldSelfOptimize()) {
404 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
406 int distance = masm_->pc_offset();
407 weight = Min(kMaxBackEdgeWeight,
408 Max(1, distance / kCodeSizeMultiplier));
410 EmitProfilingCounterDecrement(weight);
414 __ Call(isolate()->builtins()->InterruptCheck(),
415 RelocInfo::CODE_TARGET);
417 EmitProfilingCounterReset();
421 // Add a label for checking the size of the code used for returning.
422 Label check_exit_codesize;
423 __ bind(&check_exit_codesize);
425 // Make sure that the constant pool is not emitted inside of the return
427 { Assembler::BlockConstPoolScope block_const_pool(masm_);
428 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
429 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
430 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
431 PredictableCodeSizeScope predictable(masm_, -1);
433 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
434 __ add(sp, sp, Operand(sp_delta));
436 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
440 // Check that the size of the code used for returning is large enough
441 // for the debugger's requirements.
442 ASSERT(Assembler::kJSReturnSequenceInstructions <=
443 masm_->InstructionsGeneratedSince(&check_exit_codesize));
449 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
450 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
454 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
455 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
456 codegen()->GetVar(result_register(), var);
460 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
461 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
462 codegen()->GetVar(result_register(), var);
463 __ push(result_register());
467 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
468 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
469 // For simplicity we always test the accumulator register.
470 codegen()->GetVar(result_register(), var);
471 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
472 codegen()->DoTest(this);
476 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
480 void FullCodeGenerator::AccumulatorValueContext::Plug(
481 Heap::RootListIndex index) const {
482 __ LoadRoot(result_register(), index);
486 void FullCodeGenerator::StackValueContext::Plug(
487 Heap::RootListIndex index) const {
488 __ LoadRoot(result_register(), index);
489 __ push(result_register());
493 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
494 codegen()->PrepareForBailoutBeforeSplit(condition(),
498 if (index == Heap::kUndefinedValueRootIndex ||
499 index == Heap::kNullValueRootIndex ||
500 index == Heap::kFalseValueRootIndex) {
501 if (false_label_ != fall_through_) __ b(false_label_);
502 } else if (index == Heap::kTrueValueRootIndex) {
503 if (true_label_ != fall_through_) __ b(true_label_);
505 __ LoadRoot(result_register(), index);
506 codegen()->DoTest(this);
511 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
515 void FullCodeGenerator::AccumulatorValueContext::Plug(
516 Handle<Object> lit) const {
517 __ mov(result_register(), Operand(lit));
521 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
522 // Immediates cannot be pushed directly.
523 __ mov(result_register(), Operand(lit));
524 __ push(result_register());
528 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
529 codegen()->PrepareForBailoutBeforeSplit(condition(),
533 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
534 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
535 if (false_label_ != fall_through_) __ b(false_label_);
536 } else if (lit->IsTrue() || lit->IsJSObject()) {
537 if (true_label_ != fall_through_) __ b(true_label_);
538 } else if (lit->IsString()) {
539 if (String::cast(*lit)->length() == 0) {
540 if (false_label_ != fall_through_) __ b(false_label_);
542 if (true_label_ != fall_through_) __ b(true_label_);
544 } else if (lit->IsSmi()) {
545 if (Smi::cast(*lit)->value() == 0) {
546 if (false_label_ != fall_through_) __ b(false_label_);
548 if (true_label_ != fall_through_) __ b(true_label_);
551 // For simplicity we always test the accumulator register.
552 __ mov(result_register(), Operand(lit));
553 codegen()->DoTest(this);
558 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
559 Register reg) const {
565 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
567 Register reg) const {
570 __ Move(result_register(), reg);
574 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
575 Register reg) const {
577 if (count > 1) __ Drop(count - 1);
578 __ str(reg, MemOperand(sp, 0));
582 void FullCodeGenerator::TestContext::DropAndPlug(int count,
583 Register reg) const {
585 // For simplicity we always test the accumulator register.
587 __ Move(result_register(), reg);
588 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
589 codegen()->DoTest(this);
593 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
594 Label* materialize_false) const {
595 ASSERT(materialize_true == materialize_false);
596 __ bind(materialize_true);
600 void FullCodeGenerator::AccumulatorValueContext::Plug(
601 Label* materialize_true,
602 Label* materialize_false) const {
604 __ bind(materialize_true);
605 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
607 __ bind(materialize_false);
608 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
613 void FullCodeGenerator::StackValueContext::Plug(
614 Label* materialize_true,
615 Label* materialize_false) const {
617 __ bind(materialize_true);
618 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
620 __ bind(materialize_false);
621 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
627 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
628 Label* materialize_false) const {
629 ASSERT(materialize_true == true_label_);
630 ASSERT(materialize_false == false_label_);
634 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
638 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
639 Heap::RootListIndex value_root_index =
640 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
641 __ LoadRoot(result_register(), value_root_index);
645 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
646 Heap::RootListIndex value_root_index =
647 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
648 __ LoadRoot(ip, value_root_index);
653 void FullCodeGenerator::TestContext::Plug(bool flag) const {
654 codegen()->PrepareForBailoutBeforeSplit(condition(),
659 if (true_label_ != fall_through_) __ b(true_label_);
661 if (false_label_ != fall_through_) __ b(false_label_);
666 void FullCodeGenerator::DoTest(Expression* condition,
669 Label* fall_through) {
670 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
671 CallIC(ic, NOT_CONTEXTUAL, condition->test_id());
672 __ tst(result_register(), result_register());
673 Split(ne, if_true, if_false, fall_through);
677 void FullCodeGenerator::Split(Condition cond,
680 Label* fall_through) {
681 if (if_false == fall_through) {
683 } else if (if_true == fall_through) {
684 __ b(NegateCondition(cond), if_false);
692 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
693 ASSERT(var->IsStackAllocated());
694 // Offset is negative because higher indexes are at lower addresses.
695 int offset = -var->index() * kPointerSize;
696 // Adjust by a (parameter or local) base offset.
697 if (var->IsParameter()) {
698 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
700 offset += JavaScriptFrameConstants::kLocal0Offset;
702 return MemOperand(fp, offset);
706 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
707 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
708 if (var->IsContextSlot()) {
709 int context_chain_length = scope()->ContextChainLength(var->scope());
710 __ LoadContext(scratch, context_chain_length);
711 return ContextOperand(scratch, var->index());
713 return StackOperand(var);
718 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
719 // Use destination as scratch.
720 MemOperand location = VarOperand(var, dest);
721 __ ldr(dest, location);
725 void FullCodeGenerator::SetVar(Variable* var,
729 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
730 ASSERT(!scratch0.is(src));
731 ASSERT(!scratch0.is(scratch1));
732 ASSERT(!scratch1.is(src));
733 MemOperand location = VarOperand(var, scratch0);
734 __ str(src, location);
736 // Emit the write barrier code if the location is in the heap.
737 if (var->IsContextSlot()) {
738 __ RecordWriteContextSlot(scratch0,
748 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
749 bool should_normalize,
752 // Only prepare for bailouts before splits if we're in a test
753 // context. Otherwise, we let the Visit function deal with the
754 // preparation to avoid preparing with the same AST id twice.
755 if (!context()->IsTest() || !info_->IsOptimizable()) return;
758 if (should_normalize) __ b(&skip);
759 PrepareForBailout(expr, TOS_REG);
760 if (should_normalize) {
761 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
763 Split(eq, if_true, if_false, NULL);
769 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
770 // The variable in the declaration always resides in the current function
772 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
773 if (generate_debug_code_) {
774 // Check that we're not inside a with or catch context.
775 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
776 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
777 __ Check(ne, kDeclarationInWithContext);
778 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
779 __ Check(ne, kDeclarationInCatchContext);
784 void FullCodeGenerator::VisitVariableDeclaration(
785 VariableDeclaration* declaration) {
786 // If it was not possible to allocate the variable at compile time, we
787 // need to "declare" it at runtime to make sure it actually exists in the
789 VariableProxy* proxy = declaration->proxy();
790 VariableMode mode = declaration->mode();
791 Variable* variable = proxy->var();
792 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
793 switch (variable->location()) {
794 case Variable::UNALLOCATED:
795 globals_->Add(variable->name(), zone());
796 globals_->Add(variable->binding_needs_init()
797 ? isolate()->factory()->the_hole_value()
798 : isolate()->factory()->undefined_value(),
802 case Variable::PARAMETER:
803 case Variable::LOCAL:
805 Comment cmnt(masm_, "[ VariableDeclaration");
806 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
807 __ str(ip, StackOperand(variable));
811 case Variable::CONTEXT:
813 Comment cmnt(masm_, "[ VariableDeclaration");
814 EmitDebugCheckDeclarationContext(variable);
815 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
816 __ str(ip, ContextOperand(cp, variable->index()));
817 // No write barrier since the_hole_value is in old space.
818 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
822 case Variable::LOOKUP: {
823 Comment cmnt(masm_, "[ VariableDeclaration");
824 __ mov(r2, Operand(variable->name()));
825 // Declaration nodes are always introduced in one of four modes.
826 ASSERT(IsDeclaredVariableMode(mode));
827 PropertyAttributes attr =
828 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
829 __ mov(r1, Operand(Smi::FromInt(attr)));
830 // Push initial value, if any.
831 // Note: For variables we must not push an initial value (such as
832 // 'undefined') because we may have a (legal) redeclaration and we
833 // must not destroy the current value.
835 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
836 __ Push(cp, r2, r1, r0);
838 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
839 __ Push(cp, r2, r1, r0);
841 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
848 void FullCodeGenerator::VisitFunctionDeclaration(
849 FunctionDeclaration* declaration) {
850 VariableProxy* proxy = declaration->proxy();
851 Variable* variable = proxy->var();
852 switch (variable->location()) {
853 case Variable::UNALLOCATED: {
854 globals_->Add(variable->name(), zone());
855 Handle<SharedFunctionInfo> function =
856 Compiler::BuildFunctionInfo(declaration->fun(), script());
857 // Check for stack-overflow exception.
858 if (function.is_null()) return SetStackOverflow();
859 globals_->Add(function, zone());
863 case Variable::PARAMETER:
864 case Variable::LOCAL: {
865 Comment cmnt(masm_, "[ FunctionDeclaration");
866 VisitForAccumulatorValue(declaration->fun());
867 __ str(result_register(), StackOperand(variable));
871 case Variable::CONTEXT: {
872 Comment cmnt(masm_, "[ FunctionDeclaration");
873 EmitDebugCheckDeclarationContext(variable);
874 VisitForAccumulatorValue(declaration->fun());
875 __ str(result_register(), ContextOperand(cp, variable->index()));
876 int offset = Context::SlotOffset(variable->index());
877 // We know that we have written a function, which is not a smi.
878 __ RecordWriteContextSlot(cp,
886 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
890 case Variable::LOOKUP: {
891 Comment cmnt(masm_, "[ FunctionDeclaration");
892 __ mov(r2, Operand(variable->name()));
893 __ mov(r1, Operand(Smi::FromInt(NONE)));
895 // Push initial value for function declaration.
896 VisitForStackValue(declaration->fun());
897 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
904 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
905 Variable* variable = declaration->proxy()->var();
906 ASSERT(variable->location() == Variable::CONTEXT);
907 ASSERT(variable->interface()->IsFrozen());
909 Comment cmnt(masm_, "[ ModuleDeclaration");
910 EmitDebugCheckDeclarationContext(variable);
912 // Load instance object.
913 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
914 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
915 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
918 __ str(r1, ContextOperand(cp, variable->index()));
919 // We know that we have written a module, which is not a smi.
920 __ RecordWriteContextSlot(cp,
921 Context::SlotOffset(variable->index()),
928 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
930 // Traverse into body.
931 Visit(declaration->module());
935 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
936 VariableProxy* proxy = declaration->proxy();
937 Variable* variable = proxy->var();
938 switch (variable->location()) {
939 case Variable::UNALLOCATED:
943 case Variable::CONTEXT: {
944 Comment cmnt(masm_, "[ ImportDeclaration");
945 EmitDebugCheckDeclarationContext(variable);
950 case Variable::PARAMETER:
951 case Variable::LOCAL:
952 case Variable::LOOKUP:
958 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
963 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
964 // Call the runtime to declare the globals.
965 // The context is the first argument.
966 __ mov(r1, Operand(pairs));
967 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
969 __ CallRuntime(Runtime::kDeclareGlobals, 3);
970 // Return value is ignored.
974 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
975 // Call the runtime to declare the modules.
976 __ Push(descriptions);
977 __ CallRuntime(Runtime::kDeclareModules, 1);
978 // Return value is ignored.
982 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
983 Comment cmnt(masm_, "[ SwitchStatement");
984 Breakable nested_statement(this, stmt);
985 SetStatementPosition(stmt);
987 // Keep the switch value on the stack until a case matches.
988 VisitForStackValue(stmt->tag());
989 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
991 ZoneList<CaseClause*>* clauses = stmt->cases();
992 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
994 Label next_test; // Recycled for each test.
995 // Compile all the tests with branches to their bodies.
996 for (int i = 0; i < clauses->length(); i++) {
997 CaseClause* clause = clauses->at(i);
998 clause->body_target()->Unuse();
1000 // The default is not a test, but remember it as final fall through.
1001 if (clause->is_default()) {
1002 default_clause = clause;
1006 Comment cmnt(masm_, "[ Case comparison");
1007 __ bind(&next_test);
1010 // Compile the label expression.
1011 VisitForAccumulatorValue(clause->label());
1013 // Perform the comparison as if via '==='.
1014 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1015 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1016 JumpPatchSite patch_site(masm_);
1017 if (inline_smi_code) {
1020 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1023 __ b(ne, &next_test);
1024 __ Drop(1); // Switch value is no longer needed.
1025 __ b(clause->body_target());
1026 __ bind(&slow_case);
1029 // Record position before stub call for type feedback.
1030 SetSourcePosition(clause->position());
1031 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1032 CallIC(ic, NOT_CONTEXTUAL, clause->CompareId());
1033 patch_site.EmitPatchInfo();
1037 PrepareForBailout(clause, TOS_REG);
1038 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1040 __ b(ne, &next_test);
1042 __ jmp(clause->body_target());
1045 __ cmp(r0, Operand::Zero());
1046 __ b(ne, &next_test);
1047 __ Drop(1); // Switch value is no longer needed.
1048 __ b(clause->body_target());
1051 // Discard the test value and jump to the default if present, otherwise to
1052 // the end of the statement.
1053 __ bind(&next_test);
1054 __ Drop(1); // Switch value is no longer needed.
1055 if (default_clause == NULL) {
1056 __ b(nested_statement.break_label());
1058 __ b(default_clause->body_target());
1061 // Compile all the case bodies.
1062 for (int i = 0; i < clauses->length(); i++) {
1063 Comment cmnt(masm_, "[ Case body");
1064 CaseClause* clause = clauses->at(i);
1065 __ bind(clause->body_target());
1066 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1067 VisitStatements(clause->statements());
1070 __ bind(nested_statement.break_label());
1071 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1075 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1076 Comment cmnt(masm_, "[ ForInStatement");
1077 SetStatementPosition(stmt);
1080 ForIn loop_statement(this, stmt);
1081 increment_loop_depth();
1083 // Get the object to enumerate over. If the object is null or undefined, skip
1084 // over the loop. See ECMA-262 version 5, section 12.6.4.
1085 VisitForAccumulatorValue(stmt->enumerable());
1086 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1089 Register null_value = r5;
1090 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1091 __ cmp(r0, null_value);
1094 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1096 // Convert the object to a JS object.
1097 Label convert, done_convert;
1098 __ JumpIfSmi(r0, &convert);
1099 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1100 __ b(ge, &done_convert);
1103 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1104 __ bind(&done_convert);
1107 // Check for proxies.
1109 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1110 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1111 __ b(le, &call_runtime);
1113 // Check cache validity in generated code. This is a fast case for
1114 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1115 // guarantee cache validity, call the runtime system to check cache
1116 // validity or get the property names in a fixed array.
1117 __ CheckEnumCache(null_value, &call_runtime);
1119 // The enum cache is valid. Load the map of the object being
1120 // iterated over and use the cache for the iteration.
1122 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1125 // Get the set of properties to enumerate.
1126 __ bind(&call_runtime);
1127 __ push(r0); // Duplicate the enumerable object on the stack.
1128 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1130 // If we got a map from the runtime call, we can do a fast
1131 // modification check. Otherwise, we got a fixed array, and we have
1132 // to do a slow check.
1134 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1135 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1137 __ b(ne, &fixed_array);
1139 // We got a map in register r0. Get the enumeration cache from it.
1140 Label no_descriptors;
1141 __ bind(&use_cache);
1143 __ EnumLength(r1, r0);
1144 __ cmp(r1, Operand(Smi::FromInt(0)));
1145 __ b(eq, &no_descriptors);
1147 __ LoadInstanceDescriptors(r0, r2);
1148 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1149 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1151 // Set up the four remaining stack slots.
1152 __ push(r0); // Map.
1153 __ mov(r0, Operand(Smi::FromInt(0)));
1154 // Push enumeration cache, enumeration cache length (as smi) and zero.
1155 __ Push(r2, r1, r0);
1158 __ bind(&no_descriptors);
1162 // We got a fixed array in register r0. Iterate through that.
1164 __ bind(&fixed_array);
1166 Handle<Cell> cell = isolate()->factory()->NewCell(
1167 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1169 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1171 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1172 __ str(r2, FieldMemOperand(r1, Cell::kValueOffset));
1174 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1175 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1176 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1177 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1178 __ b(gt, &non_proxy);
1179 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1180 __ bind(&non_proxy);
1181 __ Push(r1, r0); // Smi and array
1182 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1183 __ mov(r0, Operand(Smi::FromInt(0)));
1184 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1186 // Generate code for doing the condition check.
1187 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1189 // Load the current count to r0, load the length to r1.
1190 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1191 __ cmp(r0, r1); // Compare to the array length.
1192 __ b(hs, loop_statement.break_label());
1194 // Get the current entry of the array into register r3.
1195 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1196 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1197 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1199 // Get the expected map from the stack or a smi in the
1200 // permanent slow case into register r2.
1201 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1203 // Check if the expected map still matches that of the enumerable.
1204 // If not, we may have to filter the key.
1206 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1207 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1208 __ cmp(r4, Operand(r2));
1209 __ b(eq, &update_each);
1211 // For proxies, no filtering is done.
1212 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1213 __ cmp(r2, Operand(Smi::FromInt(0)));
1214 __ b(eq, &update_each);
1216 // Convert the entry to a string or (smi) 0 if it isn't a property
1217 // any more. If the property has been removed while iterating, we
1219 __ push(r1); // Enumerable.
1220 __ push(r3); // Current entry.
1221 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1222 __ mov(r3, Operand(r0), SetCC);
1223 __ b(eq, loop_statement.continue_label());
1225 // Update the 'each' property or variable from the possibly filtered
1226 // entry in register r3.
1227 __ bind(&update_each);
1228 __ mov(result_register(), r3);
1229 // Perform the assignment as if via '='.
1230 { EffectContext context(this);
1231 EmitAssignment(stmt->each());
1234 // Generate code for the body of the loop.
1235 Visit(stmt->body());
1237 // Generate code for the going to the next element by incrementing
1238 // the index (smi) stored on top of the stack.
1239 __ bind(loop_statement.continue_label());
1241 __ add(r0, r0, Operand(Smi::FromInt(1)));
1244 EmitBackEdgeBookkeeping(stmt, &loop);
1247 // Remove the pointers stored on the stack.
1248 __ bind(loop_statement.break_label());
1251 // Exit and decrement the loop depth.
1252 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1254 decrement_loop_depth();
1258 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1259 Comment cmnt(masm_, "[ ForOfStatement");
1260 SetStatementPosition(stmt);
1262 Iteration loop_statement(this, stmt);
1263 increment_loop_depth();
1265 // var iterator = iterable[@@iterator]()
1266 VisitForAccumulatorValue(stmt->assign_iterator());
1268 // As with for-in, skip the loop if the iterator is null or undefined.
1269 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1270 __ b(eq, loop_statement.break_label());
1271 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1272 __ b(eq, loop_statement.break_label());
1274 // Convert the iterator to a JS object.
1275 Label convert, done_convert;
1276 __ JumpIfSmi(r0, &convert);
1277 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1278 __ b(ge, &done_convert);
1281 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1282 __ bind(&done_convert);
1286 __ bind(loop_statement.continue_label());
1288 // result = iterator.next()
1289 VisitForEffect(stmt->next_result());
1291 // if (result.done) break;
1292 Label result_not_done;
1293 VisitForControl(stmt->result_done(),
1294 loop_statement.break_label(),
1297 __ bind(&result_not_done);
1299 // each = result.value
1300 VisitForEffect(stmt->assign_each());
1302 // Generate code for the body of the loop.
1303 Visit(stmt->body());
1305 // Check stack before looping.
1306 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1307 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1308 __ jmp(loop_statement.continue_label());
1310 // Exit and decrement the loop depth.
1311 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1312 __ bind(loop_statement.break_label());
1313 decrement_loop_depth();
1317 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1319 // Use the fast case closure allocation code that allocates in new
1320 // space for nested functions that don't need literals cloning. If
1321 // we're running with the --always-opt or the --prepare-always-opt
1322 // flag, we need to use the runtime function so that the new function
1323 // we are creating here gets a chance to have its code optimized and
1324 // doesn't just get a copy of the existing unoptimized code.
1325 if (!FLAG_always_opt &&
1326 !FLAG_prepare_always_opt &&
1328 scope()->is_function_scope() &&
1329 info->num_literals() == 0) {
1330 FastNewClosureStub stub(info->language_mode(), info->is_generator());
1331 __ mov(r2, Operand(info));
1334 __ mov(r0, Operand(info));
1335 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1336 : Heap::kFalseValueRootIndex);
1337 __ Push(cp, r0, r1);
1338 __ CallRuntime(Runtime::kNewClosure, 3);
1340 context()->Plug(r0);
1344 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1345 Comment cmnt(masm_, "[ VariableProxy");
1346 EmitVariableLoad(expr);
1350 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1351 TypeofState typeof_state,
1353 Register current = cp;
1359 if (s->num_heap_slots() > 0) {
1360 if (s->calls_non_strict_eval()) {
1361 // Check that extension is NULL.
1362 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1366 // Load next context in chain.
1367 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1368 // Walk the rest of the chain without clobbering cp.
1371 // If no outer scope calls eval, we do not need to check more
1372 // context extensions.
1373 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1374 s = s->outer_scope();
1377 if (s->is_eval_scope()) {
1379 if (!current.is(next)) {
1380 __ Move(next, current);
1383 // Terminate at native context.
1384 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1385 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1388 // Check that extension is NULL.
1389 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1392 // Load next context in chain.
1393 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1398 __ ldr(r0, GlobalObjectOperand());
1399 __ mov(r2, Operand(var->name()));
1400 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1407 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1409 ASSERT(var->IsContextSlot());
1410 Register context = cp;
1414 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1415 if (s->num_heap_slots() > 0) {
1416 if (s->calls_non_strict_eval()) {
1417 // Check that extension is NULL.
1418 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1422 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1423 // Walk the rest of the chain without clobbering cp.
1427 // Check that last extension is NULL.
1428 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1432 // This function is used only for loads, not stores, so it's safe to
1433 // return an cp-based operand (the write barrier cannot be allowed to
1434 // destroy the cp register).
1435 return ContextOperand(context, var->index());
1439 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1440 TypeofState typeof_state,
1443 // Generate fast-case code for variables that might be shadowed by
1444 // eval-introduced variables. Eval is used a lot without
1445 // introducing variables. In those cases, we do not want to
1446 // perform a runtime call for all variables in the scope
1447 // containing the eval.
1448 if (var->mode() == DYNAMIC_GLOBAL) {
1449 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1451 } else if (var->mode() == DYNAMIC_LOCAL) {
1452 Variable* local = var->local_if_not_shadowed();
1453 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1454 if (local->mode() == LET ||
1455 local->mode() == CONST ||
1456 local->mode() == CONST_HARMONY) {
1457 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1458 if (local->mode() == CONST) {
1459 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1460 } else { // LET || CONST_HARMONY
1462 __ mov(r0, Operand(var->name()));
1464 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1472 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1473 // Record position before possible IC call.
1474 SetSourcePosition(proxy->position());
1475 Variable* var = proxy->var();
1477 // Three cases: global variables, lookup variables, and all other types of
1479 switch (var->location()) {
1480 case Variable::UNALLOCATED: {
1481 Comment cmnt(masm_, "Global variable");
1482 // Use inline caching. Variable name is passed in r2 and the global
1483 // object (receiver) in r0.
1484 __ ldr(r0, GlobalObjectOperand());
1485 __ mov(r2, Operand(var->name()));
1486 CallLoadIC(CONTEXTUAL);
1487 context()->Plug(r0);
1491 case Variable::PARAMETER:
1492 case Variable::LOCAL:
1493 case Variable::CONTEXT: {
1494 Comment cmnt(masm_, var->IsContextSlot()
1495 ? "Context variable"
1496 : "Stack variable");
1497 if (var->binding_needs_init()) {
1498 // var->scope() may be NULL when the proxy is located in eval code and
1499 // refers to a potential outside binding. Currently those bindings are
1500 // always looked up dynamically, i.e. in that case
1501 // var->location() == LOOKUP.
1503 ASSERT(var->scope() != NULL);
1505 // Check if the binding really needs an initialization check. The check
1506 // can be skipped in the following situation: we have a LET or CONST
1507 // binding in harmony mode, both the Variable and the VariableProxy have
1508 // the same declaration scope (i.e. they are both in global code, in the
1509 // same function or in the same eval code) and the VariableProxy is in
1510 // the source physically located after the initializer of the variable.
1512 // We cannot skip any initialization checks for CONST in non-harmony
1513 // mode because const variables may be declared but never initialized:
1514 // if (false) { const x; }; var y = x;
1516 // The condition on the declaration scopes is a conservative check for
1517 // nested functions that access a binding and are called before the
1518 // binding is initialized:
1519 // function() { f(); let x = 1; function f() { x = 2; } }
1521 bool skip_init_check;
1522 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1523 skip_init_check = false;
1525 // Check that we always have valid source position.
1526 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1527 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1528 skip_init_check = var->mode() != CONST &&
1529 var->initializer_position() < proxy->position();
1532 if (!skip_init_check) {
1533 // Let and const need a read barrier.
1535 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1536 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1537 // Throw a reference error when using an uninitialized let/const
1538 // binding in harmony mode.
1541 __ mov(r0, Operand(var->name()));
1543 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1546 // Uninitalized const bindings outside of harmony mode are unholed.
1547 ASSERT(var->mode() == CONST);
1548 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1550 context()->Plug(r0);
1554 context()->Plug(var);
1558 case Variable::LOOKUP: {
1560 // Generate code for loading from variables potentially shadowed
1561 // by eval-introduced variables.
1562 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1564 Comment cmnt(masm_, "Lookup variable");
1565 __ mov(r1, Operand(var->name()));
1566 __ Push(cp, r1); // Context and name.
1567 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1569 context()->Plug(r0);
1575 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1576 Comment cmnt(masm_, "[ RegExpLiteral");
1578 // Registers will be used as follows:
1579 // r5 = materialized value (RegExp literal)
1580 // r4 = JS function, literals array
1581 // r3 = literal index
1582 // r2 = RegExp pattern
1583 // r1 = RegExp flags
1584 // r0 = RegExp literal clone
1585 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1586 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1587 int literal_offset =
1588 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1589 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1590 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1592 __ b(ne, &materialized);
1594 // Create regexp literal using runtime function.
1595 // Result will be in r0.
1596 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1597 __ mov(r2, Operand(expr->pattern()));
1598 __ mov(r1, Operand(expr->flags()));
1599 __ Push(r4, r3, r2, r1);
1600 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1603 __ bind(&materialized);
1604 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1605 Label allocated, runtime_allocate;
1606 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1609 __ bind(&runtime_allocate);
1610 __ mov(r0, Operand(Smi::FromInt(size)));
1612 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1615 __ bind(&allocated);
1616 // After this, registers are used as follows:
1617 // r0: Newly allocated regexp.
1618 // r5: Materialized regexp.
1620 __ CopyFields(r0, r5, d0, size / kPointerSize);
1621 context()->Plug(r0);
1625 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1626 if (expression == NULL) {
1627 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1630 VisitForStackValue(expression);
1635 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1636 Comment cmnt(masm_, "[ ObjectLiteral");
1638 expr->BuildConstantProperties(isolate());
1639 Handle<FixedArray> constant_properties = expr->constant_properties();
1640 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1641 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1642 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1643 __ mov(r1, Operand(constant_properties));
1644 int flags = expr->fast_elements()
1645 ? ObjectLiteral::kFastElements
1646 : ObjectLiteral::kNoFlags;
1647 flags |= expr->has_function()
1648 ? ObjectLiteral::kHasFunction
1649 : ObjectLiteral::kNoFlags;
1650 __ mov(r0, Operand(Smi::FromInt(flags)));
1651 int properties_count = constant_properties->length() / 2;
1652 if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1653 expr->depth() > 1 || Serializer::enabled() ||
1654 flags != ObjectLiteral::kFastElements ||
1655 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1656 __ Push(r3, r2, r1, r0);
1657 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1659 FastCloneShallowObjectStub stub(properties_count);
1663 // If result_saved is true the result is on top of the stack. If
1664 // result_saved is false the result is in r0.
1665 bool result_saved = false;
1667 // Mark all computed expressions that are bound to a key that
1668 // is shadowed by a later occurrence of the same key. For the
1669 // marked expressions, no store code is emitted.
1670 expr->CalculateEmitStore(zone());
1672 AccessorTable accessor_table(zone());
1673 for (int i = 0; i < expr->properties()->length(); i++) {
1674 ObjectLiteral::Property* property = expr->properties()->at(i);
1675 if (property->IsCompileTimeValue()) continue;
1677 Literal* key = property->key();
1678 Expression* value = property->value();
1679 if (!result_saved) {
1680 __ push(r0); // Save result on stack
1681 result_saved = true;
1683 switch (property->kind()) {
1684 case ObjectLiteral::Property::CONSTANT:
1686 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1687 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1689 case ObjectLiteral::Property::COMPUTED:
1690 if (key->value()->IsInternalizedString()) {
1691 if (property->emit_store()) {
1692 VisitForAccumulatorValue(value);
1693 __ mov(r2, Operand(key->value()));
1694 __ ldr(r1, MemOperand(sp));
1695 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId());
1696 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1698 VisitForEffect(value);
1702 // Duplicate receiver on stack.
1703 __ ldr(r0, MemOperand(sp));
1705 VisitForStackValue(key);
1706 VisitForStackValue(value);
1707 if (property->emit_store()) {
1708 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1710 __ CallRuntime(Runtime::kSetProperty, 4);
1715 case ObjectLiteral::Property::PROTOTYPE:
1716 // Duplicate receiver on stack.
1717 __ ldr(r0, MemOperand(sp));
1719 VisitForStackValue(value);
1720 if (property->emit_store()) {
1721 __ CallRuntime(Runtime::kSetPrototype, 2);
1727 case ObjectLiteral::Property::GETTER:
1728 accessor_table.lookup(key)->second->getter = value;
1730 case ObjectLiteral::Property::SETTER:
1731 accessor_table.lookup(key)->second->setter = value;
1736 // Emit code to define accessors, using only a single call to the runtime for
1737 // each pair of corresponding getters and setters.
1738 for (AccessorTable::Iterator it = accessor_table.begin();
1739 it != accessor_table.end();
1741 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1743 VisitForStackValue(it->first);
1744 EmitAccessor(it->second->getter);
1745 EmitAccessor(it->second->setter);
1746 __ mov(r0, Operand(Smi::FromInt(NONE)));
1748 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1751 if (expr->has_function()) {
1752 ASSERT(result_saved);
1753 __ ldr(r0, MemOperand(sp));
1755 __ CallRuntime(Runtime::kToFastProperties, 1);
1759 context()->PlugTOS();
1761 context()->Plug(r0);
1766 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1767 Comment cmnt(masm_, "[ ArrayLiteral");
1769 expr->BuildConstantElements(isolate());
1770 int flags = expr->depth() == 1
1771 ? ArrayLiteral::kShallowElements
1772 : ArrayLiteral::kNoFlags;
1774 ZoneList<Expression*>* subexprs = expr->values();
1775 int length = subexprs->length();
1776 Handle<FixedArray> constant_elements = expr->constant_elements();
1777 ASSERT_EQ(2, constant_elements->length());
1778 ElementsKind constant_elements_kind =
1779 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1780 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1781 Handle<FixedArrayBase> constant_elements_values(
1782 FixedArrayBase::cast(constant_elements->get(1)));
1784 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1785 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1786 // If the only customer of allocation sites is transitioning, then
1787 // we can turn it off if we don't have anywhere else to transition to.
1788 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1791 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1792 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1793 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1794 __ mov(r1, Operand(constant_elements));
1795 if (has_fast_elements && constant_elements_values->map() ==
1796 isolate()->heap()->fixed_cow_array_map()) {
1797 FastCloneShallowArrayStub stub(
1798 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1799 allocation_site_mode,
1802 __ IncrementCounter(
1803 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1804 } else if (expr->depth() > 1 || Serializer::enabled() ||
1805 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1806 __ mov(r0, Operand(Smi::FromInt(flags)));
1807 __ Push(r3, r2, r1, r0);
1808 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1810 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1811 FLAG_smi_only_arrays);
1812 FastCloneShallowArrayStub::Mode mode =
1813 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1815 if (has_fast_elements) {
1816 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1819 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1823 bool result_saved = false; // Is the result saved to the stack?
1825 // Emit code to evaluate all the non-constant subexpressions and to store
1826 // them into the newly cloned array.
1827 for (int i = 0; i < length; i++) {
1828 Expression* subexpr = subexprs->at(i);
1829 // If the subexpression is a literal or a simple materialized literal it
1830 // is already set in the cloned array.
1831 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1833 if (!result_saved) {
1835 __ Push(Smi::FromInt(expr->literal_index()));
1836 result_saved = true;
1838 VisitForAccumulatorValue(subexpr);
1840 if (IsFastObjectElementsKind(constant_elements_kind)) {
1841 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1842 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1843 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1844 __ str(result_register(), FieldMemOperand(r1, offset));
1845 // Update the write barrier for the array store.
1846 __ RecordWriteField(r1, offset, result_register(), r2,
1847 kLRHasBeenSaved, kDontSaveFPRegs,
1848 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1850 __ mov(r3, Operand(Smi::FromInt(i)));
1851 StoreArrayLiteralElementStub stub;
1855 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1859 __ pop(); // literal index
1860 context()->PlugTOS();
1862 context()->Plug(r0);
1867 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1868 Comment cmnt(masm_, "[ Assignment");
1869 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1870 // on the left-hand side.
1871 if (!expr->target()->IsValidLeftHandSide()) {
1872 VisitForEffect(expr->target());
1876 // Left-hand side can only be a property, a global or a (parameter or local)
1878 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1879 LhsKind assign_type = VARIABLE;
1880 Property* property = expr->target()->AsProperty();
1881 if (property != NULL) {
1882 assign_type = (property->key()->IsPropertyName())
1887 // Evaluate LHS expression.
1888 switch (assign_type) {
1890 // Nothing to do here.
1892 case NAMED_PROPERTY:
1893 if (expr->is_compound()) {
1894 // We need the receiver both on the stack and in the accumulator.
1895 VisitForAccumulatorValue(property->obj());
1896 __ push(result_register());
1898 VisitForStackValue(property->obj());
1901 case KEYED_PROPERTY:
1902 if (expr->is_compound()) {
1903 VisitForStackValue(property->obj());
1904 VisitForAccumulatorValue(property->key());
1905 __ ldr(r1, MemOperand(sp, 0));
1908 VisitForStackValue(property->obj());
1909 VisitForStackValue(property->key());
1914 // For compound assignments we need another deoptimization point after the
1915 // variable/property load.
1916 if (expr->is_compound()) {
1917 { AccumulatorValueContext context(this);
1918 switch (assign_type) {
1920 EmitVariableLoad(expr->target()->AsVariableProxy());
1921 PrepareForBailout(expr->target(), TOS_REG);
1923 case NAMED_PROPERTY:
1924 EmitNamedPropertyLoad(property);
1925 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1927 case KEYED_PROPERTY:
1928 EmitKeyedPropertyLoad(property);
1929 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1934 Token::Value op = expr->binary_op();
1935 __ push(r0); // Left operand goes on the stack.
1936 VisitForAccumulatorValue(expr->value());
1938 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1941 SetSourcePosition(expr->position() + 1);
1942 AccumulatorValueContext context(this);
1943 if (ShouldInlineSmiCase(op)) {
1944 EmitInlineSmiBinaryOp(expr->binary_operation(),
1950 EmitBinaryOp(expr->binary_operation(), op, mode);
1953 // Deoptimization point in case the binary operation may have side effects.
1954 PrepareForBailout(expr->binary_operation(), TOS_REG);
1956 VisitForAccumulatorValue(expr->value());
1959 // Record source position before possible IC call.
1960 SetSourcePosition(expr->position());
1963 switch (assign_type) {
1965 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1967 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1968 context()->Plug(r0);
1970 case NAMED_PROPERTY:
1971 EmitNamedPropertyAssignment(expr);
1973 case KEYED_PROPERTY:
1974 EmitKeyedPropertyAssignment(expr);
1980 void FullCodeGenerator::VisitYield(Yield* expr) {
1981 Comment cmnt(masm_, "[ Yield");
1982 // Evaluate yielded value first; the initial iterator definition depends on
1983 // this. It stays on the stack while we update the iterator.
1984 VisitForStackValue(expr->expression());
1986 switch (expr->yield_kind()) {
1987 case Yield::SUSPEND:
1988 // Pop value from top-of-stack slot; box result into result register.
1989 EmitCreateIteratorResult(false);
1990 __ push(result_register());
1992 case Yield::INITIAL: {
1993 Label suspend, continuation, post_runtime, resume;
1997 __ bind(&continuation);
2001 VisitForAccumulatorValue(expr->generator_object());
2002 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2003 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2004 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2005 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2007 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2008 kLRHasBeenSaved, kDontSaveFPRegs);
2009 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2011 __ b(eq, &post_runtime);
2012 __ push(r0); // generator object
2013 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2014 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2015 __ bind(&post_runtime);
2016 __ pop(result_register());
2017 EmitReturnSequence();
2020 context()->Plug(result_register());
2024 case Yield::FINAL: {
2025 VisitForAccumulatorValue(expr->generator_object());
2026 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2027 __ str(r1, FieldMemOperand(result_register(),
2028 JSGeneratorObject::kContinuationOffset));
2029 // Pop value from top-of-stack slot, box result into result register.
2030 EmitCreateIteratorResult(true);
2031 EmitUnwindBeforeReturn();
2032 EmitReturnSequence();
2036 case Yield::DELEGATING: {
2037 VisitForStackValue(expr->generator_object());
2039 // Initial stack layout is as follows:
2040 // [sp + 1 * kPointerSize] iter
2041 // [sp + 0 * kPointerSize] g
2043 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2044 Label l_next, l_call, l_loop;
2045 // Initial send value is undefined.
2046 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2049 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2051 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2052 __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2053 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2054 __ Push(r2, r3, r0); // "throw", iter, except
2057 // try { received = %yield result }
2058 // Shuffle the received result above a try handler and yield it without
2061 __ pop(r0); // result
2062 __ PushTryHandler(StackHandler::CATCH, expr->index());
2063 const int handler_size = StackHandlerConstants::kSize;
2064 __ push(r0); // result
2066 __ bind(&l_continuation);
2068 __ bind(&l_suspend);
2069 const int generator_object_depth = kPointerSize + handler_size;
2070 __ ldr(r0, MemOperand(sp, generator_object_depth));
2072 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2073 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2074 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2075 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2077 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2078 kLRHasBeenSaved, kDontSaveFPRegs);
2079 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2080 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2081 __ pop(r0); // result
2082 EmitReturnSequence();
2083 __ bind(&l_resume); // received in r0
2086 // receiver = iter; f = 'next'; arg = received;
2088 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2089 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2090 __ Push(r2, r3, r0); // "next", iter, received
2092 // result = receiver[f](arg);
2094 __ ldr(r1, MemOperand(sp, kPointerSize));
2095 __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2096 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2097 CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None());
2099 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2100 CallFunctionStub stub(1, CALL_AS_METHOD);
2103 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2104 __ Drop(1); // The function is still on the stack; drop it.
2106 // if (!result.done) goto l_try;
2108 __ push(r0); // save result
2109 __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2110 CallLoadIC(NOT_CONTEXTUAL); // result.done in r0
2111 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2113 __ cmp(r0, Operand(0));
2117 __ pop(r0); // result
2118 __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2119 CallLoadIC(NOT_CONTEXTUAL); // result.value in r0
2120 context()->DropAndPlug(2, r0); // drop iter and g
2127 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2129 JSGeneratorObject::ResumeMode resume_mode) {
2130 // The value stays in r0, and is ultimately read by the resumed generator, as
2131 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2132 // is read to throw the value when the resumed generator is already closed.
2133 // r1 will hold the generator object until the activation has been resumed.
2134 VisitForStackValue(generator);
2135 VisitForAccumulatorValue(value);
2138 // Check generator state.
2139 Label wrong_state, closed_state, done;
2140 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2141 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2142 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2143 __ cmp(r3, Operand(Smi::FromInt(0)));
2144 __ b(eq, &closed_state);
2145 __ b(lt, &wrong_state);
2147 // Load suspended function and context.
2148 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2149 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2151 // Load receiver and store as the first argument.
2152 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2155 // Push holes for the rest of the arguments to the generator function.
2156 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2158 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2159 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2160 Label push_argument_holes, push_frame;
2161 __ bind(&push_argument_holes);
2162 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2163 __ b(mi, &push_frame);
2165 __ jmp(&push_argument_holes);
2167 // Enter a new JavaScript frame, and initialize its slots as they were when
2168 // the generator was suspended.
2170 __ bind(&push_frame);
2171 __ bl(&resume_frame);
2173 __ bind(&resume_frame);
2174 // lr = return address.
2175 // fp = caller's frame pointer.
2176 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2177 // cp = callee's context,
2178 // r4 = callee's JS function.
2179 __ PushFixedFrame(r4);
2180 // Adjust FP to point to saved FP.
2181 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2183 // Load the operand stack size.
2184 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2185 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2188 // If we are sending a value and there is no operand stack, we can jump back
2190 if (resume_mode == JSGeneratorObject::NEXT) {
2192 __ cmp(r3, Operand(0));
2193 __ b(ne, &slow_resume);
2194 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2195 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2198 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2199 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2201 __ bind(&slow_resume);
2204 // Otherwise, we push holes for the operand stack and call the runtime to fix
2205 // up the stack and the handlers.
2206 Label push_operand_holes, call_resume;
2207 __ bind(&push_operand_holes);
2208 __ sub(r3, r3, Operand(1), SetCC);
2209 __ b(mi, &call_resume);
2211 __ b(&push_operand_holes);
2212 __ bind(&call_resume);
2213 ASSERT(!result_register().is(r1));
2214 __ Push(r1, result_register());
2215 __ Push(Smi::FromInt(resume_mode));
2216 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2217 // Not reached: the runtime call returns elsewhere.
2218 __ stop("not-reached");
2220 // Reach here when generator is closed.
2221 __ bind(&closed_state);
2222 if (resume_mode == JSGeneratorObject::NEXT) {
2223 // Return completed iterator result when generator is closed.
2224 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2226 // Pop value from top-of-stack slot; box result into result register.
2227 EmitCreateIteratorResult(true);
2229 // Throw the provided value.
2231 __ CallRuntime(Runtime::kThrow, 1);
2235 // Throw error if we attempt to operate on a running generator.
2236 __ bind(&wrong_state);
2238 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2241 context()->Plug(result_register());
2245 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2249 Handle<Map> map(isolate()->native_context()->generator_result_map());
2251 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2254 __ bind(&gc_required);
2255 __ Push(Smi::FromInt(map->instance_size()));
2256 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2257 __ ldr(context_register(),
2258 MemOperand(fp, StandardFrameConstants::kContextOffset));
2260 __ bind(&allocated);
2261 __ mov(r1, Operand(map));
2263 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2264 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2265 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2266 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2267 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2268 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2270 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2272 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2274 // Only the value field needs a write barrier, as the other values are in the
2276 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2277 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2281 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2282 SetSourcePosition(prop->position());
2283 Literal* key = prop->key()->AsLiteral();
2284 __ mov(r2, Operand(key->value()));
2285 // Call load IC. It has arguments receiver and property name r0 and r2.
2286 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2290 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2291 SetSourcePosition(prop->position());
2292 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2293 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2294 CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2298 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2301 Expression* left_expr,
2302 Expression* right_expr) {
2303 Label done, smi_case, stub_call;
2305 Register scratch1 = r2;
2306 Register scratch2 = r3;
2308 // Get the arguments.
2310 Register right = r0;
2313 // Perform combined smi check on both operands.
2314 __ orr(scratch1, left, Operand(right));
2315 STATIC_ASSERT(kSmiTag == 0);
2316 JumpPatchSite patch_site(masm_);
2317 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2319 __ bind(&stub_call);
2320 BinaryOpICStub stub(op, mode);
2321 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2322 expr->BinaryOperationFeedbackId());
2323 patch_site.EmitPatchInfo();
2327 // Smi case. This code works the same way as the smi-smi case in the type
2328 // recording binary operation stub, see
2331 __ GetLeastBitsFromSmi(scratch1, right, 5);
2332 __ mov(right, Operand(left, ASR, scratch1));
2333 __ bic(right, right, Operand(kSmiTagMask));
2336 __ SmiUntag(scratch1, left);
2337 __ GetLeastBitsFromSmi(scratch2, right, 5);
2338 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2339 __ TrySmiTag(right, scratch1, &stub_call);
2343 __ SmiUntag(scratch1, left);
2344 __ GetLeastBitsFromSmi(scratch2, right, 5);
2345 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2346 __ tst(scratch1, Operand(0xc0000000));
2347 __ b(ne, &stub_call);
2348 __ SmiTag(right, scratch1);
2352 __ add(scratch1, left, Operand(right), SetCC);
2353 __ b(vs, &stub_call);
2354 __ mov(right, scratch1);
2357 __ sub(scratch1, left, Operand(right), SetCC);
2358 __ b(vs, &stub_call);
2359 __ mov(right, scratch1);
2362 __ SmiUntag(ip, right);
2363 __ smull(scratch1, scratch2, left, ip);
2364 __ mov(ip, Operand(scratch1, ASR, 31));
2365 __ cmp(ip, Operand(scratch2));
2366 __ b(ne, &stub_call);
2367 __ cmp(scratch1, Operand::Zero());
2368 __ mov(right, Operand(scratch1), LeaveCC, ne);
2370 __ add(scratch2, right, Operand(left), SetCC);
2371 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2372 __ b(mi, &stub_call);
2376 __ orr(right, left, Operand(right));
2378 case Token::BIT_AND:
2379 __ and_(right, left, Operand(right));
2381 case Token::BIT_XOR:
2382 __ eor(right, left, Operand(right));
2389 context()->Plug(r0);
2393 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2395 OverwriteMode mode) {
2397 BinaryOpICStub stub(op, mode);
2398 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2399 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2400 expr->BinaryOperationFeedbackId());
2401 patch_site.EmitPatchInfo();
2402 context()->Plug(r0);
2406 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2407 // Invalid left-hand sides are rewritten by the parser to have a 'throw
2408 // ReferenceError' on the left-hand side.
2409 if (!expr->IsValidLeftHandSide()) {
2410 VisitForEffect(expr);
2414 // Left-hand side can only be a property, a global or a (parameter or local)
2416 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2417 LhsKind assign_type = VARIABLE;
2418 Property* prop = expr->AsProperty();
2420 assign_type = (prop->key()->IsPropertyName())
2425 switch (assign_type) {
2427 Variable* var = expr->AsVariableProxy()->var();
2428 EffectContext context(this);
2429 EmitVariableAssignment(var, Token::ASSIGN);
2432 case NAMED_PROPERTY: {
2433 __ push(r0); // Preserve value.
2434 VisitForAccumulatorValue(prop->obj());
2436 __ pop(r0); // Restore value.
2437 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2438 CallStoreIC(NOT_CONTEXTUAL);
2441 case KEYED_PROPERTY: {
2442 __ push(r0); // Preserve value.
2443 VisitForStackValue(prop->obj());
2444 VisitForAccumulatorValue(prop->key());
2446 __ Pop(r0, r2); // r0 = restored value.
2447 Handle<Code> ic = is_classic_mode()
2448 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2449 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2454 context()->Plug(r0);
2458 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2460 if (var->IsUnallocated()) {
2461 // Global var, const, or let.
2462 __ mov(r2, Operand(var->name()));
2463 __ ldr(r1, GlobalObjectOperand());
2464 CallStoreIC(CONTEXTUAL);
2465 } else if (op == Token::INIT_CONST) {
2466 // Const initializers need a write barrier.
2467 ASSERT(!var->IsParameter()); // No const parameters.
2468 if (var->IsStackLocal()) {
2469 __ ldr(r1, StackOperand(var));
2470 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2471 __ str(result_register(), StackOperand(var), eq);
2473 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2474 // Like var declarations, const declarations are hoisted to function
2475 // scope. However, unlike var initializers, const initializers are
2476 // able to drill a hole to that function context, even from inside a
2477 // 'with' context. We thus bypass the normal static scope lookup for
2478 // var->IsContextSlot().
2480 __ mov(r0, Operand(var->name()));
2481 __ Push(cp, r0); // Context and name.
2482 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2485 } else if (var->mode() == LET && op != Token::INIT_LET) {
2486 // Non-initializing assignment to let variable needs a write barrier.
2487 if (var->IsLookupSlot()) {
2488 __ push(r0); // Value.
2489 __ mov(r1, Operand(var->name()));
2490 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2491 __ Push(cp, r1, r0); // Context, name, strict mode.
2492 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2494 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2496 MemOperand location = VarOperand(var, r1);
2497 __ ldr(r3, location);
2498 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2500 __ mov(r3, Operand(var->name()));
2502 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2503 // Perform the assignment.
2505 __ str(result_register(), location);
2506 if (var->IsContextSlot()) {
2507 // RecordWrite may destroy all its register arguments.
2508 __ mov(r3, result_register());
2509 int offset = Context::SlotOffset(var->index());
2510 __ RecordWriteContextSlot(
2511 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2515 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2516 // Assignment to var or initializing assignment to let/const
2518 if (var->IsStackAllocated() || var->IsContextSlot()) {
2519 MemOperand location = VarOperand(var, r1);
2520 if (generate_debug_code_ && op == Token::INIT_LET) {
2521 // Check for an uninitialized let binding.
2522 __ ldr(r2, location);
2523 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2524 __ Check(eq, kLetBindingReInitialization);
2526 // Perform the assignment.
2527 __ str(r0, location);
2528 if (var->IsContextSlot()) {
2530 int offset = Context::SlotOffset(var->index());
2531 __ RecordWriteContextSlot(
2532 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2535 ASSERT(var->IsLookupSlot());
2536 __ push(r0); // Value.
2537 __ mov(r1, Operand(var->name()));
2538 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2539 __ Push(cp, r1, r0); // Context, name, strict mode.
2540 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2543 // Non-initializing assignments to consts are ignored.
2547 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2548 // Assignment to a property, using a named store IC.
2549 Property* prop = expr->target()->AsProperty();
2550 ASSERT(prop != NULL);
2551 ASSERT(prop->key()->AsLiteral() != NULL);
2553 // Record source code position before IC call.
2554 SetSourcePosition(expr->position());
2555 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2558 CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2560 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2561 context()->Plug(r0);
2565 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2566 // Assignment to a property, using a keyed store IC.
2568 // Record source code position before IC call.
2569 SetSourcePosition(expr->position());
2570 __ Pop(r2, r1); // r1 = key.
2572 Handle<Code> ic = is_classic_mode()
2573 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2574 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2575 CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2577 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2578 context()->Plug(r0);
2582 void FullCodeGenerator::VisitProperty(Property* expr) {
2583 Comment cmnt(masm_, "[ Property");
2584 Expression* key = expr->key();
2586 if (key->IsPropertyName()) {
2587 VisitForAccumulatorValue(expr->obj());
2588 EmitNamedPropertyLoad(expr);
2589 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2590 context()->Plug(r0);
2592 VisitForStackValue(expr->obj());
2593 VisitForAccumulatorValue(expr->key());
2595 EmitKeyedPropertyLoad(expr);
2596 context()->Plug(r0);
2601 void FullCodeGenerator::CallIC(Handle<Code> code,
2602 ContextualMode mode,
2603 TypeFeedbackId ast_id) {
2605 // All calls must have a predictable size in full-codegen code to ensure that
2606 // the debugger can patch them correctly.
2607 ASSERT(mode != CONTEXTUAL || ast_id.IsNone());
2608 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2609 NEVER_INLINE_TARGET_ADDRESS);
2613 // Code common for calls using the IC.
2614 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2615 Expression* callee = expr->expression();
2616 ZoneList<Expression*>* args = expr->arguments();
2617 int arg_count = args->length();
2619 CallFunctionFlags flags;
2620 // Get the target function.
2621 if (callee->IsVariableProxy()) {
2622 { StackValueContext context(this);
2623 EmitVariableLoad(callee->AsVariableProxy());
2624 PrepareForBailout(callee, NO_REGISTERS);
2626 // Push undefined as receiver. This is patched in the method prologue if it
2627 // is a classic mode method.
2628 __ Push(isolate()->factory()->undefined_value());
2629 flags = NO_CALL_FUNCTION_FLAGS;
2631 // Load the function from the receiver.
2632 ASSERT(callee->IsProperty());
2633 __ ldr(r0, MemOperand(sp, 0));
2634 EmitNamedPropertyLoad(callee->AsProperty());
2635 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2636 // Push the target function under the receiver.
2637 __ ldr(ip, MemOperand(sp, 0));
2639 __ str(r0, MemOperand(sp, kPointerSize));
2640 flags = CALL_AS_METHOD;
2643 // Load the arguments.
2644 { PreservePositionScope scope(masm()->positions_recorder());
2645 for (int i = 0; i < arg_count; i++) {
2646 VisitForStackValue(args->at(i));
2650 // Record source position for debugger.
2651 SetSourcePosition(expr->position());
2652 CallFunctionStub stub(arg_count, flags);
2653 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2656 RecordJSReturnSite(expr);
2658 // Restore context register.
2659 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2661 context()->DropAndPlug(1, r0);
2665 // Code common for calls using the IC.
2666 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2669 VisitForAccumulatorValue(key);
2671 Expression* callee = expr->expression();
2672 ZoneList<Expression*>* args = expr->arguments();
2673 int arg_count = args->length();
2675 // Load the function from the receiver.
2676 ASSERT(callee->IsProperty());
2677 __ ldr(r1, MemOperand(sp, 0));
2678 EmitKeyedPropertyLoad(callee->AsProperty());
2679 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2681 // Push the target function under the receiver.
2682 __ ldr(ip, MemOperand(sp, 0));
2684 __ str(r0, MemOperand(sp, kPointerSize));
2686 { PreservePositionScope scope(masm()->positions_recorder());
2687 for (int i = 0; i < arg_count; i++) {
2688 VisitForStackValue(args->at(i));
2692 // Record source position for debugger.
2693 SetSourcePosition(expr->position());
2694 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2695 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2698 RecordJSReturnSite(expr);
2699 // Restore context register.
2700 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2702 context()->DropAndPlug(1, r0);
2706 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2707 // Code common for calls using the call stub.
2708 ZoneList<Expression*>* args = expr->arguments();
2709 int arg_count = args->length();
2710 { PreservePositionScope scope(masm()->positions_recorder());
2711 for (int i = 0; i < arg_count; i++) {
2712 VisitForStackValue(args->at(i));
2715 // Record source position for debugger.
2716 SetSourcePosition(expr->position());
2718 Handle<Object> uninitialized =
2719 TypeFeedbackCells::UninitializedSentinel(isolate());
2720 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2721 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2722 __ mov(r2, Operand(cell));
2724 // Record call targets in unoptimized code.
2725 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2726 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2727 __ CallStub(&stub, expr->CallFeedbackId());
2728 RecordJSReturnSite(expr);
2729 // Restore context register.
2730 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2731 context()->DropAndPlug(1, r0);
2735 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2736 // r4: copy of the first argument or undefined if it doesn't exist.
2737 if (arg_count > 0) {
2738 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2740 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2743 // r3: the receiver of the enclosing function.
2744 int receiver_offset = 2 + info_->scope()->num_parameters();
2745 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2747 // r2: the language mode.
2748 __ mov(r2, Operand(Smi::FromInt(language_mode())));
2750 // r1: the start position of the scope the calls resides in.
2751 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2753 // Do the runtime call.
2754 __ Push(r4, r3, r2, r1);
2755 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2759 void FullCodeGenerator::VisitCall(Call* expr) {
2761 // We want to verify that RecordJSReturnSite gets called on all paths
2762 // through this function. Avoid early returns.
2763 expr->return_is_recorded_ = false;
2766 Comment cmnt(masm_, "[ Call");
2767 Expression* callee = expr->expression();
2768 Call::CallType call_type = expr->GetCallType(isolate());
2770 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2771 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2772 // resolve the function we need to call and the receiver of the
2773 // call. Then we call the resolved function using the given
2775 ZoneList<Expression*>* args = expr->arguments();
2776 int arg_count = args->length();
2778 { PreservePositionScope pos_scope(masm()->positions_recorder());
2779 VisitForStackValue(callee);
2780 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2781 __ push(r2); // Reserved receiver slot.
2783 // Push the arguments.
2784 for (int i = 0; i < arg_count; i++) {
2785 VisitForStackValue(args->at(i));
2788 // Push a copy of the function (found below the arguments) and
2790 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2792 EmitResolvePossiblyDirectEval(arg_count);
2794 // The runtime call returns a pair of values in r0 (function) and
2795 // r1 (receiver). Touch up the stack with the right values.
2796 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2797 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2800 // Record source position for debugger.
2801 SetSourcePosition(expr->position());
2802 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2803 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2805 RecordJSReturnSite(expr);
2806 // Restore context register.
2807 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2808 context()->DropAndPlug(1, r0);
2809 } else if (call_type == Call::GLOBAL_CALL) {
2810 EmitCallWithIC(expr);
2812 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2813 // Call to a lookup slot (dynamically introduced variable).
2814 VariableProxy* proxy = callee->AsVariableProxy();
2817 { PreservePositionScope scope(masm()->positions_recorder());
2818 // Generate code for loading from variables potentially shadowed
2819 // by eval-introduced variables.
2820 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2824 // Call the runtime to find the function to call (returned in r0)
2825 // and the object holding it (returned in edx).
2826 ASSERT(!context_register().is(r2));
2827 __ mov(r2, Operand(proxy->name()));
2828 __ Push(context_register(), r2);
2829 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2830 __ Push(r0, r1); // Function, receiver.
2832 // If fast case code has been generated, emit code to push the
2833 // function and receiver and have the slow path jump around this
2835 if (done.is_linked()) {
2841 // The receiver is implicitly the global receiver. Indicate this
2842 // by passing the hole to the call function stub.
2843 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2848 // The receiver is either the global receiver or an object found
2849 // by LoadContextSlot.
2850 EmitCallWithStub(expr);
2851 } else if (call_type == Call::PROPERTY_CALL) {
2852 Property* property = callee->AsProperty();
2853 { PreservePositionScope scope(masm()->positions_recorder());
2854 VisitForStackValue(property->obj());
2856 if (property->key()->IsPropertyName()) {
2857 EmitCallWithIC(expr);
2859 EmitKeyedCallWithIC(expr, property->key());
2862 ASSERT(call_type == Call::OTHER_CALL);
2863 // Call to an arbitrary expression not handled specially above.
2864 { PreservePositionScope scope(masm()->positions_recorder());
2865 VisitForStackValue(callee);
2867 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2869 // Emit function call.
2870 EmitCallWithStub(expr);
2874 // RecordJSReturnSite should have been called.
2875 ASSERT(expr->return_is_recorded_);
2880 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2881 Comment cmnt(masm_, "[ CallNew");
2882 // According to ECMA-262, section 11.2.2, page 44, the function
2883 // expression in new calls must be evaluated before the
2886 // Push constructor on the stack. If it's not a function it's used as
2887 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2889 VisitForStackValue(expr->expression());
2891 // Push the arguments ("left-to-right") on the stack.
2892 ZoneList<Expression*>* args = expr->arguments();
2893 int arg_count = args->length();
2894 for (int i = 0; i < arg_count; i++) {
2895 VisitForStackValue(args->at(i));
2898 // Call the construct call builtin that handles allocation and
2899 // constructor invocation.
2900 SetSourcePosition(expr->position());
2902 // Load function and argument count into r1 and r0.
2903 __ mov(r0, Operand(arg_count));
2904 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2906 // Record call targets in unoptimized code.
2907 Handle<Object> uninitialized =
2908 TypeFeedbackCells::UninitializedSentinel(isolate());
2909 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2910 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2911 __ mov(r2, Operand(cell));
2913 CallConstructStub stub(RECORD_CALL_TARGET);
2914 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2915 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2916 context()->Plug(r0);
2920 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2921 ZoneList<Expression*>* args = expr->arguments();
2922 ASSERT(args->length() == 1);
2924 VisitForAccumulatorValue(args->at(0));
2926 Label materialize_true, materialize_false;
2927 Label* if_true = NULL;
2928 Label* if_false = NULL;
2929 Label* fall_through = NULL;
2930 context()->PrepareTest(&materialize_true, &materialize_false,
2931 &if_true, &if_false, &fall_through);
2933 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2935 Split(eq, if_true, if_false, fall_through);
2937 context()->Plug(if_true, if_false);
2941 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2942 ZoneList<Expression*>* args = expr->arguments();
2943 ASSERT(args->length() == 1);
2945 VisitForAccumulatorValue(args->at(0));
2947 Label materialize_true, materialize_false;
2948 Label* if_true = NULL;
2949 Label* if_false = NULL;
2950 Label* fall_through = NULL;
2951 context()->PrepareTest(&materialize_true, &materialize_false,
2952 &if_true, &if_false, &fall_through);
2954 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2955 __ NonNegativeSmiTst(r0);
2956 Split(eq, if_true, if_false, fall_through);
2958 context()->Plug(if_true, if_false);
2962 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2963 ZoneList<Expression*>* args = expr->arguments();
2964 ASSERT(args->length() == 1);
2966 VisitForAccumulatorValue(args->at(0));
2968 Label materialize_true, materialize_false;
2969 Label* if_true = NULL;
2970 Label* if_false = NULL;
2971 Label* fall_through = NULL;
2972 context()->PrepareTest(&materialize_true, &materialize_false,
2973 &if_true, &if_false, &fall_through);
2975 __ JumpIfSmi(r0, if_false);
2976 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2979 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2980 // Undetectable objects behave like undefined when tested with typeof.
2981 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2982 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2984 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2985 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2987 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2988 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2989 Split(le, if_true, if_false, fall_through);
2991 context()->Plug(if_true, if_false);
2995 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2996 ZoneList<Expression*>* args = expr->arguments();
2997 ASSERT(args->length() == 1);
2999 VisitForAccumulatorValue(args->at(0));
3001 Label materialize_true, materialize_false;
3002 Label* if_true = NULL;
3003 Label* if_false = NULL;
3004 Label* fall_through = NULL;
3005 context()->PrepareTest(&materialize_true, &materialize_false,
3006 &if_true, &if_false, &fall_through);
3008 __ JumpIfSmi(r0, if_false);
3009 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3010 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3011 Split(ge, if_true, if_false, fall_through);
3013 context()->Plug(if_true, if_false);
3017 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3018 ZoneList<Expression*>* args = expr->arguments();
3019 ASSERT(args->length() == 1);
3021 VisitForAccumulatorValue(args->at(0));
3023 Label materialize_true, materialize_false;
3024 Label* if_true = NULL;
3025 Label* if_false = NULL;
3026 Label* fall_through = NULL;
3027 context()->PrepareTest(&materialize_true, &materialize_false,
3028 &if_true, &if_false, &fall_through);
3030 __ JumpIfSmi(r0, if_false);
3031 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3032 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3033 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3034 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035 Split(ne, if_true, if_false, fall_through);
3037 context()->Plug(if_true, if_false);
3041 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3042 CallRuntime* expr) {
3043 ZoneList<Expression*>* args = expr->arguments();
3044 ASSERT(args->length() == 1);
3046 VisitForAccumulatorValue(args->at(0));
3048 Label materialize_true, materialize_false, skip_lookup;
3049 Label* if_true = NULL;
3050 Label* if_false = NULL;
3051 Label* fall_through = NULL;
3052 context()->PrepareTest(&materialize_true, &materialize_false,
3053 &if_true, &if_false, &fall_through);
3055 __ AssertNotSmi(r0);
3057 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3058 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3059 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3060 __ b(ne, &skip_lookup);
3062 // Check for fast case object. Generate false result for slow case object.
3063 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3064 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3065 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3069 // Look for valueOf name in the descriptor array, and indicate false if
3070 // found. Since we omit an enumeration index check, if it is added via a
3071 // transition that shares its descriptor array, this is a false positive.
3072 Label entry, loop, done;
3074 // Skip loop if no descriptors are valid.
3075 __ NumberOfOwnDescriptors(r3, r1);
3076 __ cmp(r3, Operand::Zero());
3079 __ LoadInstanceDescriptors(r1, r4);
3080 // r4: descriptor array.
3081 // r3: valid entries in the descriptor array.
3082 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3084 // Calculate location of the first key name.
3085 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3086 // Calculate the end of the descriptor array.
3088 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3090 // Loop through all the keys in the descriptor array. If one of these is the
3091 // string "valueOf" the result is false.
3092 // The use of ip to store the valueOf string assumes that it is not otherwise
3093 // used in the loop below.
3094 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3097 __ ldr(r3, MemOperand(r4, 0));
3100 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3102 __ cmp(r4, Operand(r2));
3107 // Set the bit in the map to indicate that there is no local valueOf field.
3108 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3109 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3110 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3112 __ bind(&skip_lookup);
3114 // If a valueOf property is not found on the object check that its
3115 // prototype is the un-modified String prototype. If not result is false.
3116 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3117 __ JumpIfSmi(r2, if_false);
3118 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3119 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3120 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3121 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3123 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124 Split(eq, if_true, if_false, fall_through);
3126 context()->Plug(if_true, if_false);
3130 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3131 ZoneList<Expression*>* args = expr->arguments();
3132 ASSERT(args->length() == 1);
3134 VisitForAccumulatorValue(args->at(0));
3136 Label materialize_true, materialize_false;
3137 Label* if_true = NULL;
3138 Label* if_false = NULL;
3139 Label* fall_through = NULL;
3140 context()->PrepareTest(&materialize_true, &materialize_false,
3141 &if_true, &if_false, &fall_through);
3143 __ JumpIfSmi(r0, if_false);
3144 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3145 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3146 Split(eq, if_true, if_false, fall_through);
3148 context()->Plug(if_true, if_false);
3152 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3153 ZoneList<Expression*>* args = expr->arguments();
3154 ASSERT(args->length() == 1);
3156 VisitForAccumulatorValue(args->at(0));
3158 Label materialize_true, materialize_false;
3159 Label* if_true = NULL;
3160 Label* if_false = NULL;
3161 Label* fall_through = NULL;
3162 context()->PrepareTest(&materialize_true, &materialize_false,
3163 &if_true, &if_false, &fall_through);
3165 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3166 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3167 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3168 __ cmp(r2, Operand(0x80000000));
3169 __ cmp(r1, Operand(0x00000000), eq);
3171 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3172 Split(eq, if_true, if_false, fall_through);
3174 context()->Plug(if_true, if_false);
3178 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3179 ZoneList<Expression*>* args = expr->arguments();
3180 ASSERT(args->length() == 1);
3182 VisitForAccumulatorValue(args->at(0));
3184 Label materialize_true, materialize_false;
3185 Label* if_true = NULL;
3186 Label* if_false = NULL;
3187 Label* fall_through = NULL;
3188 context()->PrepareTest(&materialize_true, &materialize_false,
3189 &if_true, &if_false, &fall_through);
3191 __ JumpIfSmi(r0, if_false);
3192 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3193 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3194 Split(eq, if_true, if_false, fall_through);
3196 context()->Plug(if_true, if_false);
3200 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3201 ZoneList<Expression*>* args = expr->arguments();
3202 ASSERT(args->length() == 1);
3204 VisitForAccumulatorValue(args->at(0));
3206 Label materialize_true, materialize_false;
3207 Label* if_true = NULL;
3208 Label* if_false = NULL;
3209 Label* fall_through = NULL;
3210 context()->PrepareTest(&materialize_true, &materialize_false,
3211 &if_true, &if_false, &fall_through);
3213 __ JumpIfSmi(r0, if_false);
3214 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3215 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3216 Split(eq, if_true, if_false, fall_through);
3218 context()->Plug(if_true, if_false);
3223 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3224 ASSERT(expr->arguments()->length() == 0);
3226 Label materialize_true, materialize_false;
3227 Label* if_true = NULL;
3228 Label* if_false = NULL;
3229 Label* fall_through = NULL;
3230 context()->PrepareTest(&materialize_true, &materialize_false,
3231 &if_true, &if_false, &fall_through);
3233 // Get the frame pointer for the calling frame.
3234 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3236 // Skip the arguments adaptor frame if it exists.
3237 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3238 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3239 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3241 // Check the marker in the calling frame.
3242 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3243 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3244 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3245 Split(eq, if_true, if_false, fall_through);
3247 context()->Plug(if_true, if_false);
3251 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments();
3253 ASSERT(args->length() == 2);
3255 // Load the two objects into registers and perform the comparison.
3256 VisitForStackValue(args->at(0));
3257 VisitForAccumulatorValue(args->at(1));
3259 Label materialize_true, materialize_false;
3260 Label* if_true = NULL;
3261 Label* if_false = NULL;
3262 Label* fall_through = NULL;
3263 context()->PrepareTest(&materialize_true, &materialize_false,
3264 &if_true, &if_false, &fall_through);
3268 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3269 Split(eq, if_true, if_false, fall_through);
3271 context()->Plug(if_true, if_false);
3275 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 ASSERT(args->length() == 1);
3279 // ArgumentsAccessStub expects the key in edx and the formal
3280 // parameter count in r0.
3281 VisitForAccumulatorValue(args->at(0));
3283 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3284 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3286 context()->Plug(r0);
3290 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3291 ASSERT(expr->arguments()->length() == 0);
3293 // Get the number of formal parameters.
3294 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3296 // Check if the calling frame is an arguments adaptor frame.
3297 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3298 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3299 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3301 // Arguments adaptor case: Read the arguments length from the
3303 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3305 context()->Plug(r0);
3309 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3310 ZoneList<Expression*>* args = expr->arguments();
3311 ASSERT(args->length() == 1);
3312 Label done, null, function, non_function_constructor;
3314 VisitForAccumulatorValue(args->at(0));
3316 // If the object is a smi, we return null.
3317 __ JumpIfSmi(r0, &null);
3319 // Check that the object is a JS object but take special care of JS
3320 // functions to make sure they have 'Function' as their class.
3321 // Assume that there are only two callable types, and one of them is at
3322 // either end of the type range for JS object types. Saves extra comparisons.
3323 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3324 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3325 // Map is now in r0.
3327 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3328 FIRST_SPEC_OBJECT_TYPE + 1);
3329 __ b(eq, &function);
3331 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3332 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3333 LAST_SPEC_OBJECT_TYPE - 1);
3334 __ b(eq, &function);
3335 // Assume that there is no larger type.
3336 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3338 // Check if the constructor in the map is a JS function.
3339 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3340 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3341 __ b(ne, &non_function_constructor);
3343 // r0 now contains the constructor function. Grab the
3344 // instance class name from there.
3345 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3346 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3349 // Functions have class 'Function'.
3351 __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
3354 // Objects with a non-function constructor have class 'Object'.
3355 __ bind(&non_function_constructor);
3356 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3359 // Non-JS objects have class null.
3361 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3366 context()->Plug(r0);
3370 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3371 // Conditionally generate a log call.
3373 // 0 (literal string): The type of logging (corresponds to the flags).
3374 // This is used to determine whether or not to generate the log call.
3375 // 1 (string): Format string. Access the string at argument index 2
3376 // with '%2s' (see Logger::LogRuntime for all the formats).
3377 // 2 (array): Arguments to the format string.
3378 ZoneList<Expression*>* args = expr->arguments();
3379 ASSERT_EQ(args->length(), 3);
3380 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3381 VisitForStackValue(args->at(1));
3382 VisitForStackValue(args->at(2));
3383 __ CallRuntime(Runtime::kLog, 2);
3386 // Finally, we're expected to leave a value on the top of the stack.
3387 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3388 context()->Plug(r0);
3392 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3393 // Load the arguments on the stack and call the stub.
3395 ZoneList<Expression*>* args = expr->arguments();
3396 ASSERT(args->length() == 3);
3397 VisitForStackValue(args->at(0));
3398 VisitForStackValue(args->at(1));
3399 VisitForStackValue(args->at(2));
3401 context()->Plug(r0);
3405 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3406 // Load the arguments on the stack and call the stub.
3407 RegExpExecStub stub;
3408 ZoneList<Expression*>* args = expr->arguments();
3409 ASSERT(args->length() == 4);
3410 VisitForStackValue(args->at(0));
3411 VisitForStackValue(args->at(1));
3412 VisitForStackValue(args->at(2));
3413 VisitForStackValue(args->at(3));
3415 context()->Plug(r0);
3419 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3420 ZoneList<Expression*>* args = expr->arguments();
3421 ASSERT(args->length() == 1);
3422 VisitForAccumulatorValue(args->at(0)); // Load the object.
3425 // If the object is a smi return the object.
3426 __ JumpIfSmi(r0, &done);
3427 // If the object is not a value type, return the object.
3428 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3429 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3432 context()->Plug(r0);
3436 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3437 ZoneList<Expression*>* args = expr->arguments();
3438 ASSERT(args->length() == 2);
3439 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3440 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3442 VisitForAccumulatorValue(args->at(0)); // Load the object.
3444 Label runtime, done, not_date_object;
3445 Register object = r0;
3446 Register result = r0;
3447 Register scratch0 = r9;
3448 Register scratch1 = r1;
3450 __ JumpIfSmi(object, ¬_date_object);
3451 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3452 __ b(ne, ¬_date_object);
3454 if (index->value() == 0) {
3455 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3458 if (index->value() < JSDate::kFirstUncachedField) {
3459 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3460 __ mov(scratch1, Operand(stamp));
3461 __ ldr(scratch1, MemOperand(scratch1));
3462 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3463 __ cmp(scratch1, scratch0);
3465 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3466 kPointerSize * index->value()));
3470 __ PrepareCallCFunction(2, scratch1);
3471 __ mov(r1, Operand(index));
3472 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3476 __ bind(¬_date_object);
3477 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3479 context()->Plug(r0);
3483 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 ASSERT_EQ(3, args->length());
3487 Register string = r0;
3488 Register index = r1;
3489 Register value = r2;
3491 VisitForStackValue(args->at(1)); // index
3492 VisitForStackValue(args->at(2)); // value
3493 VisitForAccumulatorValue(args->at(0)); // string
3494 __ Pop(index, value);
3496 if (FLAG_debug_code) {
3498 __ Check(eq, kNonSmiValue);
3500 __ Check(eq, kNonSmiIndex);
3501 __ SmiUntag(index, index);
3502 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3503 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3504 __ SmiTag(index, index);
3507 __ SmiUntag(value, value);
3510 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3511 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3512 context()->Plug(string);
3516 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3517 ZoneList<Expression*>* args = expr->arguments();
3518 ASSERT_EQ(3, args->length());
3520 Register string = r0;
3521 Register index = r1;
3522 Register value = r2;
3524 VisitForStackValue(args->at(1)); // index
3525 VisitForStackValue(args->at(2)); // value
3526 VisitForAccumulatorValue(args->at(0)); // string
3527 __ Pop(index, value);
3529 if (FLAG_debug_code) {
3531 __ Check(eq, kNonSmiValue);
3533 __ Check(eq, kNonSmiIndex);
3534 __ SmiUntag(index, index);
3535 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3536 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3537 __ SmiTag(index, index);
3540 __ SmiUntag(value, value);
3543 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3544 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3545 __ strh(value, MemOperand(ip, index));
3546 context()->Plug(string);
3551 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3552 // Load the arguments on the stack and call the runtime function.
3553 ZoneList<Expression*>* args = expr->arguments();
3554 ASSERT(args->length() == 2);
3555 VisitForStackValue(args->at(0));
3556 VisitForStackValue(args->at(1));
3557 MathPowStub stub(MathPowStub::ON_STACK);
3559 context()->Plug(r0);
3563 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3564 ZoneList<Expression*>* args = expr->arguments();
3565 ASSERT(args->length() == 2);
3566 VisitForStackValue(args->at(0)); // Load the object.
3567 VisitForAccumulatorValue(args->at(1)); // Load the value.
3568 __ pop(r1); // r0 = value. r1 = object.
3571 // If the object is a smi, return the value.
3572 __ JumpIfSmi(r1, &done);
3574 // If the object is not a value type, return the value.
3575 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3579 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3580 // Update the write barrier. Save the value as it will be
3581 // overwritten by the write barrier code and is needed afterward.
3583 __ RecordWriteField(
3584 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3587 context()->Plug(r0);
3591 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3592 ZoneList<Expression*>* args = expr->arguments();
3593 ASSERT_EQ(args->length(), 1);
3594 // Load the argument into r0 and call the stub.
3595 VisitForAccumulatorValue(args->at(0));
3597 NumberToStringStub stub;
3599 context()->Plug(r0);
3603 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3604 ZoneList<Expression*>* args = expr->arguments();
3605 ASSERT(args->length() == 1);
3606 VisitForAccumulatorValue(args->at(0));
3609 StringCharFromCodeGenerator generator(r0, r1);
3610 generator.GenerateFast(masm_);
3613 NopRuntimeCallHelper call_helper;
3614 generator.GenerateSlow(masm_, call_helper);
3617 context()->Plug(r1);
3621 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3622 ZoneList<Expression*>* args = expr->arguments();
3623 ASSERT(args->length() == 2);
3624 VisitForStackValue(args->at(0));
3625 VisitForAccumulatorValue(args->at(1));
3627 Register object = r1;
3628 Register index = r0;
3629 Register result = r3;
3633 Label need_conversion;
3634 Label index_out_of_range;
3636 StringCharCodeAtGenerator generator(object,
3641 &index_out_of_range,
3642 STRING_INDEX_IS_NUMBER);
3643 generator.GenerateFast(masm_);
3646 __ bind(&index_out_of_range);
3647 // When the index is out of range, the spec requires us to return
3649 __ LoadRoot(result, Heap::kNanValueRootIndex);
3652 __ bind(&need_conversion);
3653 // Load the undefined value into the result register, which will
3654 // trigger conversion.
3655 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3658 NopRuntimeCallHelper call_helper;
3659 generator.GenerateSlow(masm_, call_helper);
3662 context()->Plug(result);
3666 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3667 ZoneList<Expression*>* args = expr->arguments();
3668 ASSERT(args->length() == 2);
3669 VisitForStackValue(args->at(0));
3670 VisitForAccumulatorValue(args->at(1));
3672 Register object = r1;
3673 Register index = r0;
3674 Register scratch = r3;
3675 Register result = r0;
3679 Label need_conversion;
3680 Label index_out_of_range;
3682 StringCharAtGenerator generator(object,
3688 &index_out_of_range,
3689 STRING_INDEX_IS_NUMBER);
3690 generator.GenerateFast(masm_);
3693 __ bind(&index_out_of_range);
3694 // When the index is out of range, the spec requires us to return
3695 // the empty string.
3696 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3699 __ bind(&need_conversion);
3700 // Move smi zero into the result register, which will trigger
3702 __ mov(result, Operand(Smi::FromInt(0)));
3705 NopRuntimeCallHelper call_helper;
3706 generator.GenerateSlow(masm_, call_helper);
3709 context()->Plug(result);
3713 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3714 ZoneList<Expression*>* args = expr->arguments();
3715 ASSERT_EQ(2, args->length());
3716 VisitForStackValue(args->at(0));
3717 VisitForAccumulatorValue(args->at(1));
3720 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3722 context()->Plug(r0);
3726 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3727 ZoneList<Expression*>* args = expr->arguments();
3728 ASSERT_EQ(2, args->length());
3729 VisitForStackValue(args->at(0));
3730 VisitForStackValue(args->at(1));
3732 StringCompareStub stub;
3734 context()->Plug(r0);
3738 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3739 // Load the argument on the stack and call the runtime function.
3740 ZoneList<Expression*>* args = expr->arguments();
3741 ASSERT(args->length() == 1);
3742 VisitForStackValue(args->at(0));
3743 __ CallRuntime(Runtime::kMath_log, 1);
3744 context()->Plug(r0);
3748 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3749 // Load the argument on the stack and call the runtime function.
3750 ZoneList<Expression*>* args = expr->arguments();
3751 ASSERT(args->length() == 1);
3752 VisitForStackValue(args->at(0));
3753 __ CallRuntime(Runtime::kMath_sqrt, 1);
3754 context()->Plug(r0);
3758 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3759 ZoneList<Expression*>* args = expr->arguments();
3760 ASSERT(args->length() >= 2);
3762 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3763 for (int i = 0; i < arg_count + 1; i++) {
3764 VisitForStackValue(args->at(i));
3766 VisitForAccumulatorValue(args->last()); // Function.
3768 Label runtime, done;
3769 // Check for non-function argument (including proxy).
3770 __ JumpIfSmi(r0, &runtime);
3771 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3774 // InvokeFunction requires the function in r1. Move it in there.
3775 __ mov(r1, result_register());
3776 ParameterCount count(arg_count);
3777 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3778 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3783 __ CallRuntime(Runtime::kCall, args->length());
3786 context()->Plug(r0);
3790 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3791 RegExpConstructResultStub stub;
3792 ZoneList<Expression*>* args = expr->arguments();
3793 ASSERT(args->length() == 3);
3794 VisitForStackValue(args->at(0));
3795 VisitForStackValue(args->at(1));
3796 VisitForAccumulatorValue(args->at(2));
3800 context()->Plug(r0);
3804 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3805 ZoneList<Expression*>* args = expr->arguments();
3806 ASSERT_EQ(2, args->length());
3807 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3808 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3810 Handle<FixedArray> jsfunction_result_caches(
3811 isolate()->native_context()->jsfunction_result_caches());
3812 if (jsfunction_result_caches->length() <= cache_id) {
3813 __ Abort(kAttemptToUseUndefinedCache);
3814 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3815 context()->Plug(r0);
3819 VisitForAccumulatorValue(args->at(1));
3822 Register cache = r1;
3823 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3824 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3825 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3827 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3830 Label done, not_found;
3831 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3832 // r2 now holds finger offset as a smi.
3833 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3834 // r3 now points to the start of fixed array elements.
3835 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3836 // Note side effect of PreIndex: r3 now points to the key of the pair.
3838 __ b(ne, ¬_found);
3840 __ ldr(r0, MemOperand(r3, kPointerSize));
3843 __ bind(¬_found);
3844 // Call runtime to perform the lookup.
3845 __ Push(cache, key);
3846 __ CallRuntime(Runtime::kGetFromCache, 2);
3849 context()->Plug(r0);
3853 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3854 ZoneList<Expression*>* args = expr->arguments();
3855 VisitForAccumulatorValue(args->at(0));
3857 Label materialize_true, materialize_false;
3858 Label* if_true = NULL;
3859 Label* if_false = NULL;
3860 Label* fall_through = NULL;
3861 context()->PrepareTest(&materialize_true, &materialize_false,
3862 &if_true, &if_false, &fall_through);
3864 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3865 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3866 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3867 Split(eq, if_true, if_false, fall_through);
3869 context()->Plug(if_true, if_false);
3873 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3874 ZoneList<Expression*>* args = expr->arguments();
3875 ASSERT(args->length() == 1);
3876 VisitForAccumulatorValue(args->at(0));
3878 __ AssertString(r0);
3880 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3881 __ IndexFromHash(r0, r0);
3883 context()->Plug(r0);
3887 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3888 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3889 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3890 one_char_separator_loop_entry, long_separator_loop;
3891 ZoneList<Expression*>* args = expr->arguments();
3892 ASSERT(args->length() == 2);
3893 VisitForStackValue(args->at(1));
3894 VisitForAccumulatorValue(args->at(0));
3896 // All aliases of the same register have disjoint lifetimes.
3897 Register array = r0;
3898 Register elements = no_reg; // Will be r0.
3899 Register result = no_reg; // Will be r0.
3900 Register separator = r1;
3901 Register array_length = r2;
3902 Register result_pos = no_reg; // Will be r2
3903 Register string_length = r3;
3904 Register string = r4;
3905 Register element = r5;
3906 Register elements_end = r6;
3907 Register scratch = r9;
3909 // Separator operand is on the stack.
3912 // Check that the array is a JSArray.
3913 __ JumpIfSmi(array, &bailout);
3914 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3917 // Check that the array has fast elements.
3918 __ CheckFastElements(scratch, array_length, &bailout);
3920 // If the array has length zero, return the empty string.
3921 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3922 __ SmiUntag(array_length, SetCC);
3923 __ b(ne, &non_trivial_array);
3924 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3927 __ bind(&non_trivial_array);
3929 // Get the FixedArray containing array's elements.
3931 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3932 array = no_reg; // End of array's live range.
3934 // Check that all array elements are sequential ASCII strings, and
3935 // accumulate the sum of their lengths, as a smi-encoded value.
3936 __ mov(string_length, Operand::Zero());
3938 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3939 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3940 // Loop condition: while (element < elements_end).
3941 // Live values in registers:
3942 // elements: Fixed array of strings.
3943 // array_length: Length of the fixed array of strings (not smi)
3944 // separator: Separator string
3945 // string_length: Accumulated sum of string lengths (smi).
3946 // element: Current array element.
3947 // elements_end: Array end.
3948 if (generate_debug_code_) {
3949 __ cmp(array_length, Operand::Zero());
3950 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3953 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3954 __ JumpIfSmi(string, &bailout);
3955 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3956 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3957 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3958 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3959 __ add(string_length, string_length, Operand(scratch), SetCC);
3961 __ cmp(element, elements_end);
3964 // If array_length is 1, return elements[0], a string.
3965 __ cmp(array_length, Operand(1));
3966 __ b(ne, ¬_size_one_array);
3967 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3970 __ bind(¬_size_one_array);
3972 // Live values in registers:
3973 // separator: Separator string
3974 // array_length: Length of the array.
3975 // string_length: Sum of string lengths (smi).
3976 // elements: FixedArray of strings.
3978 // Check that the separator is a flat ASCII string.
3979 __ JumpIfSmi(separator, &bailout);
3980 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
3981 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3982 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3984 // Add (separator length times array_length) - separator length to the
3985 // string_length to get the length of the result string. array_length is not
3986 // smi but the other values are, so the result is a smi
3987 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3988 __ sub(string_length, string_length, Operand(scratch));
3989 __ smull(scratch, ip, array_length, scratch);
3990 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3992 __ cmp(ip, Operand::Zero());
3994 __ tst(scratch, Operand(0x80000000));
3996 __ add(string_length, string_length, Operand(scratch), SetCC);
3998 __ SmiUntag(string_length);
4000 // Get first element in the array to free up the elements register to be used
4003 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4004 result = elements; // End of live range for elements.
4006 // Live values in registers:
4007 // element: First array element
4008 // separator: Separator string
4009 // string_length: Length of result string (not smi)
4010 // array_length: Length of the array.
4011 __ AllocateAsciiString(result,
4014 string, // used as scratch
4015 elements_end, // used as scratch
4017 // Prepare for looping. Set up elements_end to end of the array. Set
4018 // result_pos to the position of the result where to write the first
4020 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4021 result_pos = array_length; // End of live range for array_length.
4022 array_length = no_reg;
4025 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4027 // Check the length of the separator.
4028 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4029 __ cmp(scratch, Operand(Smi::FromInt(1)));
4030 __ b(eq, &one_char_separator);
4031 __ b(gt, &long_separator);
4033 // Empty separator case
4034 __ bind(&empty_separator_loop);
4035 // Live values in registers:
4036 // result_pos: the position to which we are currently copying characters.
4037 // element: Current array element.
4038 // elements_end: Array end.
4040 // Copy next array element to the result.
4041 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4042 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4043 __ SmiUntag(string_length);
4046 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4047 __ CopyBytes(string, result_pos, string_length, scratch);
4048 __ cmp(element, elements_end);
4049 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4050 ASSERT(result.is(r0));
4053 // One-character separator case
4054 __ bind(&one_char_separator);
4055 // Replace separator with its ASCII character value.
4056 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4057 // Jump into the loop after the code that copies the separator, so the first
4058 // element is not preceded by a separator
4059 __ jmp(&one_char_separator_loop_entry);
4061 __ bind(&one_char_separator_loop);
4062 // Live values in registers:
4063 // result_pos: the position to which we are currently copying characters.
4064 // element: Current array element.
4065 // elements_end: Array end.
4066 // separator: Single separator ASCII char (in lower byte).
4068 // Copy the separator character to the result.
4069 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4071 // Copy next array element to the result.
4072 __ bind(&one_char_separator_loop_entry);
4073 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4074 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4075 __ SmiUntag(string_length);
4078 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4079 __ CopyBytes(string, result_pos, string_length, scratch);
4080 __ cmp(element, elements_end);
4081 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4082 ASSERT(result.is(r0));
4085 // Long separator case (separator is more than one character). Entry is at the
4086 // label long_separator below.
4087 __ bind(&long_separator_loop);
4088 // Live values in registers:
4089 // result_pos: the position to which we are currently copying characters.
4090 // element: Current array element.
4091 // elements_end: Array end.
4092 // separator: Separator string.
4094 // Copy the separator to the result.
4095 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4096 __ SmiUntag(string_length);
4099 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4100 __ CopyBytes(string, result_pos, string_length, scratch);
4102 __ bind(&long_separator);
4103 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4104 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4105 __ SmiUntag(string_length);
4108 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4109 __ CopyBytes(string, result_pos, string_length, scratch);
4110 __ cmp(element, elements_end);
4111 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4112 ASSERT(result.is(r0));
4116 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4118 context()->Plug(r0);
4122 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4123 Handle<String> name = expr->name();
4124 if (name->length() > 0 && name->Get(0) == '_') {
4125 Comment cmnt(masm_, "[ InlineRuntimeCall");
4126 EmitInlineRuntimeCall(expr);
4130 Comment cmnt(masm_, "[ CallRuntime");
4131 ZoneList<Expression*>* args = expr->arguments();
4132 int arg_count = args->length();
4134 if (expr->is_jsruntime()) {
4135 // Push the builtins object as the receiver.
4136 __ ldr(r0, GlobalObjectOperand());
4137 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
4140 // Load the function from the receiver.
4141 __ mov(r2, Operand(expr->name()));
4142 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4144 // Push the target function under the receiver.
4145 __ ldr(ip, MemOperand(sp, 0));
4147 __ str(r0, MemOperand(sp, kPointerSize));
4149 // Push the arguments ("left-to-right").
4150 int arg_count = args->length();
4151 for (int i = 0; i < arg_count; i++) {
4152 VisitForStackValue(args->at(i));
4155 // Record source position of the IC call.
4156 SetSourcePosition(expr->position());
4157 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4158 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4161 // Restore context register.
4162 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4164 context()->DropAndPlug(1, r0);
4166 // Push the arguments ("left-to-right").
4167 for (int i = 0; i < arg_count; i++) {
4168 VisitForStackValue(args->at(i));
4171 // Call the C runtime function.
4172 __ CallRuntime(expr->function(), arg_count);
4173 context()->Plug(r0);
4178 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4179 switch (expr->op()) {
4180 case Token::DELETE: {
4181 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4182 Property* property = expr->expression()->AsProperty();
4183 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4185 if (property != NULL) {
4186 VisitForStackValue(property->obj());
4187 VisitForStackValue(property->key());
4188 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4189 ? kNonStrictMode : kStrictMode;
4190 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
4192 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4193 context()->Plug(r0);
4194 } else if (proxy != NULL) {
4195 Variable* var = proxy->var();
4196 // Delete of an unqualified identifier is disallowed in strict mode
4197 // but "delete this" is allowed.
4198 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4199 if (var->IsUnallocated()) {
4200 __ ldr(r2, GlobalObjectOperand());
4201 __ mov(r1, Operand(var->name()));
4202 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
4203 __ Push(r2, r1, r0);
4204 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4205 context()->Plug(r0);
4206 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4207 // Result of deleting non-global, non-dynamic variables is false.
4208 // The subexpression does not have side effects.
4209 context()->Plug(var->is_this());
4211 // Non-global variable. Call the runtime to try to delete from the
4212 // context where the variable was introduced.
4213 ASSERT(!context_register().is(r2));
4214 __ mov(r2, Operand(var->name()));
4215 __ Push(context_register(), r2);
4216 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4217 context()->Plug(r0);
4220 // Result of deleting non-property, non-variable reference is true.
4221 // The subexpression may have side effects.
4222 VisitForEffect(expr->expression());
4223 context()->Plug(true);
4229 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4230 VisitForEffect(expr->expression());
4231 context()->Plug(Heap::kUndefinedValueRootIndex);
4236 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4237 if (context()->IsEffect()) {
4238 // Unary NOT has no side effects so it's only necessary to visit the
4239 // subexpression. Match the optimizing compiler by not branching.
4240 VisitForEffect(expr->expression());
4241 } else if (context()->IsTest()) {
4242 const TestContext* test = TestContext::cast(context());
4243 // The labels are swapped for the recursive call.
4244 VisitForControl(expr->expression(),
4245 test->false_label(),
4247 test->fall_through());
4248 context()->Plug(test->true_label(), test->false_label());
4250 // We handle value contexts explicitly rather than simply visiting
4251 // for control and plugging the control flow into the context,
4252 // because we need to prepare a pair of extra administrative AST ids
4253 // for the optimizing compiler.
4254 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4255 Label materialize_true, materialize_false, done;
4256 VisitForControl(expr->expression(),
4260 __ bind(&materialize_true);
4261 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4262 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4263 if (context()->IsStackValue()) __ push(r0);
4265 __ bind(&materialize_false);
4266 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4267 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4268 if (context()->IsStackValue()) __ push(r0);
4274 case Token::TYPEOF: {
4275 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4276 { StackValueContext context(this);
4277 VisitForTypeofValue(expr->expression());
4279 __ CallRuntime(Runtime::kTypeof, 1);
4280 context()->Plug(r0);
4290 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4291 Comment cmnt(masm_, "[ CountOperation");
4292 SetSourcePosition(expr->position());
4294 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4295 // as the left-hand side.
4296 if (!expr->expression()->IsValidLeftHandSide()) {
4297 VisitForEffect(expr->expression());
4301 // Expression can only be a property, a global or a (parameter or local)
4303 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4304 LhsKind assign_type = VARIABLE;
4305 Property* prop = expr->expression()->AsProperty();
4306 // In case of a property we use the uninitialized expression context
4307 // of the key to detect a named property.
4310 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4313 // Evaluate expression and get value.
4314 if (assign_type == VARIABLE) {
4315 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4316 AccumulatorValueContext context(this);
4317 EmitVariableLoad(expr->expression()->AsVariableProxy());
4319 // Reserve space for result of postfix operation.
4320 if (expr->is_postfix() && !context()->IsEffect()) {
4321 __ mov(ip, Operand(Smi::FromInt(0)));
4324 if (assign_type == NAMED_PROPERTY) {
4325 // Put the object both on the stack and in the accumulator.
4326 VisitForAccumulatorValue(prop->obj());
4328 EmitNamedPropertyLoad(prop);
4330 VisitForStackValue(prop->obj());
4331 VisitForAccumulatorValue(prop->key());
4332 __ ldr(r1, MemOperand(sp, 0));
4334 EmitKeyedPropertyLoad(prop);
4338 // We need a second deoptimization point after loading the value
4339 // in case evaluating the property load my have a side effect.
4340 if (assign_type == VARIABLE) {
4341 PrepareForBailout(expr->expression(), TOS_REG);
4343 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4346 // Inline smi case if we are in a loop.
4347 Label stub_call, done;
4348 JumpPatchSite patch_site(masm_);
4350 int count_value = expr->op() == Token::INC ? 1 : -1;
4351 if (ShouldInlineSmiCase(expr->op())) {
4353 patch_site.EmitJumpIfNotSmi(r0, &slow);
4355 // Save result for postfix expressions.
4356 if (expr->is_postfix()) {
4357 if (!context()->IsEffect()) {
4358 // Save the result on the stack. If we have a named or keyed property
4359 // we store the result under the receiver that is currently on top
4361 switch (assign_type) {
4365 case NAMED_PROPERTY:
4366 __ str(r0, MemOperand(sp, kPointerSize));
4368 case KEYED_PROPERTY:
4369 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4375 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4377 // Call stub. Undo operation first.
4378 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4382 ToNumberStub convert_stub;
4383 __ CallStub(&convert_stub);
4385 // Save result for postfix expressions.
4386 if (expr->is_postfix()) {
4387 if (!context()->IsEffect()) {
4388 // Save the result on the stack. If we have a named or keyed property
4389 // we store the result under the receiver that is currently on top
4391 switch (assign_type) {
4395 case NAMED_PROPERTY:
4396 __ str(r0, MemOperand(sp, kPointerSize));
4398 case KEYED_PROPERTY:
4399 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4406 __ bind(&stub_call);
4408 __ mov(r0, Operand(Smi::FromInt(count_value)));
4410 // Record position before stub call.
4411 SetSourcePosition(expr->position());
4413 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4414 CallIC(stub.GetCode(isolate()),
4416 expr->CountBinOpFeedbackId());
4417 patch_site.EmitPatchInfo();
4420 // Store the value returned in r0.
4421 switch (assign_type) {
4423 if (expr->is_postfix()) {
4424 { EffectContext context(this);
4425 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4427 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4430 // For all contexts except EffectConstant We have the result on
4431 // top of the stack.
4432 if (!context()->IsEffect()) {
4433 context()->PlugTOS();
4436 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4438 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4439 context()->Plug(r0);
4442 case NAMED_PROPERTY: {
4443 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4445 CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4446 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4447 if (expr->is_postfix()) {
4448 if (!context()->IsEffect()) {
4449 context()->PlugTOS();
4452 context()->Plug(r0);
4456 case KEYED_PROPERTY: {
4457 __ Pop(r2, r1); // r1 = key. r2 = receiver.
4458 Handle<Code> ic = is_classic_mode()
4459 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4460 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4461 CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4462 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4463 if (expr->is_postfix()) {
4464 if (!context()->IsEffect()) {
4465 context()->PlugTOS();
4468 context()->Plug(r0);
4476 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4477 ASSERT(!context()->IsEffect());
4478 ASSERT(!context()->IsTest());
4479 VariableProxy* proxy = expr->AsVariableProxy();
4480 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4481 Comment cmnt(masm_, "Global variable");
4482 __ ldr(r0, GlobalObjectOperand());
4483 __ mov(r2, Operand(proxy->name()));
4484 // Use a regular load, not a contextual load, to avoid a reference
4486 CallLoadIC(NOT_CONTEXTUAL);
4487 PrepareForBailout(expr, TOS_REG);
4488 context()->Plug(r0);
4489 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4492 // Generate code for loading from variables potentially shadowed
4493 // by eval-introduced variables.
4494 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4497 __ mov(r0, Operand(proxy->name()));
4499 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4500 PrepareForBailout(expr, TOS_REG);
4503 context()->Plug(r0);
4505 // This expression cannot throw a reference error at the top level.
4506 VisitInDuplicateContext(expr);
4511 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4512 Expression* sub_expr,
4513 Handle<String> check) {
4514 Label materialize_true, materialize_false;
4515 Label* if_true = NULL;
4516 Label* if_false = NULL;
4517 Label* fall_through = NULL;
4518 context()->PrepareTest(&materialize_true, &materialize_false,
4519 &if_true, &if_false, &fall_through);
4521 { AccumulatorValueContext context(this);
4522 VisitForTypeofValue(sub_expr);
4524 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4526 if (check->Equals(isolate()->heap()->number_string())) {
4527 __ JumpIfSmi(r0, if_true);
4528 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4529 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4531 Split(eq, if_true, if_false, fall_through);
4532 } else if (check->Equals(isolate()->heap()->float32x4_string())) {
4533 __ JumpIfSmi(r0, if_false);
4534 __ CompareObjectType(r0, r0, r1, FLOAT32x4_TYPE);
4535 Split(eq, if_true, if_false, fall_through);
4536 } else if (check->Equals(isolate()->heap()->int32x4_string())) {
4537 __ JumpIfSmi(r0, if_false);
4538 __ CompareObjectType(r0, r0, r1, INT32x4_TYPE);
4539 Split(eq, if_true, if_false, fall_through);
4540 } else if (check->Equals(isolate()->heap()->string_string())) {
4541 __ JumpIfSmi(r0, if_false);
4542 // Check for undetectable objects => false.
4543 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4545 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4546 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4547 Split(eq, if_true, if_false, fall_through);
4548 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4549 __ JumpIfSmi(r0, if_false);
4550 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4551 Split(eq, if_true, if_false, fall_through);
4552 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4553 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4555 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4556 Split(eq, if_true, if_false, fall_through);
4557 } else if (FLAG_harmony_typeof &&
4558 check->Equals(isolate()->heap()->null_string())) {
4559 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4560 Split(eq, if_true, if_false, fall_through);
4561 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4562 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4564 __ JumpIfSmi(r0, if_false);
4565 // Check for undetectable objects => true.
4566 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4567 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4568 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4569 Split(ne, if_true, if_false, fall_through);
4571 } else if (check->Equals(isolate()->heap()->function_string())) {
4572 __ JumpIfSmi(r0, if_false);
4573 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4574 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4576 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4577 Split(eq, if_true, if_false, fall_through);
4578 } else if (check->Equals(isolate()->heap()->object_string())) {
4579 __ JumpIfSmi(r0, if_false);
4580 if (!FLAG_harmony_typeof) {
4581 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4584 // Check for JS objects => true.
4585 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4587 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4589 // Check for undetectable objects => false.
4590 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4591 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4592 Split(eq, if_true, if_false, fall_through);
4594 if (if_false != fall_through) __ jmp(if_false);
4596 context()->Plug(if_true, if_false);
4600 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4601 Comment cmnt(masm_, "[ CompareOperation");
4602 SetSourcePosition(expr->position());
4604 // First we try a fast inlined version of the compare when one of
4605 // the operands is a literal.
4606 if (TryLiteralCompare(expr)) return;
4608 // Always perform the comparison for its control flow. Pack the result
4609 // into the expression's context after the comparison is performed.
4610 Label materialize_true, materialize_false;
4611 Label* if_true = NULL;
4612 Label* if_false = NULL;
4613 Label* fall_through = NULL;
4614 context()->PrepareTest(&materialize_true, &materialize_false,
4615 &if_true, &if_false, &fall_through);
4617 Token::Value op = expr->op();
4618 VisitForStackValue(expr->left());
4621 VisitForStackValue(expr->right());
4622 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4623 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4624 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4626 Split(eq, if_true, if_false, fall_through);
4629 case Token::INSTANCEOF: {
4630 VisitForStackValue(expr->right());
4631 InstanceofStub stub(InstanceofStub::kNoFlags);
4633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4634 // The stub returns 0 for true.
4636 Split(eq, if_true, if_false, fall_through);
4641 VisitForAccumulatorValue(expr->right());
4642 Condition cond = CompareIC::ComputeCondition(op);
4645 bool inline_smi_code = ShouldInlineSmiCase(op);
4646 JumpPatchSite patch_site(masm_);
4647 if (inline_smi_code) {
4649 __ orr(r2, r0, Operand(r1));
4650 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4652 Split(cond, if_true, if_false, NULL);
4653 __ bind(&slow_case);
4656 // Record position and call the compare IC.
4657 SetSourcePosition(expr->position());
4658 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4659 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4660 patch_site.EmitPatchInfo();
4661 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4662 __ cmp(r0, Operand::Zero());
4663 Split(cond, if_true, if_false, fall_through);
4667 // Convert the result of the comparison into one expected for this
4668 // expression's context.
4669 context()->Plug(if_true, if_false);
4673 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4674 Expression* sub_expr,
4676 Label materialize_true, materialize_false;
4677 Label* if_true = NULL;
4678 Label* if_false = NULL;
4679 Label* fall_through = NULL;
4680 context()->PrepareTest(&materialize_true, &materialize_false,
4681 &if_true, &if_false, &fall_through);
4683 VisitForAccumulatorValue(sub_expr);
4684 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4685 if (expr->op() == Token::EQ_STRICT) {
4686 Heap::RootListIndex nil_value = nil == kNullValue ?
4687 Heap::kNullValueRootIndex :
4688 Heap::kUndefinedValueRootIndex;
4689 __ LoadRoot(r1, nil_value);
4691 Split(eq, if_true, if_false, fall_through);
4693 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4694 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4695 __ cmp(r0, Operand(0));
4696 Split(ne, if_true, if_false, fall_through);
4698 context()->Plug(if_true, if_false);
4702 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4703 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4704 context()->Plug(r0);
4708 Register FullCodeGenerator::result_register() {
4713 Register FullCodeGenerator::context_register() {
4718 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4719 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4720 __ str(value, MemOperand(fp, frame_offset));
4724 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4725 __ ldr(dst, ContextOperand(cp, context_index));
4729 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4730 Scope* declaration_scope = scope()->DeclarationScope();
4731 if (declaration_scope->is_global_scope() ||
4732 declaration_scope->is_module_scope()) {
4733 // Contexts nested in the native context have a canonical empty function
4734 // as their closure, not the anonymous closure containing the global
4735 // code. Pass a smi sentinel and let the runtime look up the empty
4737 __ mov(ip, Operand(Smi::FromInt(0)));
4738 } else if (declaration_scope->is_eval_scope()) {
4739 // Contexts created by a call to eval have the same closure as the
4740 // context calling eval, not the anonymous closure containing the eval
4741 // code. Fetch it from the context.
4742 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4744 ASSERT(declaration_scope->is_function_scope());
4745 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4751 // ----------------------------------------------------------------------------
4752 // Non-local control flow support.
4754 void FullCodeGenerator::EnterFinallyBlock() {
4755 ASSERT(!result_register().is(r1));
4756 // Store result register while executing finally block.
4757 __ push(result_register());
4758 // Cook return address in link register to stack (smi encoded Code* delta)
4759 __ sub(r1, lr, Operand(masm_->CodeObject()));
4762 // Store result register while executing finally block.
4765 // Store pending message while executing finally block.
4766 ExternalReference pending_message_obj =
4767 ExternalReference::address_of_pending_message_obj(isolate());
4768 __ mov(ip, Operand(pending_message_obj));
4769 __ ldr(r1, MemOperand(ip));
4772 ExternalReference has_pending_message =
4773 ExternalReference::address_of_has_pending_message(isolate());
4774 __ mov(ip, Operand(has_pending_message));
4775 __ ldr(r1, MemOperand(ip));
4779 ExternalReference pending_message_script =
4780 ExternalReference::address_of_pending_message_script(isolate());
4781 __ mov(ip, Operand(pending_message_script));
4782 __ ldr(r1, MemOperand(ip));
4787 void FullCodeGenerator::ExitFinallyBlock() {
4788 ASSERT(!result_register().is(r1));
4789 // Restore pending message from stack.
4791 ExternalReference pending_message_script =
4792 ExternalReference::address_of_pending_message_script(isolate());
4793 __ mov(ip, Operand(pending_message_script));
4794 __ str(r1, MemOperand(ip));
4798 ExternalReference has_pending_message =
4799 ExternalReference::address_of_has_pending_message(isolate());
4800 __ mov(ip, Operand(has_pending_message));
4801 __ str(r1, MemOperand(ip));
4804 ExternalReference pending_message_obj =
4805 ExternalReference::address_of_pending_message_obj(isolate());
4806 __ mov(ip, Operand(pending_message_obj));
4807 __ str(r1, MemOperand(ip));
4809 // Restore result register from stack.
4812 // Uncook return address and return.
4813 __ pop(result_register());
4815 __ add(pc, r1, Operand(masm_->CodeObject()));
4821 #define __ ACCESS_MASM(masm())
4823 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4825 int* context_length) {
4826 // The macros used here must preserve the result register.
4828 // Because the handler block contains the context of the finally
4829 // code, we can restore it directly from there for the finally code
4830 // rather than iteratively unwinding contexts via their previous
4832 __ Drop(*stack_depth); // Down to the handler block.
4833 if (*context_length > 0) {
4834 // Restore the context to its dedicated register and the stack.
4835 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4836 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4839 __ bl(finally_entry_);
4842 *context_length = 0;
4850 static const int32_t kBranchBeforeInterrupt = 0x5a000004;
4853 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4855 BackEdgeState target_state,
4856 Code* replacement_code) {
4857 static const int kInstrSize = Assembler::kInstrSize;
4858 Address branch_address = pc - 3 * kInstrSize;
4859 CodePatcher patcher(branch_address, 1);
4861 switch (target_state) {
4863 // <decrement profiling counter>
4864 // 2a 00 00 01 bpl ok
4865 // e5 9f c? ?? ldr ip, [pc, <interrupt stub address>]
4866 // e1 2f ff 3c blx ip
4868 patcher.masm()->b(4 * kInstrSize, pl); // Jump offset is 4 instructions.
4869 ASSERT_EQ(kBranchBeforeInterrupt, Memory::int32_at(branch_address));
4871 case ON_STACK_REPLACEMENT:
4872 case OSR_AFTER_STACK_CHECK:
4873 // <decrement profiling counter>
4874 // e1 a0 00 00 mov r0, r0 (NOP)
4875 // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>]
4876 // e1 2f ff 3c blx ip
4878 patcher.masm()->nop();
4882 Address pc_immediate_load_address = pc - 2 * kInstrSize;
4883 // Replace the call address.
4884 uint32_t interrupt_address_offset =
4885 Memory::uint16_at(pc_immediate_load_address) & 0xfff;
4886 Address interrupt_address_pointer = pc + interrupt_address_offset;
4887 Memory::uint32_at(interrupt_address_pointer) =
4888 reinterpret_cast<uint32_t>(replacement_code->entry());
4890 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4891 unoptimized_code, pc_immediate_load_address, replacement_code);
4895 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4897 Code* unoptimized_code,
4899 static const int kInstrSize = Assembler::kInstrSize;
4900 ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
4902 Address branch_address = pc - 3 * kInstrSize;
4903 Address pc_immediate_load_address = pc - 2 * kInstrSize;
4904 uint32_t interrupt_address_offset =
4905 Memory::uint16_at(pc_immediate_load_address) & 0xfff;
4906 Address interrupt_address_pointer = pc + interrupt_address_offset;
4908 if (Memory::int32_at(branch_address) == kBranchBeforeInterrupt) {
4909 ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
4910 reinterpret_cast<uint32_t>(
4911 isolate->builtins()->InterruptCheck()->entry()));
4912 ASSERT(Assembler::IsLdrPcImmediateOffset(
4913 Assembler::instr_at(pc_immediate_load_address)));
4917 ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
4918 ASSERT(Assembler::IsLdrPcImmediateOffset(
4919 Assembler::instr_at(pc_immediate_load_address)));
4921 if (Memory::uint32_at(interrupt_address_pointer) ==
4922 reinterpret_cast<uint32_t>(
4923 isolate->builtins()->OnStackReplacement()->entry())) {
4924 return ON_STACK_REPLACEMENT;
4927 ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
4928 reinterpret_cast<uint32_t>(
4929 isolate->builtins()->OsrAfterStackCheck()->entry()));
4930 return OSR_AFTER_STACK_CHECK;
4934 } } // namespace v8::internal
4936 #endif // V8_TARGET_ARCH_ARM