1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/compiler.h"
12 #include "src/debug.h"
13 #include "src/full-codegen.h"
14 #include "src/isolate-inl.h"
15 #include "src/parser.h"
16 #include "src/scopes.h"
18 #include "src/arm/code-stubs-arm.h"
19 #include "src/arm/macro-assembler-arm.h"
24 #define __ ACCESS_MASM(masm_)
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 class JumpPatchSite BASE_EMBEDDED {
35 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
37 info_emitted_ = false;
42 DCHECK(patch_site_.is_bound() == info_emitted_);
45 // When initially emitting this ensure that a jump is always generated to skip
46 // the inlined smi code.
47 void EmitJumpIfNotSmi(Register reg, Label* target) {
48 DCHECK(!patch_site_.is_bound() && !info_emitted_);
49 Assembler::BlockConstPoolScope block_const_pool(masm_);
50 __ bind(&patch_site_);
51 __ cmp(reg, Operand(reg));
52 __ b(eq, target); // Always taken before patched.
55 // When initially emitting this ensure that a jump is never generated to skip
56 // the inlined smi code.
57 void EmitJumpIfSmi(Register reg, Label* target) {
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 Assembler::BlockConstPoolScope block_const_pool(masm_);
60 __ bind(&patch_site_);
61 __ cmp(reg, Operand(reg));
62 __ b(ne, target); // Never taken before patched.
65 void EmitPatchInfo() {
66 // Block literal pool emission whilst recording patch site information.
67 Assembler::BlockConstPoolScope block_const_pool(masm_);
68 if (patch_site_.is_bound()) {
69 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
71 reg.set_code(delta_to_patch_site / kOff12Mask);
72 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
77 __ nop(); // Signals no inlined code.
82 MacroAssembler* masm_;
90 // Generate code for a JS function. On entry to the function the receiver
91 // and arguments have been pushed on the stack left to right. The actual
92 // argument count matches the formal parameter count expected by the
95 // The live registers are:
96 // o r1: the JS function object being called (i.e., ourselves)
98 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
99 // o fp: our caller's frame pointer
100 // o sp: stack pointer
101 // o lr: return address
103 // The function builds a JS frame. Please see JavaScriptFrameConstants in
104 // frames-arm.h for its layout.
105 void FullCodeGenerator::Generate() {
106 CompilationInfo* info = info_;
108 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(function());
113 Comment cmnt(masm_, "[ function compiled by full code generator");
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
124 // Sloppy mode functions and builtins need to replace the receiver with the
125 // global proxy when called as functions (without an explicit receiver
127 if (info->strict_mode() == SLOPPY && !info->is_native()) {
129 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
130 __ ldr(r2, MemOperand(sp, receiver_offset));
131 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
134 __ ldr(r2, GlobalObjectOperand());
135 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
137 __ str(r2, MemOperand(sp, receiver_offset));
142 // Open a frame scope to indicate that there is a frame on the stack. The
143 // MANUAL indicates that the scope shouldn't actually generate code to set up
144 // the frame (that is done below).
145 FrameScope frame_scope(masm_, StackFrame::MANUAL);
147 info->set_prologue_offset(masm_->pc_offset());
148 __ Prologue(info->IsCodePreAgingActive());
149 info->AddNoFrameRange(0, masm_->pc_offset());
151 { Comment cmnt(masm_, "[ Allocate locals");
152 int locals_count = info->scope()->num_stack_slots();
153 // Generators allocate locals, if any, in context slots.
154 DCHECK(!info->function()->is_generator() || locals_count == 0);
155 if (locals_count > 0) {
156 if (locals_count >= 128) {
158 __ sub(r9, sp, Operand(locals_count * kPointerSize));
159 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
160 __ cmp(r9, Operand(r2));
162 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
165 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
166 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
167 if (locals_count >= kMaxPushes) {
168 int loop_iterations = locals_count / kMaxPushes;
169 __ mov(r2, Operand(loop_iterations));
171 __ bind(&loop_header);
173 for (int i = 0; i < kMaxPushes; i++) {
176 // Continue loop if not done.
177 __ sub(r2, r2, Operand(1), SetCC);
178 __ b(&loop_header, ne);
180 int remaining = locals_count % kMaxPushes;
181 // Emit the remaining pushes.
182 for (int i = 0; i < remaining; i++) {
188 bool function_in_register = true;
190 // Possibly allocate a local context.
191 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
192 if (heap_slots > 0) {
193 // Argument to NewContext is the function, which is still in r1.
194 Comment cmnt(masm_, "[ Allocate context");
195 bool need_write_barrier = true;
196 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
198 __ Push(info->scope()->GetScopeInfo());
199 __ CallRuntime(Runtime::kNewGlobalContext, 2);
200 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
201 FastNewContextStub stub(isolate(), heap_slots);
203 // Result of FastNewContextStub is always in new space.
204 need_write_barrier = false;
207 __ CallRuntime(Runtime::kNewFunctionContext, 1);
209 function_in_register = false;
210 // Context is returned in r0. It replaces the context passed to us.
211 // It's saved in the stack and kept live in cp.
213 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214 // Copy any necessary parameters into the context.
215 int num_parameters = info->scope()->num_parameters();
216 for (int i = 0; i < num_parameters; i++) {
217 Variable* var = scope()->parameter(i);
218 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220 (num_parameters - 1 - i) * kPointerSize;
221 // Load parameter from stack.
222 __ ldr(r0, MemOperand(fp, parameter_offset));
223 // Store it in the context.
224 MemOperand target = ContextOperand(cp, var->index());
227 // Update the write barrier.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(
230 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
231 } else if (FLAG_debug_code) {
233 __ JumpIfInNewSpace(cp, r0, &done);
234 __ Abort(kExpectedNewSpaceObject);
241 Variable* arguments = scope()->arguments();
242 if (arguments != NULL) {
243 // Function uses arguments object.
244 Comment cmnt(masm_, "[ Allocate arguments object");
245 if (!function_in_register) {
246 // Load this again, if it's used by the local context below.
247 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
251 // Receiver is just before the parameters on the caller's stack.
252 int num_parameters = info->scope()->num_parameters();
253 int offset = num_parameters * kPointerSize;
255 Operand(StandardFrameConstants::kCallerSPOffset + offset));
256 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
259 // Arguments to ArgumentsAccessStub:
260 // function, receiver address, parameter count.
261 // The stub will rewrite receiever and parameter count if the previous
262 // stack frame was an arguments adapter frame.
263 ArgumentsAccessStub::Type type;
264 if (strict_mode() == STRICT) {
265 type = ArgumentsAccessStub::NEW_STRICT;
266 } else if (function()->has_duplicate_parameters()) {
267 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
269 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
271 ArgumentsAccessStub stub(isolate(), type);
274 SetVar(arguments, r0, r1, r2);
278 __ CallRuntime(Runtime::kTraceEnter, 0);
281 // Visit the declarations and body unless there is an illegal
283 if (scope()->HasIllegalRedeclaration()) {
284 Comment cmnt(masm_, "[ Declarations");
285 scope()->VisitIllegalRedeclaration(this);
288 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
289 { Comment cmnt(masm_, "[ Declarations");
290 // For named function expressions, declare the function name as a
292 if (scope()->is_function_scope() && scope()->function() != NULL) {
293 VariableDeclaration* function = scope()->function();
294 DCHECK(function->proxy()->var()->mode() == CONST ||
295 function->proxy()->var()->mode() == CONST_LEGACY);
296 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
297 VisitVariableDeclaration(function);
299 VisitDeclarations(scope()->declarations());
302 { Comment cmnt(masm_, "[ Stack check");
303 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
305 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
306 __ cmp(sp, Operand(ip));
308 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
309 PredictableCodeSizeScope predictable(masm_,
310 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
311 __ Call(stack_check, RelocInfo::CODE_TARGET);
315 { Comment cmnt(masm_, "[ Body");
316 DCHECK(loop_depth() == 0);
317 VisitStatements(function()->body());
318 DCHECK(loop_depth() == 0);
322 // Always emit a 'return undefined' in case control fell off the end of
324 { Comment cmnt(masm_, "[ return <undefined>;");
325 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
327 EmitReturnSequence();
329 // Force emit the constant pool, so it doesn't get emitted in the middle
330 // of the back edge table.
331 masm()->CheckConstPool(true, false);
335 void FullCodeGenerator::ClearAccumulator() {
336 __ mov(r0, Operand(Smi::FromInt(0)));
340 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
341 __ mov(r2, Operand(profiling_counter_));
342 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
343 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
344 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
348 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
349 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
351 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
355 void FullCodeGenerator::EmitProfilingCounterReset() {
356 Assembler::BlockConstPoolScope block_const_pool(masm_);
357 PredictableCodeSizeScope predictable_code_size_scope(
358 masm_, kProfileCounterResetSequenceLength);
361 int reset_value = FLAG_interrupt_budget;
362 if (info_->is_debug()) {
363 // Detect debug break requests as soon as possible.
364 reset_value = FLAG_interrupt_budget >> 4;
366 __ mov(r2, Operand(profiling_counter_));
367 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
368 // instructions (for ARMv6) depending upon whether it is an extended constant
369 // pool - insert nop to compensate.
370 int expected_instr_count =
371 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
372 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
373 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
376 __ mov(r3, Operand(Smi::FromInt(reset_value)));
377 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382 Label* back_edge_target) {
383 Comment cmnt(masm_, "[ Back edge bookkeeping");
384 // Block literal pools whilst emitting back edge code.
385 Assembler::BlockConstPoolScope block_const_pool(masm_);
388 DCHECK(back_edge_target->is_bound());
389 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
390 int weight = Min(kMaxBackEdgeWeight,
391 Max(1, distance / kCodeSizeMultiplier));
392 EmitProfilingCounterDecrement(weight);
394 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 // Record a mapping of this PC offset to the OSR id. This is used to find
397 // the AST id from the unoptimized code in order to use it as a key into
398 // the deoptimization input data found in the optimized code.
399 RecordBackEdge(stmt->OsrEntryId());
401 EmitProfilingCounterReset();
404 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
405 // Record a mapping of the OSR id to this PC. This is used if the OSR
406 // entry becomes the target of a bailout. We don't expect it to be, but
407 // we want it to work if it is.
408 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
412 void FullCodeGenerator::EmitReturnSequence() {
413 Comment cmnt(masm_, "[ Return sequence");
414 if (return_label_.is_bound()) {
415 __ b(&return_label_);
417 __ bind(&return_label_);
419 // Push the return value on the stack as the parameter.
420 // Runtime::TraceExit returns its parameter in r0.
422 __ CallRuntime(Runtime::kTraceExit, 1);
424 // Pretend that the exit is a backwards jump to the entry.
426 if (info_->ShouldSelfOptimize()) {
427 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
429 int distance = masm_->pc_offset();
430 weight = Min(kMaxBackEdgeWeight,
431 Max(1, distance / kCodeSizeMultiplier));
433 EmitProfilingCounterDecrement(weight);
437 __ Call(isolate()->builtins()->InterruptCheck(),
438 RelocInfo::CODE_TARGET);
440 EmitProfilingCounterReset();
444 // Add a label for checking the size of the code used for returning.
445 Label check_exit_codesize;
446 __ bind(&check_exit_codesize);
448 // Make sure that the constant pool is not emitted inside of the return
450 { Assembler::BlockConstPoolScope block_const_pool(masm_);
451 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
452 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
453 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
454 PredictableCodeSizeScope predictable(masm_, -1);
456 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
457 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
458 __ add(sp, sp, Operand(sp_delta));
460 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
465 // Check that the size of the code used for returning is large enough
466 // for the debugger's requirements.
467 DCHECK(Assembler::kJSReturnSequenceInstructions <=
468 masm_->InstructionsGeneratedSince(&check_exit_codesize));
474 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
479 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
480 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
481 codegen()->GetVar(result_register(), var);
485 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
486 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
487 codegen()->GetVar(result_register(), var);
488 __ push(result_register());
492 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
493 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
494 // For simplicity we always test the accumulator register.
495 codegen()->GetVar(result_register(), var);
496 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
497 codegen()->DoTest(this);
501 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
505 void FullCodeGenerator::AccumulatorValueContext::Plug(
506 Heap::RootListIndex index) const {
507 __ LoadRoot(result_register(), index);
511 void FullCodeGenerator::StackValueContext::Plug(
512 Heap::RootListIndex index) const {
513 __ LoadRoot(result_register(), index);
514 __ push(result_register());
518 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
519 codegen()->PrepareForBailoutBeforeSplit(condition(),
523 if (index == Heap::kUndefinedValueRootIndex ||
524 index == Heap::kNullValueRootIndex ||
525 index == Heap::kFalseValueRootIndex) {
526 if (false_label_ != fall_through_) __ b(false_label_);
527 } else if (index == Heap::kTrueValueRootIndex) {
528 if (true_label_ != fall_through_) __ b(true_label_);
530 __ LoadRoot(result_register(), index);
531 codegen()->DoTest(this);
536 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
540 void FullCodeGenerator::AccumulatorValueContext::Plug(
541 Handle<Object> lit) const {
542 __ mov(result_register(), Operand(lit));
546 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
547 // Immediates cannot be pushed directly.
548 __ mov(result_register(), Operand(lit));
549 __ push(result_register());
553 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
554 codegen()->PrepareForBailoutBeforeSplit(condition(),
558 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
559 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
560 if (false_label_ != fall_through_) __ b(false_label_);
561 } else if (lit->IsTrue() || lit->IsJSObject()) {
562 if (true_label_ != fall_through_) __ b(true_label_);
563 } else if (lit->IsString()) {
564 if (String::cast(*lit)->length() == 0) {
565 if (false_label_ != fall_through_) __ b(false_label_);
567 if (true_label_ != fall_through_) __ b(true_label_);
569 } else if (lit->IsSmi()) {
570 if (Smi::cast(*lit)->value() == 0) {
571 if (false_label_ != fall_through_) __ b(false_label_);
573 if (true_label_ != fall_through_) __ b(true_label_);
576 // For simplicity we always test the accumulator register.
577 __ mov(result_register(), Operand(lit));
578 codegen()->DoTest(this);
583 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
584 Register reg) const {
590 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
592 Register reg) const {
595 __ Move(result_register(), reg);
599 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
600 Register reg) const {
602 if (count > 1) __ Drop(count - 1);
603 __ str(reg, MemOperand(sp, 0));
607 void FullCodeGenerator::TestContext::DropAndPlug(int count,
608 Register reg) const {
610 // For simplicity we always test the accumulator register.
612 __ Move(result_register(), reg);
613 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
614 codegen()->DoTest(this);
618 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
619 Label* materialize_false) const {
620 DCHECK(materialize_true == materialize_false);
621 __ bind(materialize_true);
625 void FullCodeGenerator::AccumulatorValueContext::Plug(
626 Label* materialize_true,
627 Label* materialize_false) const {
629 __ bind(materialize_true);
630 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
632 __ bind(materialize_false);
633 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
638 void FullCodeGenerator::StackValueContext::Plug(
639 Label* materialize_true,
640 Label* materialize_false) const {
642 __ bind(materialize_true);
643 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
645 __ bind(materialize_false);
646 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
652 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
653 Label* materialize_false) const {
654 DCHECK(materialize_true == true_label_);
655 DCHECK(materialize_false == false_label_);
659 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
663 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
664 Heap::RootListIndex value_root_index =
665 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
666 __ LoadRoot(result_register(), value_root_index);
670 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
671 Heap::RootListIndex value_root_index =
672 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
673 __ LoadRoot(ip, value_root_index);
678 void FullCodeGenerator::TestContext::Plug(bool flag) const {
679 codegen()->PrepareForBailoutBeforeSplit(condition(),
684 if (true_label_ != fall_through_) __ b(true_label_);
686 if (false_label_ != fall_through_) __ b(false_label_);
691 void FullCodeGenerator::DoTest(Expression* condition,
694 Label* fall_through) {
695 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
696 CallIC(ic, condition->test_id());
697 __ tst(result_register(), result_register());
698 Split(ne, if_true, if_false, fall_through);
702 void FullCodeGenerator::Split(Condition cond,
705 Label* fall_through) {
706 if (if_false == fall_through) {
708 } else if (if_true == fall_through) {
709 __ b(NegateCondition(cond), if_false);
717 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
718 DCHECK(var->IsStackAllocated());
719 // Offset is negative because higher indexes are at lower addresses.
720 int offset = -var->index() * kPointerSize;
721 // Adjust by a (parameter or local) base offset.
722 if (var->IsParameter()) {
723 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
725 offset += JavaScriptFrameConstants::kLocal0Offset;
727 return MemOperand(fp, offset);
731 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
732 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
733 if (var->IsContextSlot()) {
734 int context_chain_length = scope()->ContextChainLength(var->scope());
735 __ LoadContext(scratch, context_chain_length);
736 return ContextOperand(scratch, var->index());
738 return StackOperand(var);
743 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
744 // Use destination as scratch.
745 MemOperand location = VarOperand(var, dest);
746 __ ldr(dest, location);
750 void FullCodeGenerator::SetVar(Variable* var,
754 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
755 DCHECK(!scratch0.is(src));
756 DCHECK(!scratch0.is(scratch1));
757 DCHECK(!scratch1.is(src));
758 MemOperand location = VarOperand(var, scratch0);
759 __ str(src, location);
761 // Emit the write barrier code if the location is in the heap.
762 if (var->IsContextSlot()) {
763 __ RecordWriteContextSlot(scratch0,
773 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
774 bool should_normalize,
777 // Only prepare for bailouts before splits if we're in a test
778 // context. Otherwise, we let the Visit function deal with the
779 // preparation to avoid preparing with the same AST id twice.
780 if (!context()->IsTest() || !info_->IsOptimizable()) return;
783 if (should_normalize) __ b(&skip);
784 PrepareForBailout(expr, TOS_REG);
785 if (should_normalize) {
786 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
788 Split(eq, if_true, if_false, NULL);
794 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
795 // The variable in the declaration always resides in the current function
797 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
798 if (generate_debug_code_) {
799 // Check that we're not inside a with or catch context.
800 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
801 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
802 __ Check(ne, kDeclarationInWithContext);
803 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
804 __ Check(ne, kDeclarationInCatchContext);
809 void FullCodeGenerator::VisitVariableDeclaration(
810 VariableDeclaration* declaration) {
811 // If it was not possible to allocate the variable at compile time, we
812 // need to "declare" it at runtime to make sure it actually exists in the
814 VariableProxy* proxy = declaration->proxy();
815 VariableMode mode = declaration->mode();
816 Variable* variable = proxy->var();
817 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
818 switch (variable->location()) {
819 case Variable::UNALLOCATED:
820 globals_->Add(variable->name(), zone());
821 globals_->Add(variable->binding_needs_init()
822 ? isolate()->factory()->the_hole_value()
823 : isolate()->factory()->undefined_value(),
827 case Variable::PARAMETER:
828 case Variable::LOCAL:
830 Comment cmnt(masm_, "[ VariableDeclaration");
831 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
832 __ str(ip, StackOperand(variable));
836 case Variable::CONTEXT:
838 Comment cmnt(masm_, "[ VariableDeclaration");
839 EmitDebugCheckDeclarationContext(variable);
840 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
841 __ str(ip, ContextOperand(cp, variable->index()));
842 // No write barrier since the_hole_value is in old space.
843 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
847 case Variable::LOOKUP: {
848 Comment cmnt(masm_, "[ VariableDeclaration");
849 __ mov(r2, Operand(variable->name()));
850 // Declaration nodes are always introduced in one of four modes.
851 DCHECK(IsDeclaredVariableMode(mode));
852 PropertyAttributes attr =
853 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
854 __ mov(r1, Operand(Smi::FromInt(attr)));
855 // Push initial value, if any.
856 // Note: For variables we must not push an initial value (such as
857 // 'undefined') because we may have a (legal) redeclaration and we
858 // must not destroy the current value.
860 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
861 __ Push(cp, r2, r1, r0);
863 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
864 __ Push(cp, r2, r1, r0);
866 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
873 void FullCodeGenerator::VisitFunctionDeclaration(
874 FunctionDeclaration* declaration) {
875 VariableProxy* proxy = declaration->proxy();
876 Variable* variable = proxy->var();
877 switch (variable->location()) {
878 case Variable::UNALLOCATED: {
879 globals_->Add(variable->name(), zone());
880 Handle<SharedFunctionInfo> function =
881 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
882 // Check for stack-overflow exception.
883 if (function.is_null()) return SetStackOverflow();
884 globals_->Add(function, zone());
888 case Variable::PARAMETER:
889 case Variable::LOCAL: {
890 Comment cmnt(masm_, "[ FunctionDeclaration");
891 VisitForAccumulatorValue(declaration->fun());
892 __ str(result_register(), StackOperand(variable));
896 case Variable::CONTEXT: {
897 Comment cmnt(masm_, "[ FunctionDeclaration");
898 EmitDebugCheckDeclarationContext(variable);
899 VisitForAccumulatorValue(declaration->fun());
900 __ str(result_register(), ContextOperand(cp, variable->index()));
901 int offset = Context::SlotOffset(variable->index());
902 // We know that we have written a function, which is not a smi.
903 __ RecordWriteContextSlot(cp,
911 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
915 case Variable::LOOKUP: {
916 Comment cmnt(masm_, "[ FunctionDeclaration");
917 __ mov(r2, Operand(variable->name()));
918 __ mov(r1, Operand(Smi::FromInt(NONE)));
920 // Push initial value for function declaration.
921 VisitForStackValue(declaration->fun());
922 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
929 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
930 Variable* variable = declaration->proxy()->var();
931 DCHECK(variable->location() == Variable::CONTEXT);
932 DCHECK(variable->interface()->IsFrozen());
934 Comment cmnt(masm_, "[ ModuleDeclaration");
935 EmitDebugCheckDeclarationContext(variable);
937 // Load instance object.
938 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
939 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
940 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
943 __ str(r1, ContextOperand(cp, variable->index()));
944 // We know that we have written a module, which is not a smi.
945 __ RecordWriteContextSlot(cp,
946 Context::SlotOffset(variable->index()),
953 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
955 // Traverse into body.
956 Visit(declaration->module());
960 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
961 VariableProxy* proxy = declaration->proxy();
962 Variable* variable = proxy->var();
963 switch (variable->location()) {
964 case Variable::UNALLOCATED:
968 case Variable::CONTEXT: {
969 Comment cmnt(masm_, "[ ImportDeclaration");
970 EmitDebugCheckDeclarationContext(variable);
975 case Variable::PARAMETER:
976 case Variable::LOCAL:
977 case Variable::LOOKUP:
983 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
988 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
989 // Call the runtime to declare the globals.
990 // The context is the first argument.
991 __ mov(r1, Operand(pairs));
992 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
994 __ CallRuntime(Runtime::kDeclareGlobals, 3);
995 // Return value is ignored.
999 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1000 // Call the runtime to declare the modules.
1001 __ Push(descriptions);
1002 __ CallRuntime(Runtime::kDeclareModules, 1);
1003 // Return value is ignored.
1007 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1008 Comment cmnt(masm_, "[ SwitchStatement");
1009 Breakable nested_statement(this, stmt);
1010 SetStatementPosition(stmt);
1012 // Keep the switch value on the stack until a case matches.
1013 VisitForStackValue(stmt->tag());
1014 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1016 ZoneList<CaseClause*>* clauses = stmt->cases();
1017 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1019 Label next_test; // Recycled for each test.
1020 // Compile all the tests with branches to their bodies.
1021 for (int i = 0; i < clauses->length(); i++) {
1022 CaseClause* clause = clauses->at(i);
1023 clause->body_target()->Unuse();
1025 // The default is not a test, but remember it as final fall through.
1026 if (clause->is_default()) {
1027 default_clause = clause;
1031 Comment cmnt(masm_, "[ Case comparison");
1032 __ bind(&next_test);
1035 // Compile the label expression.
1036 VisitForAccumulatorValue(clause->label());
1038 // Perform the comparison as if via '==='.
1039 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1040 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1041 JumpPatchSite patch_site(masm_);
1042 if (inline_smi_code) {
1045 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1048 __ b(ne, &next_test);
1049 __ Drop(1); // Switch value is no longer needed.
1050 __ b(clause->body_target());
1051 __ bind(&slow_case);
1054 // Record position before stub call for type feedback.
1055 SetSourcePosition(clause->position());
1056 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1057 CallIC(ic, clause->CompareId());
1058 patch_site.EmitPatchInfo();
1062 PrepareForBailout(clause, TOS_REG);
1063 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1065 __ b(ne, &next_test);
1067 __ jmp(clause->body_target());
1070 __ cmp(r0, Operand::Zero());
1071 __ b(ne, &next_test);
1072 __ Drop(1); // Switch value is no longer needed.
1073 __ b(clause->body_target());
1076 // Discard the test value and jump to the default if present, otherwise to
1077 // the end of the statement.
1078 __ bind(&next_test);
1079 __ Drop(1); // Switch value is no longer needed.
1080 if (default_clause == NULL) {
1081 __ b(nested_statement.break_label());
1083 __ b(default_clause->body_target());
1086 // Compile all the case bodies.
1087 for (int i = 0; i < clauses->length(); i++) {
1088 Comment cmnt(masm_, "[ Case body");
1089 CaseClause* clause = clauses->at(i);
1090 __ bind(clause->body_target());
1091 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1092 VisitStatements(clause->statements());
1095 __ bind(nested_statement.break_label());
1096 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1100 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1101 Comment cmnt(masm_, "[ ForInStatement");
1102 int slot = stmt->ForInFeedbackSlot();
1103 SetStatementPosition(stmt);
1106 ForIn loop_statement(this, stmt);
1107 increment_loop_depth();
1109 // Get the object to enumerate over. If the object is null or undefined, skip
1110 // over the loop. See ECMA-262 version 5, section 12.6.4.
1111 VisitForAccumulatorValue(stmt->enumerable());
1112 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1115 Register null_value = r5;
1116 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1117 __ cmp(r0, null_value);
1120 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1122 // Convert the object to a JS object.
1123 Label convert, done_convert;
1124 __ JumpIfSmi(r0, &convert);
1125 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1126 __ b(ge, &done_convert);
1129 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1130 __ bind(&done_convert);
1133 // Check for proxies.
1135 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1136 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1137 __ b(le, &call_runtime);
1139 // Check cache validity in generated code. This is a fast case for
1140 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1141 // guarantee cache validity, call the runtime system to check cache
1142 // validity or get the property names in a fixed array.
1143 __ CheckEnumCache(null_value, &call_runtime);
1145 // The enum cache is valid. Load the map of the object being
1146 // iterated over and use the cache for the iteration.
1148 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1151 // Get the set of properties to enumerate.
1152 __ bind(&call_runtime);
1153 __ push(r0); // Duplicate the enumerable object on the stack.
1154 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1156 // If we got a map from the runtime call, we can do a fast
1157 // modification check. Otherwise, we got a fixed array, and we have
1158 // to do a slow check.
1160 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1161 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1163 __ b(ne, &fixed_array);
1165 // We got a map in register r0. Get the enumeration cache from it.
1166 Label no_descriptors;
1167 __ bind(&use_cache);
1169 __ EnumLength(r1, r0);
1170 __ cmp(r1, Operand(Smi::FromInt(0)));
1171 __ b(eq, &no_descriptors);
1173 __ LoadInstanceDescriptors(r0, r2);
1174 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1175 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1177 // Set up the four remaining stack slots.
1178 __ push(r0); // Map.
1179 __ mov(r0, Operand(Smi::FromInt(0)));
1180 // Push enumeration cache, enumeration cache length (as smi) and zero.
1181 __ Push(r2, r1, r0);
1184 __ bind(&no_descriptors);
1188 // We got a fixed array in register r0. Iterate through that.
1190 __ bind(&fixed_array);
1192 __ Move(r1, FeedbackVector());
1193 __ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1194 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
1196 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1197 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1198 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1199 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1200 __ b(gt, &non_proxy);
1201 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1202 __ bind(&non_proxy);
1203 __ Push(r1, r0); // Smi and array
1204 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1205 __ mov(r0, Operand(Smi::FromInt(0)));
1206 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1208 // Generate code for doing the condition check.
1209 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1211 // Load the current count to r0, load the length to r1.
1212 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1213 __ cmp(r0, r1); // Compare to the array length.
1214 __ b(hs, loop_statement.break_label());
1216 // Get the current entry of the array into register r3.
1217 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1218 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1219 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1221 // Get the expected map from the stack or a smi in the
1222 // permanent slow case into register r2.
1223 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1225 // Check if the expected map still matches that of the enumerable.
1226 // If not, we may have to filter the key.
1228 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1229 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1230 __ cmp(r4, Operand(r2));
1231 __ b(eq, &update_each);
1233 // For proxies, no filtering is done.
1234 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1235 __ cmp(r2, Operand(Smi::FromInt(0)));
1236 __ b(eq, &update_each);
1238 // Convert the entry to a string or (smi) 0 if it isn't a property
1239 // any more. If the property has been removed while iterating, we
1241 __ push(r1); // Enumerable.
1242 __ push(r3); // Current entry.
1243 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1244 __ mov(r3, Operand(r0), SetCC);
1245 __ b(eq, loop_statement.continue_label());
1247 // Update the 'each' property or variable from the possibly filtered
1248 // entry in register r3.
1249 __ bind(&update_each);
1250 __ mov(result_register(), r3);
1251 // Perform the assignment as if via '='.
1252 { EffectContext context(this);
1253 EmitAssignment(stmt->each());
1256 // Generate code for the body of the loop.
1257 Visit(stmt->body());
1259 // Generate code for the going to the next element by incrementing
1260 // the index (smi) stored on top of the stack.
1261 __ bind(loop_statement.continue_label());
1263 __ add(r0, r0, Operand(Smi::FromInt(1)));
1266 EmitBackEdgeBookkeeping(stmt, &loop);
1269 // Remove the pointers stored on the stack.
1270 __ bind(loop_statement.break_label());
1273 // Exit and decrement the loop depth.
1274 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1276 decrement_loop_depth();
1280 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1281 Comment cmnt(masm_, "[ ForOfStatement");
1282 SetStatementPosition(stmt);
1284 Iteration loop_statement(this, stmt);
1285 increment_loop_depth();
1287 // var iterator = iterable[Symbol.iterator]();
1288 VisitForEffect(stmt->assign_iterator());
1291 __ bind(loop_statement.continue_label());
1293 // result = iterator.next()
1294 VisitForEffect(stmt->next_result());
1296 // if (result.done) break;
1297 Label result_not_done;
1298 VisitForControl(stmt->result_done(),
1299 loop_statement.break_label(),
1302 __ bind(&result_not_done);
1304 // each = result.value
1305 VisitForEffect(stmt->assign_each());
1307 // Generate code for the body of the loop.
1308 Visit(stmt->body());
1310 // Check stack before looping.
1311 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1312 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1313 __ jmp(loop_statement.continue_label());
1315 // Exit and decrement the loop depth.
1316 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1317 __ bind(loop_statement.break_label());
1318 decrement_loop_depth();
1322 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1324 // Use the fast case closure allocation code that allocates in new
1325 // space for nested functions that don't need literals cloning. If
1326 // we're running with the --always-opt or the --prepare-always-opt
1327 // flag, we need to use the runtime function so that the new function
1328 // we are creating here gets a chance to have its code optimized and
1329 // doesn't just get a copy of the existing unoptimized code.
1330 if (!FLAG_always_opt &&
1331 !FLAG_prepare_always_opt &&
1333 scope()->is_function_scope() &&
1334 info->num_literals() == 0) {
1335 FastNewClosureStub stub(isolate(),
1336 info->strict_mode(),
1337 info->is_generator());
1338 __ mov(r2, Operand(info));
1341 __ mov(r0, Operand(info));
1342 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1343 : Heap::kFalseValueRootIndex);
1344 __ Push(cp, r0, r1);
1345 __ CallRuntime(Runtime::kNewClosure, 3);
1347 context()->Plug(r0);
1351 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1352 Comment cmnt(masm_, "[ VariableProxy");
1353 EmitVariableLoad(expr);
1357 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1358 TypeofState typeof_state,
1360 Register current = cp;
1366 if (s->num_heap_slots() > 0) {
1367 if (s->calls_sloppy_eval()) {
1368 // Check that extension is NULL.
1369 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1373 // Load next context in chain.
1374 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1375 // Walk the rest of the chain without clobbering cp.
1378 // If no outer scope calls eval, we do not need to check more
1379 // context extensions.
1380 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1381 s = s->outer_scope();
1384 if (s->is_eval_scope()) {
1386 if (!current.is(next)) {
1387 __ Move(next, current);
1390 // Terminate at native context.
1391 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1392 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1395 // Check that extension is NULL.
1396 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1399 // Load next context in chain.
1400 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1405 __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1406 __ mov(LoadIC::NameRegister(), Operand(proxy->var()->name()));
1407 if (FLAG_vector_ics) {
1408 __ mov(LoadIC::SlotRegister(),
1409 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1412 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1419 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1421 DCHECK(var->IsContextSlot());
1422 Register context = cp;
1426 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1427 if (s->num_heap_slots() > 0) {
1428 if (s->calls_sloppy_eval()) {
1429 // Check that extension is NULL.
1430 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1434 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1435 // Walk the rest of the chain without clobbering cp.
1439 // Check that last extension is NULL.
1440 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1444 // This function is used only for loads, not stores, so it's safe to
1445 // return an cp-based operand (the write barrier cannot be allowed to
1446 // destroy the cp register).
1447 return ContextOperand(context, var->index());
1451 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1452 TypeofState typeof_state,
1455 // Generate fast-case code for variables that might be shadowed by
1456 // eval-introduced variables. Eval is used a lot without
1457 // introducing variables. In those cases, we do not want to
1458 // perform a runtime call for all variables in the scope
1459 // containing the eval.
1460 Variable* var = proxy->var();
1461 if (var->mode() == DYNAMIC_GLOBAL) {
1462 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1464 } else if (var->mode() == DYNAMIC_LOCAL) {
1465 Variable* local = var->local_if_not_shadowed();
1466 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1467 if (local->mode() == LET || local->mode() == CONST ||
1468 local->mode() == CONST_LEGACY) {
1469 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1470 if (local->mode() == CONST_LEGACY) {
1471 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1472 } else { // LET || CONST
1474 __ mov(r0, Operand(var->name()));
1476 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1484 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1485 // Record position before possible IC call.
1486 SetSourcePosition(proxy->position());
1487 Variable* var = proxy->var();
1489 // Three cases: global variables, lookup variables, and all other types of
1491 switch (var->location()) {
1492 case Variable::UNALLOCATED: {
1493 Comment cmnt(masm_, "[ Global variable");
1494 __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1495 __ mov(LoadIC::NameRegister(), Operand(var->name()));
1496 if (FLAG_vector_ics) {
1497 __ mov(LoadIC::SlotRegister(),
1498 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1500 CallLoadIC(CONTEXTUAL);
1501 context()->Plug(r0);
1505 case Variable::PARAMETER:
1506 case Variable::LOCAL:
1507 case Variable::CONTEXT: {
1508 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1509 : "[ Stack variable");
1510 if (var->binding_needs_init()) {
1511 // var->scope() may be NULL when the proxy is located in eval code and
1512 // refers to a potential outside binding. Currently those bindings are
1513 // always looked up dynamically, i.e. in that case
1514 // var->location() == LOOKUP.
1516 DCHECK(var->scope() != NULL);
1518 // Check if the binding really needs an initialization check. The check
1519 // can be skipped in the following situation: we have a LET or CONST
1520 // binding in harmony mode, both the Variable and the VariableProxy have
1521 // the same declaration scope (i.e. they are both in global code, in the
1522 // same function or in the same eval code) and the VariableProxy is in
1523 // the source physically located after the initializer of the variable.
1525 // We cannot skip any initialization checks for CONST in non-harmony
1526 // mode because const variables may be declared but never initialized:
1527 // if (false) { const x; }; var y = x;
1529 // The condition on the declaration scopes is a conservative check for
1530 // nested functions that access a binding and are called before the
1531 // binding is initialized:
1532 // function() { f(); let x = 1; function f() { x = 2; } }
1534 bool skip_init_check;
1535 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1536 skip_init_check = false;
1538 // Check that we always have valid source position.
1539 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1540 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1541 skip_init_check = var->mode() != CONST_LEGACY &&
1542 var->initializer_position() < proxy->position();
1545 if (!skip_init_check) {
1546 // Let and const need a read barrier.
1548 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1549 if (var->mode() == LET || var->mode() == CONST) {
1550 // Throw a reference error when using an uninitialized let/const
1551 // binding in harmony mode.
1554 __ mov(r0, Operand(var->name()));
1556 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1559 // Uninitalized const bindings outside of harmony mode are unholed.
1560 DCHECK(var->mode() == CONST_LEGACY);
1561 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1563 context()->Plug(r0);
1567 context()->Plug(var);
1571 case Variable::LOOKUP: {
1572 Comment cmnt(masm_, "[ Lookup variable");
1574 // Generate code for loading from variables potentially shadowed
1575 // by eval-introduced variables.
1576 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1578 __ mov(r1, Operand(var->name()));
1579 __ Push(cp, r1); // Context and name.
1580 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1582 context()->Plug(r0);
1588 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1589 Comment cmnt(masm_, "[ RegExpLiteral");
1591 // Registers will be used as follows:
1592 // r5 = materialized value (RegExp literal)
1593 // r4 = JS function, literals array
1594 // r3 = literal index
1595 // r2 = RegExp pattern
1596 // r1 = RegExp flags
1597 // r0 = RegExp literal clone
1598 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1599 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1600 int literal_offset =
1601 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1602 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1603 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1605 __ b(ne, &materialized);
1607 // Create regexp literal using runtime function.
1608 // Result will be in r0.
1609 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1610 __ mov(r2, Operand(expr->pattern()));
1611 __ mov(r1, Operand(expr->flags()));
1612 __ Push(r4, r3, r2, r1);
1613 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1616 __ bind(&materialized);
1617 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1618 Label allocated, runtime_allocate;
1619 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1622 __ bind(&runtime_allocate);
1623 __ mov(r0, Operand(Smi::FromInt(size)));
1625 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1628 __ bind(&allocated);
1629 // After this, registers are used as follows:
1630 // r0: Newly allocated regexp.
1631 // r5: Materialized regexp.
1633 __ CopyFields(r0, r5, d0, size / kPointerSize);
1634 context()->Plug(r0);
1638 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1639 if (expression == NULL) {
1640 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1643 VisitForStackValue(expression);
1648 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1649 Comment cmnt(masm_, "[ ObjectLiteral");
1651 expr->BuildConstantProperties(isolate());
1652 Handle<FixedArray> constant_properties = expr->constant_properties();
1653 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1654 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1655 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1656 __ mov(r1, Operand(constant_properties));
1657 int flags = expr->fast_elements()
1658 ? ObjectLiteral::kFastElements
1659 : ObjectLiteral::kNoFlags;
1660 flags |= expr->has_function()
1661 ? ObjectLiteral::kHasFunction
1662 : ObjectLiteral::kNoFlags;
1663 __ mov(r0, Operand(Smi::FromInt(flags)));
1664 int properties_count = constant_properties->length() / 2;
1665 if (expr->may_store_doubles() || expr->depth() > 1 ||
1666 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1667 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1668 __ Push(r3, r2, r1, r0);
1669 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1671 FastCloneShallowObjectStub stub(isolate(), properties_count);
1675 // If result_saved is true the result is on top of the stack. If
1676 // result_saved is false the result is in r0.
1677 bool result_saved = false;
1679 // Mark all computed expressions that are bound to a key that
1680 // is shadowed by a later occurrence of the same key. For the
1681 // marked expressions, no store code is emitted.
1682 expr->CalculateEmitStore(zone());
1684 AccessorTable accessor_table(zone());
1685 for (int i = 0; i < expr->properties()->length(); i++) {
1686 ObjectLiteral::Property* property = expr->properties()->at(i);
1687 if (property->IsCompileTimeValue()) continue;
1689 Literal* key = property->key();
1690 Expression* value = property->value();
1691 if (!result_saved) {
1692 __ push(r0); // Save result on stack
1693 result_saved = true;
1695 switch (property->kind()) {
1696 case ObjectLiteral::Property::CONSTANT:
1698 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1699 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1701 case ObjectLiteral::Property::COMPUTED:
1702 if (key->value()->IsInternalizedString()) {
1703 if (property->emit_store()) {
1704 VisitForAccumulatorValue(value);
1705 DCHECK(StoreIC::ValueRegister().is(r0));
1706 __ mov(StoreIC::NameRegister(), Operand(key->value()));
1707 __ ldr(StoreIC::ReceiverRegister(), MemOperand(sp));
1708 CallStoreIC(key->LiteralFeedbackId());
1709 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1711 VisitForEffect(value);
1715 // Duplicate receiver on stack.
1716 __ ldr(r0, MemOperand(sp));
1718 VisitForStackValue(key);
1719 VisitForStackValue(value);
1720 if (property->emit_store()) {
1721 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1723 __ CallRuntime(Runtime::kSetProperty, 4);
1728 case ObjectLiteral::Property::PROTOTYPE:
1729 // Duplicate receiver on stack.
1730 __ ldr(r0, MemOperand(sp));
1732 VisitForStackValue(value);
1733 if (property->emit_store()) {
1734 __ CallRuntime(Runtime::kSetPrototype, 2);
1740 case ObjectLiteral::Property::GETTER:
1741 accessor_table.lookup(key)->second->getter = value;
1743 case ObjectLiteral::Property::SETTER:
1744 accessor_table.lookup(key)->second->setter = value;
1749 // Emit code to define accessors, using only a single call to the runtime for
1750 // each pair of corresponding getters and setters.
1751 for (AccessorTable::Iterator it = accessor_table.begin();
1752 it != accessor_table.end();
1754 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1756 VisitForStackValue(it->first);
1757 EmitAccessor(it->second->getter);
1758 EmitAccessor(it->second->setter);
1759 __ mov(r0, Operand(Smi::FromInt(NONE)));
1761 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1764 if (expr->has_function()) {
1765 DCHECK(result_saved);
1766 __ ldr(r0, MemOperand(sp));
1768 __ CallRuntime(Runtime::kToFastProperties, 1);
1772 context()->PlugTOS();
1774 context()->Plug(r0);
1779 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1780 Comment cmnt(masm_, "[ ArrayLiteral");
1782 expr->BuildConstantElements(isolate());
1783 int flags = expr->depth() == 1
1784 ? ArrayLiteral::kShallowElements
1785 : ArrayLiteral::kNoFlags;
1787 ZoneList<Expression*>* subexprs = expr->values();
1788 int length = subexprs->length();
1789 Handle<FixedArray> constant_elements = expr->constant_elements();
1790 DCHECK_EQ(2, constant_elements->length());
1791 ElementsKind constant_elements_kind =
1792 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1793 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1794 Handle<FixedArrayBase> constant_elements_values(
1795 FixedArrayBase::cast(constant_elements->get(1)));
1797 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1798 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1799 // If the only customer of allocation sites is transitioning, then
1800 // we can turn it off if we don't have anywhere else to transition to.
1801 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1804 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1805 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1806 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1807 __ mov(r1, Operand(constant_elements));
1808 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1809 __ mov(r0, Operand(Smi::FromInt(flags)));
1810 __ Push(r3, r2, r1, r0);
1811 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1813 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1817 bool result_saved = false; // Is the result saved to the stack?
1819 // Emit code to evaluate all the non-constant subexpressions and to store
1820 // them into the newly cloned array.
1821 for (int i = 0; i < length; i++) {
1822 Expression* subexpr = subexprs->at(i);
1823 // If the subexpression is a literal or a simple materialized literal it
1824 // is already set in the cloned array.
1825 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1827 if (!result_saved) {
1829 __ Push(Smi::FromInt(expr->literal_index()));
1830 result_saved = true;
1832 VisitForAccumulatorValue(subexpr);
1834 if (IsFastObjectElementsKind(constant_elements_kind)) {
1835 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1836 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1837 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1838 __ str(result_register(), FieldMemOperand(r1, offset));
1839 // Update the write barrier for the array store.
1840 __ RecordWriteField(r1, offset, result_register(), r2,
1841 kLRHasBeenSaved, kDontSaveFPRegs,
1842 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1844 __ mov(r3, Operand(Smi::FromInt(i)));
1845 StoreArrayLiteralElementStub stub(isolate());
1849 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1853 __ pop(); // literal index
1854 context()->PlugTOS();
1856 context()->Plug(r0);
1861 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1862 DCHECK(expr->target()->IsValidReferenceExpression());
1864 Comment cmnt(masm_, "[ Assignment");
1866 // Left-hand side can only be a property, a global or a (parameter or local)
1868 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1869 LhsKind assign_type = VARIABLE;
1870 Property* property = expr->target()->AsProperty();
1871 if (property != NULL) {
1872 assign_type = (property->key()->IsPropertyName())
1877 // Evaluate LHS expression.
1878 switch (assign_type) {
1880 // Nothing to do here.
1882 case NAMED_PROPERTY:
1883 if (expr->is_compound()) {
1884 // We need the receiver both on the stack and in the register.
1885 VisitForStackValue(property->obj());
1886 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
1888 VisitForStackValue(property->obj());
1891 case KEYED_PROPERTY:
1892 if (expr->is_compound()) {
1893 VisitForStackValue(property->obj());
1894 VisitForStackValue(property->key());
1895 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
1896 __ ldr(LoadIC::NameRegister(), MemOperand(sp, 0));
1898 VisitForStackValue(property->obj());
1899 VisitForStackValue(property->key());
1904 // For compound assignments we need another deoptimization point after the
1905 // variable/property load.
1906 if (expr->is_compound()) {
1907 { AccumulatorValueContext context(this);
1908 switch (assign_type) {
1910 EmitVariableLoad(expr->target()->AsVariableProxy());
1911 PrepareForBailout(expr->target(), TOS_REG);
1913 case NAMED_PROPERTY:
1914 EmitNamedPropertyLoad(property);
1915 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1917 case KEYED_PROPERTY:
1918 EmitKeyedPropertyLoad(property);
1919 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1924 Token::Value op = expr->binary_op();
1925 __ push(r0); // Left operand goes on the stack.
1926 VisitForAccumulatorValue(expr->value());
1928 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1931 SetSourcePosition(expr->position() + 1);
1932 AccumulatorValueContext context(this);
1933 if (ShouldInlineSmiCase(op)) {
1934 EmitInlineSmiBinaryOp(expr->binary_operation(),
1940 EmitBinaryOp(expr->binary_operation(), op, mode);
1943 // Deoptimization point in case the binary operation may have side effects.
1944 PrepareForBailout(expr->binary_operation(), TOS_REG);
1946 VisitForAccumulatorValue(expr->value());
1949 // Record source position before possible IC call.
1950 SetSourcePosition(expr->position());
1953 switch (assign_type) {
1955 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1957 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1958 context()->Plug(r0);
1960 case NAMED_PROPERTY:
1961 EmitNamedPropertyAssignment(expr);
1963 case KEYED_PROPERTY:
1964 EmitKeyedPropertyAssignment(expr);
1970 void FullCodeGenerator::VisitYield(Yield* expr) {
1971 Comment cmnt(masm_, "[ Yield");
1972 // Evaluate yielded value first; the initial iterator definition depends on
1973 // this. It stays on the stack while we update the iterator.
1974 VisitForStackValue(expr->expression());
1976 switch (expr->yield_kind()) {
1977 case Yield::SUSPEND:
1978 // Pop value from top-of-stack slot; box result into result register.
1979 EmitCreateIteratorResult(false);
1980 __ push(result_register());
1982 case Yield::INITIAL: {
1983 Label suspend, continuation, post_runtime, resume;
1987 __ bind(&continuation);
1991 VisitForAccumulatorValue(expr->generator_object());
1992 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1993 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1994 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1995 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1997 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1998 kLRHasBeenSaved, kDontSaveFPRegs);
1999 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2001 __ b(eq, &post_runtime);
2002 __ push(r0); // generator object
2003 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2004 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2005 __ bind(&post_runtime);
2006 __ pop(result_register());
2007 EmitReturnSequence();
2010 context()->Plug(result_register());
2014 case Yield::FINAL: {
2015 VisitForAccumulatorValue(expr->generator_object());
2016 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2017 __ str(r1, FieldMemOperand(result_register(),
2018 JSGeneratorObject::kContinuationOffset));
2019 // Pop value from top-of-stack slot, box result into result register.
2020 EmitCreateIteratorResult(true);
2021 EmitUnwindBeforeReturn();
2022 EmitReturnSequence();
2026 case Yield::DELEGATING: {
2027 VisitForStackValue(expr->generator_object());
2029 // Initial stack layout is as follows:
2030 // [sp + 1 * kPointerSize] iter
2031 // [sp + 0 * kPointerSize] g
2033 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2034 Label l_next, l_call, l_loop;
2035 Register load_receiver = LoadIC::ReceiverRegister();
2036 Register load_name = LoadIC::NameRegister();
2038 // Initial send value is undefined.
2039 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2042 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2044 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2045 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2046 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2047 __ Push(load_name, r3, r0); // "throw", iter, except
2050 // try { received = %yield result }
2051 // Shuffle the received result above a try handler and yield it without
2054 __ pop(r0); // result
2055 __ PushTryHandler(StackHandler::CATCH, expr->index());
2056 const int handler_size = StackHandlerConstants::kSize;
2057 __ push(r0); // result
2059 __ bind(&l_continuation);
2061 __ bind(&l_suspend);
2062 const int generator_object_depth = kPointerSize + handler_size;
2063 __ ldr(r0, MemOperand(sp, generator_object_depth));
2065 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2066 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2067 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2068 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2070 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2071 kLRHasBeenSaved, kDontSaveFPRegs);
2072 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2073 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2074 __ pop(r0); // result
2075 EmitReturnSequence();
2076 __ bind(&l_resume); // received in r0
2079 // receiver = iter; f = 'next'; arg = received;
2082 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2083 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2084 __ Push(load_name, r3, r0); // "next", iter, received
2086 // result = receiver[f](arg);
2088 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2089 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2090 if (FLAG_vector_ics) {
2091 __ mov(LoadIC::SlotRegister(),
2092 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2094 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2095 CallIC(ic, TypeFeedbackId::None());
2097 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2098 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2101 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2102 __ Drop(1); // The function is still on the stack; drop it.
2104 // if (!result.done) goto l_try;
2106 __ Move(load_receiver, r0);
2108 __ push(load_receiver); // save result
2109 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2110 if (FLAG_vector_ics) {
2111 __ mov(LoadIC::SlotRegister(),
2112 Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2114 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2115 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2117 __ cmp(r0, Operand(0));
2121 __ pop(load_receiver); // result
2122 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2123 if (FLAG_vector_ics) {
2124 __ mov(LoadIC::SlotRegister(),
2125 Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2127 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value
2128 context()->DropAndPlug(2, r0); // drop iter and g
2135 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2137 JSGeneratorObject::ResumeMode resume_mode) {
2138 // The value stays in r0, and is ultimately read by the resumed generator, as
2139 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2140 // is read to throw the value when the resumed generator is already closed.
2141 // r1 will hold the generator object until the activation has been resumed.
2142 VisitForStackValue(generator);
2143 VisitForAccumulatorValue(value);
2146 // Check generator state.
2147 Label wrong_state, closed_state, done;
2148 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2149 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2150 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2151 __ cmp(r3, Operand(Smi::FromInt(0)));
2152 __ b(eq, &closed_state);
2153 __ b(lt, &wrong_state);
2155 // Load suspended function and context.
2156 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2157 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2159 // Load receiver and store as the first argument.
2160 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2163 // Push holes for the rest of the arguments to the generator function.
2164 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2166 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2167 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2168 Label push_argument_holes, push_frame;
2169 __ bind(&push_argument_holes);
2170 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2171 __ b(mi, &push_frame);
2173 __ jmp(&push_argument_holes);
2175 // Enter a new JavaScript frame, and initialize its slots as they were when
2176 // the generator was suspended.
2178 __ bind(&push_frame);
2179 __ bl(&resume_frame);
2181 __ bind(&resume_frame);
2182 // lr = return address.
2183 // fp = caller's frame pointer.
2184 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2185 // cp = callee's context,
2186 // r4 = callee's JS function.
2187 __ PushFixedFrame(r4);
2188 // Adjust FP to point to saved FP.
2189 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2191 // Load the operand stack size.
2192 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2193 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2196 // If we are sending a value and there is no operand stack, we can jump back
2198 if (resume_mode == JSGeneratorObject::NEXT) {
2200 __ cmp(r3, Operand(0));
2201 __ b(ne, &slow_resume);
2202 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2204 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2205 if (FLAG_enable_ool_constant_pool) {
2206 // Load the new code object's constant pool pointer.
2208 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
2211 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2214 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2215 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2218 __ bind(&slow_resume);
2221 // Otherwise, we push holes for the operand stack and call the runtime to fix
2222 // up the stack and the handlers.
2223 Label push_operand_holes, call_resume;
2224 __ bind(&push_operand_holes);
2225 __ sub(r3, r3, Operand(1), SetCC);
2226 __ b(mi, &call_resume);
2228 __ b(&push_operand_holes);
2229 __ bind(&call_resume);
2230 DCHECK(!result_register().is(r1));
2231 __ Push(r1, result_register());
2232 __ Push(Smi::FromInt(resume_mode));
2233 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2234 // Not reached: the runtime call returns elsewhere.
2235 __ stop("not-reached");
2237 // Reach here when generator is closed.
2238 __ bind(&closed_state);
2239 if (resume_mode == JSGeneratorObject::NEXT) {
2240 // Return completed iterator result when generator is closed.
2241 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2243 // Pop value from top-of-stack slot; box result into result register.
2244 EmitCreateIteratorResult(true);
2246 // Throw the provided value.
2248 __ CallRuntime(Runtime::kThrow, 1);
2252 // Throw error if we attempt to operate on a running generator.
2253 __ bind(&wrong_state);
2255 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2258 context()->Plug(result_register());
2262 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2266 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2268 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2271 __ bind(&gc_required);
2272 __ Push(Smi::FromInt(map->instance_size()));
2273 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2274 __ ldr(context_register(),
2275 MemOperand(fp, StandardFrameConstants::kContextOffset));
2277 __ bind(&allocated);
2278 __ mov(r1, Operand(map));
2280 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2281 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2282 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2283 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2284 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2285 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2287 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2289 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2291 // Only the value field needs a write barrier, as the other values are in the
2293 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2294 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2298 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2299 SetSourcePosition(prop->position());
2300 Literal* key = prop->key()->AsLiteral();
2301 __ mov(LoadIC::NameRegister(), Operand(key->value()));
2302 if (FLAG_vector_ics) {
2303 __ mov(LoadIC::SlotRegister(),
2304 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2305 CallLoadIC(NOT_CONTEXTUAL);
2307 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2312 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2313 SetSourcePosition(prop->position());
2314 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2315 if (FLAG_vector_ics) {
2316 __ mov(LoadIC::SlotRegister(),
2317 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2320 CallIC(ic, prop->PropertyFeedbackId());
2325 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2328 Expression* left_expr,
2329 Expression* right_expr) {
2330 Label done, smi_case, stub_call;
2332 Register scratch1 = r2;
2333 Register scratch2 = r3;
2335 // Get the arguments.
2337 Register right = r0;
2340 // Perform combined smi check on both operands.
2341 __ orr(scratch1, left, Operand(right));
2342 STATIC_ASSERT(kSmiTag == 0);
2343 JumpPatchSite patch_site(masm_);
2344 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2346 __ bind(&stub_call);
2347 BinaryOpICStub stub(isolate(), op, mode);
2348 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2349 patch_site.EmitPatchInfo();
2353 // Smi case. This code works the same way as the smi-smi case in the type
2354 // recording binary operation stub, see
2357 __ GetLeastBitsFromSmi(scratch1, right, 5);
2358 __ mov(right, Operand(left, ASR, scratch1));
2359 __ bic(right, right, Operand(kSmiTagMask));
2362 __ SmiUntag(scratch1, left);
2363 __ GetLeastBitsFromSmi(scratch2, right, 5);
2364 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2365 __ TrySmiTag(right, scratch1, &stub_call);
2369 __ SmiUntag(scratch1, left);
2370 __ GetLeastBitsFromSmi(scratch2, right, 5);
2371 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2372 __ tst(scratch1, Operand(0xc0000000));
2373 __ b(ne, &stub_call);
2374 __ SmiTag(right, scratch1);
2378 __ add(scratch1, left, Operand(right), SetCC);
2379 __ b(vs, &stub_call);
2380 __ mov(right, scratch1);
2383 __ sub(scratch1, left, Operand(right), SetCC);
2384 __ b(vs, &stub_call);
2385 __ mov(right, scratch1);
2388 __ SmiUntag(ip, right);
2389 __ smull(scratch1, scratch2, left, ip);
2390 __ mov(ip, Operand(scratch1, ASR, 31));
2391 __ cmp(ip, Operand(scratch2));
2392 __ b(ne, &stub_call);
2393 __ cmp(scratch1, Operand::Zero());
2394 __ mov(right, Operand(scratch1), LeaveCC, ne);
2396 __ add(scratch2, right, Operand(left), SetCC);
2397 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2398 __ b(mi, &stub_call);
2402 __ orr(right, left, Operand(right));
2404 case Token::BIT_AND:
2405 __ and_(right, left, Operand(right));
2407 case Token::BIT_XOR:
2408 __ eor(right, left, Operand(right));
2415 context()->Plug(r0);
2419 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2421 OverwriteMode mode) {
2423 BinaryOpICStub stub(isolate(), op, mode);
2424 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2425 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2426 patch_site.EmitPatchInfo();
2427 context()->Plug(r0);
2431 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2432 DCHECK(expr->IsValidReferenceExpression());
2434 // Left-hand side can only be a property, a global or a (parameter or local)
2436 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2437 LhsKind assign_type = VARIABLE;
2438 Property* prop = expr->AsProperty();
2440 assign_type = (prop->key()->IsPropertyName())
2445 switch (assign_type) {
2447 Variable* var = expr->AsVariableProxy()->var();
2448 EffectContext context(this);
2449 EmitVariableAssignment(var, Token::ASSIGN);
2452 case NAMED_PROPERTY: {
2453 __ push(r0); // Preserve value.
2454 VisitForAccumulatorValue(prop->obj());
2455 __ Move(StoreIC::ReceiverRegister(), r0);
2456 __ pop(StoreIC::ValueRegister()); // Restore value.
2457 __ mov(StoreIC::NameRegister(),
2458 Operand(prop->key()->AsLiteral()->value()));
2462 case KEYED_PROPERTY: {
2463 __ push(r0); // Preserve value.
2464 VisitForStackValue(prop->obj());
2465 VisitForAccumulatorValue(prop->key());
2466 __ Move(KeyedStoreIC::NameRegister(), r0);
2467 __ Pop(KeyedStoreIC::ValueRegister(), KeyedStoreIC::ReceiverRegister());
2468 Handle<Code> ic = strict_mode() == SLOPPY
2469 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2470 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2475 context()->Plug(r0);
2479 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2480 Variable* var, MemOperand location) {
2481 __ str(result_register(), location);
2482 if (var->IsContextSlot()) {
2483 // RecordWrite may destroy all its register arguments.
2484 __ mov(r3, result_register());
2485 int offset = Context::SlotOffset(var->index());
2486 __ RecordWriteContextSlot(
2487 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2492 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2493 if (var->IsUnallocated()) {
2494 // Global var, const, or let.
2495 __ mov(StoreIC::NameRegister(), Operand(var->name()));
2496 __ ldr(StoreIC::ReceiverRegister(), GlobalObjectOperand());
2499 } else if (op == Token::INIT_CONST_LEGACY) {
2500 // Const initializers need a write barrier.
2501 DCHECK(!var->IsParameter()); // No const parameters.
2502 if (var->IsLookupSlot()) {
2504 __ mov(r0, Operand(var->name()));
2505 __ Push(cp, r0); // Context and name.
2506 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2508 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2510 MemOperand location = VarOperand(var, r1);
2511 __ ldr(r2, location);
2512 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2514 EmitStoreToStackLocalOrContextSlot(var, location);
2518 } else if (var->mode() == LET && op != Token::INIT_LET) {
2519 // Non-initializing assignment to let variable needs a write barrier.
2520 DCHECK(!var->IsLookupSlot());
2521 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2523 MemOperand location = VarOperand(var, r1);
2524 __ ldr(r3, location);
2525 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2527 __ mov(r3, Operand(var->name()));
2529 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2530 // Perform the assignment.
2532 EmitStoreToStackLocalOrContextSlot(var, location);
2534 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2535 if (var->IsLookupSlot()) {
2536 // Assignment to var.
2537 __ push(r0); // Value.
2538 __ mov(r1, Operand(var->name()));
2539 __ mov(r0, Operand(Smi::FromInt(strict_mode())));
2540 __ Push(cp, r1, r0); // Context, name, strict mode.
2541 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2543 // Assignment to var or initializing assignment to let/const in harmony
2545 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2546 MemOperand location = VarOperand(var, r1);
2547 if (generate_debug_code_ && op == Token::INIT_LET) {
2548 // Check for an uninitialized let binding.
2549 __ ldr(r2, location);
2550 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2551 __ Check(eq, kLetBindingReInitialization);
2553 EmitStoreToStackLocalOrContextSlot(var, location);
2556 // Non-initializing assignments to consts are ignored.
2560 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2561 // Assignment to a property, using a named store IC.
2562 Property* prop = expr->target()->AsProperty();
2563 DCHECK(prop != NULL);
2564 DCHECK(prop->key()->IsLiteral());
2566 // Record source code position before IC call.
2567 SetSourcePosition(expr->position());
2568 __ mov(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value()));
2569 __ pop(StoreIC::ReceiverRegister());
2570 CallStoreIC(expr->AssignmentFeedbackId());
2572 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2573 context()->Plug(r0);
2577 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2578 // Assignment to a property, using a keyed store IC.
2580 // Record source code position before IC call.
2581 SetSourcePosition(expr->position());
2582 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
2583 DCHECK(KeyedStoreIC::ValueRegister().is(r0));
2585 Handle<Code> ic = strict_mode() == SLOPPY
2586 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2587 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2588 CallIC(ic, expr->AssignmentFeedbackId());
2590 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2591 context()->Plug(r0);
2595 void FullCodeGenerator::VisitProperty(Property* expr) {
2596 Comment cmnt(masm_, "[ Property");
2597 Expression* key = expr->key();
2599 if (key->IsPropertyName()) {
2600 VisitForAccumulatorValue(expr->obj());
2601 __ Move(LoadIC::ReceiverRegister(), r0);
2602 EmitNamedPropertyLoad(expr);
2603 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2604 context()->Plug(r0);
2606 VisitForStackValue(expr->obj());
2607 VisitForAccumulatorValue(expr->key());
2608 __ Move(LoadIC::NameRegister(), r0);
2609 __ pop(LoadIC::ReceiverRegister());
2610 EmitKeyedPropertyLoad(expr);
2611 context()->Plug(r0);
2616 void FullCodeGenerator::CallIC(Handle<Code> code,
2617 TypeFeedbackId ast_id) {
2619 // All calls must have a predictable size in full-codegen code to ensure that
2620 // the debugger can patch them correctly.
2621 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2622 NEVER_INLINE_TARGET_ADDRESS);
2626 // Code common for calls using the IC.
2627 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2628 Expression* callee = expr->expression();
2630 CallIC::CallType call_type = callee->IsVariableProxy()
2634 // Get the target function.
2635 if (call_type == CallIC::FUNCTION) {
2636 { StackValueContext context(this);
2637 EmitVariableLoad(callee->AsVariableProxy());
2638 PrepareForBailout(callee, NO_REGISTERS);
2640 // Push undefined as receiver. This is patched in the method prologue if it
2641 // is a sloppy mode method.
2642 __ Push(isolate()->factory()->undefined_value());
2644 // Load the function from the receiver.
2645 DCHECK(callee->IsProperty());
2646 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2647 EmitNamedPropertyLoad(callee->AsProperty());
2648 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2649 // Push the target function under the receiver.
2650 __ ldr(ip, MemOperand(sp, 0));
2652 __ str(r0, MemOperand(sp, kPointerSize));
2655 EmitCall(expr, call_type);
2659 // Code common for calls using the IC.
2660 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2663 VisitForAccumulatorValue(key);
2665 Expression* callee = expr->expression();
2667 // Load the function from the receiver.
2668 DCHECK(callee->IsProperty());
2669 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2670 __ Move(LoadIC::NameRegister(), r0);
2671 EmitKeyedPropertyLoad(callee->AsProperty());
2672 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2674 // Push the target function under the receiver.
2675 __ ldr(ip, MemOperand(sp, 0));
2677 __ str(r0, MemOperand(sp, kPointerSize));
2679 EmitCall(expr, CallIC::METHOD);
2683 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2684 // Load the arguments.
2685 ZoneList<Expression*>* args = expr->arguments();
2686 int arg_count = args->length();
2687 { PreservePositionScope scope(masm()->positions_recorder());
2688 for (int i = 0; i < arg_count; i++) {
2689 VisitForStackValue(args->at(i));
2693 // Record source position of the IC call.
2694 SetSourcePosition(expr->position());
2695 Handle<Code> ic = CallIC::initialize_stub(
2696 isolate(), arg_count, call_type);
2697 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2698 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2699 // Don't assign a type feedback id to the IC, since type feedback is provided
2700 // by the vector above.
2703 RecordJSReturnSite(expr);
2704 // Restore context register.
2705 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2706 context()->DropAndPlug(1, r0);
2710 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2711 // r4: copy of the first argument or undefined if it doesn't exist.
2712 if (arg_count > 0) {
2713 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2715 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2718 // r3: the receiver of the enclosing function.
2719 int receiver_offset = 2 + info_->scope()->num_parameters();
2720 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2723 __ mov(r2, Operand(Smi::FromInt(strict_mode())));
2725 // r1: the start position of the scope the calls resides in.
2726 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2728 // Do the runtime call.
2729 __ Push(r4, r3, r2, r1);
2730 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2734 void FullCodeGenerator::VisitCall(Call* expr) {
2736 // We want to verify that RecordJSReturnSite gets called on all paths
2737 // through this function. Avoid early returns.
2738 expr->return_is_recorded_ = false;
2741 Comment cmnt(masm_, "[ Call");
2742 Expression* callee = expr->expression();
2743 Call::CallType call_type = expr->GetCallType(isolate());
2745 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2746 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2747 // to resolve the function we need to call and the receiver of the
2748 // call. Then we call the resolved function using the given
2750 ZoneList<Expression*>* args = expr->arguments();
2751 int arg_count = args->length();
2753 { PreservePositionScope pos_scope(masm()->positions_recorder());
2754 VisitForStackValue(callee);
2755 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2756 __ push(r2); // Reserved receiver slot.
2758 // Push the arguments.
2759 for (int i = 0; i < arg_count; i++) {
2760 VisitForStackValue(args->at(i));
2763 // Push a copy of the function (found below the arguments) and
2765 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2767 EmitResolvePossiblyDirectEval(arg_count);
2769 // The runtime call returns a pair of values in r0 (function) and
2770 // r1 (receiver). Touch up the stack with the right values.
2771 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2772 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2775 // Record source position for debugger.
2776 SetSourcePosition(expr->position());
2777 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2778 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2780 RecordJSReturnSite(expr);
2781 // Restore context register.
2782 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2783 context()->DropAndPlug(1, r0);
2784 } else if (call_type == Call::GLOBAL_CALL) {
2785 EmitCallWithLoadIC(expr);
2787 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2788 // Call to a lookup slot (dynamically introduced variable).
2789 VariableProxy* proxy = callee->AsVariableProxy();
2792 { PreservePositionScope scope(masm()->positions_recorder());
2793 // Generate code for loading from variables potentially shadowed
2794 // by eval-introduced variables.
2795 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2799 // Call the runtime to find the function to call (returned in r0)
2800 // and the object holding it (returned in edx).
2801 DCHECK(!context_register().is(r2));
2802 __ mov(r2, Operand(proxy->name()));
2803 __ Push(context_register(), r2);
2804 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2805 __ Push(r0, r1); // Function, receiver.
2807 // If fast case code has been generated, emit code to push the
2808 // function and receiver and have the slow path jump around this
2810 if (done.is_linked()) {
2816 // The receiver is implicitly the global receiver. Indicate this
2817 // by passing the hole to the call function stub.
2818 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2823 // The receiver is either the global receiver or an object found
2824 // by LoadContextSlot.
2826 } else if (call_type == Call::PROPERTY_CALL) {
2827 Property* property = callee->AsProperty();
2828 { PreservePositionScope scope(masm()->positions_recorder());
2829 VisitForStackValue(property->obj());
2831 if (property->key()->IsPropertyName()) {
2832 EmitCallWithLoadIC(expr);
2834 EmitKeyedCallWithLoadIC(expr, property->key());
2837 DCHECK(call_type == Call::OTHER_CALL);
2838 // Call to an arbitrary expression not handled specially above.
2839 { PreservePositionScope scope(masm()->positions_recorder());
2840 VisitForStackValue(callee);
2842 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2844 // Emit function call.
2849 // RecordJSReturnSite should have been called.
2850 DCHECK(expr->return_is_recorded_);
2855 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2856 Comment cmnt(masm_, "[ CallNew");
2857 // According to ECMA-262, section 11.2.2, page 44, the function
2858 // expression in new calls must be evaluated before the
2861 // Push constructor on the stack. If it's not a function it's used as
2862 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2864 VisitForStackValue(expr->expression());
2866 // Push the arguments ("left-to-right") on the stack.
2867 ZoneList<Expression*>* args = expr->arguments();
2868 int arg_count = args->length();
2869 for (int i = 0; i < arg_count; i++) {
2870 VisitForStackValue(args->at(i));
2873 // Call the construct call builtin that handles allocation and
2874 // constructor invocation.
2875 SetSourcePosition(expr->position());
2877 // Load function and argument count into r1 and r0.
2878 __ mov(r0, Operand(arg_count));
2879 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2881 // Record call targets in unoptimized code.
2882 if (FLAG_pretenuring_call_new) {
2883 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2884 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2885 expr->CallNewFeedbackSlot() + 1);
2888 __ Move(r2, FeedbackVector());
2889 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2891 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2892 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2893 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2894 context()->Plug(r0);
2898 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2899 ZoneList<Expression*>* args = expr->arguments();
2900 DCHECK(args->length() == 1);
2902 VisitForAccumulatorValue(args->at(0));
2904 Label materialize_true, materialize_false;
2905 Label* if_true = NULL;
2906 Label* if_false = NULL;
2907 Label* fall_through = NULL;
2908 context()->PrepareTest(&materialize_true, &materialize_false,
2909 &if_true, &if_false, &fall_through);
2911 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2913 Split(eq, if_true, if_false, fall_through);
2915 context()->Plug(if_true, if_false);
2919 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2920 ZoneList<Expression*>* args = expr->arguments();
2921 DCHECK(args->length() == 1);
2923 VisitForAccumulatorValue(args->at(0));
2925 Label materialize_true, materialize_false;
2926 Label* if_true = NULL;
2927 Label* if_false = NULL;
2928 Label* fall_through = NULL;
2929 context()->PrepareTest(&materialize_true, &materialize_false,
2930 &if_true, &if_false, &fall_through);
2932 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2933 __ NonNegativeSmiTst(r0);
2934 Split(eq, if_true, if_false, fall_through);
2936 context()->Plug(if_true, if_false);
2940 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2941 ZoneList<Expression*>* args = expr->arguments();
2942 DCHECK(args->length() == 1);
2944 VisitForAccumulatorValue(args->at(0));
2946 Label materialize_true, materialize_false;
2947 Label* if_true = NULL;
2948 Label* if_false = NULL;
2949 Label* fall_through = NULL;
2950 context()->PrepareTest(&materialize_true, &materialize_false,
2951 &if_true, &if_false, &fall_through);
2953 __ JumpIfSmi(r0, if_false);
2954 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2957 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2958 // Undetectable objects behave like undefined when tested with typeof.
2959 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2960 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2962 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2963 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2965 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2966 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2967 Split(le, if_true, if_false, fall_through);
2969 context()->Plug(if_true, if_false);
2973 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2974 ZoneList<Expression*>* args = expr->arguments();
2975 DCHECK(args->length() == 1);
2977 VisitForAccumulatorValue(args->at(0));
2979 Label materialize_true, materialize_false;
2980 Label* if_true = NULL;
2981 Label* if_false = NULL;
2982 Label* fall_through = NULL;
2983 context()->PrepareTest(&materialize_true, &materialize_false,
2984 &if_true, &if_false, &fall_through);
2986 __ JumpIfSmi(r0, if_false);
2987 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2988 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2989 Split(ge, if_true, if_false, fall_through);
2991 context()->Plug(if_true, if_false);
2995 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2996 ZoneList<Expression*>* args = expr->arguments();
2997 DCHECK(args->length() == 1);
2999 VisitForAccumulatorValue(args->at(0));
3001 Label materialize_true, materialize_false;
3002 Label* if_true = NULL;
3003 Label* if_false = NULL;
3004 Label* fall_through = NULL;
3005 context()->PrepareTest(&materialize_true, &materialize_false,
3006 &if_true, &if_false, &fall_through);
3008 __ JumpIfSmi(r0, if_false);
3009 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3010 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3011 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3012 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013 Split(ne, if_true, if_false, fall_through);
3015 context()->Plug(if_true, if_false);
3019 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3020 CallRuntime* expr) {
3021 ZoneList<Expression*>* args = expr->arguments();
3022 DCHECK(args->length() == 1);
3024 VisitForAccumulatorValue(args->at(0));
3026 Label materialize_true, materialize_false, skip_lookup;
3027 Label* if_true = NULL;
3028 Label* if_false = NULL;
3029 Label* fall_through = NULL;
3030 context()->PrepareTest(&materialize_true, &materialize_false,
3031 &if_true, &if_false, &fall_through);
3033 __ AssertNotSmi(r0);
3035 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3036 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3037 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3038 __ b(ne, &skip_lookup);
3040 // Check for fast case object. Generate false result for slow case object.
3041 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3042 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3043 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3047 // Look for valueOf name in the descriptor array, and indicate false if
3048 // found. Since we omit an enumeration index check, if it is added via a
3049 // transition that shares its descriptor array, this is a false positive.
3050 Label entry, loop, done;
3052 // Skip loop if no descriptors are valid.
3053 __ NumberOfOwnDescriptors(r3, r1);
3054 __ cmp(r3, Operand::Zero());
3057 __ LoadInstanceDescriptors(r1, r4);
3058 // r4: descriptor array.
3059 // r3: valid entries in the descriptor array.
3060 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3062 // Calculate location of the first key name.
3063 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3064 // Calculate the end of the descriptor array.
3066 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3068 // Loop through all the keys in the descriptor array. If one of these is the
3069 // string "valueOf" the result is false.
3070 // The use of ip to store the valueOf string assumes that it is not otherwise
3071 // used in the loop below.
3072 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3075 __ ldr(r3, MemOperand(r4, 0));
3078 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3080 __ cmp(r4, Operand(r2));
3085 // Set the bit in the map to indicate that there is no local valueOf field.
3086 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3087 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3088 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3090 __ bind(&skip_lookup);
3092 // If a valueOf property is not found on the object check that its
3093 // prototype is the un-modified String prototype. If not result is false.
3094 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3095 __ JumpIfSmi(r2, if_false);
3096 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3097 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3098 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3099 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3101 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3102 Split(eq, if_true, if_false, fall_through);
3104 context()->Plug(if_true, if_false);
3108 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3109 ZoneList<Expression*>* args = expr->arguments();
3110 DCHECK(args->length() == 1);
3112 VisitForAccumulatorValue(args->at(0));
3114 Label materialize_true, materialize_false;
3115 Label* if_true = NULL;
3116 Label* if_false = NULL;
3117 Label* fall_through = NULL;
3118 context()->PrepareTest(&materialize_true, &materialize_false,
3119 &if_true, &if_false, &fall_through);
3121 __ JumpIfSmi(r0, if_false);
3122 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3123 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124 Split(eq, if_true, if_false, fall_through);
3126 context()->Plug(if_true, if_false);
3130 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3131 ZoneList<Expression*>* args = expr->arguments();
3132 DCHECK(args->length() == 1);
3134 VisitForAccumulatorValue(args->at(0));
3136 Label materialize_true, materialize_false;
3137 Label* if_true = NULL;
3138 Label* if_false = NULL;
3139 Label* fall_through = NULL;
3140 context()->PrepareTest(&materialize_true, &materialize_false,
3141 &if_true, &if_false, &fall_through);
3143 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3144 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3145 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3146 __ cmp(r2, Operand(0x80000000));
3147 __ cmp(r1, Operand(0x00000000), eq);
3149 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3150 Split(eq, if_true, if_false, fall_through);
3152 context()->Plug(if_true, if_false);
3156 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3157 ZoneList<Expression*>* args = expr->arguments();
3158 DCHECK(args->length() == 1);
3160 VisitForAccumulatorValue(args->at(0));
3162 Label materialize_true, materialize_false;
3163 Label* if_true = NULL;
3164 Label* if_false = NULL;
3165 Label* fall_through = NULL;
3166 context()->PrepareTest(&materialize_true, &materialize_false,
3167 &if_true, &if_false, &fall_through);
3169 __ JumpIfSmi(r0, if_false);
3170 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3171 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3172 Split(eq, if_true, if_false, fall_through);
3174 context()->Plug(if_true, if_false);
3178 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3179 ZoneList<Expression*>* args = expr->arguments();
3180 DCHECK(args->length() == 1);
3182 VisitForAccumulatorValue(args->at(0));
3184 Label materialize_true, materialize_false;
3185 Label* if_true = NULL;
3186 Label* if_false = NULL;
3187 Label* fall_through = NULL;
3188 context()->PrepareTest(&materialize_true, &materialize_false,
3189 &if_true, &if_false, &fall_through);
3191 __ JumpIfSmi(r0, if_false);
3192 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3193 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3194 Split(eq, if_true, if_false, fall_through);
3196 context()->Plug(if_true, if_false);
3201 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3202 DCHECK(expr->arguments()->length() == 0);
3204 Label materialize_true, materialize_false;
3205 Label* if_true = NULL;
3206 Label* if_false = NULL;
3207 Label* fall_through = NULL;
3208 context()->PrepareTest(&materialize_true, &materialize_false,
3209 &if_true, &if_false, &fall_through);
3211 // Get the frame pointer for the calling frame.
3212 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3214 // Skip the arguments adaptor frame if it exists.
3215 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3216 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3217 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3219 // Check the marker in the calling frame.
3220 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3221 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3222 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3223 Split(eq, if_true, if_false, fall_through);
3225 context()->Plug(if_true, if_false);
3229 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3230 ZoneList<Expression*>* args = expr->arguments();
3231 DCHECK(args->length() == 2);
3233 // Load the two objects into registers and perform the comparison.
3234 VisitForStackValue(args->at(0));
3235 VisitForAccumulatorValue(args->at(1));
3237 Label materialize_true, materialize_false;
3238 Label* if_true = NULL;
3239 Label* if_false = NULL;
3240 Label* fall_through = NULL;
3241 context()->PrepareTest(&materialize_true, &materialize_false,
3242 &if_true, &if_false, &fall_through);
3246 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3247 Split(eq, if_true, if_false, fall_through);
3249 context()->Plug(if_true, if_false);
3253 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3254 ZoneList<Expression*>* args = expr->arguments();
3255 DCHECK(args->length() == 1);
3257 // ArgumentsAccessStub expects the key in edx and the formal
3258 // parameter count in r0.
3259 VisitForAccumulatorValue(args->at(0));
3261 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3262 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3264 context()->Plug(r0);
3268 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3269 DCHECK(expr->arguments()->length() == 0);
3271 // Get the number of formal parameters.
3272 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3274 // Check if the calling frame is an arguments adaptor frame.
3275 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3276 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3277 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3279 // Arguments adaptor case: Read the arguments length from the
3281 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3283 context()->Plug(r0);
3287 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3288 ZoneList<Expression*>* args = expr->arguments();
3289 DCHECK(args->length() == 1);
3290 Label done, null, function, non_function_constructor;
3292 VisitForAccumulatorValue(args->at(0));
3294 // If the object is a smi, we return null.
3295 __ JumpIfSmi(r0, &null);
3297 // Check that the object is a JS object but take special care of JS
3298 // functions to make sure they have 'Function' as their class.
3299 // Assume that there are only two callable types, and one of them is at
3300 // either end of the type range for JS object types. Saves extra comparisons.
3301 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3302 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3303 // Map is now in r0.
3305 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3306 FIRST_SPEC_OBJECT_TYPE + 1);
3307 __ b(eq, &function);
3309 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3310 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3311 LAST_SPEC_OBJECT_TYPE - 1);
3312 __ b(eq, &function);
3313 // Assume that there is no larger type.
3314 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3316 // Check if the constructor in the map is a JS function.
3317 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3318 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3319 __ b(ne, &non_function_constructor);
3321 // r0 now contains the constructor function. Grab the
3322 // instance class name from there.
3323 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3324 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3327 // Functions have class 'Function'.
3329 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3332 // Objects with a non-function constructor have class 'Object'.
3333 __ bind(&non_function_constructor);
3334 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3337 // Non-JS objects have class null.
3339 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3344 context()->Plug(r0);
3348 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3349 // Load the arguments on the stack and call the stub.
3350 SubStringStub stub(isolate());
3351 ZoneList<Expression*>* args = expr->arguments();
3352 DCHECK(args->length() == 3);
3353 VisitForStackValue(args->at(0));
3354 VisitForStackValue(args->at(1));
3355 VisitForStackValue(args->at(2));
3357 context()->Plug(r0);
3361 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3362 // Load the arguments on the stack and call the stub.
3363 RegExpExecStub stub(isolate());
3364 ZoneList<Expression*>* args = expr->arguments();
3365 DCHECK(args->length() == 4);
3366 VisitForStackValue(args->at(0));
3367 VisitForStackValue(args->at(1));
3368 VisitForStackValue(args->at(2));
3369 VisitForStackValue(args->at(3));
3371 context()->Plug(r0);
3375 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3376 ZoneList<Expression*>* args = expr->arguments();
3377 DCHECK(args->length() == 1);
3378 VisitForAccumulatorValue(args->at(0)); // Load the object.
3381 // If the object is a smi return the object.
3382 __ JumpIfSmi(r0, &done);
3383 // If the object is not a value type, return the object.
3384 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3385 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3388 context()->Plug(r0);
3392 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3393 ZoneList<Expression*>* args = expr->arguments();
3394 DCHECK(args->length() == 2);
3395 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3396 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3398 VisitForAccumulatorValue(args->at(0)); // Load the object.
3400 Label runtime, done, not_date_object;
3401 Register object = r0;
3402 Register result = r0;
3403 Register scratch0 = r9;
3404 Register scratch1 = r1;
3406 __ JumpIfSmi(object, ¬_date_object);
3407 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3408 __ b(ne, ¬_date_object);
3410 if (index->value() == 0) {
3411 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3414 if (index->value() < JSDate::kFirstUncachedField) {
3415 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3416 __ mov(scratch1, Operand(stamp));
3417 __ ldr(scratch1, MemOperand(scratch1));
3418 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3419 __ cmp(scratch1, scratch0);
3421 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3422 kPointerSize * index->value()));
3426 __ PrepareCallCFunction(2, scratch1);
3427 __ mov(r1, Operand(index));
3428 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3432 __ bind(¬_date_object);
3433 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3435 context()->Plug(r0);
3439 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3440 ZoneList<Expression*>* args = expr->arguments();
3441 DCHECK_EQ(3, args->length());
3443 Register string = r0;
3444 Register index = r1;
3445 Register value = r2;
3447 VisitForStackValue(args->at(1)); // index
3448 VisitForStackValue(args->at(2)); // value
3449 VisitForAccumulatorValue(args->at(0)); // string
3450 __ Pop(index, value);
3452 if (FLAG_debug_code) {
3454 __ Check(eq, kNonSmiValue);
3456 __ Check(eq, kNonSmiIndex);
3457 __ SmiUntag(index, index);
3458 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3459 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3460 __ SmiTag(index, index);
3463 __ SmiUntag(value, value);
3466 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3467 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3468 context()->Plug(string);
3472 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3473 ZoneList<Expression*>* args = expr->arguments();
3474 DCHECK_EQ(3, args->length());
3476 Register string = r0;
3477 Register index = r1;
3478 Register value = r2;
3480 VisitForStackValue(args->at(1)); // index
3481 VisitForStackValue(args->at(2)); // value
3482 VisitForAccumulatorValue(args->at(0)); // string
3483 __ Pop(index, value);
3485 if (FLAG_debug_code) {
3487 __ Check(eq, kNonSmiValue);
3489 __ Check(eq, kNonSmiIndex);
3490 __ SmiUntag(index, index);
3491 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3492 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3493 __ SmiTag(index, index);
3496 __ SmiUntag(value, value);
3499 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3500 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3501 __ strh(value, MemOperand(ip, index));
3502 context()->Plug(string);
3507 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3508 // Load the arguments on the stack and call the runtime function.
3509 ZoneList<Expression*>* args = expr->arguments();
3510 DCHECK(args->length() == 2);
3511 VisitForStackValue(args->at(0));
3512 VisitForStackValue(args->at(1));
3513 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3515 context()->Plug(r0);
3519 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3520 ZoneList<Expression*>* args = expr->arguments();
3521 DCHECK(args->length() == 2);
3522 VisitForStackValue(args->at(0)); // Load the object.
3523 VisitForAccumulatorValue(args->at(1)); // Load the value.
3524 __ pop(r1); // r0 = value. r1 = object.
3527 // If the object is a smi, return the value.
3528 __ JumpIfSmi(r1, &done);
3530 // If the object is not a value type, return the value.
3531 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3535 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3536 // Update the write barrier. Save the value as it will be
3537 // overwritten by the write barrier code and is needed afterward.
3539 __ RecordWriteField(
3540 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3543 context()->Plug(r0);
3547 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3548 ZoneList<Expression*>* args = expr->arguments();
3549 DCHECK_EQ(args->length(), 1);
3550 // Load the argument into r0 and call the stub.
3551 VisitForAccumulatorValue(args->at(0));
3553 NumberToStringStub stub(isolate());
3555 context()->Plug(r0);
3559 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3560 ZoneList<Expression*>* args = expr->arguments();
3561 DCHECK(args->length() == 1);
3562 VisitForAccumulatorValue(args->at(0));
3565 StringCharFromCodeGenerator generator(r0, r1);
3566 generator.GenerateFast(masm_);
3569 NopRuntimeCallHelper call_helper;
3570 generator.GenerateSlow(masm_, call_helper);
3573 context()->Plug(r1);
3577 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3578 ZoneList<Expression*>* args = expr->arguments();
3579 DCHECK(args->length() == 2);
3580 VisitForStackValue(args->at(0));
3581 VisitForAccumulatorValue(args->at(1));
3583 Register object = r1;
3584 Register index = r0;
3585 Register result = r3;
3589 Label need_conversion;
3590 Label index_out_of_range;
3592 StringCharCodeAtGenerator generator(object,
3597 &index_out_of_range,
3598 STRING_INDEX_IS_NUMBER);
3599 generator.GenerateFast(masm_);
3602 __ bind(&index_out_of_range);
3603 // When the index is out of range, the spec requires us to return
3605 __ LoadRoot(result, Heap::kNanValueRootIndex);
3608 __ bind(&need_conversion);
3609 // Load the undefined value into the result register, which will
3610 // trigger conversion.
3611 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3614 NopRuntimeCallHelper call_helper;
3615 generator.GenerateSlow(masm_, call_helper);
3618 context()->Plug(result);
3622 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3623 ZoneList<Expression*>* args = expr->arguments();
3624 DCHECK(args->length() == 2);
3625 VisitForStackValue(args->at(0));
3626 VisitForAccumulatorValue(args->at(1));
3628 Register object = r1;
3629 Register index = r0;
3630 Register scratch = r3;
3631 Register result = r0;
3635 Label need_conversion;
3636 Label index_out_of_range;
3638 StringCharAtGenerator generator(object,
3644 &index_out_of_range,
3645 STRING_INDEX_IS_NUMBER);
3646 generator.GenerateFast(masm_);
3649 __ bind(&index_out_of_range);
3650 // When the index is out of range, the spec requires us to return
3651 // the empty string.
3652 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3655 __ bind(&need_conversion);
3656 // Move smi zero into the result register, which will trigger
3658 __ mov(result, Operand(Smi::FromInt(0)));
3661 NopRuntimeCallHelper call_helper;
3662 generator.GenerateSlow(masm_, call_helper);
3665 context()->Plug(result);
3669 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3670 ZoneList<Expression*>* args = expr->arguments();
3671 DCHECK_EQ(2, args->length());
3672 VisitForStackValue(args->at(0));
3673 VisitForAccumulatorValue(args->at(1));
3676 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3678 context()->Plug(r0);
3682 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3683 ZoneList<Expression*>* args = expr->arguments();
3684 DCHECK_EQ(2, args->length());
3685 VisitForStackValue(args->at(0));
3686 VisitForStackValue(args->at(1));
3688 StringCompareStub stub(isolate());
3690 context()->Plug(r0);
3694 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3695 ZoneList<Expression*>* args = expr->arguments();
3696 DCHECK(args->length() >= 2);
3698 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3699 for (int i = 0; i < arg_count + 1; i++) {
3700 VisitForStackValue(args->at(i));
3702 VisitForAccumulatorValue(args->last()); // Function.
3704 Label runtime, done;
3705 // Check for non-function argument (including proxy).
3706 __ JumpIfSmi(r0, &runtime);
3707 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3710 // InvokeFunction requires the function in r1. Move it in there.
3711 __ mov(r1, result_register());
3712 ParameterCount count(arg_count);
3713 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3714 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3719 __ CallRuntime(Runtime::kCall, args->length());
3722 context()->Plug(r0);
3726 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3727 RegExpConstructResultStub stub(isolate());
3728 ZoneList<Expression*>* args = expr->arguments();
3729 DCHECK(args->length() == 3);
3730 VisitForStackValue(args->at(0));
3731 VisitForStackValue(args->at(1));
3732 VisitForAccumulatorValue(args->at(2));
3736 context()->Plug(r0);
3740 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3741 ZoneList<Expression*>* args = expr->arguments();
3742 DCHECK_EQ(2, args->length());
3743 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3744 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3746 Handle<FixedArray> jsfunction_result_caches(
3747 isolate()->native_context()->jsfunction_result_caches());
3748 if (jsfunction_result_caches->length() <= cache_id) {
3749 __ Abort(kAttemptToUseUndefinedCache);
3750 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3751 context()->Plug(r0);
3755 VisitForAccumulatorValue(args->at(1));
3758 Register cache = r1;
3759 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3760 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3761 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3763 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3766 Label done, not_found;
3767 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3768 // r2 now holds finger offset as a smi.
3769 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3770 // r3 now points to the start of fixed array elements.
3771 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3772 // Note side effect of PreIndex: r3 now points to the key of the pair.
3774 __ b(ne, ¬_found);
3776 __ ldr(r0, MemOperand(r3, kPointerSize));
3779 __ bind(¬_found);
3780 // Call runtime to perform the lookup.
3781 __ Push(cache, key);
3782 __ CallRuntime(Runtime::kGetFromCache, 2);
3785 context()->Plug(r0);
3789 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3790 ZoneList<Expression*>* args = expr->arguments();
3791 VisitForAccumulatorValue(args->at(0));
3793 Label materialize_true, materialize_false;
3794 Label* if_true = NULL;
3795 Label* if_false = NULL;
3796 Label* fall_through = NULL;
3797 context()->PrepareTest(&materialize_true, &materialize_false,
3798 &if_true, &if_false, &fall_through);
3800 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3801 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3802 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3803 Split(eq, if_true, if_false, fall_through);
3805 context()->Plug(if_true, if_false);
3809 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3810 ZoneList<Expression*>* args = expr->arguments();
3811 DCHECK(args->length() == 1);
3812 VisitForAccumulatorValue(args->at(0));
3814 __ AssertString(r0);
3816 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3817 __ IndexFromHash(r0, r0);
3819 context()->Plug(r0);
3823 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3824 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3825 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3826 one_char_separator_loop_entry, long_separator_loop;
3827 ZoneList<Expression*>* args = expr->arguments();
3828 DCHECK(args->length() == 2);
3829 VisitForStackValue(args->at(1));
3830 VisitForAccumulatorValue(args->at(0));
3832 // All aliases of the same register have disjoint lifetimes.
3833 Register array = r0;
3834 Register elements = no_reg; // Will be r0.
3835 Register result = no_reg; // Will be r0.
3836 Register separator = r1;
3837 Register array_length = r2;
3838 Register result_pos = no_reg; // Will be r2
3839 Register string_length = r3;
3840 Register string = r4;
3841 Register element = r5;
3842 Register elements_end = r6;
3843 Register scratch = r9;
3845 // Separator operand is on the stack.
3848 // Check that the array is a JSArray.
3849 __ JumpIfSmi(array, &bailout);
3850 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3853 // Check that the array has fast elements.
3854 __ CheckFastElements(scratch, array_length, &bailout);
3856 // If the array has length zero, return the empty string.
3857 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3858 __ SmiUntag(array_length, SetCC);
3859 __ b(ne, &non_trivial_array);
3860 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3863 __ bind(&non_trivial_array);
3865 // Get the FixedArray containing array's elements.
3867 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3868 array = no_reg; // End of array's live range.
3870 // Check that all array elements are sequential ASCII strings, and
3871 // accumulate the sum of their lengths, as a smi-encoded value.
3872 __ mov(string_length, Operand::Zero());
3874 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3875 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3876 // Loop condition: while (element < elements_end).
3877 // Live values in registers:
3878 // elements: Fixed array of strings.
3879 // array_length: Length of the fixed array of strings (not smi)
3880 // separator: Separator string
3881 // string_length: Accumulated sum of string lengths (smi).
3882 // element: Current array element.
3883 // elements_end: Array end.
3884 if (generate_debug_code_) {
3885 __ cmp(array_length, Operand::Zero());
3886 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3889 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3890 __ JumpIfSmi(string, &bailout);
3891 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3892 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3893 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3894 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3895 __ add(string_length, string_length, Operand(scratch), SetCC);
3897 __ cmp(element, elements_end);
3900 // If array_length is 1, return elements[0], a string.
3901 __ cmp(array_length, Operand(1));
3902 __ b(ne, ¬_size_one_array);
3903 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3906 __ bind(¬_size_one_array);
3908 // Live values in registers:
3909 // separator: Separator string
3910 // array_length: Length of the array.
3911 // string_length: Sum of string lengths (smi).
3912 // elements: FixedArray of strings.
3914 // Check that the separator is a flat ASCII string.
3915 __ JumpIfSmi(separator, &bailout);
3916 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
3917 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3918 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3920 // Add (separator length times array_length) - separator length to the
3921 // string_length to get the length of the result string. array_length is not
3922 // smi but the other values are, so the result is a smi
3923 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3924 __ sub(string_length, string_length, Operand(scratch));
3925 __ smull(scratch, ip, array_length, scratch);
3926 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3928 __ cmp(ip, Operand::Zero());
3930 __ tst(scratch, Operand(0x80000000));
3932 __ add(string_length, string_length, Operand(scratch), SetCC);
3934 __ SmiUntag(string_length);
3936 // Get first element in the array to free up the elements register to be used
3939 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3940 result = elements; // End of live range for elements.
3942 // Live values in registers:
3943 // element: First array element
3944 // separator: Separator string
3945 // string_length: Length of result string (not smi)
3946 // array_length: Length of the array.
3947 __ AllocateAsciiString(result,
3950 string, // used as scratch
3951 elements_end, // used as scratch
3953 // Prepare for looping. Set up elements_end to end of the array. Set
3954 // result_pos to the position of the result where to write the first
3956 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3957 result_pos = array_length; // End of live range for array_length.
3958 array_length = no_reg;
3961 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3963 // Check the length of the separator.
3964 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3965 __ cmp(scratch, Operand(Smi::FromInt(1)));
3966 __ b(eq, &one_char_separator);
3967 __ b(gt, &long_separator);
3969 // Empty separator case
3970 __ bind(&empty_separator_loop);
3971 // Live values in registers:
3972 // result_pos: the position to which we are currently copying characters.
3973 // element: Current array element.
3974 // elements_end: Array end.
3976 // Copy next array element to the result.
3977 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3978 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3979 __ SmiUntag(string_length);
3982 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3983 __ CopyBytes(string, result_pos, string_length, scratch);
3984 __ cmp(element, elements_end);
3985 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3986 DCHECK(result.is(r0));
3989 // One-character separator case
3990 __ bind(&one_char_separator);
3991 // Replace separator with its ASCII character value.
3992 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3993 // Jump into the loop after the code that copies the separator, so the first
3994 // element is not preceded by a separator
3995 __ jmp(&one_char_separator_loop_entry);
3997 __ bind(&one_char_separator_loop);
3998 // Live values in registers:
3999 // result_pos: the position to which we are currently copying characters.
4000 // element: Current array element.
4001 // elements_end: Array end.
4002 // separator: Single separator ASCII char (in lower byte).
4004 // Copy the separator character to the result.
4005 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4007 // Copy next array element to the result.
4008 __ bind(&one_char_separator_loop_entry);
4009 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4010 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4011 __ SmiUntag(string_length);
4014 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4015 __ CopyBytes(string, result_pos, string_length, scratch);
4016 __ cmp(element, elements_end);
4017 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4018 DCHECK(result.is(r0));
4021 // Long separator case (separator is more than one character). Entry is at the
4022 // label long_separator below.
4023 __ bind(&long_separator_loop);
4024 // Live values in registers:
4025 // result_pos: the position to which we are currently copying characters.
4026 // element: Current array element.
4027 // elements_end: Array end.
4028 // separator: Separator string.
4030 // Copy the separator to the result.
4031 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4032 __ SmiUntag(string_length);
4035 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4036 __ CopyBytes(string, result_pos, string_length, scratch);
4038 __ bind(&long_separator);
4039 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4040 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4041 __ SmiUntag(string_length);
4044 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4045 __ CopyBytes(string, result_pos, string_length, scratch);
4046 __ cmp(element, elements_end);
4047 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4048 DCHECK(result.is(r0));
4052 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4054 context()->Plug(r0);
4058 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4059 DCHECK(expr->arguments()->length() == 0);
4060 ExternalReference debug_is_active =
4061 ExternalReference::debug_is_active_address(isolate());
4062 __ mov(ip, Operand(debug_is_active));
4063 __ ldrb(r0, MemOperand(ip));
4065 context()->Plug(r0);
4069 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4070 if (expr->function() != NULL &&
4071 expr->function()->intrinsic_type == Runtime::INLINE) {
4072 Comment cmnt(masm_, "[ InlineRuntimeCall");
4073 EmitInlineRuntimeCall(expr);
4077 Comment cmnt(masm_, "[ CallRuntime");
4078 ZoneList<Expression*>* args = expr->arguments();
4079 int arg_count = args->length();
4081 if (expr->is_jsruntime()) {
4082 // Push the builtins object as the receiver.
4083 Register receiver = LoadIC::ReceiverRegister();
4084 __ ldr(receiver, GlobalObjectOperand());
4085 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4088 // Load the function from the receiver.
4089 __ mov(LoadIC::NameRegister(), Operand(expr->name()));
4090 if (FLAG_vector_ics) {
4091 __ mov(LoadIC::SlotRegister(),
4092 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4093 CallLoadIC(NOT_CONTEXTUAL);
4095 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4098 // Push the target function under the receiver.
4099 __ ldr(ip, MemOperand(sp, 0));
4101 __ str(r0, MemOperand(sp, kPointerSize));
4103 // Push the arguments ("left-to-right").
4104 int arg_count = args->length();
4105 for (int i = 0; i < arg_count; i++) {
4106 VisitForStackValue(args->at(i));
4109 // Record source position of the IC call.
4110 SetSourcePosition(expr->position());
4111 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4112 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4115 // Restore context register.
4116 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4118 context()->DropAndPlug(1, r0);
4120 // Push the arguments ("left-to-right").
4121 for (int i = 0; i < arg_count; i++) {
4122 VisitForStackValue(args->at(i));
4125 // Call the C runtime function.
4126 __ CallRuntime(expr->function(), arg_count);
4127 context()->Plug(r0);
4132 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4133 switch (expr->op()) {
4134 case Token::DELETE: {
4135 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4136 Property* property = expr->expression()->AsProperty();
4137 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4139 if (property != NULL) {
4140 VisitForStackValue(property->obj());
4141 VisitForStackValue(property->key());
4142 __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4144 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4145 context()->Plug(r0);
4146 } else if (proxy != NULL) {
4147 Variable* var = proxy->var();
4148 // Delete of an unqualified identifier is disallowed in strict mode
4149 // but "delete this" is allowed.
4150 DCHECK(strict_mode() == SLOPPY || var->is_this());
4151 if (var->IsUnallocated()) {
4152 __ ldr(r2, GlobalObjectOperand());
4153 __ mov(r1, Operand(var->name()));
4154 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4155 __ Push(r2, r1, r0);
4156 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4157 context()->Plug(r0);
4158 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4159 // Result of deleting non-global, non-dynamic variables is false.
4160 // The subexpression does not have side effects.
4161 context()->Plug(var->is_this());
4163 // Non-global variable. Call the runtime to try to delete from the
4164 // context where the variable was introduced.
4165 DCHECK(!context_register().is(r2));
4166 __ mov(r2, Operand(var->name()));
4167 __ Push(context_register(), r2);
4168 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4169 context()->Plug(r0);
4172 // Result of deleting non-property, non-variable reference is true.
4173 // The subexpression may have side effects.
4174 VisitForEffect(expr->expression());
4175 context()->Plug(true);
4181 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4182 VisitForEffect(expr->expression());
4183 context()->Plug(Heap::kUndefinedValueRootIndex);
4188 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4189 if (context()->IsEffect()) {
4190 // Unary NOT has no side effects so it's only necessary to visit the
4191 // subexpression. Match the optimizing compiler by not branching.
4192 VisitForEffect(expr->expression());
4193 } else if (context()->IsTest()) {
4194 const TestContext* test = TestContext::cast(context());
4195 // The labels are swapped for the recursive call.
4196 VisitForControl(expr->expression(),
4197 test->false_label(),
4199 test->fall_through());
4200 context()->Plug(test->true_label(), test->false_label());
4202 // We handle value contexts explicitly rather than simply visiting
4203 // for control and plugging the control flow into the context,
4204 // because we need to prepare a pair of extra administrative AST ids
4205 // for the optimizing compiler.
4206 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4207 Label materialize_true, materialize_false, done;
4208 VisitForControl(expr->expression(),
4212 __ bind(&materialize_true);
4213 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4214 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4215 if (context()->IsStackValue()) __ push(r0);
4217 __ bind(&materialize_false);
4218 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4219 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4220 if (context()->IsStackValue()) __ push(r0);
4226 case Token::TYPEOF: {
4227 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4228 { StackValueContext context(this);
4229 VisitForTypeofValue(expr->expression());
4231 __ CallRuntime(Runtime::kTypeof, 1);
4232 context()->Plug(r0);
4242 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4243 DCHECK(expr->expression()->IsValidReferenceExpression());
4245 Comment cmnt(masm_, "[ CountOperation");
4246 SetSourcePosition(expr->position());
4248 // Expression can only be a property, a global or a (parameter or local)
4250 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4251 LhsKind assign_type = VARIABLE;
4252 Property* prop = expr->expression()->AsProperty();
4253 // In case of a property we use the uninitialized expression context
4254 // of the key to detect a named property.
4257 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4260 // Evaluate expression and get value.
4261 if (assign_type == VARIABLE) {
4262 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4263 AccumulatorValueContext context(this);
4264 EmitVariableLoad(expr->expression()->AsVariableProxy());
4266 // Reserve space for result of postfix operation.
4267 if (expr->is_postfix() && !context()->IsEffect()) {
4268 __ mov(ip, Operand(Smi::FromInt(0)));
4271 if (assign_type == NAMED_PROPERTY) {
4272 // Put the object both on the stack and in the register.
4273 VisitForStackValue(prop->obj());
4274 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
4275 EmitNamedPropertyLoad(prop);
4277 VisitForStackValue(prop->obj());
4278 VisitForStackValue(prop->key());
4279 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
4280 __ ldr(LoadIC::NameRegister(), MemOperand(sp, 0));
4281 EmitKeyedPropertyLoad(prop);
4285 // We need a second deoptimization point after loading the value
4286 // in case evaluating the property load my have a side effect.
4287 if (assign_type == VARIABLE) {
4288 PrepareForBailout(expr->expression(), TOS_REG);
4290 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4293 // Inline smi case if we are in a loop.
4294 Label stub_call, done;
4295 JumpPatchSite patch_site(masm_);
4297 int count_value = expr->op() == Token::INC ? 1 : -1;
4298 if (ShouldInlineSmiCase(expr->op())) {
4300 patch_site.EmitJumpIfNotSmi(r0, &slow);
4302 // Save result for postfix expressions.
4303 if (expr->is_postfix()) {
4304 if (!context()->IsEffect()) {
4305 // Save the result on the stack. If we have a named or keyed property
4306 // we store the result under the receiver that is currently on top
4308 switch (assign_type) {
4312 case NAMED_PROPERTY:
4313 __ str(r0, MemOperand(sp, kPointerSize));
4315 case KEYED_PROPERTY:
4316 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4322 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4324 // Call stub. Undo operation first.
4325 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4329 ToNumberStub convert_stub(isolate());
4330 __ CallStub(&convert_stub);
4332 // Save result for postfix expressions.
4333 if (expr->is_postfix()) {
4334 if (!context()->IsEffect()) {
4335 // Save the result on the stack. If we have a named or keyed property
4336 // we store the result under the receiver that is currently on top
4338 switch (assign_type) {
4342 case NAMED_PROPERTY:
4343 __ str(r0, MemOperand(sp, kPointerSize));
4345 case KEYED_PROPERTY:
4346 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4353 __ bind(&stub_call);
4355 __ mov(r0, Operand(Smi::FromInt(count_value)));
4357 // Record position before stub call.
4358 SetSourcePosition(expr->position());
4360 BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
4361 CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4362 patch_site.EmitPatchInfo();
4365 // Store the value returned in r0.
4366 switch (assign_type) {
4368 if (expr->is_postfix()) {
4369 { EffectContext context(this);
4370 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4372 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4375 // For all contexts except EffectConstant We have the result on
4376 // top of the stack.
4377 if (!context()->IsEffect()) {
4378 context()->PlugTOS();
4381 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4383 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4384 context()->Plug(r0);
4387 case NAMED_PROPERTY: {
4388 __ mov(StoreIC::NameRegister(),
4389 Operand(prop->key()->AsLiteral()->value()));
4390 __ pop(StoreIC::ReceiverRegister());
4391 CallStoreIC(expr->CountStoreFeedbackId());
4392 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4393 if (expr->is_postfix()) {
4394 if (!context()->IsEffect()) {
4395 context()->PlugTOS();
4398 context()->Plug(r0);
4402 case KEYED_PROPERTY: {
4403 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
4404 Handle<Code> ic = strict_mode() == SLOPPY
4405 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4406 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4407 CallIC(ic, expr->CountStoreFeedbackId());
4408 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4409 if (expr->is_postfix()) {
4410 if (!context()->IsEffect()) {
4411 context()->PlugTOS();
4414 context()->Plug(r0);
4422 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4423 DCHECK(!context()->IsEffect());
4424 DCHECK(!context()->IsTest());
4425 VariableProxy* proxy = expr->AsVariableProxy();
4426 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4427 Comment cmnt(masm_, "[ Global variable");
4428 __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
4429 __ mov(LoadIC::NameRegister(), Operand(proxy->name()));
4430 if (FLAG_vector_ics) {
4431 __ mov(LoadIC::SlotRegister(),
4432 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4434 // Use a regular load, not a contextual load, to avoid a reference
4436 CallLoadIC(NOT_CONTEXTUAL);
4437 PrepareForBailout(expr, TOS_REG);
4438 context()->Plug(r0);
4439 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4440 Comment cmnt(masm_, "[ Lookup slot");
4443 // Generate code for loading from variables potentially shadowed
4444 // by eval-introduced variables.
4445 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4448 __ mov(r0, Operand(proxy->name()));
4450 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4451 PrepareForBailout(expr, TOS_REG);
4454 context()->Plug(r0);
4456 // This expression cannot throw a reference error at the top level.
4457 VisitInDuplicateContext(expr);
4462 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4463 Expression* sub_expr,
4464 Handle<String> check) {
4465 Label materialize_true, materialize_false;
4466 Label* if_true = NULL;
4467 Label* if_false = NULL;
4468 Label* fall_through = NULL;
4469 context()->PrepareTest(&materialize_true, &materialize_false,
4470 &if_true, &if_false, &fall_through);
4472 { AccumulatorValueContext context(this);
4473 VisitForTypeofValue(sub_expr);
4475 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4477 Factory* factory = isolate()->factory();
4478 if (String::Equals(check, factory->number_string())) {
4479 __ JumpIfSmi(r0, if_true);
4480 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4481 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4483 Split(eq, if_true, if_false, fall_through);
4484 } else if (String::Equals(check, factory->string_string())) {
4485 __ JumpIfSmi(r0, if_false);
4486 // Check for undetectable objects => false.
4487 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4489 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4490 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4491 Split(eq, if_true, if_false, fall_through);
4492 } else if (String::Equals(check, factory->symbol_string())) {
4493 __ JumpIfSmi(r0, if_false);
4494 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4495 Split(eq, if_true, if_false, fall_through);
4496 } else if (String::Equals(check, factory->boolean_string())) {
4497 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4499 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4500 Split(eq, if_true, if_false, fall_through);
4501 } else if (String::Equals(check, factory->undefined_string())) {
4502 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4504 __ JumpIfSmi(r0, if_false);
4505 // Check for undetectable objects => true.
4506 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4507 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4508 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4509 Split(ne, if_true, if_false, fall_through);
4511 } else if (String::Equals(check, factory->function_string())) {
4512 __ JumpIfSmi(r0, if_false);
4513 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4514 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4516 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4517 Split(eq, if_true, if_false, fall_through);
4518 } else if (String::Equals(check, factory->object_string())) {
4519 __ JumpIfSmi(r0, if_false);
4520 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4522 // Check for JS objects => true.
4523 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4525 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4527 // Check for undetectable objects => false.
4528 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4529 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4530 Split(eq, if_true, if_false, fall_through);
4532 if (if_false != fall_through) __ jmp(if_false);
4534 context()->Plug(if_true, if_false);
4538 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4539 Comment cmnt(masm_, "[ CompareOperation");
4540 SetSourcePosition(expr->position());
4542 // First we try a fast inlined version of the compare when one of
4543 // the operands is a literal.
4544 if (TryLiteralCompare(expr)) return;
4546 // Always perform the comparison for its control flow. Pack the result
4547 // into the expression's context after the comparison is performed.
4548 Label materialize_true, materialize_false;
4549 Label* if_true = NULL;
4550 Label* if_false = NULL;
4551 Label* fall_through = NULL;
4552 context()->PrepareTest(&materialize_true, &materialize_false,
4553 &if_true, &if_false, &fall_through);
4555 Token::Value op = expr->op();
4556 VisitForStackValue(expr->left());
4559 VisitForStackValue(expr->right());
4560 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4561 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4562 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4564 Split(eq, if_true, if_false, fall_through);
4567 case Token::INSTANCEOF: {
4568 VisitForStackValue(expr->right());
4569 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4571 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4572 // The stub returns 0 for true.
4574 Split(eq, if_true, if_false, fall_through);
4579 VisitForAccumulatorValue(expr->right());
4580 Condition cond = CompareIC::ComputeCondition(op);
4583 bool inline_smi_code = ShouldInlineSmiCase(op);
4584 JumpPatchSite patch_site(masm_);
4585 if (inline_smi_code) {
4587 __ orr(r2, r0, Operand(r1));
4588 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4590 Split(cond, if_true, if_false, NULL);
4591 __ bind(&slow_case);
4594 // Record position and call the compare IC.
4595 SetSourcePosition(expr->position());
4596 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4597 CallIC(ic, expr->CompareOperationFeedbackId());
4598 patch_site.EmitPatchInfo();
4599 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4600 __ cmp(r0, Operand::Zero());
4601 Split(cond, if_true, if_false, fall_through);
4605 // Convert the result of the comparison into one expected for this
4606 // expression's context.
4607 context()->Plug(if_true, if_false);
4611 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4612 Expression* sub_expr,
4614 Label materialize_true, materialize_false;
4615 Label* if_true = NULL;
4616 Label* if_false = NULL;
4617 Label* fall_through = NULL;
4618 context()->PrepareTest(&materialize_true, &materialize_false,
4619 &if_true, &if_false, &fall_through);
4621 VisitForAccumulatorValue(sub_expr);
4622 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4623 if (expr->op() == Token::EQ_STRICT) {
4624 Heap::RootListIndex nil_value = nil == kNullValue ?
4625 Heap::kNullValueRootIndex :
4626 Heap::kUndefinedValueRootIndex;
4627 __ LoadRoot(r1, nil_value);
4629 Split(eq, if_true, if_false, fall_through);
4631 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4632 CallIC(ic, expr->CompareOperationFeedbackId());
4633 __ cmp(r0, Operand(0));
4634 Split(ne, if_true, if_false, fall_through);
4636 context()->Plug(if_true, if_false);
4640 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4641 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4642 context()->Plug(r0);
4646 Register FullCodeGenerator::result_register() {
4651 Register FullCodeGenerator::context_register() {
4656 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4657 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4658 __ str(value, MemOperand(fp, frame_offset));
4662 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4663 __ ldr(dst, ContextOperand(cp, context_index));
4667 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4668 Scope* declaration_scope = scope()->DeclarationScope();
4669 if (declaration_scope->is_global_scope() ||
4670 declaration_scope->is_module_scope()) {
4671 // Contexts nested in the native context have a canonical empty function
4672 // as their closure, not the anonymous closure containing the global
4673 // code. Pass a smi sentinel and let the runtime look up the empty
4675 __ mov(ip, Operand(Smi::FromInt(0)));
4676 } else if (declaration_scope->is_eval_scope()) {
4677 // Contexts created by a call to eval have the same closure as the
4678 // context calling eval, not the anonymous closure containing the eval
4679 // code. Fetch it from the context.
4680 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4682 DCHECK(declaration_scope->is_function_scope());
4683 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4689 // ----------------------------------------------------------------------------
4690 // Non-local control flow support.
4692 void FullCodeGenerator::EnterFinallyBlock() {
4693 DCHECK(!result_register().is(r1));
4694 // Store result register while executing finally block.
4695 __ push(result_register());
4696 // Cook return address in link register to stack (smi encoded Code* delta)
4697 __ sub(r1, lr, Operand(masm_->CodeObject()));
4700 // Store result register while executing finally block.
4703 // Store pending message while executing finally block.
4704 ExternalReference pending_message_obj =
4705 ExternalReference::address_of_pending_message_obj(isolate());
4706 __ mov(ip, Operand(pending_message_obj));
4707 __ ldr(r1, MemOperand(ip));
4710 ExternalReference has_pending_message =
4711 ExternalReference::address_of_has_pending_message(isolate());
4712 __ mov(ip, Operand(has_pending_message));
4713 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4714 __ ldrb(r1, MemOperand(ip));
4718 ExternalReference pending_message_script =
4719 ExternalReference::address_of_pending_message_script(isolate());
4720 __ mov(ip, Operand(pending_message_script));
4721 __ ldr(r1, MemOperand(ip));
4726 void FullCodeGenerator::ExitFinallyBlock() {
4727 DCHECK(!result_register().is(r1));
4728 // Restore pending message from stack.
4730 ExternalReference pending_message_script =
4731 ExternalReference::address_of_pending_message_script(isolate());
4732 __ mov(ip, Operand(pending_message_script));
4733 __ str(r1, MemOperand(ip));
4737 ExternalReference has_pending_message =
4738 ExternalReference::address_of_has_pending_message(isolate());
4739 __ mov(ip, Operand(has_pending_message));
4740 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4741 __ strb(r1, MemOperand(ip));
4744 ExternalReference pending_message_obj =
4745 ExternalReference::address_of_pending_message_obj(isolate());
4746 __ mov(ip, Operand(pending_message_obj));
4747 __ str(r1, MemOperand(ip));
4749 // Restore result register from stack.
4752 // Uncook return address and return.
4753 __ pop(result_register());
4755 __ add(pc, r1, Operand(masm_->CodeObject()));
4761 #define __ ACCESS_MASM(masm())
4763 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4765 int* context_length) {
4766 // The macros used here must preserve the result register.
4768 // Because the handler block contains the context of the finally
4769 // code, we can restore it directly from there for the finally code
4770 // rather than iteratively unwinding contexts via their previous
4772 __ Drop(*stack_depth); // Down to the handler block.
4773 if (*context_length > 0) {
4774 // Restore the context to its dedicated register and the stack.
4775 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4776 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4779 __ bl(finally_entry_);
4782 *context_length = 0;
4790 static Address GetInterruptImmediateLoadAddress(Address pc) {
4791 Address load_address = pc - 2 * Assembler::kInstrSize;
4792 if (!FLAG_enable_ool_constant_pool) {
4793 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4794 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4795 // This is an extended constant pool lookup.
4796 if (CpuFeatures::IsSupported(ARMv7)) {
4797 load_address -= 2 * Assembler::kInstrSize;
4798 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4799 DCHECK(Assembler::IsMovT(
4800 Memory::int32_at(load_address + Assembler::kInstrSize)));
4802 load_address -= 4 * Assembler::kInstrSize;
4803 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4804 DCHECK(Assembler::IsOrrImmed(
4805 Memory::int32_at(load_address + Assembler::kInstrSize)));
4806 DCHECK(Assembler::IsOrrImmed(
4807 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4808 DCHECK(Assembler::IsOrrImmed(
4809 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4811 } else if (CpuFeatures::IsSupported(ARMv7) &&
4812 Assembler::IsMovT(Memory::int32_at(load_address))) {
4813 // This is a movw / movt immediate load.
4814 load_address -= Assembler::kInstrSize;
4815 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4816 } else if (!CpuFeatures::IsSupported(ARMv7) &&
4817 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4818 // This is a mov / orr immediate load.
4819 load_address -= 3 * Assembler::kInstrSize;
4820 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4821 DCHECK(Assembler::IsOrrImmed(
4822 Memory::int32_at(load_address + Assembler::kInstrSize)));
4823 DCHECK(Assembler::IsOrrImmed(
4824 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4826 // This is a small constant pool lookup.
4827 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4829 return load_address;
4833 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4835 BackEdgeState target_state,
4836 Code* replacement_code) {
4837 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4838 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4839 CodePatcher patcher(branch_address, 1);
4840 switch (target_state) {
4843 // <decrement profiling counter>
4845 // ; load interrupt stub address into ip - either of (for ARMv7):
4846 // ; <small cp load> | <extended cp load> | <immediate load>
4847 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4848 // | movt ip, #imm | movw ip, #imm
4849 // | ldr ip, [pp, ip]
4850 // ; or (for ARMv6):
4851 // ; <small cp load> | <extended cp load> | <immediate load>
4852 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4853 // | orr ip, ip, #imm> | orr ip, ip, #imm
4854 // | orr ip, ip, #imm> | orr ip, ip, #imm
4855 // | orr ip, ip, #imm> | orr ip, ip, #imm
4857 // <reset profiling counter>
4860 // Calculate branch offset to the ok-label - this is the difference
4861 // between the branch address and |pc| (which points at <blx ip>) plus
4862 // kProfileCounterResetSequence instructions
4863 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4864 kProfileCounterResetSequenceLength;
4865 patcher.masm()->b(branch_offset, pl);
4868 case ON_STACK_REPLACEMENT:
4869 case OSR_AFTER_STACK_CHECK:
4870 // <decrement profiling counter>
4872 // ; load on-stack replacement address into ip - either of (for ARMv7):
4873 // ; <small cp load> | <extended cp load> | <immediate load>
4874 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4875 // | movt ip, #imm> | movw ip, #imm
4876 // | ldr ip, [pp, ip]
4877 // ; or (for ARMv6):
4878 // ; <small cp load> | <extended cp load> | <immediate load>
4879 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4880 // | orr ip, ip, #imm> | orr ip, ip, #imm
4881 // | orr ip, ip, #imm> | orr ip, ip, #imm
4882 // | orr ip, ip, #imm> | orr ip, ip, #imm
4884 // <reset profiling counter>
4886 patcher.masm()->nop();
4890 // Replace the call address.
4891 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
4892 replacement_code->entry());
4894 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4895 unoptimized_code, pc_immediate_load_address, replacement_code);
4899 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4901 Code* unoptimized_code,
4903 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
4905 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4906 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4907 Address interrupt_address = Assembler::target_address_at(
4908 pc_immediate_load_address, unoptimized_code);
4910 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4911 DCHECK(interrupt_address ==
4912 isolate->builtins()->InterruptCheck()->entry());
4916 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
4918 if (interrupt_address ==
4919 isolate->builtins()->OnStackReplacement()->entry()) {
4920 return ON_STACK_REPLACEMENT;
4923 DCHECK(interrupt_address ==
4924 isolate->builtins()->OsrAfterStackCheck()->entry());
4925 return OSR_AFTER_STACK_CHECK;
4929 } } // namespace v8::internal
4931 #endif // V8_TARGET_ARCH_ARM