1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
20 #include "src/arm/code-stubs-arm.h"
21 #include "src/arm/macro-assembler-arm.h"
26 #define __ ACCESS_MASM(masm_)
29 // A patch site is a location in the code which it is possible to patch. This
30 // class has a number of methods to emit the code which is patchable and the
31 // method EmitPatchInfo to record a marker back to the patchable code. This
32 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
33 // immediate value is used) is the delta from the pc to the first instruction of
34 // the patchable code.
35 class JumpPatchSite BASE_EMBEDDED {
37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
39 info_emitted_ = false;
44 DCHECK(patch_site_.is_bound() == info_emitted_);
47 // When initially emitting this ensure that a jump is always generated to skip
48 // the inlined smi code.
49 void EmitJumpIfNotSmi(Register reg, Label* target) {
50 DCHECK(!patch_site_.is_bound() && !info_emitted_);
51 Assembler::BlockConstPoolScope block_const_pool(masm_);
52 __ bind(&patch_site_);
53 __ cmp(reg, Operand(reg));
54 __ b(eq, target); // Always taken before patched.
57 // When initially emitting this ensure that a jump is never generated to skip
58 // the inlined smi code.
59 void EmitJumpIfSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockConstPoolScope block_const_pool(masm_);
62 __ bind(&patch_site_);
63 __ cmp(reg, Operand(reg));
64 __ b(ne, target); // Never taken before patched.
67 void EmitPatchInfo() {
68 // Block literal pool emission whilst recording patch site information.
69 Assembler::BlockConstPoolScope block_const_pool(masm_);
70 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
73 reg.set_code(delta_to_patch_site / kOff12Mask);
74 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
79 __ nop(); // Signals no inlined code.
84 MacroAssembler* masm_;
92 // Generate code for a JS function. On entry to the function the receiver
93 // and arguments have been pushed on the stack left to right. The actual
94 // argument count matches the formal parameter count expected by the
97 // The live registers are:
98 // o r1: the JS function object being called (i.e., ourselves)
100 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
101 // o fp: our caller's frame pointer
102 // o sp: stack pointer
103 // o lr: return address
105 // The function builds a JS frame. Please see JavaScriptFrameConstants in
106 // frames-arm.h for its layout.
107 void FullCodeGenerator::Generate() {
108 CompilationInfo* info = info_;
110 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
112 profiling_counter_ = isolate()->factory()->NewCell(
113 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
114 SetFunctionPosition(function());
115 Comment cmnt(masm_, "[ function compiled by full code generator");
117 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120 if (strlen(FLAG_stop_at) > 0 &&
121 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
126 // Sloppy mode functions and builtins need to replace the receiver with the
127 // global proxy when called as functions (without an explicit receiver
129 if (info->strict_mode() == SLOPPY && !info->is_native()) {
131 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
132 __ ldr(r2, MemOperand(sp, receiver_offset));
133 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
136 __ ldr(r2, GlobalObjectOperand());
137 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
139 __ str(r2, MemOperand(sp, receiver_offset));
144 // Open a frame scope to indicate that there is a frame on the stack. The
145 // MANUAL indicates that the scope shouldn't actually generate code to set up
146 // the frame (that is done below).
147 FrameScope frame_scope(masm_, StackFrame::MANUAL);
149 info->set_prologue_offset(masm_->pc_offset());
150 __ Prologue(info->IsCodePreAgingActive());
151 info->AddNoFrameRange(0, masm_->pc_offset());
153 { Comment cmnt(masm_, "[ Allocate locals");
154 int locals_count = info->scope()->num_stack_slots();
155 // Generators allocate locals, if any, in context slots.
156 DCHECK(!info->function()->is_generator() || locals_count == 0);
157 if (locals_count > 0) {
158 if (locals_count >= 128) {
160 __ sub(r9, sp, Operand(locals_count * kPointerSize));
161 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
162 __ cmp(r9, Operand(r2));
164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
167 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169 if (locals_count >= kMaxPushes) {
170 int loop_iterations = locals_count / kMaxPushes;
171 __ mov(r2, Operand(loop_iterations));
173 __ bind(&loop_header);
175 for (int i = 0; i < kMaxPushes; i++) {
178 // Continue loop if not done.
179 __ sub(r2, r2, Operand(1), SetCC);
180 __ b(&loop_header, ne);
182 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes.
184 for (int i = 0; i < remaining; i++) {
190 bool function_in_register = true;
192 // Possibly allocate a local context.
193 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194 if (heap_slots > 0) {
195 // Argument to NewContext is the function, which is still in r1.
196 Comment cmnt(masm_, "[ Allocate context");
197 bool need_write_barrier = true;
198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200 __ Push(info->scope()->GetScopeInfo());
201 __ CallRuntime(Runtime::kNewGlobalContext, 2);
202 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
203 FastNewContextStub stub(isolate(), heap_slots);
205 // Result of FastNewContextStub is always in new space.
206 need_write_barrier = false;
209 __ CallRuntime(Runtime::kNewFunctionContext, 1);
211 function_in_register = false;
212 // Context is returned in r0. It replaces the context passed to us.
213 // It's saved in the stack and kept live in cp.
215 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
216 // Copy any necessary parameters into the context.
217 int num_parameters = info->scope()->num_parameters();
218 for (int i = 0; i < num_parameters; i++) {
219 Variable* var = scope()->parameter(i);
220 if (var->IsContextSlot()) {
221 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
222 (num_parameters - 1 - i) * kPointerSize;
223 // Load parameter from stack.
224 __ ldr(r0, MemOperand(fp, parameter_offset));
225 // Store it in the context.
226 MemOperand target = ContextOperand(cp, var->index());
229 // Update the write barrier.
230 if (need_write_barrier) {
231 __ RecordWriteContextSlot(
232 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
233 } else if (FLAG_debug_code) {
235 __ JumpIfInNewSpace(cp, r0, &done);
236 __ Abort(kExpectedNewSpaceObject);
243 Variable* arguments = scope()->arguments();
244 if (arguments != NULL) {
245 // Function uses arguments object.
246 Comment cmnt(masm_, "[ Allocate arguments object");
247 if (!function_in_register) {
248 // Load this again, if it's used by the local context below.
249 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // Receiver is just before the parameters on the caller's stack.
254 int num_parameters = info->scope()->num_parameters();
255 int offset = num_parameters * kPointerSize;
257 Operand(StandardFrameConstants::kCallerSPOffset + offset));
258 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
261 // Arguments to ArgumentsAccessStub:
262 // function, receiver address, parameter count.
263 // The stub will rewrite receiever and parameter count if the previous
264 // stack frame was an arguments adapter frame.
265 ArgumentsAccessStub::Type type;
266 if (strict_mode() == STRICT) {
267 type = ArgumentsAccessStub::NEW_STRICT;
268 } else if (function()->has_duplicate_parameters()) {
269 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
271 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
273 ArgumentsAccessStub stub(isolate(), type);
276 SetVar(arguments, r0, r1, r2);
280 __ CallRuntime(Runtime::kTraceEnter, 0);
283 // Visit the declarations and body unless there is an illegal
285 if (scope()->HasIllegalRedeclaration()) {
286 Comment cmnt(masm_, "[ Declarations");
287 scope()->VisitIllegalRedeclaration(this);
290 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
291 { Comment cmnt(masm_, "[ Declarations");
292 // For named function expressions, declare the function name as a
294 if (scope()->is_function_scope() && scope()->function() != NULL) {
295 VariableDeclaration* function = scope()->function();
296 DCHECK(function->proxy()->var()->mode() == CONST ||
297 function->proxy()->var()->mode() == CONST_LEGACY);
298 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
299 VisitVariableDeclaration(function);
301 VisitDeclarations(scope()->declarations());
304 { Comment cmnt(masm_, "[ Stack check");
305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
307 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
308 __ cmp(sp, Operand(ip));
310 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
311 PredictableCodeSizeScope predictable(masm_,
312 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
313 __ Call(stack_check, RelocInfo::CODE_TARGET);
317 { Comment cmnt(masm_, "[ Body");
318 DCHECK(loop_depth() == 0);
319 VisitStatements(function()->body());
320 DCHECK(loop_depth() == 0);
324 // Always emit a 'return undefined' in case control fell off the end of
326 { Comment cmnt(masm_, "[ return <undefined>;");
327 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
329 EmitReturnSequence();
331 // Force emit the constant pool, so it doesn't get emitted in the middle
332 // of the back edge table.
333 masm()->CheckConstPool(true, false);
337 void FullCodeGenerator::ClearAccumulator() {
338 __ mov(r0, Operand(Smi::FromInt(0)));
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ mov(r2, Operand(profiling_counter_));
344 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
345 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
346 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
350 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
351 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
353 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
357 void FullCodeGenerator::EmitProfilingCounterReset() {
358 Assembler::BlockConstPoolScope block_const_pool(masm_);
359 PredictableCodeSizeScope predictable_code_size_scope(
360 masm_, kProfileCounterResetSequenceLength);
363 int reset_value = FLAG_interrupt_budget;
364 if (info_->is_debug()) {
365 // Detect debug break requests as soon as possible.
366 reset_value = FLAG_interrupt_budget >> 4;
368 __ mov(r2, Operand(profiling_counter_));
369 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
370 // instructions (for ARMv6) depending upon whether it is an extended constant
371 // pool - insert nop to compensate.
372 int expected_instr_count =
373 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
374 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
375 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
378 __ mov(r3, Operand(Smi::FromInt(reset_value)));
379 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
383 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
384 Label* back_edge_target) {
385 Comment cmnt(masm_, "[ Back edge bookkeeping");
386 // Block literal pools whilst emitting back edge code.
387 Assembler::BlockConstPoolScope block_const_pool(masm_);
390 DCHECK(back_edge_target->is_bound());
391 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
392 int weight = Min(kMaxBackEdgeWeight,
393 Max(1, distance / kCodeSizeMultiplier));
394 EmitProfilingCounterDecrement(weight);
396 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
398 // Record a mapping of this PC offset to the OSR id. This is used to find
399 // the AST id from the unoptimized code in order to use it as a key into
400 // the deoptimization input data found in the optimized code.
401 RecordBackEdge(stmt->OsrEntryId());
403 EmitProfilingCounterReset();
406 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
407 // Record a mapping of the OSR id to this PC. This is used if the OSR
408 // entry becomes the target of a bailout. We don't expect it to be, but
409 // we want it to work if it is.
410 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
414 void FullCodeGenerator::EmitReturnSequence() {
415 Comment cmnt(masm_, "[ Return sequence");
416 if (return_label_.is_bound()) {
417 __ b(&return_label_);
419 __ bind(&return_label_);
421 // Push the return value on the stack as the parameter.
422 // Runtime::TraceExit returns its parameter in r0.
424 __ CallRuntime(Runtime::kTraceExit, 1);
426 // Pretend that the exit is a backwards jump to the entry.
428 if (info_->ShouldSelfOptimize()) {
429 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
431 int distance = masm_->pc_offset();
432 weight = Min(kMaxBackEdgeWeight,
433 Max(1, distance / kCodeSizeMultiplier));
435 EmitProfilingCounterDecrement(weight);
439 __ Call(isolate()->builtins()->InterruptCheck(),
440 RelocInfo::CODE_TARGET);
442 EmitProfilingCounterReset();
446 // Add a label for checking the size of the code used for returning.
447 Label check_exit_codesize;
448 __ bind(&check_exit_codesize);
450 // Make sure that the constant pool is not emitted inside of the return
452 { Assembler::BlockConstPoolScope block_const_pool(masm_);
453 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
454 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
455 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
456 PredictableCodeSizeScope predictable(masm_, -1);
458 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
459 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
460 __ add(sp, sp, Operand(sp_delta));
462 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
467 // Check that the size of the code used for returning is large enough
468 // for the debugger's requirements.
469 DCHECK(Assembler::kJSReturnSequenceInstructions <=
470 masm_->InstructionsGeneratedSince(&check_exit_codesize));
476 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
477 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
481 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
482 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
483 codegen()->GetVar(result_register(), var);
487 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
488 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
489 codegen()->GetVar(result_register(), var);
490 __ push(result_register());
494 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
495 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496 // For simplicity we always test the accumulator register.
497 codegen()->GetVar(result_register(), var);
498 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
499 codegen()->DoTest(this);
503 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
507 void FullCodeGenerator::AccumulatorValueContext::Plug(
508 Heap::RootListIndex index) const {
509 __ LoadRoot(result_register(), index);
513 void FullCodeGenerator::StackValueContext::Plug(
514 Heap::RootListIndex index) const {
515 __ LoadRoot(result_register(), index);
516 __ push(result_register());
520 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 if (index == Heap::kUndefinedValueRootIndex ||
526 index == Heap::kNullValueRootIndex ||
527 index == Heap::kFalseValueRootIndex) {
528 if (false_label_ != fall_through_) __ b(false_label_);
529 } else if (index == Heap::kTrueValueRootIndex) {
530 if (true_label_ != fall_through_) __ b(true_label_);
532 __ LoadRoot(result_register(), index);
533 codegen()->DoTest(this);
538 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
542 void FullCodeGenerator::AccumulatorValueContext::Plug(
543 Handle<Object> lit) const {
544 __ mov(result_register(), Operand(lit));
548 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
549 // Immediates cannot be pushed directly.
550 __ mov(result_register(), Operand(lit));
551 __ push(result_register());
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(),
560 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562 if (false_label_ != fall_through_) __ b(false_label_);
563 } else if (lit->IsTrue() || lit->IsJSObject()) {
564 if (true_label_ != fall_through_) __ b(true_label_);
565 } else if (lit->IsString()) {
566 if (String::cast(*lit)->length() == 0) {
567 if (false_label_ != fall_through_) __ b(false_label_);
569 if (true_label_ != fall_through_) __ b(true_label_);
571 } else if (lit->IsSmi()) {
572 if (Smi::cast(*lit)->value() == 0) {
573 if (false_label_ != fall_through_) __ b(false_label_);
575 if (true_label_ != fall_through_) __ b(true_label_);
578 // For simplicity we always test the accumulator register.
579 __ mov(result_register(), Operand(lit));
580 codegen()->DoTest(this);
585 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
586 Register reg) const {
592 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
594 Register reg) const {
597 __ Move(result_register(), reg);
601 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
602 Register reg) const {
604 if (count > 1) __ Drop(count - 1);
605 __ str(reg, MemOperand(sp, 0));
609 void FullCodeGenerator::TestContext::DropAndPlug(int count,
610 Register reg) const {
612 // For simplicity we always test the accumulator register.
614 __ Move(result_register(), reg);
615 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
616 codegen()->DoTest(this);
620 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
621 Label* materialize_false) const {
622 DCHECK(materialize_true == materialize_false);
623 __ bind(materialize_true);
627 void FullCodeGenerator::AccumulatorValueContext::Plug(
628 Label* materialize_true,
629 Label* materialize_false) const {
631 __ bind(materialize_true);
632 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
634 __ bind(materialize_false);
635 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
640 void FullCodeGenerator::StackValueContext::Plug(
641 Label* materialize_true,
642 Label* materialize_false) const {
644 __ bind(materialize_true);
645 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
647 __ bind(materialize_false);
648 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
654 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
655 Label* materialize_false) const {
656 DCHECK(materialize_true == true_label_);
657 DCHECK(materialize_false == false_label_);
661 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
665 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
666 Heap::RootListIndex value_root_index =
667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
668 __ LoadRoot(result_register(), value_root_index);
672 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
673 Heap::RootListIndex value_root_index =
674 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
675 __ LoadRoot(ip, value_root_index);
680 void FullCodeGenerator::TestContext::Plug(bool flag) const {
681 codegen()->PrepareForBailoutBeforeSplit(condition(),
686 if (true_label_ != fall_through_) __ b(true_label_);
688 if (false_label_ != fall_through_) __ b(false_label_);
693 void FullCodeGenerator::DoTest(Expression* condition,
696 Label* fall_through) {
697 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
698 CallIC(ic, condition->test_id());
699 __ tst(result_register(), result_register());
700 Split(ne, if_true, if_false, fall_through);
704 void FullCodeGenerator::Split(Condition cond,
707 Label* fall_through) {
708 if (if_false == fall_through) {
710 } else if (if_true == fall_through) {
711 __ b(NegateCondition(cond), if_false);
719 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
720 DCHECK(var->IsStackAllocated());
721 // Offset is negative because higher indexes are at lower addresses.
722 int offset = -var->index() * kPointerSize;
723 // Adjust by a (parameter or local) base offset.
724 if (var->IsParameter()) {
725 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
727 offset += JavaScriptFrameConstants::kLocal0Offset;
729 return MemOperand(fp, offset);
733 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
734 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
735 if (var->IsContextSlot()) {
736 int context_chain_length = scope()->ContextChainLength(var->scope());
737 __ LoadContext(scratch, context_chain_length);
738 return ContextOperand(scratch, var->index());
740 return StackOperand(var);
745 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
746 // Use destination as scratch.
747 MemOperand location = VarOperand(var, dest);
748 __ ldr(dest, location);
752 void FullCodeGenerator::SetVar(Variable* var,
756 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
757 DCHECK(!scratch0.is(src));
758 DCHECK(!scratch0.is(scratch1));
759 DCHECK(!scratch1.is(src));
760 MemOperand location = VarOperand(var, scratch0);
761 __ str(src, location);
763 // Emit the write barrier code if the location is in the heap.
764 if (var->IsContextSlot()) {
765 __ RecordWriteContextSlot(scratch0,
775 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
776 bool should_normalize,
779 // Only prepare for bailouts before splits if we're in a test
780 // context. Otherwise, we let the Visit function deal with the
781 // preparation to avoid preparing with the same AST id twice.
782 if (!context()->IsTest() || !info_->IsOptimizable()) return;
785 if (should_normalize) __ b(&skip);
786 PrepareForBailout(expr, TOS_REG);
787 if (should_normalize) {
788 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
790 Split(eq, if_true, if_false, NULL);
796 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
797 // The variable in the declaration always resides in the current function
799 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
800 if (generate_debug_code_) {
801 // Check that we're not inside a with or catch context.
802 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
803 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
804 __ Check(ne, kDeclarationInWithContext);
805 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
806 __ Check(ne, kDeclarationInCatchContext);
811 void FullCodeGenerator::VisitVariableDeclaration(
812 VariableDeclaration* declaration) {
813 // If it was not possible to allocate the variable at compile time, we
814 // need to "declare" it at runtime to make sure it actually exists in the
816 VariableProxy* proxy = declaration->proxy();
817 VariableMode mode = declaration->mode();
818 Variable* variable = proxy->var();
819 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
820 switch (variable->location()) {
821 case Variable::UNALLOCATED:
822 globals_->Add(variable->name(), zone());
823 globals_->Add(variable->binding_needs_init()
824 ? isolate()->factory()->the_hole_value()
825 : isolate()->factory()->undefined_value(),
829 case Variable::PARAMETER:
830 case Variable::LOCAL:
832 Comment cmnt(masm_, "[ VariableDeclaration");
833 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
834 __ str(ip, StackOperand(variable));
838 case Variable::CONTEXT:
840 Comment cmnt(masm_, "[ VariableDeclaration");
841 EmitDebugCheckDeclarationContext(variable);
842 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
843 __ str(ip, ContextOperand(cp, variable->index()));
844 // No write barrier since the_hole_value is in old space.
845 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
849 case Variable::LOOKUP: {
850 Comment cmnt(masm_, "[ VariableDeclaration");
851 __ mov(r2, Operand(variable->name()));
852 // Declaration nodes are always introduced in one of four modes.
853 DCHECK(IsDeclaredVariableMode(mode));
854 PropertyAttributes attr =
855 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
856 __ mov(r1, Operand(Smi::FromInt(attr)));
857 // Push initial value, if any.
858 // Note: For variables we must not push an initial value (such as
859 // 'undefined') because we may have a (legal) redeclaration and we
860 // must not destroy the current value.
862 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
863 __ Push(cp, r2, r1, r0);
865 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
866 __ Push(cp, r2, r1, r0);
868 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
875 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var();
879 switch (variable->location()) {
880 case Variable::UNALLOCATED: {
881 globals_->Add(variable->name(), zone());
882 Handle<SharedFunctionInfo> function =
883 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
884 // Check for stack-overflow exception.
885 if (function.is_null()) return SetStackOverflow();
886 globals_->Add(function, zone());
890 case Variable::PARAMETER:
891 case Variable::LOCAL: {
892 Comment cmnt(masm_, "[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun());
894 __ str(result_register(), StackOperand(variable));
898 case Variable::CONTEXT: {
899 Comment cmnt(masm_, "[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun());
902 __ str(result_register(), ContextOperand(cp, variable->index()));
903 int offset = Context::SlotOffset(variable->index());
904 // We know that we have written a function, which is not a smi.
905 __ RecordWriteContextSlot(cp,
913 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
917 case Variable::LOOKUP: {
918 Comment cmnt(masm_, "[ FunctionDeclaration");
919 __ mov(r2, Operand(variable->name()));
920 __ mov(r1, Operand(Smi::FromInt(NONE)));
922 // Push initial value for function declaration.
923 VisitForStackValue(declaration->fun());
924 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 Variable* variable = declaration->proxy()->var();
933 DCHECK(variable->location() == Variable::CONTEXT);
934 DCHECK(variable->interface()->IsFrozen());
936 Comment cmnt(masm_, "[ ModuleDeclaration");
937 EmitDebugCheckDeclarationContext(variable);
939 // Load instance object.
940 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
941 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
942 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
945 __ str(r1, ContextOperand(cp, variable->index()));
946 // We know that we have written a module, which is not a smi.
947 __ RecordWriteContextSlot(cp,
948 Context::SlotOffset(variable->index()),
955 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
957 // Traverse into body.
958 Visit(declaration->module());
962 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
963 VariableProxy* proxy = declaration->proxy();
964 Variable* variable = proxy->var();
965 switch (variable->location()) {
966 case Variable::UNALLOCATED:
970 case Variable::CONTEXT: {
971 Comment cmnt(masm_, "[ ImportDeclaration");
972 EmitDebugCheckDeclarationContext(variable);
977 case Variable::PARAMETER:
978 case Variable::LOCAL:
979 case Variable::LOOKUP:
985 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
990 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
991 // Call the runtime to declare the globals.
992 // The context is the first argument.
993 __ mov(r1, Operand(pairs));
994 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
996 __ CallRuntime(Runtime::kDeclareGlobals, 3);
997 // Return value is ignored.
1001 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1002 // Call the runtime to declare the modules.
1003 __ Push(descriptions);
1004 __ CallRuntime(Runtime::kDeclareModules, 1);
1005 // Return value is ignored.
1009 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1010 Comment cmnt(masm_, "[ SwitchStatement");
1011 Breakable nested_statement(this, stmt);
1012 SetStatementPosition(stmt);
1014 // Keep the switch value on the stack until a case matches.
1015 VisitForStackValue(stmt->tag());
1016 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1018 ZoneList<CaseClause*>* clauses = stmt->cases();
1019 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1021 Label next_test; // Recycled for each test.
1022 // Compile all the tests with branches to their bodies.
1023 for (int i = 0; i < clauses->length(); i++) {
1024 CaseClause* clause = clauses->at(i);
1025 clause->body_target()->Unuse();
1027 // The default is not a test, but remember it as final fall through.
1028 if (clause->is_default()) {
1029 default_clause = clause;
1033 Comment cmnt(masm_, "[ Case comparison");
1034 __ bind(&next_test);
1037 // Compile the label expression.
1038 VisitForAccumulatorValue(clause->label());
1040 // Perform the comparison as if via '==='.
1041 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1042 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1043 JumpPatchSite patch_site(masm_);
1044 if (inline_smi_code) {
1047 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1050 __ b(ne, &next_test);
1051 __ Drop(1); // Switch value is no longer needed.
1052 __ b(clause->body_target());
1053 __ bind(&slow_case);
1056 // Record position before stub call for type feedback.
1057 SetSourcePosition(clause->position());
1059 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1060 CallIC(ic, clause->CompareId());
1061 patch_site.EmitPatchInfo();
1065 PrepareForBailout(clause, TOS_REG);
1066 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1068 __ b(ne, &next_test);
1070 __ jmp(clause->body_target());
1073 __ cmp(r0, Operand::Zero());
1074 __ b(ne, &next_test);
1075 __ Drop(1); // Switch value is no longer needed.
1076 __ b(clause->body_target());
1079 // Discard the test value and jump to the default if present, otherwise to
1080 // the end of the statement.
1081 __ bind(&next_test);
1082 __ Drop(1); // Switch value is no longer needed.
1083 if (default_clause == NULL) {
1084 __ b(nested_statement.break_label());
1086 __ b(default_clause->body_target());
1089 // Compile all the case bodies.
1090 for (int i = 0; i < clauses->length(); i++) {
1091 Comment cmnt(masm_, "[ Case body");
1092 CaseClause* clause = clauses->at(i);
1093 __ bind(clause->body_target());
1094 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1095 VisitStatements(clause->statements());
1098 __ bind(nested_statement.break_label());
1099 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1103 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1104 Comment cmnt(masm_, "[ ForInStatement");
1105 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1106 SetStatementPosition(stmt);
1109 ForIn loop_statement(this, stmt);
1110 increment_loop_depth();
1112 // Get the object to enumerate over. If the object is null or undefined, skip
1113 // over the loop. See ECMA-262 version 5, section 12.6.4.
1114 VisitForAccumulatorValue(stmt->enumerable());
1115 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1118 Register null_value = r5;
1119 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1120 __ cmp(r0, null_value);
1123 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1125 // Convert the object to a JS object.
1126 Label convert, done_convert;
1127 __ JumpIfSmi(r0, &convert);
1128 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1129 __ b(ge, &done_convert);
1132 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1133 __ bind(&done_convert);
1134 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1137 // Check for proxies.
1139 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1140 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1141 __ b(le, &call_runtime);
1143 // Check cache validity in generated code. This is a fast case for
1144 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1145 // guarantee cache validity, call the runtime system to check cache
1146 // validity or get the property names in a fixed array.
1147 __ CheckEnumCache(null_value, &call_runtime);
1149 // The enum cache is valid. Load the map of the object being
1150 // iterated over and use the cache for the iteration.
1152 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1155 // Get the set of properties to enumerate.
1156 __ bind(&call_runtime);
1157 __ push(r0); // Duplicate the enumerable object on the stack.
1158 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1159 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1161 // If we got a map from the runtime call, we can do a fast
1162 // modification check. Otherwise, we got a fixed array, and we have
1163 // to do a slow check.
1165 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1166 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1168 __ b(ne, &fixed_array);
1170 // We got a map in register r0. Get the enumeration cache from it.
1171 Label no_descriptors;
1172 __ bind(&use_cache);
1174 __ EnumLength(r1, r0);
1175 __ cmp(r1, Operand(Smi::FromInt(0)));
1176 __ b(eq, &no_descriptors);
1178 __ LoadInstanceDescriptors(r0, r2);
1179 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1180 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1182 // Set up the four remaining stack slots.
1183 __ push(r0); // Map.
1184 __ mov(r0, Operand(Smi::FromInt(0)));
1185 // Push enumeration cache, enumeration cache length (as smi) and zero.
1186 __ Push(r2, r1, r0);
1189 __ bind(&no_descriptors);
1193 // We got a fixed array in register r0. Iterate through that.
1195 __ bind(&fixed_array);
1197 __ Move(r1, FeedbackVector());
1198 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1199 int vector_index = FeedbackVector()->GetIndex(slot);
1200 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1202 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1203 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1204 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1205 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1206 __ b(gt, &non_proxy);
1207 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1208 __ bind(&non_proxy);
1209 __ Push(r1, r0); // Smi and array
1210 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1211 __ mov(r0, Operand(Smi::FromInt(0)));
1212 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1214 // Generate code for doing the condition check.
1215 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1217 // Load the current count to r0, load the length to r1.
1218 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1219 __ cmp(r0, r1); // Compare to the array length.
1220 __ b(hs, loop_statement.break_label());
1222 // Get the current entry of the array into register r3.
1223 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1224 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1225 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1227 // Get the expected map from the stack or a smi in the
1228 // permanent slow case into register r2.
1229 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1231 // Check if the expected map still matches that of the enumerable.
1232 // If not, we may have to filter the key.
1234 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1235 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1236 __ cmp(r4, Operand(r2));
1237 __ b(eq, &update_each);
1239 // For proxies, no filtering is done.
1240 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1241 __ cmp(r2, Operand(Smi::FromInt(0)));
1242 __ b(eq, &update_each);
1244 // Convert the entry to a string or (smi) 0 if it isn't a property
1245 // any more. If the property has been removed while iterating, we
1247 __ push(r1); // Enumerable.
1248 __ push(r3); // Current entry.
1249 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1250 __ mov(r3, Operand(r0), SetCC);
1251 __ b(eq, loop_statement.continue_label());
1253 // Update the 'each' property or variable from the possibly filtered
1254 // entry in register r3.
1255 __ bind(&update_each);
1256 __ mov(result_register(), r3);
1257 // Perform the assignment as if via '='.
1258 { EffectContext context(this);
1259 EmitAssignment(stmt->each());
1262 // Generate code for the body of the loop.
1263 Visit(stmt->body());
1265 // Generate code for the going to the next element by incrementing
1266 // the index (smi) stored on top of the stack.
1267 __ bind(loop_statement.continue_label());
1269 __ add(r0, r0, Operand(Smi::FromInt(1)));
1272 EmitBackEdgeBookkeeping(stmt, &loop);
1275 // Remove the pointers stored on the stack.
1276 __ bind(loop_statement.break_label());
1279 // Exit and decrement the loop depth.
1280 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1282 decrement_loop_depth();
1286 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1287 Comment cmnt(masm_, "[ ForOfStatement");
1288 SetStatementPosition(stmt);
1290 Iteration loop_statement(this, stmt);
1291 increment_loop_depth();
1293 // var iterator = iterable[Symbol.iterator]();
1294 VisitForEffect(stmt->assign_iterator());
1297 __ bind(loop_statement.continue_label());
1299 // result = iterator.next()
1300 VisitForEffect(stmt->next_result());
1302 // if (result.done) break;
1303 Label result_not_done;
1304 VisitForControl(stmt->result_done(),
1305 loop_statement.break_label(),
1308 __ bind(&result_not_done);
1310 // each = result.value
1311 VisitForEffect(stmt->assign_each());
1313 // Generate code for the body of the loop.
1314 Visit(stmt->body());
1316 // Check stack before looping.
1317 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1318 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1319 __ jmp(loop_statement.continue_label());
1321 // Exit and decrement the loop depth.
1322 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1323 __ bind(loop_statement.break_label());
1324 decrement_loop_depth();
1328 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1330 // Use the fast case closure allocation code that allocates in new
1331 // space for nested functions that don't need literals cloning. If
1332 // we're running with the --always-opt or the --prepare-always-opt
1333 // flag, we need to use the runtime function so that the new function
1334 // we are creating here gets a chance to have its code optimized and
1335 // doesn't just get a copy of the existing unoptimized code.
1336 if (!FLAG_always_opt &&
1337 !FLAG_prepare_always_opt &&
1339 scope()->is_function_scope() &&
1340 info->num_literals() == 0) {
1341 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1342 __ mov(r2, Operand(info));
1345 __ mov(r0, Operand(info));
1346 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1347 : Heap::kFalseValueRootIndex);
1348 __ Push(cp, r0, r1);
1349 __ CallRuntime(Runtime::kNewClosure, 3);
1351 context()->Plug(r0);
1355 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1356 Comment cmnt(masm_, "[ VariableProxy");
1357 EmitVariableLoad(expr);
1361 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1362 Comment cnmt(masm_, "[ SuperReference ");
1364 __ ldr(LoadDescriptor::ReceiverRegister(),
1365 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1367 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1368 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1370 if (FLAG_vector_ics) {
1371 __ mov(VectorLoadICDescriptor::SlotRegister(),
1372 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1373 CallLoadIC(NOT_CONTEXTUAL);
1375 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1378 __ cmp(r0, Operand(isolate()->factory()->undefined_value()));
1381 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1386 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1387 TypeofState typeof_state,
1389 Register current = cp;
1395 if (s->num_heap_slots() > 0) {
1396 if (s->calls_sloppy_eval()) {
1397 // Check that extension is NULL.
1398 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1402 // Load next context in chain.
1403 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1404 // Walk the rest of the chain without clobbering cp.
1407 // If no outer scope calls eval, we do not need to check more
1408 // context extensions.
1409 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1410 s = s->outer_scope();
1413 if (s->is_eval_scope()) {
1415 if (!current.is(next)) {
1416 __ Move(next, current);
1419 // Terminate at native context.
1420 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1421 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1424 // Check that extension is NULL.
1425 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1428 // Load next context in chain.
1429 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1434 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1435 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1436 if (FLAG_vector_ics) {
1437 __ mov(VectorLoadICDescriptor::SlotRegister(),
1438 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1441 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1448 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1450 DCHECK(var->IsContextSlot());
1451 Register context = cp;
1455 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1456 if (s->num_heap_slots() > 0) {
1457 if (s->calls_sloppy_eval()) {
1458 // Check that extension is NULL.
1459 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1463 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1464 // Walk the rest of the chain without clobbering cp.
1468 // Check that last extension is NULL.
1469 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1473 // This function is used only for loads, not stores, so it's safe to
1474 // return an cp-based operand (the write barrier cannot be allowed to
1475 // destroy the cp register).
1476 return ContextOperand(context, var->index());
1480 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1481 TypeofState typeof_state,
1484 // Generate fast-case code for variables that might be shadowed by
1485 // eval-introduced variables. Eval is used a lot without
1486 // introducing variables. In those cases, we do not want to
1487 // perform a runtime call for all variables in the scope
1488 // containing the eval.
1489 Variable* var = proxy->var();
1490 if (var->mode() == DYNAMIC_GLOBAL) {
1491 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1493 } else if (var->mode() == DYNAMIC_LOCAL) {
1494 Variable* local = var->local_if_not_shadowed();
1495 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1496 if (local->mode() == LET || local->mode() == CONST ||
1497 local->mode() == CONST_LEGACY) {
1498 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1499 if (local->mode() == CONST_LEGACY) {
1500 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1501 } else { // LET || CONST
1503 __ mov(r0, Operand(var->name()));
1505 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1513 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1514 // Record position before possible IC call.
1515 SetSourcePosition(proxy->position());
1516 Variable* var = proxy->var();
1518 // Three cases: global variables, lookup variables, and all other types of
1520 switch (var->location()) {
1521 case Variable::UNALLOCATED: {
1522 Comment cmnt(masm_, "[ Global variable");
1523 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1524 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1525 if (FLAG_vector_ics) {
1526 __ mov(VectorLoadICDescriptor::SlotRegister(),
1527 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1529 CallLoadIC(CONTEXTUAL);
1530 context()->Plug(r0);
1534 case Variable::PARAMETER:
1535 case Variable::LOCAL:
1536 case Variable::CONTEXT: {
1537 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1538 : "[ Stack variable");
1539 if (var->binding_needs_init()) {
1540 // var->scope() may be NULL when the proxy is located in eval code and
1541 // refers to a potential outside binding. Currently those bindings are
1542 // always looked up dynamically, i.e. in that case
1543 // var->location() == LOOKUP.
1545 DCHECK(var->scope() != NULL);
1547 // Check if the binding really needs an initialization check. The check
1548 // can be skipped in the following situation: we have a LET or CONST
1549 // binding in harmony mode, both the Variable and the VariableProxy have
1550 // the same declaration scope (i.e. they are both in global code, in the
1551 // same function or in the same eval code) and the VariableProxy is in
1552 // the source physically located after the initializer of the variable.
1554 // We cannot skip any initialization checks for CONST in non-harmony
1555 // mode because const variables may be declared but never initialized:
1556 // if (false) { const x; }; var y = x;
1558 // The condition on the declaration scopes is a conservative check for
1559 // nested functions that access a binding and are called before the
1560 // binding is initialized:
1561 // function() { f(); let x = 1; function f() { x = 2; } }
1563 bool skip_init_check;
1564 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1565 skip_init_check = false;
1567 // Check that we always have valid source position.
1568 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1569 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1570 skip_init_check = var->mode() != CONST_LEGACY &&
1571 var->initializer_position() < proxy->position();
1574 if (!skip_init_check) {
1575 // Let and const need a read barrier.
1577 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1578 if (var->mode() == LET || var->mode() == CONST) {
1579 // Throw a reference error when using an uninitialized let/const
1580 // binding in harmony mode.
1583 __ mov(r0, Operand(var->name()));
1585 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1588 // Uninitalized const bindings outside of harmony mode are unholed.
1589 DCHECK(var->mode() == CONST_LEGACY);
1590 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1592 context()->Plug(r0);
1596 context()->Plug(var);
1600 case Variable::LOOKUP: {
1601 Comment cmnt(masm_, "[ Lookup variable");
1603 // Generate code for loading from variables potentially shadowed
1604 // by eval-introduced variables.
1605 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1607 __ mov(r1, Operand(var->name()));
1608 __ Push(cp, r1); // Context and name.
1609 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1611 context()->Plug(r0);
1617 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1618 Comment cmnt(masm_, "[ RegExpLiteral");
1620 // Registers will be used as follows:
1621 // r5 = materialized value (RegExp literal)
1622 // r4 = JS function, literals array
1623 // r3 = literal index
1624 // r2 = RegExp pattern
1625 // r1 = RegExp flags
1626 // r0 = RegExp literal clone
1627 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1628 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1629 int literal_offset =
1630 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1631 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1632 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1634 __ b(ne, &materialized);
1636 // Create regexp literal using runtime function.
1637 // Result will be in r0.
1638 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1639 __ mov(r2, Operand(expr->pattern()));
1640 __ mov(r1, Operand(expr->flags()));
1641 __ Push(r4, r3, r2, r1);
1642 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1645 __ bind(&materialized);
1646 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1647 Label allocated, runtime_allocate;
1648 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1651 __ bind(&runtime_allocate);
1652 __ mov(r0, Operand(Smi::FromInt(size)));
1654 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1657 __ bind(&allocated);
1658 // After this, registers are used as follows:
1659 // r0: Newly allocated regexp.
1660 // r5: Materialized regexp.
1662 __ CopyFields(r0, r5, d0, size / kPointerSize);
1663 context()->Plug(r0);
1667 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1668 if (expression == NULL) {
1669 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1672 VisitForStackValue(expression);
1677 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1678 Comment cmnt(masm_, "[ ObjectLiteral");
1680 expr->BuildConstantProperties(isolate());
1681 Handle<FixedArray> constant_properties = expr->constant_properties();
1682 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1683 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1684 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1685 __ mov(r1, Operand(constant_properties));
1686 int flags = expr->fast_elements()
1687 ? ObjectLiteral::kFastElements
1688 : ObjectLiteral::kNoFlags;
1689 flags |= expr->has_function()
1690 ? ObjectLiteral::kHasFunction
1691 : ObjectLiteral::kNoFlags;
1692 __ mov(r0, Operand(Smi::FromInt(flags)));
1693 int properties_count = constant_properties->length() / 2;
1694 if (expr->may_store_doubles() || expr->depth() > 1 ||
1695 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1696 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1697 __ Push(r3, r2, r1, r0);
1698 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1700 FastCloneShallowObjectStub stub(isolate(), properties_count);
1703 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1705 // If result_saved is true the result is on top of the stack. If
1706 // result_saved is false the result is in r0.
1707 bool result_saved = false;
1709 // Mark all computed expressions that are bound to a key that
1710 // is shadowed by a later occurrence of the same key. For the
1711 // marked expressions, no store code is emitted.
1712 expr->CalculateEmitStore(zone());
1714 AccessorTable accessor_table(zone());
1715 for (int i = 0; i < expr->properties()->length(); i++) {
1716 ObjectLiteral::Property* property = expr->properties()->at(i);
1717 if (property->IsCompileTimeValue()) continue;
1719 Literal* key = property->key();
1720 Expression* value = property->value();
1721 if (!result_saved) {
1722 __ push(r0); // Save result on stack
1723 result_saved = true;
1725 switch (property->kind()) {
1726 case ObjectLiteral::Property::CONSTANT:
1728 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1729 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1731 case ObjectLiteral::Property::COMPUTED:
1732 // It is safe to use [[Put]] here because the boilerplate already
1733 // contains computed properties with an uninitialized value.
1734 if (key->value()->IsInternalizedString()) {
1735 if (property->emit_store()) {
1736 VisitForAccumulatorValue(value);
1737 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1738 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1739 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1740 CallStoreIC(key->LiteralFeedbackId());
1741 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1743 VisitForEffect(value);
1747 // Duplicate receiver on stack.
1748 __ ldr(r0, MemOperand(sp));
1750 VisitForStackValue(key);
1751 VisitForStackValue(value);
1752 if (property->emit_store()) {
1753 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1755 __ CallRuntime(Runtime::kSetProperty, 4);
1760 case ObjectLiteral::Property::PROTOTYPE:
1761 // Duplicate receiver on stack.
1762 __ ldr(r0, MemOperand(sp));
1764 VisitForStackValue(value);
1765 if (property->emit_store()) {
1766 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1772 case ObjectLiteral::Property::GETTER:
1773 accessor_table.lookup(key)->second->getter = value;
1775 case ObjectLiteral::Property::SETTER:
1776 accessor_table.lookup(key)->second->setter = value;
1781 // Emit code to define accessors, using only a single call to the runtime for
1782 // each pair of corresponding getters and setters.
1783 for (AccessorTable::Iterator it = accessor_table.begin();
1784 it != accessor_table.end();
1786 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1788 VisitForStackValue(it->first);
1789 EmitAccessor(it->second->getter);
1790 EmitAccessor(it->second->setter);
1791 __ mov(r0, Operand(Smi::FromInt(NONE)));
1793 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1796 if (expr->has_function()) {
1797 DCHECK(result_saved);
1798 __ ldr(r0, MemOperand(sp));
1800 __ CallRuntime(Runtime::kToFastProperties, 1);
1804 context()->PlugTOS();
1806 context()->Plug(r0);
1811 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1812 Comment cmnt(masm_, "[ ArrayLiteral");
1814 expr->BuildConstantElements(isolate());
1815 int flags = expr->depth() == 1
1816 ? ArrayLiteral::kShallowElements
1817 : ArrayLiteral::kNoFlags;
1819 ZoneList<Expression*>* subexprs = expr->values();
1820 int length = subexprs->length();
1821 Handle<FixedArray> constant_elements = expr->constant_elements();
1822 DCHECK_EQ(2, constant_elements->length());
1823 ElementsKind constant_elements_kind =
1824 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1825 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1826 Handle<FixedArrayBase> constant_elements_values(
1827 FixedArrayBase::cast(constant_elements->get(1)));
1829 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1830 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1831 // If the only customer of allocation sites is transitioning, then
1832 // we can turn it off if we don't have anywhere else to transition to.
1833 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1836 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1837 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1838 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1839 __ mov(r1, Operand(constant_elements));
1840 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1841 __ mov(r0, Operand(Smi::FromInt(flags)));
1842 __ Push(r3, r2, r1, r0);
1843 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1845 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1849 bool result_saved = false; // Is the result saved to the stack?
1851 // Emit code to evaluate all the non-constant subexpressions and to store
1852 // them into the newly cloned array.
1853 for (int i = 0; i < length; i++) {
1854 Expression* subexpr = subexprs->at(i);
1855 // If the subexpression is a literal or a simple materialized literal it
1856 // is already set in the cloned array.
1857 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1859 if (!result_saved) {
1861 __ Push(Smi::FromInt(expr->literal_index()));
1862 result_saved = true;
1864 VisitForAccumulatorValue(subexpr);
1866 if (IsFastObjectElementsKind(constant_elements_kind)) {
1867 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1868 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1869 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1870 __ str(result_register(), FieldMemOperand(r1, offset));
1871 // Update the write barrier for the array store.
1872 __ RecordWriteField(r1, offset, result_register(), r2,
1873 kLRHasBeenSaved, kDontSaveFPRegs,
1874 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1876 __ mov(r3, Operand(Smi::FromInt(i)));
1877 StoreArrayLiteralElementStub stub(isolate());
1881 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1885 __ pop(); // literal index
1886 context()->PlugTOS();
1888 context()->Plug(r0);
1893 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1894 DCHECK(expr->target()->IsValidReferenceExpression());
1896 Comment cmnt(masm_, "[ Assignment");
1898 Property* property = expr->target()->AsProperty();
1899 LhsKind assign_type = GetAssignType(property);
1901 // Evaluate LHS expression.
1902 switch (assign_type) {
1904 // Nothing to do here.
1906 case NAMED_PROPERTY:
1907 if (expr->is_compound()) {
1908 // We need the receiver both on the stack and in the register.
1909 VisitForStackValue(property->obj());
1910 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1912 VisitForStackValue(property->obj());
1915 case NAMED_SUPER_PROPERTY:
1916 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1917 EmitLoadHomeObject(property->obj()->AsSuperReference());
1918 __ Push(result_register());
1919 if (expr->is_compound()) {
1920 const Register scratch = r1;
1921 __ ldr(scratch, MemOperand(sp, kPointerSize));
1923 __ Push(result_register());
1926 case KEYED_SUPER_PROPERTY:
1927 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1928 EmitLoadHomeObject(property->obj()->AsSuperReference());
1929 __ Push(result_register());
1930 VisitForAccumulatorValue(property->key());
1931 __ Push(result_register());
1932 if (expr->is_compound()) {
1933 const Register scratch = r1;
1934 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1936 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1938 __ Push(result_register());
1941 case KEYED_PROPERTY:
1942 if (expr->is_compound()) {
1943 VisitForStackValue(property->obj());
1944 VisitForStackValue(property->key());
1945 __ ldr(LoadDescriptor::ReceiverRegister(),
1946 MemOperand(sp, 1 * kPointerSize));
1947 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1949 VisitForStackValue(property->obj());
1950 VisitForStackValue(property->key());
1955 // For compound assignments we need another deoptimization point after the
1956 // variable/property load.
1957 if (expr->is_compound()) {
1958 { AccumulatorValueContext context(this);
1959 switch (assign_type) {
1961 EmitVariableLoad(expr->target()->AsVariableProxy());
1962 PrepareForBailout(expr->target(), TOS_REG);
1964 case NAMED_PROPERTY:
1965 EmitNamedPropertyLoad(property);
1966 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1968 case NAMED_SUPER_PROPERTY:
1969 EmitNamedSuperPropertyLoad(property);
1970 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1972 case KEYED_SUPER_PROPERTY:
1973 EmitKeyedSuperPropertyLoad(property);
1974 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1976 case KEYED_PROPERTY:
1977 EmitKeyedPropertyLoad(property);
1978 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1983 Token::Value op = expr->binary_op();
1984 __ push(r0); // Left operand goes on the stack.
1985 VisitForAccumulatorValue(expr->value());
1987 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1990 SetSourcePosition(expr->position() + 1);
1991 AccumulatorValueContext context(this);
1992 if (ShouldInlineSmiCase(op)) {
1993 EmitInlineSmiBinaryOp(expr->binary_operation(),
1999 EmitBinaryOp(expr->binary_operation(), op, mode);
2002 // Deoptimization point in case the binary operation may have side effects.
2003 PrepareForBailout(expr->binary_operation(), TOS_REG);
2005 VisitForAccumulatorValue(expr->value());
2008 // Record source position before possible IC call.
2009 SetSourcePosition(expr->position());
2012 switch (assign_type) {
2014 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2016 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2017 context()->Plug(r0);
2019 case NAMED_PROPERTY:
2020 EmitNamedPropertyAssignment(expr);
2022 case NAMED_SUPER_PROPERTY:
2023 EmitNamedSuperPropertyStore(property);
2024 context()->Plug(r0);
2026 case KEYED_SUPER_PROPERTY:
2027 EmitKeyedSuperPropertyStore(property);
2028 context()->Plug(r0);
2030 case KEYED_PROPERTY:
2031 EmitKeyedPropertyAssignment(expr);
2037 void FullCodeGenerator::VisitYield(Yield* expr) {
2038 Comment cmnt(masm_, "[ Yield");
2039 // Evaluate yielded value first; the initial iterator definition depends on
2040 // this. It stays on the stack while we update the iterator.
2041 VisitForStackValue(expr->expression());
2043 switch (expr->yield_kind()) {
2044 case Yield::kSuspend:
2045 // Pop value from top-of-stack slot; box result into result register.
2046 EmitCreateIteratorResult(false);
2047 __ push(result_register());
2049 case Yield::kInitial: {
2050 Label suspend, continuation, post_runtime, resume;
2054 __ bind(&continuation);
2058 VisitForAccumulatorValue(expr->generator_object());
2059 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2060 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2061 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2062 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2064 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2065 kLRHasBeenSaved, kDontSaveFPRegs);
2066 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2068 __ b(eq, &post_runtime);
2069 __ push(r0); // generator object
2070 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2071 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2072 __ bind(&post_runtime);
2073 __ pop(result_register());
2074 EmitReturnSequence();
2077 context()->Plug(result_register());
2081 case Yield::kFinal: {
2082 VisitForAccumulatorValue(expr->generator_object());
2083 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2084 __ str(r1, FieldMemOperand(result_register(),
2085 JSGeneratorObject::kContinuationOffset));
2086 // Pop value from top-of-stack slot, box result into result register.
2087 EmitCreateIteratorResult(true);
2088 EmitUnwindBeforeReturn();
2089 EmitReturnSequence();
2093 case Yield::kDelegating: {
2094 VisitForStackValue(expr->generator_object());
2096 // Initial stack layout is as follows:
2097 // [sp + 1 * kPointerSize] iter
2098 // [sp + 0 * kPointerSize] g
2100 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2101 Label l_next, l_call, l_loop;
2102 Register load_receiver = LoadDescriptor::ReceiverRegister();
2103 Register load_name = LoadDescriptor::NameRegister();
2105 // Initial send value is undefined.
2106 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2109 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2111 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2112 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2113 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2114 __ Push(load_name, r3, r0); // "throw", iter, except
2117 // try { received = %yield result }
2118 // Shuffle the received result above a try handler and yield it without
2121 __ pop(r0); // result
2122 __ PushTryHandler(StackHandler::CATCH, expr->index());
2123 const int handler_size = StackHandlerConstants::kSize;
2124 __ push(r0); // result
2126 __ bind(&l_continuation);
2128 __ bind(&l_suspend);
2129 const int generator_object_depth = kPointerSize + handler_size;
2130 __ ldr(r0, MemOperand(sp, generator_object_depth));
2132 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2133 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2134 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2135 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2137 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2138 kLRHasBeenSaved, kDontSaveFPRegs);
2139 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2140 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2141 __ pop(r0); // result
2142 EmitReturnSequence();
2143 __ bind(&l_resume); // received in r0
2146 // receiver = iter; f = 'next'; arg = received;
2149 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2150 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2151 __ Push(load_name, r3, r0); // "next", iter, received
2153 // result = receiver[f](arg);
2155 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2156 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2157 if (FLAG_vector_ics) {
2158 __ mov(VectorLoadICDescriptor::SlotRegister(),
2159 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2161 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2162 CallIC(ic, TypeFeedbackId::None());
2164 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2165 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2168 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2169 __ Drop(1); // The function is still on the stack; drop it.
2171 // if (!result.done) goto l_try;
2173 __ Move(load_receiver, r0);
2175 __ push(load_receiver); // save result
2176 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2177 if (FLAG_vector_ics) {
2178 __ mov(VectorLoadICDescriptor::SlotRegister(),
2179 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2181 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2182 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2184 __ cmp(r0, Operand(0));
2188 __ pop(load_receiver); // result
2189 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2190 if (FLAG_vector_ics) {
2191 __ mov(VectorLoadICDescriptor::SlotRegister(),
2192 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2194 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value
2195 context()->DropAndPlug(2, r0); // drop iter and g
2202 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2204 JSGeneratorObject::ResumeMode resume_mode) {
2205 // The value stays in r0, and is ultimately read by the resumed generator, as
2206 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2207 // is read to throw the value when the resumed generator is already closed.
2208 // r1 will hold the generator object until the activation has been resumed.
2209 VisitForStackValue(generator);
2210 VisitForAccumulatorValue(value);
2213 // Check generator state.
2214 Label wrong_state, closed_state, done;
2215 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2216 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2217 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2218 __ cmp(r3, Operand(Smi::FromInt(0)));
2219 __ b(eq, &closed_state);
2220 __ b(lt, &wrong_state);
2222 // Load suspended function and context.
2223 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2224 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2226 // Load receiver and store as the first argument.
2227 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2230 // Push holes for the rest of the arguments to the generator function.
2231 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2233 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2234 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2235 Label push_argument_holes, push_frame;
2236 __ bind(&push_argument_holes);
2237 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2238 __ b(mi, &push_frame);
2240 __ jmp(&push_argument_holes);
2242 // Enter a new JavaScript frame, and initialize its slots as they were when
2243 // the generator was suspended.
2245 __ bind(&push_frame);
2246 __ bl(&resume_frame);
2248 __ bind(&resume_frame);
2249 // lr = return address.
2250 // fp = caller's frame pointer.
2251 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2252 // cp = callee's context,
2253 // r4 = callee's JS function.
2254 __ PushFixedFrame(r4);
2255 // Adjust FP to point to saved FP.
2256 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2258 // Load the operand stack size.
2259 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2260 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2263 // If we are sending a value and there is no operand stack, we can jump back
2265 if (resume_mode == JSGeneratorObject::NEXT) {
2267 __ cmp(r3, Operand(0));
2268 __ b(ne, &slow_resume);
2269 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2271 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2272 if (FLAG_enable_ool_constant_pool) {
2273 // Load the new code object's constant pool pointer.
2275 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
2278 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2281 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2282 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2285 __ bind(&slow_resume);
2288 // Otherwise, we push holes for the operand stack and call the runtime to fix
2289 // up the stack and the handlers.
2290 Label push_operand_holes, call_resume;
2291 __ bind(&push_operand_holes);
2292 __ sub(r3, r3, Operand(1), SetCC);
2293 __ b(mi, &call_resume);
2295 __ b(&push_operand_holes);
2296 __ bind(&call_resume);
2297 DCHECK(!result_register().is(r1));
2298 __ Push(r1, result_register());
2299 __ Push(Smi::FromInt(resume_mode));
2300 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2301 // Not reached: the runtime call returns elsewhere.
2302 __ stop("not-reached");
2304 // Reach here when generator is closed.
2305 __ bind(&closed_state);
2306 if (resume_mode == JSGeneratorObject::NEXT) {
2307 // Return completed iterator result when generator is closed.
2308 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2310 // Pop value from top-of-stack slot; box result into result register.
2311 EmitCreateIteratorResult(true);
2313 // Throw the provided value.
2315 __ CallRuntime(Runtime::kThrow, 1);
2319 // Throw error if we attempt to operate on a running generator.
2320 __ bind(&wrong_state);
2322 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2325 context()->Plug(result_register());
2329 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2333 const int instance_size = 5 * kPointerSize;
2334 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2337 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT);
2340 __ bind(&gc_required);
2341 __ Push(Smi::FromInt(instance_size));
2342 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2343 __ ldr(context_register(),
2344 MemOperand(fp, StandardFrameConstants::kContextOffset));
2346 __ bind(&allocated);
2347 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2348 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2349 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2351 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2352 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2353 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2354 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2355 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2357 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2359 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2361 // Only the value field needs a write barrier, as the other values are in the
2363 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2364 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2368 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2369 SetSourcePosition(prop->position());
2370 Literal* key = prop->key()->AsLiteral();
2371 DCHECK(!prop->IsSuperAccess());
2373 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2374 if (FLAG_vector_ics) {
2375 __ mov(VectorLoadICDescriptor::SlotRegister(),
2376 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2377 CallLoadIC(NOT_CONTEXTUAL);
2379 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2384 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2385 // Stack: receiver, home_object.
2386 SetSourcePosition(prop->position());
2387 Literal* key = prop->key()->AsLiteral();
2388 DCHECK(!key->value()->IsSmi());
2389 DCHECK(prop->IsSuperAccess());
2391 __ Push(key->value());
2392 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2396 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2397 SetSourcePosition(prop->position());
2398 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2399 if (FLAG_vector_ics) {
2400 __ mov(VectorLoadICDescriptor::SlotRegister(),
2401 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2404 CallIC(ic, prop->PropertyFeedbackId());
2409 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2410 // Stack: receiver, home_object, key.
2411 SetSourcePosition(prop->position());
2413 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2417 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2420 Expression* left_expr,
2421 Expression* right_expr) {
2422 Label done, smi_case, stub_call;
2424 Register scratch1 = r2;
2425 Register scratch2 = r3;
2427 // Get the arguments.
2429 Register right = r0;
2432 // Perform combined smi check on both operands.
2433 __ orr(scratch1, left, Operand(right));
2434 STATIC_ASSERT(kSmiTag == 0);
2435 JumpPatchSite patch_site(masm_);
2436 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2438 __ bind(&stub_call);
2439 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2440 CallIC(code, expr->BinaryOperationFeedbackId());
2441 patch_site.EmitPatchInfo();
2445 // Smi case. This code works the same way as the smi-smi case in the type
2446 // recording binary operation stub, see
2449 __ GetLeastBitsFromSmi(scratch1, right, 5);
2450 __ mov(right, Operand(left, ASR, scratch1));
2451 __ bic(right, right, Operand(kSmiTagMask));
2454 __ SmiUntag(scratch1, left);
2455 __ GetLeastBitsFromSmi(scratch2, right, 5);
2456 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2457 __ TrySmiTag(right, scratch1, &stub_call);
2461 __ SmiUntag(scratch1, left);
2462 __ GetLeastBitsFromSmi(scratch2, right, 5);
2463 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2464 __ tst(scratch1, Operand(0xc0000000));
2465 __ b(ne, &stub_call);
2466 __ SmiTag(right, scratch1);
2470 __ add(scratch1, left, Operand(right), SetCC);
2471 __ b(vs, &stub_call);
2472 __ mov(right, scratch1);
2475 __ sub(scratch1, left, Operand(right), SetCC);
2476 __ b(vs, &stub_call);
2477 __ mov(right, scratch1);
2480 __ SmiUntag(ip, right);
2481 __ smull(scratch1, scratch2, left, ip);
2482 __ mov(ip, Operand(scratch1, ASR, 31));
2483 __ cmp(ip, Operand(scratch2));
2484 __ b(ne, &stub_call);
2485 __ cmp(scratch1, Operand::Zero());
2486 __ mov(right, Operand(scratch1), LeaveCC, ne);
2488 __ add(scratch2, right, Operand(left), SetCC);
2489 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2490 __ b(mi, &stub_call);
2494 __ orr(right, left, Operand(right));
2496 case Token::BIT_AND:
2497 __ and_(right, left, Operand(right));
2499 case Token::BIT_XOR:
2500 __ eor(right, left, Operand(right));
2507 context()->Plug(r0);
2511 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2512 // Constructor is in r0.
2513 DCHECK(lit != NULL);
2516 // No access check is needed here since the constructor is created by the
2518 Register scratch = r1;
2520 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2523 for (int i = 0; i < lit->properties()->length(); i++) {
2524 ObjectLiteral::Property* property = lit->properties()->at(i);
2525 Literal* key = property->key()->AsLiteral();
2526 Expression* value = property->value();
2527 DCHECK(key != NULL);
2529 if (property->is_static()) {
2530 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2532 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2535 VisitForStackValue(key);
2536 VisitForStackValue(value);
2538 switch (property->kind()) {
2539 case ObjectLiteral::Property::CONSTANT:
2540 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2541 case ObjectLiteral::Property::COMPUTED:
2542 case ObjectLiteral::Property::PROTOTYPE:
2543 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2546 case ObjectLiteral::Property::GETTER:
2547 __ CallRuntime(Runtime::kDefineClassGetter, 3);
2550 case ObjectLiteral::Property::SETTER:
2551 __ CallRuntime(Runtime::kDefineClassSetter, 3);
2560 __ CallRuntime(Runtime::kToFastProperties, 1);
2563 __ CallRuntime(Runtime::kToFastProperties, 1);
2567 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2569 OverwriteMode mode) {
2571 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2572 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2573 CallIC(code, expr->BinaryOperationFeedbackId());
2574 patch_site.EmitPatchInfo();
2575 context()->Plug(r0);
2579 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2580 DCHECK(expr->IsValidReferenceExpression());
2582 Property* prop = expr->AsProperty();
2583 LhsKind assign_type = GetAssignType(prop);
2585 switch (assign_type) {
2587 Variable* var = expr->AsVariableProxy()->var();
2588 EffectContext context(this);
2589 EmitVariableAssignment(var, Token::ASSIGN);
2592 case NAMED_PROPERTY: {
2593 __ push(r0); // Preserve value.
2594 VisitForAccumulatorValue(prop->obj());
2595 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2596 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2597 __ mov(StoreDescriptor::NameRegister(),
2598 Operand(prop->key()->AsLiteral()->value()));
2602 case NAMED_SUPER_PROPERTY: {
2604 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2605 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2606 // stack: value, this; r0: home_object
2607 Register scratch = r2;
2608 Register scratch2 = r3;
2609 __ mov(scratch, result_register()); // home_object
2610 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2611 __ ldr(scratch2, MemOperand(sp, 0)); // this
2612 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2613 __ str(scratch, MemOperand(sp, 0)); // home_object
2614 // stack: this, home_object; r0: value
2615 EmitNamedSuperPropertyStore(prop);
2618 case KEYED_SUPER_PROPERTY: {
2620 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2621 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2622 __ Push(result_register());
2623 VisitForAccumulatorValue(prop->key());
2624 Register scratch = r2;
2625 Register scratch2 = r3;
2626 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2627 // stack: value, this, home_object; r0: key, r3: value
2628 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2629 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2630 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2631 __ str(scratch, MemOperand(sp, kPointerSize));
2632 __ str(r0, MemOperand(sp, 0));
2633 __ Move(r0, scratch2);
2634 // stack: this, home_object, key; r0: value.
2635 EmitKeyedSuperPropertyStore(prop);
2638 case KEYED_PROPERTY: {
2639 __ push(r0); // Preserve value.
2640 VisitForStackValue(prop->obj());
2641 VisitForAccumulatorValue(prop->key());
2642 __ Move(StoreDescriptor::NameRegister(), r0);
2643 __ Pop(StoreDescriptor::ValueRegister(),
2644 StoreDescriptor::ReceiverRegister());
2646 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2651 context()->Plug(r0);
2655 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2656 Variable* var, MemOperand location) {
2657 __ str(result_register(), location);
2658 if (var->IsContextSlot()) {
2659 // RecordWrite may destroy all its register arguments.
2660 __ mov(r3, result_register());
2661 int offset = Context::SlotOffset(var->index());
2662 __ RecordWriteContextSlot(
2663 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2668 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2669 if (var->IsUnallocated()) {
2670 // Global var, const, or let.
2671 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2672 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2675 } else if (op == Token::INIT_CONST_LEGACY) {
2676 // Const initializers need a write barrier.
2677 DCHECK(!var->IsParameter()); // No const parameters.
2678 if (var->IsLookupSlot()) {
2680 __ mov(r0, Operand(var->name()));
2681 __ Push(cp, r0); // Context and name.
2682 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2684 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2686 MemOperand location = VarOperand(var, r1);
2687 __ ldr(r2, location);
2688 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2690 EmitStoreToStackLocalOrContextSlot(var, location);
2694 } else if (var->mode() == LET && op != Token::INIT_LET) {
2695 // Non-initializing assignment to let variable needs a write barrier.
2696 DCHECK(!var->IsLookupSlot());
2697 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2699 MemOperand location = VarOperand(var, r1);
2700 __ ldr(r3, location);
2701 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2703 __ mov(r3, Operand(var->name()));
2705 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2706 // Perform the assignment.
2708 EmitStoreToStackLocalOrContextSlot(var, location);
2710 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2711 if (var->IsLookupSlot()) {
2712 // Assignment to var.
2713 __ push(r0); // Value.
2714 __ mov(r1, Operand(var->name()));
2715 __ mov(r0, Operand(Smi::FromInt(strict_mode())));
2716 __ Push(cp, r1, r0); // Context, name, strict mode.
2717 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2719 // Assignment to var or initializing assignment to let/const in harmony
2721 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2722 MemOperand location = VarOperand(var, r1);
2723 if (generate_debug_code_ && op == Token::INIT_LET) {
2724 // Check for an uninitialized let binding.
2725 __ ldr(r2, location);
2726 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2727 __ Check(eq, kLetBindingReInitialization);
2729 EmitStoreToStackLocalOrContextSlot(var, location);
2732 // Non-initializing assignments to consts are ignored.
2736 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2737 // Assignment to a property, using a named store IC.
2738 Property* prop = expr->target()->AsProperty();
2739 DCHECK(prop != NULL);
2740 DCHECK(prop->key()->IsLiteral());
2742 // Record source code position before IC call.
2743 SetSourcePosition(expr->position());
2744 __ mov(StoreDescriptor::NameRegister(),
2745 Operand(prop->key()->AsLiteral()->value()));
2746 __ pop(StoreDescriptor::ReceiverRegister());
2747 CallStoreIC(expr->AssignmentFeedbackId());
2749 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2750 context()->Plug(r0);
2754 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2755 // Assignment to named property of super.
2757 // stack : receiver ('this'), home_object
2758 DCHECK(prop != NULL);
2759 Literal* key = prop->key()->AsLiteral();
2760 DCHECK(key != NULL);
2762 __ Push(key->value());
2764 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2765 : Runtime::kStoreToSuper_Sloppy),
2770 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2771 // Assignment to named property of super.
2773 // stack : receiver ('this'), home_object, key
2774 DCHECK(prop != NULL);
2777 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreKeyedToSuper_Strict
2778 : Runtime::kStoreKeyedToSuper_Sloppy),
2783 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2784 // Assignment to a property, using a keyed store IC.
2786 // Record source code position before IC call.
2787 SetSourcePosition(expr->position());
2788 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2789 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2791 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2792 CallIC(ic, expr->AssignmentFeedbackId());
2794 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2795 context()->Plug(r0);
2799 void FullCodeGenerator::VisitProperty(Property* expr) {
2800 Comment cmnt(masm_, "[ Property");
2801 Expression* key = expr->key();
2803 if (key->IsPropertyName()) {
2804 if (!expr->IsSuperAccess()) {
2805 VisitForAccumulatorValue(expr->obj());
2806 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2807 EmitNamedPropertyLoad(expr);
2809 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2810 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2811 __ Push(result_register());
2812 EmitNamedSuperPropertyLoad(expr);
2814 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2815 context()->Plug(r0);
2817 if (!expr->IsSuperAccess()) {
2818 VisitForStackValue(expr->obj());
2819 VisitForAccumulatorValue(expr->key());
2820 __ Move(LoadDescriptor::NameRegister(), r0);
2821 __ pop(LoadDescriptor::ReceiverRegister());
2822 EmitKeyedPropertyLoad(expr);
2824 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2825 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2826 __ Push(result_register());
2827 VisitForStackValue(expr->key());
2828 EmitKeyedSuperPropertyLoad(expr);
2830 context()->Plug(r0);
2835 void FullCodeGenerator::CallIC(Handle<Code> code,
2836 TypeFeedbackId ast_id) {
2838 // All calls must have a predictable size in full-codegen code to ensure that
2839 // the debugger can patch them correctly.
2840 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2841 NEVER_INLINE_TARGET_ADDRESS);
2845 // Code common for calls using the IC.
2846 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2847 Expression* callee = expr->expression();
2849 CallICState::CallType call_type =
2850 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2852 // Get the target function.
2853 if (call_type == CallICState::FUNCTION) {
2854 { StackValueContext context(this);
2855 EmitVariableLoad(callee->AsVariableProxy());
2856 PrepareForBailout(callee, NO_REGISTERS);
2858 // Push undefined as receiver. This is patched in the method prologue if it
2859 // is a sloppy mode method.
2860 __ Push(isolate()->factory()->undefined_value());
2862 // Load the function from the receiver.
2863 DCHECK(callee->IsProperty());
2864 DCHECK(!callee->AsProperty()->IsSuperAccess());
2865 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2866 EmitNamedPropertyLoad(callee->AsProperty());
2867 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2868 // Push the target function under the receiver.
2869 __ ldr(ip, MemOperand(sp, 0));
2871 __ str(r0, MemOperand(sp, kPointerSize));
2874 EmitCall(expr, call_type);
2878 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2879 Expression* callee = expr->expression();
2880 DCHECK(callee->IsProperty());
2881 Property* prop = callee->AsProperty();
2882 DCHECK(prop->IsSuperAccess());
2884 SetSourcePosition(prop->position());
2885 Literal* key = prop->key()->AsLiteral();
2886 DCHECK(!key->value()->IsSmi());
2887 // Load the function from the receiver.
2888 const Register scratch = r1;
2889 SuperReference* super_ref = prop->obj()->AsSuperReference();
2890 EmitLoadHomeObject(super_ref);
2892 VisitForAccumulatorValue(super_ref->this_var());
2895 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2897 __ Push(key->value());
2901 // - this (receiver)
2902 // - this (receiver) <-- LoadFromSuper will pop here and below.
2905 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2907 // Replace home_object with target function.
2908 __ str(r0, MemOperand(sp, kPointerSize));
2911 // - target function
2912 // - this (receiver)
2913 EmitCall(expr, CallICState::METHOD);
2917 // Code common for calls using the IC.
2918 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2921 VisitForAccumulatorValue(key);
2923 Expression* callee = expr->expression();
2925 // Load the function from the receiver.
2926 DCHECK(callee->IsProperty());
2927 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2928 __ Move(LoadDescriptor::NameRegister(), r0);
2929 EmitKeyedPropertyLoad(callee->AsProperty());
2930 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2932 // Push the target function under the receiver.
2933 __ ldr(ip, MemOperand(sp, 0));
2935 __ str(r0, MemOperand(sp, kPointerSize));
2937 EmitCall(expr, CallICState::METHOD);
2941 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2942 Expression* callee = expr->expression();
2943 DCHECK(callee->IsProperty());
2944 Property* prop = callee->AsProperty();
2945 DCHECK(prop->IsSuperAccess());
2947 SetSourcePosition(prop->position());
2948 // Load the function from the receiver.
2949 const Register scratch = r1;
2950 SuperReference* super_ref = prop->obj()->AsSuperReference();
2951 EmitLoadHomeObject(super_ref);
2953 VisitForAccumulatorValue(super_ref->this_var());
2956 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2958 VisitForStackValue(prop->key());
2962 // - this (receiver)
2963 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2966 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2968 // Replace home_object with target function.
2969 __ str(r0, MemOperand(sp, kPointerSize));
2972 // - target function
2973 // - this (receiver)
2974 EmitCall(expr, CallICState::METHOD);
2978 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2979 // Load the arguments.
2980 ZoneList<Expression*>* args = expr->arguments();
2981 int arg_count = args->length();
2982 { PreservePositionScope scope(masm()->positions_recorder());
2983 for (int i = 0; i < arg_count; i++) {
2984 VisitForStackValue(args->at(i));
2988 // Record source position of the IC call.
2989 SetSourcePosition(expr->position());
2990 Handle<Code> ic = CallIC::initialize_stub(
2991 isolate(), arg_count, call_type);
2992 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
2993 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2994 // Don't assign a type feedback id to the IC, since type feedback is provided
2995 // by the vector above.
2998 RecordJSReturnSite(expr);
2999 // Restore context register.
3000 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3001 context()->DropAndPlug(1, r0);
3005 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3006 // r5: copy of the first argument or undefined if it doesn't exist.
3007 if (arg_count > 0) {
3008 __ ldr(r5, MemOperand(sp, arg_count * kPointerSize));
3010 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3013 // r4: the receiver of the enclosing function.
3014 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3016 // r3: the receiver of the enclosing function.
3017 int receiver_offset = 2 + info_->scope()->num_parameters();
3018 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
3021 __ mov(r2, Operand(Smi::FromInt(strict_mode())));
3023 // r1: the start position of the scope the calls resides in.
3024 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3026 // Do the runtime call.
3028 __ Push(r4, r3, r2, r1);
3029 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3033 void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) {
3034 DCHECK(super_ref != NULL);
3035 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3037 __ CallRuntime(Runtime::kGetPrototype, 1);
3041 void FullCodeGenerator::VisitCall(Call* expr) {
3043 // We want to verify that RecordJSReturnSite gets called on all paths
3044 // through this function. Avoid early returns.
3045 expr->return_is_recorded_ = false;
3048 Comment cmnt(masm_, "[ Call");
3049 Expression* callee = expr->expression();
3050 Call::CallType call_type = expr->GetCallType(isolate());
3052 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3053 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3054 // to resolve the function we need to call and the receiver of the
3055 // call. Then we call the resolved function using the given
3057 ZoneList<Expression*>* args = expr->arguments();
3058 int arg_count = args->length();
3060 { PreservePositionScope pos_scope(masm()->positions_recorder());
3061 VisitForStackValue(callee);
3062 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3063 __ push(r2); // Reserved receiver slot.
3065 // Push the arguments.
3066 for (int i = 0; i < arg_count; i++) {
3067 VisitForStackValue(args->at(i));
3070 // Push a copy of the function (found below the arguments) and
3072 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3074 EmitResolvePossiblyDirectEval(arg_count);
3076 // The runtime call returns a pair of values in r0 (function) and
3077 // r1 (receiver). Touch up the stack with the right values.
3078 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3079 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
3081 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3084 // Record source position for debugger.
3085 SetSourcePosition(expr->position());
3086 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3087 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3089 RecordJSReturnSite(expr);
3090 // Restore context register.
3091 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3092 context()->DropAndPlug(1, r0);
3093 } else if (call_type == Call::GLOBAL_CALL) {
3094 EmitCallWithLoadIC(expr);
3096 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3097 // Call to a lookup slot (dynamically introduced variable).
3098 VariableProxy* proxy = callee->AsVariableProxy();
3101 { PreservePositionScope scope(masm()->positions_recorder());
3102 // Generate code for loading from variables potentially shadowed
3103 // by eval-introduced variables.
3104 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3108 // Call the runtime to find the function to call (returned in r0)
3109 // and the object holding it (returned in edx).
3110 DCHECK(!context_register().is(r2));
3111 __ mov(r2, Operand(proxy->name()));
3112 __ Push(context_register(), r2);
3113 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3114 __ Push(r0, r1); // Function, receiver.
3115 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3117 // If fast case code has been generated, emit code to push the
3118 // function and receiver and have the slow path jump around this
3120 if (done.is_linked()) {
3126 // The receiver is implicitly the global receiver. Indicate this
3127 // by passing the hole to the call function stub.
3128 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3133 // The receiver is either the global receiver or an object found
3134 // by LoadContextSlot.
3136 } else if (call_type == Call::PROPERTY_CALL) {
3137 Property* property = callee->AsProperty();
3138 bool is_named_call = property->key()->IsPropertyName();
3139 if (property->IsSuperAccess()) {
3140 if (is_named_call) {
3141 EmitSuperCallWithLoadIC(expr);
3143 EmitKeyedSuperCallWithLoadIC(expr);
3147 PreservePositionScope scope(masm()->positions_recorder());
3148 VisitForStackValue(property->obj());
3150 if (is_named_call) {
3151 EmitCallWithLoadIC(expr);
3153 EmitKeyedCallWithLoadIC(expr, property->key());
3156 } else if (call_type == Call::SUPER_CALL) {
3157 SuperReference* super_ref = callee->AsSuperReference();
3158 EmitLoadSuperConstructor(super_ref);
3159 __ Push(result_register());
3160 VisitForStackValue(super_ref->this_var());
3161 EmitCall(expr, CallICState::METHOD);
3163 DCHECK(call_type == Call::OTHER_CALL);
3164 // Call to an arbitrary expression not handled specially above.
3165 { PreservePositionScope scope(masm()->positions_recorder());
3166 VisitForStackValue(callee);
3168 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3170 // Emit function call.
3175 // RecordJSReturnSite should have been called.
3176 DCHECK(expr->return_is_recorded_);
3181 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3182 Comment cmnt(masm_, "[ CallNew");
3183 // According to ECMA-262, section 11.2.2, page 44, the function
3184 // expression in new calls must be evaluated before the
3187 // Push constructor on the stack. If it's not a function it's used as
3188 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3190 if (expr->expression()->IsSuperReference()) {
3191 EmitLoadSuperConstructor(expr->expression()->AsSuperReference());
3192 __ Push(result_register());
3194 VisitForStackValue(expr->expression());
3197 // Push the arguments ("left-to-right") on the stack.
3198 ZoneList<Expression*>* args = expr->arguments();
3199 int arg_count = args->length();
3200 for (int i = 0; i < arg_count; i++) {
3201 VisitForStackValue(args->at(i));
3204 // Call the construct call builtin that handles allocation and
3205 // constructor invocation.
3206 SetSourcePosition(expr->position());
3208 // Load function and argument count into r1 and r0.
3209 __ mov(r0, Operand(arg_count));
3210 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3212 // Record call targets in unoptimized code.
3213 if (FLAG_pretenuring_call_new) {
3214 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3215 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3216 expr->CallNewFeedbackSlot().ToInt() + 1);
3219 __ Move(r2, FeedbackVector());
3220 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3222 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3223 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3224 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3225 context()->Plug(r0);
3229 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3230 ZoneList<Expression*>* args = expr->arguments();
3231 DCHECK(args->length() == 1);
3233 VisitForAccumulatorValue(args->at(0));
3235 Label materialize_true, materialize_false;
3236 Label* if_true = NULL;
3237 Label* if_false = NULL;
3238 Label* fall_through = NULL;
3239 context()->PrepareTest(&materialize_true, &materialize_false,
3240 &if_true, &if_false, &fall_through);
3242 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3244 Split(eq, if_true, if_false, fall_through);
3246 context()->Plug(if_true, if_false);
3250 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3251 ZoneList<Expression*>* args = expr->arguments();
3252 DCHECK(args->length() == 1);
3254 VisitForAccumulatorValue(args->at(0));
3256 Label materialize_true, materialize_false;
3257 Label* if_true = NULL;
3258 Label* if_false = NULL;
3259 Label* fall_through = NULL;
3260 context()->PrepareTest(&materialize_true, &materialize_false,
3261 &if_true, &if_false, &fall_through);
3263 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3264 __ NonNegativeSmiTst(r0);
3265 Split(eq, if_true, if_false, fall_through);
3267 context()->Plug(if_true, if_false);
3271 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3272 ZoneList<Expression*>* args = expr->arguments();
3273 DCHECK(args->length() == 1);
3275 VisitForAccumulatorValue(args->at(0));
3277 Label materialize_true, materialize_false;
3278 Label* if_true = NULL;
3279 Label* if_false = NULL;
3280 Label* fall_through = NULL;
3281 context()->PrepareTest(&materialize_true, &materialize_false,
3282 &if_true, &if_false, &fall_through);
3284 __ JumpIfSmi(r0, if_false);
3285 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3288 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3289 // Undetectable objects behave like undefined when tested with typeof.
3290 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3291 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3293 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3294 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3296 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3297 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3298 Split(le, if_true, if_false, fall_through);
3300 context()->Plug(if_true, if_false);
3304 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3305 ZoneList<Expression*>* args = expr->arguments();
3306 DCHECK(args->length() == 1);
3308 VisitForAccumulatorValue(args->at(0));
3310 Label materialize_true, materialize_false;
3311 Label* if_true = NULL;
3312 Label* if_false = NULL;
3313 Label* fall_through = NULL;
3314 context()->PrepareTest(&materialize_true, &materialize_false,
3315 &if_true, &if_false, &fall_through);
3317 __ JumpIfSmi(r0, if_false);
3318 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3319 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3320 Split(ge, if_true, if_false, fall_through);
3322 context()->Plug(if_true, if_false);
3326 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3327 ZoneList<Expression*>* args = expr->arguments();
3328 DCHECK(args->length() == 1);
3330 VisitForAccumulatorValue(args->at(0));
3332 Label materialize_true, materialize_false;
3333 Label* if_true = NULL;
3334 Label* if_false = NULL;
3335 Label* fall_through = NULL;
3336 context()->PrepareTest(&materialize_true, &materialize_false,
3337 &if_true, &if_false, &fall_through);
3339 __ JumpIfSmi(r0, if_false);
3340 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3341 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3342 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3343 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3344 Split(ne, if_true, if_false, fall_through);
3346 context()->Plug(if_true, if_false);
3350 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3351 CallRuntime* expr) {
3352 ZoneList<Expression*>* args = expr->arguments();
3353 DCHECK(args->length() == 1);
3355 VisitForAccumulatorValue(args->at(0));
3357 Label materialize_true, materialize_false, skip_lookup;
3358 Label* if_true = NULL;
3359 Label* if_false = NULL;
3360 Label* fall_through = NULL;
3361 context()->PrepareTest(&materialize_true, &materialize_false,
3362 &if_true, &if_false, &fall_through);
3364 __ AssertNotSmi(r0);
3366 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3367 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3368 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3369 __ b(ne, &skip_lookup);
3371 // Check for fast case object. Generate false result for slow case object.
3372 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3373 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3374 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3378 // Look for valueOf name in the descriptor array, and indicate false if
3379 // found. Since we omit an enumeration index check, if it is added via a
3380 // transition that shares its descriptor array, this is a false positive.
3381 Label entry, loop, done;
3383 // Skip loop if no descriptors are valid.
3384 __ NumberOfOwnDescriptors(r3, r1);
3385 __ cmp(r3, Operand::Zero());
3388 __ LoadInstanceDescriptors(r1, r4);
3389 // r4: descriptor array.
3390 // r3: valid entries in the descriptor array.
3391 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3393 // Calculate location of the first key name.
3394 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3395 // Calculate the end of the descriptor array.
3397 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3399 // Loop through all the keys in the descriptor array. If one of these is the
3400 // string "valueOf" the result is false.
3401 // The use of ip to store the valueOf string assumes that it is not otherwise
3402 // used in the loop below.
3403 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3406 __ ldr(r3, MemOperand(r4, 0));
3409 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3411 __ cmp(r4, Operand(r2));
3416 // Set the bit in the map to indicate that there is no local valueOf field.
3417 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3418 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3419 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3421 __ bind(&skip_lookup);
3423 // If a valueOf property is not found on the object check that its
3424 // prototype is the un-modified String prototype. If not result is false.
3425 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3426 __ JumpIfSmi(r2, if_false);
3427 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3428 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3429 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3430 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3432 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3433 Split(eq, if_true, if_false, fall_through);
3435 context()->Plug(if_true, if_false);
3439 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3440 ZoneList<Expression*>* args = expr->arguments();
3441 DCHECK(args->length() == 1);
3443 VisitForAccumulatorValue(args->at(0));
3445 Label materialize_true, materialize_false;
3446 Label* if_true = NULL;
3447 Label* if_false = NULL;
3448 Label* fall_through = NULL;
3449 context()->PrepareTest(&materialize_true, &materialize_false,
3450 &if_true, &if_false, &fall_through);
3452 __ JumpIfSmi(r0, if_false);
3453 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3454 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3455 Split(eq, if_true, if_false, fall_through);
3457 context()->Plug(if_true, if_false);
3461 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3462 ZoneList<Expression*>* args = expr->arguments();
3463 DCHECK(args->length() == 1);
3465 VisitForAccumulatorValue(args->at(0));
3467 Label materialize_true, materialize_false;
3468 Label* if_true = NULL;
3469 Label* if_false = NULL;
3470 Label* fall_through = NULL;
3471 context()->PrepareTest(&materialize_true, &materialize_false,
3472 &if_true, &if_false, &fall_through);
3474 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3475 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3476 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3477 __ cmp(r2, Operand(0x80000000));
3478 __ cmp(r1, Operand(0x00000000), eq);
3480 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3481 Split(eq, if_true, if_false, fall_through);
3483 context()->Plug(if_true, if_false);
3487 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3488 ZoneList<Expression*>* args = expr->arguments();
3489 DCHECK(args->length() == 1);
3491 VisitForAccumulatorValue(args->at(0));
3493 Label materialize_true, materialize_false;
3494 Label* if_true = NULL;
3495 Label* if_false = NULL;
3496 Label* fall_through = NULL;
3497 context()->PrepareTest(&materialize_true, &materialize_false,
3498 &if_true, &if_false, &fall_through);
3500 __ JumpIfSmi(r0, if_false);
3501 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3502 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3503 Split(eq, if_true, if_false, fall_through);
3505 context()->Plug(if_true, if_false);
3509 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3510 ZoneList<Expression*>* args = expr->arguments();
3511 DCHECK(args->length() == 1);
3513 VisitForAccumulatorValue(args->at(0));
3515 Label materialize_true, materialize_false;
3516 Label* if_true = NULL;
3517 Label* if_false = NULL;
3518 Label* fall_through = NULL;
3519 context()->PrepareTest(&materialize_true, &materialize_false,
3520 &if_true, &if_false, &fall_through);
3522 __ JumpIfSmi(r0, if_false);
3523 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3524 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3525 Split(eq, if_true, if_false, fall_through);
3527 context()->Plug(if_true, if_false);
3531 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3532 ZoneList<Expression*>* args = expr->arguments();
3533 DCHECK(args->length() == 1);
3535 VisitForAccumulatorValue(args->at(0));
3537 Label materialize_true, materialize_false;
3538 Label* if_true = NULL;
3539 Label* if_false = NULL;
3540 Label* fall_through = NULL;
3541 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3542 &if_false, &fall_through);
3544 __ JumpIfSmi(r0, if_false);
3546 Register type_reg = r2;
3547 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3548 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3549 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3550 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3551 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3552 Split(ls, if_true, if_false, fall_through);
3554 context()->Plug(if_true, if_false);
3558 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3559 DCHECK(expr->arguments()->length() == 0);
3561 Label materialize_true, materialize_false;
3562 Label* if_true = NULL;
3563 Label* if_false = NULL;
3564 Label* fall_through = NULL;
3565 context()->PrepareTest(&materialize_true, &materialize_false,
3566 &if_true, &if_false, &fall_through);
3568 // Get the frame pointer for the calling frame.
3569 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3571 // Skip the arguments adaptor frame if it exists.
3572 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3573 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3574 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3576 // Check the marker in the calling frame.
3577 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3578 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3579 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3580 Split(eq, if_true, if_false, fall_through);
3582 context()->Plug(if_true, if_false);
3586 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3587 ZoneList<Expression*>* args = expr->arguments();
3588 DCHECK(args->length() == 2);
3590 // Load the two objects into registers and perform the comparison.
3591 VisitForStackValue(args->at(0));
3592 VisitForAccumulatorValue(args->at(1));
3594 Label materialize_true, materialize_false;
3595 Label* if_true = NULL;
3596 Label* if_false = NULL;
3597 Label* fall_through = NULL;
3598 context()->PrepareTest(&materialize_true, &materialize_false,
3599 &if_true, &if_false, &fall_through);
3603 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3604 Split(eq, if_true, if_false, fall_through);
3606 context()->Plug(if_true, if_false);
3610 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3611 ZoneList<Expression*>* args = expr->arguments();
3612 DCHECK(args->length() == 1);
3614 // ArgumentsAccessStub expects the key in edx and the formal
3615 // parameter count in r0.
3616 VisitForAccumulatorValue(args->at(0));
3618 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3619 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3621 context()->Plug(r0);
3625 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3626 DCHECK(expr->arguments()->length() == 0);
3628 // Get the number of formal parameters.
3629 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3631 // Check if the calling frame is an arguments adaptor frame.
3632 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3633 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3634 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3636 // Arguments adaptor case: Read the arguments length from the
3638 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3640 context()->Plug(r0);
3644 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3645 ZoneList<Expression*>* args = expr->arguments();
3646 DCHECK(args->length() == 1);
3647 Label done, null, function, non_function_constructor;
3649 VisitForAccumulatorValue(args->at(0));
3651 // If the object is a smi, we return null.
3652 __ JumpIfSmi(r0, &null);
3654 // Check that the object is a JS object but take special care of JS
3655 // functions to make sure they have 'Function' as their class.
3656 // Assume that there are only two callable types, and one of them is at
3657 // either end of the type range for JS object types. Saves extra comparisons.
3658 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3659 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3660 // Map is now in r0.
3662 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3663 FIRST_SPEC_OBJECT_TYPE + 1);
3664 __ b(eq, &function);
3666 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3667 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3668 LAST_SPEC_OBJECT_TYPE - 1);
3669 __ b(eq, &function);
3670 // Assume that there is no larger type.
3671 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3673 // Check if the constructor in the map is a JS function.
3674 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3675 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3676 __ b(ne, &non_function_constructor);
3678 // r0 now contains the constructor function. Grab the
3679 // instance class name from there.
3680 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3681 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3684 // Functions have class 'Function'.
3686 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3689 // Objects with a non-function constructor have class 'Object'.
3690 __ bind(&non_function_constructor);
3691 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3694 // Non-JS objects have class null.
3696 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3701 context()->Plug(r0);
3705 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3706 // Load the arguments on the stack and call the stub.
3707 SubStringStub stub(isolate());
3708 ZoneList<Expression*>* args = expr->arguments();
3709 DCHECK(args->length() == 3);
3710 VisitForStackValue(args->at(0));
3711 VisitForStackValue(args->at(1));
3712 VisitForStackValue(args->at(2));
3714 context()->Plug(r0);
3718 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3719 // Load the arguments on the stack and call the stub.
3720 RegExpExecStub stub(isolate());
3721 ZoneList<Expression*>* args = expr->arguments();
3722 DCHECK(args->length() == 4);
3723 VisitForStackValue(args->at(0));
3724 VisitForStackValue(args->at(1));
3725 VisitForStackValue(args->at(2));
3726 VisitForStackValue(args->at(3));
3728 context()->Plug(r0);
3732 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3733 ZoneList<Expression*>* args = expr->arguments();
3734 DCHECK(args->length() == 1);
3735 VisitForAccumulatorValue(args->at(0)); // Load the object.
3738 // If the object is a smi return the object.
3739 __ JumpIfSmi(r0, &done);
3740 // If the object is not a value type, return the object.
3741 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3742 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3745 context()->Plug(r0);
3749 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments();
3751 DCHECK(args->length() == 2);
3752 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3753 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3755 VisitForAccumulatorValue(args->at(0)); // Load the object.
3757 Label runtime, done, not_date_object;
3758 Register object = r0;
3759 Register result = r0;
3760 Register scratch0 = r9;
3761 Register scratch1 = r1;
3763 __ JumpIfSmi(object, ¬_date_object);
3764 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3765 __ b(ne, ¬_date_object);
3767 if (index->value() == 0) {
3768 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3771 if (index->value() < JSDate::kFirstUncachedField) {
3772 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3773 __ mov(scratch1, Operand(stamp));
3774 __ ldr(scratch1, MemOperand(scratch1));
3775 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3776 __ cmp(scratch1, scratch0);
3778 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3779 kPointerSize * index->value()));
3783 __ PrepareCallCFunction(2, scratch1);
3784 __ mov(r1, Operand(index));
3785 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3789 __ bind(¬_date_object);
3790 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3792 context()->Plug(r0);
3796 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3797 ZoneList<Expression*>* args = expr->arguments();
3798 DCHECK_EQ(3, args->length());
3800 Register string = r0;
3801 Register index = r1;
3802 Register value = r2;
3804 VisitForStackValue(args->at(0)); // index
3805 VisitForStackValue(args->at(1)); // value
3806 VisitForAccumulatorValue(args->at(2)); // string
3807 __ Pop(index, value);
3809 if (FLAG_debug_code) {
3811 __ Check(eq, kNonSmiValue);
3813 __ Check(eq, kNonSmiIndex);
3814 __ SmiUntag(index, index);
3815 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3816 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3817 __ SmiTag(index, index);
3820 __ SmiUntag(value, value);
3823 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3824 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3825 context()->Plug(string);
3829 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3830 ZoneList<Expression*>* args = expr->arguments();
3831 DCHECK_EQ(3, args->length());
3833 Register string = r0;
3834 Register index = r1;
3835 Register value = r2;
3837 VisitForStackValue(args->at(0)); // index
3838 VisitForStackValue(args->at(1)); // value
3839 VisitForAccumulatorValue(args->at(2)); // string
3840 __ Pop(index, value);
3842 if (FLAG_debug_code) {
3844 __ Check(eq, kNonSmiValue);
3846 __ Check(eq, kNonSmiIndex);
3847 __ SmiUntag(index, index);
3848 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3849 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3850 __ SmiTag(index, index);
3853 __ SmiUntag(value, value);
3856 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3857 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3858 __ strh(value, MemOperand(ip, index));
3859 context()->Plug(string);
3864 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3865 // Load the arguments on the stack and call the runtime function.
3866 ZoneList<Expression*>* args = expr->arguments();
3867 DCHECK(args->length() == 2);
3868 VisitForStackValue(args->at(0));
3869 VisitForStackValue(args->at(1));
3870 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3872 context()->Plug(r0);
3876 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3877 ZoneList<Expression*>* args = expr->arguments();
3878 DCHECK(args->length() == 2);
3879 VisitForStackValue(args->at(0)); // Load the object.
3880 VisitForAccumulatorValue(args->at(1)); // Load the value.
3881 __ pop(r1); // r0 = value. r1 = object.
3884 // If the object is a smi, return the value.
3885 __ JumpIfSmi(r1, &done);
3887 // If the object is not a value type, return the value.
3888 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3892 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3893 // Update the write barrier. Save the value as it will be
3894 // overwritten by the write barrier code and is needed afterward.
3896 __ RecordWriteField(
3897 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3900 context()->Plug(r0);
3904 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3905 ZoneList<Expression*>* args = expr->arguments();
3906 DCHECK_EQ(args->length(), 1);
3907 // Load the argument into r0 and call the stub.
3908 VisitForAccumulatorValue(args->at(0));
3910 NumberToStringStub stub(isolate());
3912 context()->Plug(r0);
3916 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3917 ZoneList<Expression*>* args = expr->arguments();
3918 DCHECK(args->length() == 1);
3919 VisitForAccumulatorValue(args->at(0));
3922 StringCharFromCodeGenerator generator(r0, r1);
3923 generator.GenerateFast(masm_);
3926 NopRuntimeCallHelper call_helper;
3927 generator.GenerateSlow(masm_, call_helper);
3930 context()->Plug(r1);
3934 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3935 ZoneList<Expression*>* args = expr->arguments();
3936 DCHECK(args->length() == 2);
3937 VisitForStackValue(args->at(0));
3938 VisitForAccumulatorValue(args->at(1));
3940 Register object = r1;
3941 Register index = r0;
3942 Register result = r3;
3946 Label need_conversion;
3947 Label index_out_of_range;
3949 StringCharCodeAtGenerator generator(object,
3954 &index_out_of_range,
3955 STRING_INDEX_IS_NUMBER);
3956 generator.GenerateFast(masm_);
3959 __ bind(&index_out_of_range);
3960 // When the index is out of range, the spec requires us to return
3962 __ LoadRoot(result, Heap::kNanValueRootIndex);
3965 __ bind(&need_conversion);
3966 // Load the undefined value into the result register, which will
3967 // trigger conversion.
3968 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3971 NopRuntimeCallHelper call_helper;
3972 generator.GenerateSlow(masm_, call_helper);
3975 context()->Plug(result);
3979 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3980 ZoneList<Expression*>* args = expr->arguments();
3981 DCHECK(args->length() == 2);
3982 VisitForStackValue(args->at(0));
3983 VisitForAccumulatorValue(args->at(1));
3985 Register object = r1;
3986 Register index = r0;
3987 Register scratch = r3;
3988 Register result = r0;
3992 Label need_conversion;
3993 Label index_out_of_range;
3995 StringCharAtGenerator generator(object,
4001 &index_out_of_range,
4002 STRING_INDEX_IS_NUMBER);
4003 generator.GenerateFast(masm_);
4006 __ bind(&index_out_of_range);
4007 // When the index is out of range, the spec requires us to return
4008 // the empty string.
4009 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4012 __ bind(&need_conversion);
4013 // Move smi zero into the result register, which will trigger
4015 __ mov(result, Operand(Smi::FromInt(0)));
4018 NopRuntimeCallHelper call_helper;
4019 generator.GenerateSlow(masm_, call_helper);
4022 context()->Plug(result);
4026 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4027 ZoneList<Expression*>* args = expr->arguments();
4028 DCHECK_EQ(2, args->length());
4029 VisitForStackValue(args->at(0));
4030 VisitForAccumulatorValue(args->at(1));
4033 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4035 context()->Plug(r0);
4039 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4040 ZoneList<Expression*>* args = expr->arguments();
4041 DCHECK_EQ(2, args->length());
4042 VisitForStackValue(args->at(0));
4043 VisitForStackValue(args->at(1));
4045 StringCompareStub stub(isolate());
4047 context()->Plug(r0);
4051 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4052 ZoneList<Expression*>* args = expr->arguments();
4053 DCHECK(args->length() >= 2);
4055 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4056 for (int i = 0; i < arg_count + 1; i++) {
4057 VisitForStackValue(args->at(i));
4059 VisitForAccumulatorValue(args->last()); // Function.
4061 Label runtime, done;
4062 // Check for non-function argument (including proxy).
4063 __ JumpIfSmi(r0, &runtime);
4064 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4067 // InvokeFunction requires the function in r1. Move it in there.
4068 __ mov(r1, result_register());
4069 ParameterCount count(arg_count);
4070 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4071 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4076 __ CallRuntime(Runtime::kCall, args->length());
4079 context()->Plug(r0);
4083 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4084 RegExpConstructResultStub stub(isolate());
4085 ZoneList<Expression*>* args = expr->arguments();
4086 DCHECK(args->length() == 3);
4087 VisitForStackValue(args->at(0));
4088 VisitForStackValue(args->at(1));
4089 VisitForAccumulatorValue(args->at(2));
4093 context()->Plug(r0);
4097 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4098 ZoneList<Expression*>* args = expr->arguments();
4099 DCHECK_EQ(2, args->length());
4100 DCHECK_NE(NULL, args->at(0)->AsLiteral());
4101 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4103 Handle<FixedArray> jsfunction_result_caches(
4104 isolate()->native_context()->jsfunction_result_caches());
4105 if (jsfunction_result_caches->length() <= cache_id) {
4106 __ Abort(kAttemptToUseUndefinedCache);
4107 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4108 context()->Plug(r0);
4112 VisitForAccumulatorValue(args->at(1));
4115 Register cache = r1;
4116 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4117 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4118 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4120 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4123 Label done, not_found;
4124 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4125 // r2 now holds finger offset as a smi.
4126 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4127 // r3 now points to the start of fixed array elements.
4128 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
4129 // Note side effect of PreIndex: r3 now points to the key of the pair.
4131 __ b(ne, ¬_found);
4133 __ ldr(r0, MemOperand(r3, kPointerSize));
4136 __ bind(¬_found);
4137 // Call runtime to perform the lookup.
4138 __ Push(cache, key);
4139 __ CallRuntime(Runtime::kGetFromCache, 2);
4142 context()->Plug(r0);
4146 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4147 ZoneList<Expression*>* args = expr->arguments();
4148 VisitForAccumulatorValue(args->at(0));
4150 Label materialize_true, materialize_false;
4151 Label* if_true = NULL;
4152 Label* if_false = NULL;
4153 Label* fall_through = NULL;
4154 context()->PrepareTest(&materialize_true, &materialize_false,
4155 &if_true, &if_false, &fall_through);
4157 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4158 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4159 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4160 Split(eq, if_true, if_false, fall_through);
4162 context()->Plug(if_true, if_false);
4166 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4167 ZoneList<Expression*>* args = expr->arguments();
4168 DCHECK(args->length() == 1);
4169 VisitForAccumulatorValue(args->at(0));
4171 __ AssertString(r0);
4173 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4174 __ IndexFromHash(r0, r0);
4176 context()->Plug(r0);
4180 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4181 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4182 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4183 one_char_separator_loop_entry, long_separator_loop;
4184 ZoneList<Expression*>* args = expr->arguments();
4185 DCHECK(args->length() == 2);
4186 VisitForStackValue(args->at(1));
4187 VisitForAccumulatorValue(args->at(0));
4189 // All aliases of the same register have disjoint lifetimes.
4190 Register array = r0;
4191 Register elements = no_reg; // Will be r0.
4192 Register result = no_reg; // Will be r0.
4193 Register separator = r1;
4194 Register array_length = r2;
4195 Register result_pos = no_reg; // Will be r2
4196 Register string_length = r3;
4197 Register string = r4;
4198 Register element = r5;
4199 Register elements_end = r6;
4200 Register scratch = r9;
4202 // Separator operand is on the stack.
4205 // Check that the array is a JSArray.
4206 __ JumpIfSmi(array, &bailout);
4207 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4210 // Check that the array has fast elements.
4211 __ CheckFastElements(scratch, array_length, &bailout);
4213 // If the array has length zero, return the empty string.
4214 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4215 __ SmiUntag(array_length, SetCC);
4216 __ b(ne, &non_trivial_array);
4217 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4220 __ bind(&non_trivial_array);
4222 // Get the FixedArray containing array's elements.
4224 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4225 array = no_reg; // End of array's live range.
4227 // Check that all array elements are sequential one-byte strings, and
4228 // accumulate the sum of their lengths, as a smi-encoded value.
4229 __ mov(string_length, Operand::Zero());
4231 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4232 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4233 // Loop condition: while (element < elements_end).
4234 // Live values in registers:
4235 // elements: Fixed array of strings.
4236 // array_length: Length of the fixed array of strings (not smi)
4237 // separator: Separator string
4238 // string_length: Accumulated sum of string lengths (smi).
4239 // element: Current array element.
4240 // elements_end: Array end.
4241 if (generate_debug_code_) {
4242 __ cmp(array_length, Operand::Zero());
4243 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4246 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4247 __ JumpIfSmi(string, &bailout);
4248 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4249 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4250 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4251 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4252 __ add(string_length, string_length, Operand(scratch), SetCC);
4254 __ cmp(element, elements_end);
4257 // If array_length is 1, return elements[0], a string.
4258 __ cmp(array_length, Operand(1));
4259 __ b(ne, ¬_size_one_array);
4260 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4263 __ bind(¬_size_one_array);
4265 // Live values in registers:
4266 // separator: Separator string
4267 // array_length: Length of the array.
4268 // string_length: Sum of string lengths (smi).
4269 // elements: FixedArray of strings.
4271 // Check that the separator is a flat one-byte string.
4272 __ JumpIfSmi(separator, &bailout);
4273 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4274 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4275 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4277 // Add (separator length times array_length) - separator length to the
4278 // string_length to get the length of the result string. array_length is not
4279 // smi but the other values are, so the result is a smi
4280 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4281 __ sub(string_length, string_length, Operand(scratch));
4282 __ smull(scratch, ip, array_length, scratch);
4283 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4285 __ cmp(ip, Operand::Zero());
4287 __ tst(scratch, Operand(0x80000000));
4289 __ add(string_length, string_length, Operand(scratch), SetCC);
4291 __ SmiUntag(string_length);
4293 // Get first element in the array to free up the elements register to be used
4296 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4297 result = elements; // End of live range for elements.
4299 // Live values in registers:
4300 // element: First array element
4301 // separator: Separator string
4302 // string_length: Length of result string (not smi)
4303 // array_length: Length of the array.
4304 __ AllocateOneByteString(result, string_length, scratch,
4305 string, // used as scratch
4306 elements_end, // used as scratch
4308 // Prepare for looping. Set up elements_end to end of the array. Set
4309 // result_pos to the position of the result where to write the first
4311 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4312 result_pos = array_length; // End of live range for array_length.
4313 array_length = no_reg;
4316 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4318 // Check the length of the separator.
4319 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4320 __ cmp(scratch, Operand(Smi::FromInt(1)));
4321 __ b(eq, &one_char_separator);
4322 __ b(gt, &long_separator);
4324 // Empty separator case
4325 __ bind(&empty_separator_loop);
4326 // Live values in registers:
4327 // result_pos: the position to which we are currently copying characters.
4328 // element: Current array element.
4329 // elements_end: Array end.
4331 // Copy next array element to the result.
4332 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4333 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4334 __ SmiUntag(string_length);
4337 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4338 __ CopyBytes(string, result_pos, string_length, scratch);
4339 __ cmp(element, elements_end);
4340 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4341 DCHECK(result.is(r0));
4344 // One-character separator case
4345 __ bind(&one_char_separator);
4346 // Replace separator with its one-byte character value.
4347 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4348 // Jump into the loop after the code that copies the separator, so the first
4349 // element is not preceded by a separator
4350 __ jmp(&one_char_separator_loop_entry);
4352 __ bind(&one_char_separator_loop);
4353 // Live values in registers:
4354 // result_pos: the position to which we are currently copying characters.
4355 // element: Current array element.
4356 // elements_end: Array end.
4357 // separator: Single separator one-byte char (in lower byte).
4359 // Copy the separator character to the result.
4360 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4362 // Copy next array element to the result.
4363 __ bind(&one_char_separator_loop_entry);
4364 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4365 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4366 __ SmiUntag(string_length);
4369 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4370 __ CopyBytes(string, result_pos, string_length, scratch);
4371 __ cmp(element, elements_end);
4372 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4373 DCHECK(result.is(r0));
4376 // Long separator case (separator is more than one character). Entry is at the
4377 // label long_separator below.
4378 __ bind(&long_separator_loop);
4379 // Live values in registers:
4380 // result_pos: the position to which we are currently copying characters.
4381 // element: Current array element.
4382 // elements_end: Array end.
4383 // separator: Separator string.
4385 // Copy the separator to the result.
4386 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4387 __ SmiUntag(string_length);
4390 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4391 __ CopyBytes(string, result_pos, string_length, scratch);
4393 __ bind(&long_separator);
4394 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4395 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4396 __ SmiUntag(string_length);
4399 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4400 __ CopyBytes(string, result_pos, string_length, scratch);
4401 __ cmp(element, elements_end);
4402 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4403 DCHECK(result.is(r0));
4407 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4409 context()->Plug(r0);
4413 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4414 DCHECK(expr->arguments()->length() == 0);
4415 ExternalReference debug_is_active =
4416 ExternalReference::debug_is_active_address(isolate());
4417 __ mov(ip, Operand(debug_is_active));
4418 __ ldrb(r0, MemOperand(ip));
4420 context()->Plug(r0);
4424 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4425 if (expr->function() != NULL &&
4426 expr->function()->intrinsic_type == Runtime::INLINE) {
4427 Comment cmnt(masm_, "[ InlineRuntimeCall");
4428 EmitInlineRuntimeCall(expr);
4432 Comment cmnt(masm_, "[ CallRuntime");
4433 ZoneList<Expression*>* args = expr->arguments();
4434 int arg_count = args->length();
4436 if (expr->is_jsruntime()) {
4437 // Push the builtins object as the receiver.
4438 Register receiver = LoadDescriptor::ReceiverRegister();
4439 __ ldr(receiver, GlobalObjectOperand());
4440 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4443 // Load the function from the receiver.
4444 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4445 if (FLAG_vector_ics) {
4446 __ mov(VectorLoadICDescriptor::SlotRegister(),
4447 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4448 CallLoadIC(NOT_CONTEXTUAL);
4450 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4453 // Push the target function under the receiver.
4454 __ ldr(ip, MemOperand(sp, 0));
4456 __ str(r0, MemOperand(sp, kPointerSize));
4458 // Push the arguments ("left-to-right").
4459 int arg_count = args->length();
4460 for (int i = 0; i < arg_count; i++) {
4461 VisitForStackValue(args->at(i));
4464 // Record source position of the IC call.
4465 SetSourcePosition(expr->position());
4466 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4467 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4470 // Restore context register.
4471 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4473 context()->DropAndPlug(1, r0);
4475 // Push the arguments ("left-to-right").
4476 for (int i = 0; i < arg_count; i++) {
4477 VisitForStackValue(args->at(i));
4480 // Call the C runtime function.
4481 __ CallRuntime(expr->function(), arg_count);
4482 context()->Plug(r0);
4487 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4488 switch (expr->op()) {
4489 case Token::DELETE: {
4490 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4491 Property* property = expr->expression()->AsProperty();
4492 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4494 if (property != NULL) {
4495 VisitForStackValue(property->obj());
4496 VisitForStackValue(property->key());
4497 __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4499 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4500 context()->Plug(r0);
4501 } else if (proxy != NULL) {
4502 Variable* var = proxy->var();
4503 // Delete of an unqualified identifier is disallowed in strict mode
4504 // but "delete this" is allowed.
4505 DCHECK(strict_mode() == SLOPPY || var->is_this());
4506 if (var->IsUnallocated()) {
4507 __ ldr(r2, GlobalObjectOperand());
4508 __ mov(r1, Operand(var->name()));
4509 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4510 __ Push(r2, r1, r0);
4511 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4512 context()->Plug(r0);
4513 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4514 // Result of deleting non-global, non-dynamic variables is false.
4515 // The subexpression does not have side effects.
4516 context()->Plug(var->is_this());
4518 // Non-global variable. Call the runtime to try to delete from the
4519 // context where the variable was introduced.
4520 DCHECK(!context_register().is(r2));
4521 __ mov(r2, Operand(var->name()));
4522 __ Push(context_register(), r2);
4523 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4524 context()->Plug(r0);
4527 // Result of deleting non-property, non-variable reference is true.
4528 // The subexpression may have side effects.
4529 VisitForEffect(expr->expression());
4530 context()->Plug(true);
4536 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4537 VisitForEffect(expr->expression());
4538 context()->Plug(Heap::kUndefinedValueRootIndex);
4543 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4544 if (context()->IsEffect()) {
4545 // Unary NOT has no side effects so it's only necessary to visit the
4546 // subexpression. Match the optimizing compiler by not branching.
4547 VisitForEffect(expr->expression());
4548 } else if (context()->IsTest()) {
4549 const TestContext* test = TestContext::cast(context());
4550 // The labels are swapped for the recursive call.
4551 VisitForControl(expr->expression(),
4552 test->false_label(),
4554 test->fall_through());
4555 context()->Plug(test->true_label(), test->false_label());
4557 // We handle value contexts explicitly rather than simply visiting
4558 // for control and plugging the control flow into the context,
4559 // because we need to prepare a pair of extra administrative AST ids
4560 // for the optimizing compiler.
4561 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4562 Label materialize_true, materialize_false, done;
4563 VisitForControl(expr->expression(),
4567 __ bind(&materialize_true);
4568 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4569 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4570 if (context()->IsStackValue()) __ push(r0);
4572 __ bind(&materialize_false);
4573 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4574 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4575 if (context()->IsStackValue()) __ push(r0);
4581 case Token::TYPEOF: {
4582 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4583 { StackValueContext context(this);
4584 VisitForTypeofValue(expr->expression());
4586 __ CallRuntime(Runtime::kTypeof, 1);
4587 context()->Plug(r0);
4597 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4598 DCHECK(expr->expression()->IsValidReferenceExpression());
4600 Comment cmnt(masm_, "[ CountOperation");
4601 SetSourcePosition(expr->position());
4603 Property* prop = expr->expression()->AsProperty();
4604 LhsKind assign_type = GetAssignType(prop);
4606 // Evaluate expression and get value.
4607 if (assign_type == VARIABLE) {
4608 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4609 AccumulatorValueContext context(this);
4610 EmitVariableLoad(expr->expression()->AsVariableProxy());
4612 // Reserve space for result of postfix operation.
4613 if (expr->is_postfix() && !context()->IsEffect()) {
4614 __ mov(ip, Operand(Smi::FromInt(0)));
4617 switch (assign_type) {
4618 case NAMED_PROPERTY: {
4619 // Put the object both on the stack and in the register.
4620 VisitForStackValue(prop->obj());
4621 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4622 EmitNamedPropertyLoad(prop);
4626 case NAMED_SUPER_PROPERTY: {
4627 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4628 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4629 __ Push(result_register());
4630 const Register scratch = r1;
4631 __ ldr(scratch, MemOperand(sp, kPointerSize));
4633 __ Push(result_register());
4634 EmitNamedSuperPropertyLoad(prop);
4638 case KEYED_SUPER_PROPERTY: {
4639 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4640 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4641 __ Push(result_register());
4642 VisitForAccumulatorValue(prop->key());
4643 __ Push(result_register());
4644 const Register scratch = r1;
4645 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4647 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4649 __ Push(result_register());
4650 EmitKeyedSuperPropertyLoad(prop);
4654 case KEYED_PROPERTY: {
4655 VisitForStackValue(prop->obj());
4656 VisitForStackValue(prop->key());
4657 __ ldr(LoadDescriptor::ReceiverRegister(),
4658 MemOperand(sp, 1 * kPointerSize));
4659 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4660 EmitKeyedPropertyLoad(prop);
4669 // We need a second deoptimization point after loading the value
4670 // in case evaluating the property load my have a side effect.
4671 if (assign_type == VARIABLE) {
4672 PrepareForBailout(expr->expression(), TOS_REG);
4674 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4677 // Inline smi case if we are in a loop.
4678 Label stub_call, done;
4679 JumpPatchSite patch_site(masm_);
4681 int count_value = expr->op() == Token::INC ? 1 : -1;
4682 if (ShouldInlineSmiCase(expr->op())) {
4684 patch_site.EmitJumpIfNotSmi(r0, &slow);
4686 // Save result for postfix expressions.
4687 if (expr->is_postfix()) {
4688 if (!context()->IsEffect()) {
4689 // Save the result on the stack. If we have a named or keyed property
4690 // we store the result under the receiver that is currently on top
4692 switch (assign_type) {
4696 case NAMED_PROPERTY:
4697 __ str(r0, MemOperand(sp, kPointerSize));
4699 case NAMED_SUPER_PROPERTY:
4700 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4702 case KEYED_PROPERTY:
4703 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4705 case KEYED_SUPER_PROPERTY:
4706 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4712 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4714 // Call stub. Undo operation first.
4715 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4719 ToNumberStub convert_stub(isolate());
4720 __ CallStub(&convert_stub);
4722 // Save result for postfix expressions.
4723 if (expr->is_postfix()) {
4724 if (!context()->IsEffect()) {
4725 // Save the result on the stack. If we have a named or keyed property
4726 // we store the result under the receiver that is currently on top
4728 switch (assign_type) {
4732 case NAMED_PROPERTY:
4733 __ str(r0, MemOperand(sp, kPointerSize));
4735 case NAMED_SUPER_PROPERTY:
4736 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4738 case KEYED_PROPERTY:
4739 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4741 case KEYED_SUPER_PROPERTY:
4742 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4749 __ bind(&stub_call);
4751 __ mov(r0, Operand(Smi::FromInt(count_value)));
4753 // Record position before stub call.
4754 SetSourcePosition(expr->position());
4757 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4758 CallIC(code, expr->CountBinOpFeedbackId());
4759 patch_site.EmitPatchInfo();
4762 // Store the value returned in r0.
4763 switch (assign_type) {
4765 if (expr->is_postfix()) {
4766 { EffectContext context(this);
4767 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4769 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4772 // For all contexts except EffectConstant We have the result on
4773 // top of the stack.
4774 if (!context()->IsEffect()) {
4775 context()->PlugTOS();
4778 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4780 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4781 context()->Plug(r0);
4784 case NAMED_PROPERTY: {
4785 __ mov(StoreDescriptor::NameRegister(),
4786 Operand(prop->key()->AsLiteral()->value()));
4787 __ pop(StoreDescriptor::ReceiverRegister());
4788 CallStoreIC(expr->CountStoreFeedbackId());
4789 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4790 if (expr->is_postfix()) {
4791 if (!context()->IsEffect()) {
4792 context()->PlugTOS();
4795 context()->Plug(r0);
4799 case NAMED_SUPER_PROPERTY: {
4800 EmitNamedSuperPropertyStore(prop);
4801 if (expr->is_postfix()) {
4802 if (!context()->IsEffect()) {
4803 context()->PlugTOS();
4806 context()->Plug(r0);
4810 case KEYED_SUPER_PROPERTY: {
4811 EmitKeyedSuperPropertyStore(prop);
4812 if (expr->is_postfix()) {
4813 if (!context()->IsEffect()) {
4814 context()->PlugTOS();
4817 context()->Plug(r0);
4821 case KEYED_PROPERTY: {
4822 __ Pop(StoreDescriptor::ReceiverRegister(),
4823 StoreDescriptor::NameRegister());
4825 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4826 CallIC(ic, expr->CountStoreFeedbackId());
4827 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4828 if (expr->is_postfix()) {
4829 if (!context()->IsEffect()) {
4830 context()->PlugTOS();
4833 context()->Plug(r0);
4841 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4842 DCHECK(!context()->IsEffect());
4843 DCHECK(!context()->IsTest());
4844 VariableProxy* proxy = expr->AsVariableProxy();
4845 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4846 Comment cmnt(masm_, "[ Global variable");
4847 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4848 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4849 if (FLAG_vector_ics) {
4850 __ mov(VectorLoadICDescriptor::SlotRegister(),
4851 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
4853 // Use a regular load, not a contextual load, to avoid a reference
4855 CallLoadIC(NOT_CONTEXTUAL);
4856 PrepareForBailout(expr, TOS_REG);
4857 context()->Plug(r0);
4858 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4859 Comment cmnt(masm_, "[ Lookup slot");
4862 // Generate code for loading from variables potentially shadowed
4863 // by eval-introduced variables.
4864 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4867 __ mov(r0, Operand(proxy->name()));
4869 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4870 PrepareForBailout(expr, TOS_REG);
4873 context()->Plug(r0);
4875 // This expression cannot throw a reference error at the top level.
4876 VisitInDuplicateContext(expr);
4881 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4882 Expression* sub_expr,
4883 Handle<String> check) {
4884 Label materialize_true, materialize_false;
4885 Label* if_true = NULL;
4886 Label* if_false = NULL;
4887 Label* fall_through = NULL;
4888 context()->PrepareTest(&materialize_true, &materialize_false,
4889 &if_true, &if_false, &fall_through);
4891 { AccumulatorValueContext context(this);
4892 VisitForTypeofValue(sub_expr);
4894 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4896 Factory* factory = isolate()->factory();
4897 if (String::Equals(check, factory->number_string())) {
4898 __ JumpIfSmi(r0, if_true);
4899 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4900 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4902 Split(eq, if_true, if_false, fall_through);
4903 } else if (String::Equals(check, factory->string_string())) {
4904 __ JumpIfSmi(r0, if_false);
4905 // Check for undetectable objects => false.
4906 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4908 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4909 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4910 Split(eq, if_true, if_false, fall_through);
4911 } else if (String::Equals(check, factory->symbol_string())) {
4912 __ JumpIfSmi(r0, if_false);
4913 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4914 Split(eq, if_true, if_false, fall_through);
4915 } else if (String::Equals(check, factory->boolean_string())) {
4916 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4918 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4919 Split(eq, if_true, if_false, fall_through);
4920 } else if (String::Equals(check, factory->undefined_string())) {
4921 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4923 __ JumpIfSmi(r0, if_false);
4924 // Check for undetectable objects => true.
4925 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4926 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4927 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4928 Split(ne, if_true, if_false, fall_through);
4930 } else if (String::Equals(check, factory->function_string())) {
4931 __ JumpIfSmi(r0, if_false);
4932 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4933 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4935 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4936 Split(eq, if_true, if_false, fall_through);
4937 } else if (String::Equals(check, factory->object_string())) {
4938 __ JumpIfSmi(r0, if_false);
4939 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4941 // Check for JS objects => true.
4942 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4944 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4946 // Check for undetectable objects => false.
4947 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4948 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4949 Split(eq, if_true, if_false, fall_through);
4951 if (if_false != fall_through) __ jmp(if_false);
4953 context()->Plug(if_true, if_false);
4957 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4958 Comment cmnt(masm_, "[ CompareOperation");
4959 SetSourcePosition(expr->position());
4961 // First we try a fast inlined version of the compare when one of
4962 // the operands is a literal.
4963 if (TryLiteralCompare(expr)) return;
4965 // Always perform the comparison for its control flow. Pack the result
4966 // into the expression's context after the comparison is performed.
4967 Label materialize_true, materialize_false;
4968 Label* if_true = NULL;
4969 Label* if_false = NULL;
4970 Label* fall_through = NULL;
4971 context()->PrepareTest(&materialize_true, &materialize_false,
4972 &if_true, &if_false, &fall_through);
4974 Token::Value op = expr->op();
4975 VisitForStackValue(expr->left());
4978 VisitForStackValue(expr->right());
4979 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4980 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4981 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4983 Split(eq, if_true, if_false, fall_through);
4986 case Token::INSTANCEOF: {
4987 VisitForStackValue(expr->right());
4988 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4990 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4991 // The stub returns 0 for true.
4993 Split(eq, if_true, if_false, fall_through);
4998 VisitForAccumulatorValue(expr->right());
4999 Condition cond = CompareIC::ComputeCondition(op);
5002 bool inline_smi_code = ShouldInlineSmiCase(op);
5003 JumpPatchSite patch_site(masm_);
5004 if (inline_smi_code) {
5006 __ orr(r2, r0, Operand(r1));
5007 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5009 Split(cond, if_true, if_false, NULL);
5010 __ bind(&slow_case);
5013 // Record position and call the compare IC.
5014 SetSourcePosition(expr->position());
5015 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5016 CallIC(ic, expr->CompareOperationFeedbackId());
5017 patch_site.EmitPatchInfo();
5018 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5019 __ cmp(r0, Operand::Zero());
5020 Split(cond, if_true, if_false, fall_through);
5024 // Convert the result of the comparison into one expected for this
5025 // expression's context.
5026 context()->Plug(if_true, if_false);
5030 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5031 Expression* sub_expr,
5033 Label materialize_true, materialize_false;
5034 Label* if_true = NULL;
5035 Label* if_false = NULL;
5036 Label* fall_through = NULL;
5037 context()->PrepareTest(&materialize_true, &materialize_false,
5038 &if_true, &if_false, &fall_through);
5040 VisitForAccumulatorValue(sub_expr);
5041 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5042 if (expr->op() == Token::EQ_STRICT) {
5043 Heap::RootListIndex nil_value = nil == kNullValue ?
5044 Heap::kNullValueRootIndex :
5045 Heap::kUndefinedValueRootIndex;
5046 __ LoadRoot(r1, nil_value);
5048 Split(eq, if_true, if_false, fall_through);
5050 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5051 CallIC(ic, expr->CompareOperationFeedbackId());
5052 __ cmp(r0, Operand(0));
5053 Split(ne, if_true, if_false, fall_through);
5055 context()->Plug(if_true, if_false);
5059 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5060 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5061 context()->Plug(r0);
5065 Register FullCodeGenerator::result_register() {
5070 Register FullCodeGenerator::context_register() {
5075 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5076 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5077 __ str(value, MemOperand(fp, frame_offset));
5081 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5082 __ ldr(dst, ContextOperand(cp, context_index));
5086 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5087 Scope* declaration_scope = scope()->DeclarationScope();
5088 if (declaration_scope->is_global_scope() ||
5089 declaration_scope->is_module_scope()) {
5090 // Contexts nested in the native context have a canonical empty function
5091 // as their closure, not the anonymous closure containing the global
5092 // code. Pass a smi sentinel and let the runtime look up the empty
5094 __ mov(ip, Operand(Smi::FromInt(0)));
5095 } else if (declaration_scope->is_eval_scope()) {
5096 // Contexts created by a call to eval have the same closure as the
5097 // context calling eval, not the anonymous closure containing the eval
5098 // code. Fetch it from the context.
5099 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5101 DCHECK(declaration_scope->is_function_scope());
5102 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5108 // ----------------------------------------------------------------------------
5109 // Non-local control flow support.
5111 void FullCodeGenerator::EnterFinallyBlock() {
5112 DCHECK(!result_register().is(r1));
5113 // Store result register while executing finally block.
5114 __ push(result_register());
5115 // Cook return address in link register to stack (smi encoded Code* delta)
5116 __ sub(r1, lr, Operand(masm_->CodeObject()));
5119 // Store result register while executing finally block.
5122 // Store pending message while executing finally block.
5123 ExternalReference pending_message_obj =
5124 ExternalReference::address_of_pending_message_obj(isolate());
5125 __ mov(ip, Operand(pending_message_obj));
5126 __ ldr(r1, MemOperand(ip));
5129 ExternalReference has_pending_message =
5130 ExternalReference::address_of_has_pending_message(isolate());
5131 __ mov(ip, Operand(has_pending_message));
5132 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
5133 __ ldrb(r1, MemOperand(ip));
5137 ExternalReference pending_message_script =
5138 ExternalReference::address_of_pending_message_script(isolate());
5139 __ mov(ip, Operand(pending_message_script));
5140 __ ldr(r1, MemOperand(ip));
5145 void FullCodeGenerator::ExitFinallyBlock() {
5146 DCHECK(!result_register().is(r1));
5147 // Restore pending message from stack.
5149 ExternalReference pending_message_script =
5150 ExternalReference::address_of_pending_message_script(isolate());
5151 __ mov(ip, Operand(pending_message_script));
5152 __ str(r1, MemOperand(ip));
5156 ExternalReference has_pending_message =
5157 ExternalReference::address_of_has_pending_message(isolate());
5158 __ mov(ip, Operand(has_pending_message));
5159 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
5160 __ strb(r1, MemOperand(ip));
5163 ExternalReference pending_message_obj =
5164 ExternalReference::address_of_pending_message_obj(isolate());
5165 __ mov(ip, Operand(pending_message_obj));
5166 __ str(r1, MemOperand(ip));
5168 // Restore result register from stack.
5171 // Uncook return address and return.
5172 __ pop(result_register());
5174 __ add(pc, r1, Operand(masm_->CodeObject()));
5180 #define __ ACCESS_MASM(masm())
5182 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5184 int* context_length) {
5185 // The macros used here must preserve the result register.
5187 // Because the handler block contains the context of the finally
5188 // code, we can restore it directly from there for the finally code
5189 // rather than iteratively unwinding contexts via their previous
5191 __ Drop(*stack_depth); // Down to the handler block.
5192 if (*context_length > 0) {
5193 // Restore the context to its dedicated register and the stack.
5194 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
5195 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5198 __ bl(finally_entry_);
5201 *context_length = 0;
5209 static Address GetInterruptImmediateLoadAddress(Address pc) {
5210 Address load_address = pc - 2 * Assembler::kInstrSize;
5211 if (!FLAG_enable_ool_constant_pool) {
5212 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5213 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5214 // This is an extended constant pool lookup.
5215 if (CpuFeatures::IsSupported(ARMv7)) {
5216 load_address -= 2 * Assembler::kInstrSize;
5217 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5218 DCHECK(Assembler::IsMovT(
5219 Memory::int32_at(load_address + Assembler::kInstrSize)));
5221 load_address -= 4 * Assembler::kInstrSize;
5222 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5223 DCHECK(Assembler::IsOrrImmed(
5224 Memory::int32_at(load_address + Assembler::kInstrSize)));
5225 DCHECK(Assembler::IsOrrImmed(
5226 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5227 DCHECK(Assembler::IsOrrImmed(
5228 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5230 } else if (CpuFeatures::IsSupported(ARMv7) &&
5231 Assembler::IsMovT(Memory::int32_at(load_address))) {
5232 // This is a movw / movt immediate load.
5233 load_address -= Assembler::kInstrSize;
5234 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5235 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5236 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5237 // This is a mov / orr immediate load.
5238 load_address -= 3 * Assembler::kInstrSize;
5239 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5240 DCHECK(Assembler::IsOrrImmed(
5241 Memory::int32_at(load_address + Assembler::kInstrSize)));
5242 DCHECK(Assembler::IsOrrImmed(
5243 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5245 // This is a small constant pool lookup.
5246 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5248 return load_address;
5252 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5254 BackEdgeState target_state,
5255 Code* replacement_code) {
5256 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5257 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5258 CodePatcher patcher(branch_address, 1);
5259 switch (target_state) {
5262 // <decrement profiling counter>
5264 // ; load interrupt stub address into ip - either of (for ARMv7):
5265 // ; <small cp load> | <extended cp load> | <immediate load>
5266 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5267 // | movt ip, #imm | movw ip, #imm
5268 // | ldr ip, [pp, ip]
5269 // ; or (for ARMv6):
5270 // ; <small cp load> | <extended cp load> | <immediate load>
5271 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5272 // | orr ip, ip, #imm> | orr ip, ip, #imm
5273 // | orr ip, ip, #imm> | orr ip, ip, #imm
5274 // | orr ip, ip, #imm> | orr ip, ip, #imm
5276 // <reset profiling counter>
5279 // Calculate branch offset to the ok-label - this is the difference
5280 // between the branch address and |pc| (which points at <blx ip>) plus
5281 // kProfileCounterResetSequence instructions
5282 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5283 kProfileCounterResetSequenceLength;
5284 patcher.masm()->b(branch_offset, pl);
5287 case ON_STACK_REPLACEMENT:
5288 case OSR_AFTER_STACK_CHECK:
5289 // <decrement profiling counter>
5291 // ; load on-stack replacement address into ip - either of (for ARMv7):
5292 // ; <small cp load> | <extended cp load> | <immediate load>
5293 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5294 // | movt ip, #imm> | movw ip, #imm
5295 // | ldr ip, [pp, ip]
5296 // ; or (for ARMv6):
5297 // ; <small cp load> | <extended cp load> | <immediate load>
5298 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5299 // | orr ip, ip, #imm> | orr ip, ip, #imm
5300 // | orr ip, ip, #imm> | orr ip, ip, #imm
5301 // | orr ip, ip, #imm> | orr ip, ip, #imm
5303 // <reset profiling counter>
5305 patcher.masm()->nop();
5309 // Replace the call address.
5310 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5311 replacement_code->entry());
5313 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5314 unoptimized_code, pc_immediate_load_address, replacement_code);
5318 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5320 Code* unoptimized_code,
5322 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5324 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5325 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5326 Address interrupt_address = Assembler::target_address_at(
5327 pc_immediate_load_address, unoptimized_code);
5329 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5330 DCHECK(interrupt_address ==
5331 isolate->builtins()->InterruptCheck()->entry());
5335 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5337 if (interrupt_address ==
5338 isolate->builtins()->OnStackReplacement()->entry()) {
5339 return ON_STACK_REPLACEMENT;
5342 DCHECK(interrupt_address ==
5343 isolate->builtins()->OsrAfterStackCheck()->entry());
5344 return OSR_AFTER_STACK_CHECK;
5348 } } // namespace v8::internal
5350 #endif // V8_TARGET_ARCH_ARM