1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "code-stubs.h"
13 #include "full-codegen.h"
14 #include "isolate-inl.h"
17 #include "stub-cache.h"
19 #include "arm/code-stubs-arm.h"
20 #include "arm/macro-assembler-arm.h"
25 #define __ ACCESS_MASM(masm_)
28 // A patch site is a location in the code which it is possible to patch. This
29 // class has a number of methods to emit the code which is patchable and the
30 // method EmitPatchInfo to record a marker back to the patchable code. This
31 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
32 // immediate value is used) is the delta from the pc to the first instruction of
33 // the patchable code.
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
43 ASSERT(patch_site_.is_bound() == info_emitted_);
46 // When initially emitting this ensure that a jump is always generated to skip
47 // the inlined smi code.
48 void EmitJumpIfNotSmi(Register reg, Label* target) {
49 ASSERT(!patch_site_.is_bound() && !info_emitted_);
50 Assembler::BlockConstPoolScope block_const_pool(masm_);
51 __ bind(&patch_site_);
52 __ cmp(reg, Operand(reg));
53 __ b(eq, target); // Always taken before patched.
56 // When initially emitting this ensure that a jump is never generated to skip
57 // the inlined smi code.
58 void EmitJumpIfSmi(Register reg, Label* target) {
59 ASSERT(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockConstPoolScope block_const_pool(masm_);
61 __ bind(&patch_site_);
62 __ cmp(reg, Operand(reg));
63 __ b(ne, target); // Never taken before patched.
66 void EmitPatchInfo() {
67 // Block literal pool emission whilst recording patch site information.
68 Assembler::BlockConstPoolScope block_const_pool(masm_);
69 if (patch_site_.is_bound()) {
70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72 reg.set_code(delta_to_patch_site / kOff12Mask);
73 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
78 __ nop(); // Signals no inlined code.
83 MacroAssembler* masm_;
91 static void EmitStackCheck(MacroAssembler* masm_,
92 Register stack_limit_scratch,
94 Register scratch = sp) {
95 Isolate* isolate = masm_->isolate();
97 ASSERT(scratch.is(sp) == (pointers == 0));
98 Heap::RootListIndex index;
100 __ sub(scratch, sp, Operand(pointers * kPointerSize));
101 index = Heap::kRealStackLimitRootIndex;
103 index = Heap::kStackLimitRootIndex;
105 __ LoadRoot(stack_limit_scratch, index);
106 __ cmp(scratch, Operand(stack_limit_scratch));
108 Handle<Code> stack_check = isolate->builtins()->StackCheck();
109 PredictableCodeSizeScope predictable(masm_,
110 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
111 __ Call(stack_check, RelocInfo::CODE_TARGET);
116 // Generate code for a JS function. On entry to the function the receiver
117 // and arguments have been pushed on the stack left to right. The actual
118 // argument count matches the formal parameter count expected by the
121 // The live registers are:
122 // o r1: the JS function object being called (i.e., ourselves)
124 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
125 // o fp: our caller's frame pointer
126 // o sp: stack pointer
127 // o lr: return address
129 // The function builds a JS frame. Please see JavaScriptFrameConstants in
130 // frames-arm.h for its layout.
131 void FullCodeGenerator::Generate() {
132 CompilationInfo* info = info_;
134 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
136 profiling_counter_ = isolate()->factory()->NewCell(
137 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
138 SetFunctionPosition(function());
139 Comment cmnt(masm_, "[ function compiled by full code generator");
141 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
144 if (strlen(FLAG_stop_at) > 0 &&
145 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
150 // Sloppy mode functions and builtins need to replace the receiver with the
151 // global proxy when called as functions (without an explicit receiver
153 if (info->strict_mode() == SLOPPY && !info->is_native()) {
155 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
156 __ ldr(r2, MemOperand(sp, receiver_offset));
157 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
160 __ ldr(r2, GlobalObjectOperand());
161 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
163 __ str(r2, MemOperand(sp, receiver_offset));
168 // Open a frame scope to indicate that there is a frame on the stack. The
169 // MANUAL indicates that the scope shouldn't actually generate code to set up
170 // the frame (that is done below).
171 FrameScope frame_scope(masm_, StackFrame::MANUAL);
173 info->set_prologue_offset(masm_->pc_offset());
174 __ Prologue(BUILD_FUNCTION_FRAME);
175 info->AddNoFrameRange(0, masm_->pc_offset());
177 { Comment cmnt(masm_, "[ Allocate locals");
178 int locals_count = info->scope()->num_stack_slots();
179 // Generators allocate locals, if any, in context slots.
180 ASSERT(!info->function()->is_generator() || locals_count == 0);
181 if (locals_count > 0) {
182 if (locals_count >= 128) {
183 EmitStackCheck(masm_, r2, locals_count, r9);
185 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
186 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
187 if (locals_count >= kMaxPushes) {
188 int loop_iterations = locals_count / kMaxPushes;
189 __ mov(r2, Operand(loop_iterations));
191 __ bind(&loop_header);
193 for (int i = 0; i < kMaxPushes; i++) {
196 // Continue loop if not done.
197 __ sub(r2, r2, Operand(1), SetCC);
198 __ b(&loop_header, ne);
200 int remaining = locals_count % kMaxPushes;
201 // Emit the remaining pushes.
202 for (int i = 0; i < remaining; i++) {
208 bool function_in_register = true;
210 // Possibly allocate a local context.
211 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
212 if (heap_slots > 0) {
213 // Argument to NewContext is the function, which is still in r1.
214 Comment cmnt(masm_, "[ Allocate context");
215 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
217 __ Push(info->scope()->GetScopeInfo());
218 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
219 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
220 FastNewContextStub stub(isolate(), heap_slots);
224 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
226 function_in_register = false;
227 // Context is returned in r0. It replaces the context passed to us.
228 // It's saved in the stack and kept live in cp.
230 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
231 // Copy any necessary parameters into the context.
232 int num_parameters = info->scope()->num_parameters();
233 for (int i = 0; i < num_parameters; i++) {
234 Variable* var = scope()->parameter(i);
235 if (var->IsContextSlot()) {
236 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
237 (num_parameters - 1 - i) * kPointerSize;
238 // Load parameter from stack.
239 __ ldr(r0, MemOperand(fp, parameter_offset));
240 // Store it in the context.
241 MemOperand target = ContextOperand(cp, var->index());
244 // Update the write barrier.
245 __ RecordWriteContextSlot(
246 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
251 Variable* arguments = scope()->arguments();
252 if (arguments != NULL) {
253 // Function uses arguments object.
254 Comment cmnt(masm_, "[ Allocate arguments object");
255 if (!function_in_register) {
256 // Load this again, if it's used by the local context below.
257 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
261 // Receiver is just before the parameters on the caller's stack.
262 int num_parameters = info->scope()->num_parameters();
263 int offset = num_parameters * kPointerSize;
265 Operand(StandardFrameConstants::kCallerSPOffset + offset));
266 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
269 // Arguments to ArgumentsAccessStub:
270 // function, receiver address, parameter count.
271 // The stub will rewrite receiever and parameter count if the previous
272 // stack frame was an arguments adapter frame.
273 ArgumentsAccessStub::Type type;
274 if (strict_mode() == STRICT) {
275 type = ArgumentsAccessStub::NEW_STRICT;
276 } else if (function()->has_duplicate_parameters()) {
277 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
279 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
281 ArgumentsAccessStub stub(isolate(), type);
284 SetVar(arguments, r0, r1, r2);
288 __ CallRuntime(Runtime::kTraceEnter, 0);
291 // Visit the declarations and body unless there is an illegal
293 if (scope()->HasIllegalRedeclaration()) {
294 Comment cmnt(masm_, "[ Declarations");
295 scope()->VisitIllegalRedeclaration(this);
298 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
299 { Comment cmnt(masm_, "[ Declarations");
300 // For named function expressions, declare the function name as a
302 if (scope()->is_function_scope() && scope()->function() != NULL) {
303 VariableDeclaration* function = scope()->function();
304 ASSERT(function->proxy()->var()->mode() == CONST ||
305 function->proxy()->var()->mode() == CONST_LEGACY);
306 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
307 VisitVariableDeclaration(function);
309 VisitDeclarations(scope()->declarations());
312 { Comment cmnt(masm_, "[ Stack check");
313 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
314 EmitStackCheck(masm_, ip);
317 { Comment cmnt(masm_, "[ Body");
318 ASSERT(loop_depth() == 0);
319 VisitStatements(function()->body());
320 ASSERT(loop_depth() == 0);
324 // Always emit a 'return undefined' in case control fell off the end of
326 { Comment cmnt(masm_, "[ return <undefined>;");
327 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
329 EmitReturnSequence();
331 // Force emit the constant pool, so it doesn't get emitted in the middle
332 // of the back edge table.
333 masm()->CheckConstPool(true, false);
337 void FullCodeGenerator::ClearAccumulator() {
338 __ mov(r0, Operand(Smi::FromInt(0)));
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ mov(r2, Operand(profiling_counter_));
344 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
345 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
346 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
350 void FullCodeGenerator::EmitProfilingCounterReset() {
351 int reset_value = FLAG_interrupt_budget;
352 if (isolate()->IsDebuggerActive()) {
353 // Detect debug break requests as soon as possible.
354 reset_value = FLAG_interrupt_budget >> 4;
356 __ mov(r2, Operand(profiling_counter_));
357 __ mov(r3, Operand(Smi::FromInt(reset_value)));
358 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363 Label* back_edge_target) {
364 Comment cmnt(masm_, "[ Back edge bookkeeping");
365 // Block literal pools whilst emitting back edge code.
366 Assembler::BlockConstPoolScope block_const_pool(masm_);
369 ASSERT(back_edge_target->is_bound());
370 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
371 int weight = Min(kMaxBackEdgeWeight,
372 Max(1, distance / kCodeSizeMultiplier));
373 EmitProfilingCounterDecrement(weight);
375 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
377 // Record a mapping of this PC offset to the OSR id. This is used to find
378 // the AST id from the unoptimized code in order to use it as a key into
379 // the deoptimization input data found in the optimized code.
380 RecordBackEdge(stmt->OsrEntryId());
382 EmitProfilingCounterReset();
385 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
386 // Record a mapping of the OSR id to this PC. This is used if the OSR
387 // entry becomes the target of a bailout. We don't expect it to be, but
388 // we want it to work if it is.
389 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
393 void FullCodeGenerator::EmitReturnSequence() {
394 Comment cmnt(masm_, "[ Return sequence");
395 if (return_label_.is_bound()) {
396 __ b(&return_label_);
398 __ bind(&return_label_);
400 // Push the return value on the stack as the parameter.
401 // Runtime::TraceExit returns its parameter in r0.
403 __ CallRuntime(Runtime::kTraceExit, 1);
405 // Pretend that the exit is a backwards jump to the entry.
407 if (info_->ShouldSelfOptimize()) {
408 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
410 int distance = masm_->pc_offset();
411 weight = Min(kMaxBackEdgeWeight,
412 Max(1, distance / kCodeSizeMultiplier));
414 EmitProfilingCounterDecrement(weight);
418 __ Call(isolate()->builtins()->InterruptCheck(),
419 RelocInfo::CODE_TARGET);
421 EmitProfilingCounterReset();
425 // Add a label for checking the size of the code used for returning.
426 Label check_exit_codesize;
427 __ bind(&check_exit_codesize);
429 // Make sure that the constant pool is not emitted inside of the return
431 { Assembler::BlockConstPoolScope block_const_pool(masm_);
432 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
433 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
434 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
435 PredictableCodeSizeScope predictable(masm_, -1);
437 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
438 __ add(sp, sp, Operand(sp_delta));
440 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
444 // Check that the size of the code used for returning is large enough
445 // for the debugger's requirements.
446 ASSERT(Assembler::kJSReturnSequenceInstructions <=
447 masm_->InstructionsGeneratedSince(&check_exit_codesize));
453 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
458 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
459 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
460 codegen()->GetVar(result_register(), var);
464 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
465 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466 codegen()->GetVar(result_register(), var);
467 __ push(result_register());
471 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
472 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
473 // For simplicity we always test the accumulator register.
474 codegen()->GetVar(result_register(), var);
475 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
476 codegen()->DoTest(this);
480 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
484 void FullCodeGenerator::AccumulatorValueContext::Plug(
485 Heap::RootListIndex index) const {
486 __ LoadRoot(result_register(), index);
490 void FullCodeGenerator::StackValueContext::Plug(
491 Heap::RootListIndex index) const {
492 __ LoadRoot(result_register(), index);
493 __ push(result_register());
497 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
498 codegen()->PrepareForBailoutBeforeSplit(condition(),
502 if (index == Heap::kUndefinedValueRootIndex ||
503 index == Heap::kNullValueRootIndex ||
504 index == Heap::kFalseValueRootIndex) {
505 if (false_label_ != fall_through_) __ b(false_label_);
506 } else if (index == Heap::kTrueValueRootIndex) {
507 if (true_label_ != fall_through_) __ b(true_label_);
509 __ LoadRoot(result_register(), index);
510 codegen()->DoTest(this);
515 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
519 void FullCodeGenerator::AccumulatorValueContext::Plug(
520 Handle<Object> lit) const {
521 __ mov(result_register(), Operand(lit));
525 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
526 // Immediates cannot be pushed directly.
527 __ mov(result_register(), Operand(lit));
528 __ push(result_register());
532 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
533 codegen()->PrepareForBailoutBeforeSplit(condition(),
537 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
538 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
539 if (false_label_ != fall_through_) __ b(false_label_);
540 } else if (lit->IsTrue() || lit->IsJSObject()) {
541 if (true_label_ != fall_through_) __ b(true_label_);
542 } else if (lit->IsString()) {
543 if (String::cast(*lit)->length() == 0) {
544 if (false_label_ != fall_through_) __ b(false_label_);
546 if (true_label_ != fall_through_) __ b(true_label_);
548 } else if (lit->IsSmi()) {
549 if (Smi::cast(*lit)->value() == 0) {
550 if (false_label_ != fall_through_) __ b(false_label_);
552 if (true_label_ != fall_through_) __ b(true_label_);
555 // For simplicity we always test the accumulator register.
556 __ mov(result_register(), Operand(lit));
557 codegen()->DoTest(this);
562 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
563 Register reg) const {
569 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
571 Register reg) const {
574 __ Move(result_register(), reg);
578 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
579 Register reg) const {
581 if (count > 1) __ Drop(count - 1);
582 __ str(reg, MemOperand(sp, 0));
586 void FullCodeGenerator::TestContext::DropAndPlug(int count,
587 Register reg) const {
589 // For simplicity we always test the accumulator register.
591 __ Move(result_register(), reg);
592 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
593 codegen()->DoTest(this);
597 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
598 Label* materialize_false) const {
599 ASSERT(materialize_true == materialize_false);
600 __ bind(materialize_true);
604 void FullCodeGenerator::AccumulatorValueContext::Plug(
605 Label* materialize_true,
606 Label* materialize_false) const {
608 __ bind(materialize_true);
609 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
611 __ bind(materialize_false);
612 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
617 void FullCodeGenerator::StackValueContext::Plug(
618 Label* materialize_true,
619 Label* materialize_false) const {
621 __ bind(materialize_true);
622 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
624 __ bind(materialize_false);
625 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
631 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
632 Label* materialize_false) const {
633 ASSERT(materialize_true == true_label_);
634 ASSERT(materialize_false == false_label_);
638 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
642 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
643 Heap::RootListIndex value_root_index =
644 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
645 __ LoadRoot(result_register(), value_root_index);
649 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
650 Heap::RootListIndex value_root_index =
651 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
652 __ LoadRoot(ip, value_root_index);
657 void FullCodeGenerator::TestContext::Plug(bool flag) const {
658 codegen()->PrepareForBailoutBeforeSplit(condition(),
663 if (true_label_ != fall_through_) __ b(true_label_);
665 if (false_label_ != fall_through_) __ b(false_label_);
670 void FullCodeGenerator::DoTest(Expression* condition,
673 Label* fall_through) {
674 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
675 CallIC(ic, condition->test_id());
676 __ tst(result_register(), result_register());
677 Split(ne, if_true, if_false, fall_through);
681 void FullCodeGenerator::Split(Condition cond,
684 Label* fall_through) {
685 if (if_false == fall_through) {
687 } else if (if_true == fall_through) {
688 __ b(NegateCondition(cond), if_false);
696 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
697 ASSERT(var->IsStackAllocated());
698 // Offset is negative because higher indexes are at lower addresses.
699 int offset = -var->index() * kPointerSize;
700 // Adjust by a (parameter or local) base offset.
701 if (var->IsParameter()) {
702 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
704 offset += JavaScriptFrameConstants::kLocal0Offset;
706 return MemOperand(fp, offset);
710 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
711 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
712 if (var->IsContextSlot()) {
713 int context_chain_length = scope()->ContextChainLength(var->scope());
714 __ LoadContext(scratch, context_chain_length);
715 return ContextOperand(scratch, var->index());
717 return StackOperand(var);
722 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
723 // Use destination as scratch.
724 MemOperand location = VarOperand(var, dest);
725 __ ldr(dest, location);
729 void FullCodeGenerator::SetVar(Variable* var,
733 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
734 ASSERT(!scratch0.is(src));
735 ASSERT(!scratch0.is(scratch1));
736 ASSERT(!scratch1.is(src));
737 MemOperand location = VarOperand(var, scratch0);
738 __ str(src, location);
740 // Emit the write barrier code if the location is in the heap.
741 if (var->IsContextSlot()) {
742 __ RecordWriteContextSlot(scratch0,
752 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
753 bool should_normalize,
756 // Only prepare for bailouts before splits if we're in a test
757 // context. Otherwise, we let the Visit function deal with the
758 // preparation to avoid preparing with the same AST id twice.
759 if (!context()->IsTest() || !info_->IsOptimizable()) return;
762 if (should_normalize) __ b(&skip);
763 PrepareForBailout(expr, TOS_REG);
764 if (should_normalize) {
765 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
767 Split(eq, if_true, if_false, NULL);
773 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
774 // The variable in the declaration always resides in the current function
776 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
777 if (generate_debug_code_) {
778 // Check that we're not inside a with or catch context.
779 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
780 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
781 __ Check(ne, kDeclarationInWithContext);
782 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
783 __ Check(ne, kDeclarationInCatchContext);
788 void FullCodeGenerator::VisitVariableDeclaration(
789 VariableDeclaration* declaration) {
790 // If it was not possible to allocate the variable at compile time, we
791 // need to "declare" it at runtime to make sure it actually exists in the
793 VariableProxy* proxy = declaration->proxy();
794 VariableMode mode = declaration->mode();
795 Variable* variable = proxy->var();
796 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
797 switch (variable->location()) {
798 case Variable::UNALLOCATED:
799 globals_->Add(variable->name(), zone());
800 globals_->Add(variable->binding_needs_init()
801 ? isolate()->factory()->the_hole_value()
802 : isolate()->factory()->undefined_value(),
806 case Variable::PARAMETER:
807 case Variable::LOCAL:
809 Comment cmnt(masm_, "[ VariableDeclaration");
810 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
811 __ str(ip, StackOperand(variable));
815 case Variable::CONTEXT:
817 Comment cmnt(masm_, "[ VariableDeclaration");
818 EmitDebugCheckDeclarationContext(variable);
819 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
820 __ str(ip, ContextOperand(cp, variable->index()));
821 // No write barrier since the_hole_value is in old space.
822 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
826 case Variable::LOOKUP: {
827 Comment cmnt(masm_, "[ VariableDeclaration");
828 __ mov(r2, Operand(variable->name()));
829 // Declaration nodes are always introduced in one of four modes.
830 ASSERT(IsDeclaredVariableMode(mode));
831 PropertyAttributes attr =
832 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
833 __ mov(r1, Operand(Smi::FromInt(attr)));
834 // Push initial value, if any.
835 // Note: For variables we must not push an initial value (such as
836 // 'undefined') because we may have a (legal) redeclaration and we
837 // must not destroy the current value.
839 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
840 __ Push(cp, r2, r1, r0);
842 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
843 __ Push(cp, r2, r1, r0);
845 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
852 void FullCodeGenerator::VisitFunctionDeclaration(
853 FunctionDeclaration* declaration) {
854 VariableProxy* proxy = declaration->proxy();
855 Variable* variable = proxy->var();
856 switch (variable->location()) {
857 case Variable::UNALLOCATED: {
858 globals_->Add(variable->name(), zone());
859 Handle<SharedFunctionInfo> function =
860 Compiler::BuildFunctionInfo(declaration->fun(), script());
861 // Check for stack-overflow exception.
862 if (function.is_null()) return SetStackOverflow();
863 globals_->Add(function, zone());
867 case Variable::PARAMETER:
868 case Variable::LOCAL: {
869 Comment cmnt(masm_, "[ FunctionDeclaration");
870 VisitForAccumulatorValue(declaration->fun());
871 __ str(result_register(), StackOperand(variable));
875 case Variable::CONTEXT: {
876 Comment cmnt(masm_, "[ FunctionDeclaration");
877 EmitDebugCheckDeclarationContext(variable);
878 VisitForAccumulatorValue(declaration->fun());
879 __ str(result_register(), ContextOperand(cp, variable->index()));
880 int offset = Context::SlotOffset(variable->index());
881 // We know that we have written a function, which is not a smi.
882 __ RecordWriteContextSlot(cp,
890 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
894 case Variable::LOOKUP: {
895 Comment cmnt(masm_, "[ FunctionDeclaration");
896 __ mov(r2, Operand(variable->name()));
897 __ mov(r1, Operand(Smi::FromInt(NONE)));
899 // Push initial value for function declaration.
900 VisitForStackValue(declaration->fun());
901 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
908 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
909 Variable* variable = declaration->proxy()->var();
910 ASSERT(variable->location() == Variable::CONTEXT);
911 ASSERT(variable->interface()->IsFrozen());
913 Comment cmnt(masm_, "[ ModuleDeclaration");
914 EmitDebugCheckDeclarationContext(variable);
916 // Load instance object.
917 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
918 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
919 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
922 __ str(r1, ContextOperand(cp, variable->index()));
923 // We know that we have written a module, which is not a smi.
924 __ RecordWriteContextSlot(cp,
925 Context::SlotOffset(variable->index()),
932 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
934 // Traverse into body.
935 Visit(declaration->module());
939 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
940 VariableProxy* proxy = declaration->proxy();
941 Variable* variable = proxy->var();
942 switch (variable->location()) {
943 case Variable::UNALLOCATED:
947 case Variable::CONTEXT: {
948 Comment cmnt(masm_, "[ ImportDeclaration");
949 EmitDebugCheckDeclarationContext(variable);
954 case Variable::PARAMETER:
955 case Variable::LOCAL:
956 case Variable::LOOKUP:
962 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
967 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
968 // Call the runtime to declare the globals.
969 // The context is the first argument.
970 __ mov(r1, Operand(pairs));
971 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
973 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
974 // Return value is ignored.
978 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
979 // Call the runtime to declare the modules.
980 __ Push(descriptions);
981 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
982 // Return value is ignored.
986 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
987 Comment cmnt(masm_, "[ SwitchStatement");
988 Breakable nested_statement(this, stmt);
989 SetStatementPosition(stmt);
991 // Keep the switch value on the stack until a case matches.
992 VisitForStackValue(stmt->tag());
993 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
995 ZoneList<CaseClause*>* clauses = stmt->cases();
996 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
998 Label next_test; // Recycled for each test.
999 // Compile all the tests with branches to their bodies.
1000 for (int i = 0; i < clauses->length(); i++) {
1001 CaseClause* clause = clauses->at(i);
1002 clause->body_target()->Unuse();
1004 // The default is not a test, but remember it as final fall through.
1005 if (clause->is_default()) {
1006 default_clause = clause;
1010 Comment cmnt(masm_, "[ Case comparison");
1011 __ bind(&next_test);
1014 // Compile the label expression.
1015 VisitForAccumulatorValue(clause->label());
1017 // Perform the comparison as if via '==='.
1018 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1019 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1020 JumpPatchSite patch_site(masm_);
1021 if (inline_smi_code) {
1024 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1027 __ b(ne, &next_test);
1028 __ Drop(1); // Switch value is no longer needed.
1029 __ b(clause->body_target());
1030 __ bind(&slow_case);
1033 // Record position before stub call for type feedback.
1034 SetSourcePosition(clause->position());
1035 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1036 CallIC(ic, clause->CompareId());
1037 patch_site.EmitPatchInfo();
1041 PrepareForBailout(clause, TOS_REG);
1042 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1044 __ b(ne, &next_test);
1046 __ jmp(clause->body_target());
1049 __ cmp(r0, Operand::Zero());
1050 __ b(ne, &next_test);
1051 __ Drop(1); // Switch value is no longer needed.
1052 __ b(clause->body_target());
1055 // Discard the test value and jump to the default if present, otherwise to
1056 // the end of the statement.
1057 __ bind(&next_test);
1058 __ Drop(1); // Switch value is no longer needed.
1059 if (default_clause == NULL) {
1060 __ b(nested_statement.break_label());
1062 __ b(default_clause->body_target());
1065 // Compile all the case bodies.
1066 for (int i = 0; i < clauses->length(); i++) {
1067 Comment cmnt(masm_, "[ Case body");
1068 CaseClause* clause = clauses->at(i);
1069 __ bind(clause->body_target());
1070 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1071 VisitStatements(clause->statements());
1074 __ bind(nested_statement.break_label());
1075 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1079 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1080 Comment cmnt(masm_, "[ ForInStatement");
1081 int slot = stmt->ForInFeedbackSlot();
1082 SetStatementPosition(stmt);
1085 ForIn loop_statement(this, stmt);
1086 increment_loop_depth();
1088 // Get the object to enumerate over. If the object is null or undefined, skip
1089 // over the loop. See ECMA-262 version 5, section 12.6.4.
1090 VisitForAccumulatorValue(stmt->enumerable());
1091 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1094 Register null_value = r5;
1095 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1096 __ cmp(r0, null_value);
1099 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1101 // Convert the object to a JS object.
1102 Label convert, done_convert;
1103 __ JumpIfSmi(r0, &convert);
1104 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1105 __ b(ge, &done_convert);
1108 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1109 __ bind(&done_convert);
1112 // Check for proxies.
1114 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1115 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1116 __ b(le, &call_runtime);
1118 // Check cache validity in generated code. This is a fast case for
1119 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1120 // guarantee cache validity, call the runtime system to check cache
1121 // validity or get the property names in a fixed array.
1122 __ CheckEnumCache(null_value, &call_runtime);
1124 // The enum cache is valid. Load the map of the object being
1125 // iterated over and use the cache for the iteration.
1127 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1130 // Get the set of properties to enumerate.
1131 __ bind(&call_runtime);
1132 __ push(r0); // Duplicate the enumerable object on the stack.
1133 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1135 // If we got a map from the runtime call, we can do a fast
1136 // modification check. Otherwise, we got a fixed array, and we have
1137 // to do a slow check.
1139 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1140 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1142 __ b(ne, &fixed_array);
1144 // We got a map in register r0. Get the enumeration cache from it.
1145 Label no_descriptors;
1146 __ bind(&use_cache);
1148 __ EnumLength(r1, r0);
1149 __ cmp(r1, Operand(Smi::FromInt(0)));
1150 __ b(eq, &no_descriptors);
1152 __ LoadInstanceDescriptors(r0, r2);
1153 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1154 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1156 // Set up the four remaining stack slots.
1157 __ push(r0); // Map.
1158 __ mov(r0, Operand(Smi::FromInt(0)));
1159 // Push enumeration cache, enumeration cache length (as smi) and zero.
1160 __ Push(r2, r1, r0);
1163 __ bind(&no_descriptors);
1167 // We got a fixed array in register r0. Iterate through that.
1169 __ bind(&fixed_array);
1171 __ Move(r1, FeedbackVector());
1172 __ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1173 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
1175 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1176 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1177 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1178 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1179 __ b(gt, &non_proxy);
1180 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1181 __ bind(&non_proxy);
1182 __ Push(r1, r0); // Smi and array
1183 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1184 __ mov(r0, Operand(Smi::FromInt(0)));
1185 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1187 // Generate code for doing the condition check.
1188 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1190 // Load the current count to r0, load the length to r1.
1191 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1192 __ cmp(r0, r1); // Compare to the array length.
1193 __ b(hs, loop_statement.break_label());
1195 // Get the current entry of the array into register r3.
1196 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1197 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1198 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1200 // Get the expected map from the stack or a smi in the
1201 // permanent slow case into register r2.
1202 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1204 // Check if the expected map still matches that of the enumerable.
1205 // If not, we may have to filter the key.
1207 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1208 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1209 __ cmp(r4, Operand(r2));
1210 __ b(eq, &update_each);
1212 // For proxies, no filtering is done.
1213 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1214 __ cmp(r2, Operand(Smi::FromInt(0)));
1215 __ b(eq, &update_each);
1217 // Convert the entry to a string or (smi) 0 if it isn't a property
1218 // any more. If the property has been removed while iterating, we
1220 __ push(r1); // Enumerable.
1221 __ push(r3); // Current entry.
1222 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1223 __ mov(r3, Operand(r0), SetCC);
1224 __ b(eq, loop_statement.continue_label());
1226 // Update the 'each' property or variable from the possibly filtered
1227 // entry in register r3.
1228 __ bind(&update_each);
1229 __ mov(result_register(), r3);
1230 // Perform the assignment as if via '='.
1231 { EffectContext context(this);
1232 EmitAssignment(stmt->each());
1235 // Generate code for the body of the loop.
1236 Visit(stmt->body());
1238 // Generate code for the going to the next element by incrementing
1239 // the index (smi) stored on top of the stack.
1240 __ bind(loop_statement.continue_label());
1242 __ add(r0, r0, Operand(Smi::FromInt(1)));
1245 EmitBackEdgeBookkeeping(stmt, &loop);
1248 // Remove the pointers stored on the stack.
1249 __ bind(loop_statement.break_label());
1252 // Exit and decrement the loop depth.
1253 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1255 decrement_loop_depth();
1259 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1260 Comment cmnt(masm_, "[ ForOfStatement");
1261 SetStatementPosition(stmt);
1263 Iteration loop_statement(this, stmt);
1264 increment_loop_depth();
1266 // var iterator = iterable[@@iterator]()
1267 VisitForAccumulatorValue(stmt->assign_iterator());
1269 // As with for-in, skip the loop if the iterator is null or undefined.
1270 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1271 __ b(eq, loop_statement.break_label());
1272 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1273 __ b(eq, loop_statement.break_label());
1275 // Convert the iterator to a JS object.
1276 Label convert, done_convert;
1277 __ JumpIfSmi(r0, &convert);
1278 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1279 __ b(ge, &done_convert);
1282 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1283 __ bind(&done_convert);
1287 __ bind(loop_statement.continue_label());
1289 // result = iterator.next()
1290 VisitForEffect(stmt->next_result());
1292 // if (result.done) break;
1293 Label result_not_done;
1294 VisitForControl(stmt->result_done(),
1295 loop_statement.break_label(),
1298 __ bind(&result_not_done);
1300 // each = result.value
1301 VisitForEffect(stmt->assign_each());
1303 // Generate code for the body of the loop.
1304 Visit(stmt->body());
1306 // Check stack before looping.
1307 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1308 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1309 __ jmp(loop_statement.continue_label());
1311 // Exit and decrement the loop depth.
1312 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1313 __ bind(loop_statement.break_label());
1314 decrement_loop_depth();
1318 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1320 // Use the fast case closure allocation code that allocates in new
1321 // space for nested functions that don't need literals cloning. If
1322 // we're running with the --always-opt or the --prepare-always-opt
1323 // flag, we need to use the runtime function so that the new function
1324 // we are creating here gets a chance to have its code optimized and
1325 // doesn't just get a copy of the existing unoptimized code.
1326 if (!FLAG_always_opt &&
1327 !FLAG_prepare_always_opt &&
1329 scope()->is_function_scope() &&
1330 info->num_literals() == 0) {
1331 FastNewClosureStub stub(isolate(),
1332 info->strict_mode(),
1333 info->is_generator());
1334 __ mov(r2, Operand(info));
1337 __ mov(r0, Operand(info));
1338 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1339 : Heap::kFalseValueRootIndex);
1340 __ Push(cp, r0, r1);
1341 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1343 context()->Plug(r0);
1347 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1348 Comment cmnt(masm_, "[ VariableProxy");
1349 EmitVariableLoad(expr);
1353 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1354 TypeofState typeof_state,
1356 Register current = cp;
1362 if (s->num_heap_slots() > 0) {
1363 if (s->calls_sloppy_eval()) {
1364 // Check that extension is NULL.
1365 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1369 // Load next context in chain.
1370 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1371 // Walk the rest of the chain without clobbering cp.
1374 // If no outer scope calls eval, we do not need to check more
1375 // context extensions.
1376 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1377 s = s->outer_scope();
1380 if (s->is_eval_scope()) {
1382 if (!current.is(next)) {
1383 __ Move(next, current);
1386 // Terminate at native context.
1387 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1388 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1391 // Check that extension is NULL.
1392 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1395 // Load next context in chain.
1396 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1401 __ ldr(r0, GlobalObjectOperand());
1402 __ mov(r2, Operand(var->name()));
1403 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1410 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1412 ASSERT(var->IsContextSlot());
1413 Register context = cp;
1417 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1418 if (s->num_heap_slots() > 0) {
1419 if (s->calls_sloppy_eval()) {
1420 // Check that extension is NULL.
1421 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1425 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1426 // Walk the rest of the chain without clobbering cp.
1430 // Check that last extension is NULL.
1431 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1435 // This function is used only for loads, not stores, so it's safe to
1436 // return an cp-based operand (the write barrier cannot be allowed to
1437 // destroy the cp register).
1438 return ContextOperand(context, var->index());
1442 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1443 TypeofState typeof_state,
1446 // Generate fast-case code for variables that might be shadowed by
1447 // eval-introduced variables. Eval is used a lot without
1448 // introducing variables. In those cases, we do not want to
1449 // perform a runtime call for all variables in the scope
1450 // containing the eval.
1451 if (var->mode() == DYNAMIC_GLOBAL) {
1452 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1454 } else if (var->mode() == DYNAMIC_LOCAL) {
1455 Variable* local = var->local_if_not_shadowed();
1456 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1457 if (local->mode() == LET || local->mode() == CONST ||
1458 local->mode() == CONST_LEGACY) {
1459 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1460 if (local->mode() == CONST_LEGACY) {
1461 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1462 } else { // LET || CONST
1464 __ mov(r0, Operand(var->name()));
1466 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1474 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1475 // Record position before possible IC call.
1476 SetSourcePosition(proxy->position());
1477 Variable* var = proxy->var();
1479 // Three cases: global variables, lookup variables, and all other types of
1481 switch (var->location()) {
1482 case Variable::UNALLOCATED: {
1483 Comment cmnt(masm_, "[ Global variable");
1484 // Use inline caching. Variable name is passed in r2 and the global
1485 // object (receiver) in r0.
1486 __ ldr(r0, GlobalObjectOperand());
1487 __ mov(r2, Operand(var->name()));
1488 CallLoadIC(CONTEXTUAL);
1489 context()->Plug(r0);
1493 case Variable::PARAMETER:
1494 case Variable::LOCAL:
1495 case Variable::CONTEXT: {
1496 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1497 : "[ Stack variable");
1498 if (var->binding_needs_init()) {
1499 // var->scope() may be NULL when the proxy is located in eval code and
1500 // refers to a potential outside binding. Currently those bindings are
1501 // always looked up dynamically, i.e. in that case
1502 // var->location() == LOOKUP.
1504 ASSERT(var->scope() != NULL);
1506 // Check if the binding really needs an initialization check. The check
1507 // can be skipped in the following situation: we have a LET or CONST
1508 // binding in harmony mode, both the Variable and the VariableProxy have
1509 // the same declaration scope (i.e. they are both in global code, in the
1510 // same function or in the same eval code) and the VariableProxy is in
1511 // the source physically located after the initializer of the variable.
1513 // We cannot skip any initialization checks for CONST in non-harmony
1514 // mode because const variables may be declared but never initialized:
1515 // if (false) { const x; }; var y = x;
1517 // The condition on the declaration scopes is a conservative check for
1518 // nested functions that access a binding and are called before the
1519 // binding is initialized:
1520 // function() { f(); let x = 1; function f() { x = 2; } }
1522 bool skip_init_check;
1523 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1524 skip_init_check = false;
1526 // Check that we always have valid source position.
1527 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1528 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1529 skip_init_check = var->mode() != CONST_LEGACY &&
1530 var->initializer_position() < proxy->position();
1533 if (!skip_init_check) {
1534 // Let and const need a read barrier.
1536 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1537 if (var->mode() == LET || var->mode() == CONST) {
1538 // Throw a reference error when using an uninitialized let/const
1539 // binding in harmony mode.
1542 __ mov(r0, Operand(var->name()));
1544 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1547 // Uninitalized const bindings outside of harmony mode are unholed.
1548 ASSERT(var->mode() == CONST_LEGACY);
1549 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1551 context()->Plug(r0);
1555 context()->Plug(var);
1559 case Variable::LOOKUP: {
1560 Comment cmnt(masm_, "[ Lookup variable");
1562 // Generate code for loading from variables potentially shadowed
1563 // by eval-introduced variables.
1564 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1566 __ mov(r1, Operand(var->name()));
1567 __ Push(cp, r1); // Context and name.
1568 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1570 context()->Plug(r0);
1576 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1577 Comment cmnt(masm_, "[ RegExpLiteral");
1579 // Registers will be used as follows:
1580 // r5 = materialized value (RegExp literal)
1581 // r4 = JS function, literals array
1582 // r3 = literal index
1583 // r2 = RegExp pattern
1584 // r1 = RegExp flags
1585 // r0 = RegExp literal clone
1586 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1587 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1588 int literal_offset =
1589 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1590 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1591 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1593 __ b(ne, &materialized);
1595 // Create regexp literal using runtime function.
1596 // Result will be in r0.
1597 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1598 __ mov(r2, Operand(expr->pattern()));
1599 __ mov(r1, Operand(expr->flags()));
1600 __ Push(r4, r3, r2, r1);
1601 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1604 __ bind(&materialized);
1605 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1606 Label allocated, runtime_allocate;
1607 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1610 __ bind(&runtime_allocate);
1611 __ mov(r0, Operand(Smi::FromInt(size)));
1613 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1616 __ bind(&allocated);
1617 // After this, registers are used as follows:
1618 // r0: Newly allocated regexp.
1619 // r5: Materialized regexp.
1621 __ CopyFields(r0, r5, d0, size / kPointerSize);
1622 context()->Plug(r0);
1626 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1627 if (expression == NULL) {
1628 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1631 VisitForStackValue(expression);
1636 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1637 Comment cmnt(masm_, "[ ObjectLiteral");
1639 expr->BuildConstantProperties(isolate());
1640 Handle<FixedArray> constant_properties = expr->constant_properties();
1641 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1642 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1643 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1644 __ mov(r1, Operand(constant_properties));
1645 int flags = expr->fast_elements()
1646 ? ObjectLiteral::kFastElements
1647 : ObjectLiteral::kNoFlags;
1648 flags |= expr->has_function()
1649 ? ObjectLiteral::kHasFunction
1650 : ObjectLiteral::kNoFlags;
1651 __ mov(r0, Operand(Smi::FromInt(flags)));
1652 int properties_count = constant_properties->length() / 2;
1653 if (expr->may_store_doubles() || expr->depth() > 1 ||
1654 Serializer::enabled(isolate()) || flags != ObjectLiteral::kFastElements ||
1655 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1656 __ Push(r3, r2, r1, r0);
1657 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1659 FastCloneShallowObjectStub stub(isolate(), properties_count);
1663 // If result_saved is true the result is on top of the stack. If
1664 // result_saved is false the result is in r0.
1665 bool result_saved = false;
1667 // Mark all computed expressions that are bound to a key that
1668 // is shadowed by a later occurrence of the same key. For the
1669 // marked expressions, no store code is emitted.
1670 expr->CalculateEmitStore(zone());
1672 AccessorTable accessor_table(zone());
1673 for (int i = 0; i < expr->properties()->length(); i++) {
1674 ObjectLiteral::Property* property = expr->properties()->at(i);
1675 if (property->IsCompileTimeValue()) continue;
1677 Literal* key = property->key();
1678 Expression* value = property->value();
1679 if (!result_saved) {
1680 __ push(r0); // Save result on stack
1681 result_saved = true;
1683 switch (property->kind()) {
1684 case ObjectLiteral::Property::CONSTANT:
1686 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1687 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1689 case ObjectLiteral::Property::COMPUTED:
1690 if (key->value()->IsInternalizedString()) {
1691 if (property->emit_store()) {
1692 VisitForAccumulatorValue(value);
1693 __ mov(r2, Operand(key->value()));
1694 __ ldr(r1, MemOperand(sp));
1695 CallStoreIC(key->LiteralFeedbackId());
1696 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1698 VisitForEffect(value);
1702 // Duplicate receiver on stack.
1703 __ ldr(r0, MemOperand(sp));
1705 VisitForStackValue(key);
1706 VisitForStackValue(value);
1707 if (property->emit_store()) {
1708 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1710 __ CallRuntime(Runtime::kSetProperty, 4);
1715 case ObjectLiteral::Property::PROTOTYPE:
1716 // Duplicate receiver on stack.
1717 __ ldr(r0, MemOperand(sp));
1719 VisitForStackValue(value);
1720 if (property->emit_store()) {
1721 __ CallRuntime(Runtime::kSetPrototype, 2);
1727 case ObjectLiteral::Property::GETTER:
1728 accessor_table.lookup(key)->second->getter = value;
1730 case ObjectLiteral::Property::SETTER:
1731 accessor_table.lookup(key)->second->setter = value;
1736 // Emit code to define accessors, using only a single call to the runtime for
1737 // each pair of corresponding getters and setters.
1738 for (AccessorTable::Iterator it = accessor_table.begin();
1739 it != accessor_table.end();
1741 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1743 VisitForStackValue(it->first);
1744 EmitAccessor(it->second->getter);
1745 EmitAccessor(it->second->setter);
1746 __ mov(r0, Operand(Smi::FromInt(NONE)));
1748 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1751 if (expr->has_function()) {
1752 ASSERT(result_saved);
1753 __ ldr(r0, MemOperand(sp));
1755 __ CallRuntime(Runtime::kToFastProperties, 1);
1759 context()->PlugTOS();
1761 context()->Plug(r0);
1766 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1767 Comment cmnt(masm_, "[ ArrayLiteral");
1769 expr->BuildConstantElements(isolate());
1770 int flags = expr->depth() == 1
1771 ? ArrayLiteral::kShallowElements
1772 : ArrayLiteral::kNoFlags;
1774 ZoneList<Expression*>* subexprs = expr->values();
1775 int length = subexprs->length();
1776 Handle<FixedArray> constant_elements = expr->constant_elements();
1777 ASSERT_EQ(2, constant_elements->length());
1778 ElementsKind constant_elements_kind =
1779 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1780 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1781 Handle<FixedArrayBase> constant_elements_values(
1782 FixedArrayBase::cast(constant_elements->get(1)));
1784 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1785 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1786 // If the only customer of allocation sites is transitioning, then
1787 // we can turn it off if we don't have anywhere else to transition to.
1788 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1791 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1792 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1793 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1794 __ mov(r1, Operand(constant_elements));
1795 if (has_fast_elements && constant_elements_values->map() ==
1796 isolate()->heap()->fixed_cow_array_map()) {
1797 FastCloneShallowArrayStub stub(
1799 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1800 allocation_site_mode,
1803 __ IncrementCounter(
1804 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1805 } else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
1806 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1807 __ mov(r0, Operand(Smi::FromInt(flags)));
1808 __ Push(r3, r2, r1, r0);
1809 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1811 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1812 FLAG_smi_only_arrays);
1813 FastCloneShallowArrayStub::Mode mode =
1814 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1816 if (has_fast_elements) {
1817 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1820 FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
1825 bool result_saved = false; // Is the result saved to the stack?
1827 // Emit code to evaluate all the non-constant subexpressions and to store
1828 // them into the newly cloned array.
1829 for (int i = 0; i < length; i++) {
1830 Expression* subexpr = subexprs->at(i);
1831 // If the subexpression is a literal or a simple materialized literal it
1832 // is already set in the cloned array.
1833 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1835 if (!result_saved) {
1837 __ Push(Smi::FromInt(expr->literal_index()));
1838 result_saved = true;
1840 VisitForAccumulatorValue(subexpr);
1842 if (IsFastObjectElementsKind(constant_elements_kind)) {
1843 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1844 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1845 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1846 __ str(result_register(), FieldMemOperand(r1, offset));
1847 // Update the write barrier for the array store.
1848 __ RecordWriteField(r1, offset, result_register(), r2,
1849 kLRHasBeenSaved, kDontSaveFPRegs,
1850 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1852 __ mov(r3, Operand(Smi::FromInt(i)));
1853 StoreArrayLiteralElementStub stub(isolate());
1857 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1861 __ pop(); // literal index
1862 context()->PlugTOS();
1864 context()->Plug(r0);
1869 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1870 ASSERT(expr->target()->IsValidReferenceExpression());
1872 Comment cmnt(masm_, "[ Assignment");
1874 // Left-hand side can only be a property, a global or a (parameter or local)
1876 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1877 LhsKind assign_type = VARIABLE;
1878 Property* property = expr->target()->AsProperty();
1879 if (property != NULL) {
1880 assign_type = (property->key()->IsPropertyName())
1885 // Evaluate LHS expression.
1886 switch (assign_type) {
1888 // Nothing to do here.
1890 case NAMED_PROPERTY:
1891 if (expr->is_compound()) {
1892 // We need the receiver both on the stack and in the accumulator.
1893 VisitForAccumulatorValue(property->obj());
1894 __ push(result_register());
1896 VisitForStackValue(property->obj());
1899 case KEYED_PROPERTY:
1900 if (expr->is_compound()) {
1901 VisitForStackValue(property->obj());
1902 VisitForAccumulatorValue(property->key());
1903 __ ldr(r1, MemOperand(sp, 0));
1906 VisitForStackValue(property->obj());
1907 VisitForStackValue(property->key());
1912 // For compound assignments we need another deoptimization point after the
1913 // variable/property load.
1914 if (expr->is_compound()) {
1915 { AccumulatorValueContext context(this);
1916 switch (assign_type) {
1918 EmitVariableLoad(expr->target()->AsVariableProxy());
1919 PrepareForBailout(expr->target(), TOS_REG);
1921 case NAMED_PROPERTY:
1922 EmitNamedPropertyLoad(property);
1923 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1925 case KEYED_PROPERTY:
1926 EmitKeyedPropertyLoad(property);
1927 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1932 Token::Value op = expr->binary_op();
1933 __ push(r0); // Left operand goes on the stack.
1934 VisitForAccumulatorValue(expr->value());
1936 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1939 SetSourcePosition(expr->position() + 1);
1940 AccumulatorValueContext context(this);
1941 if (ShouldInlineSmiCase(op)) {
1942 EmitInlineSmiBinaryOp(expr->binary_operation(),
1948 EmitBinaryOp(expr->binary_operation(), op, mode);
1951 // Deoptimization point in case the binary operation may have side effects.
1952 PrepareForBailout(expr->binary_operation(), TOS_REG);
1954 VisitForAccumulatorValue(expr->value());
1957 // Record source position before possible IC call.
1958 SetSourcePosition(expr->position());
1961 switch (assign_type) {
1963 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1965 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1966 context()->Plug(r0);
1968 case NAMED_PROPERTY:
1969 EmitNamedPropertyAssignment(expr);
1971 case KEYED_PROPERTY:
1972 EmitKeyedPropertyAssignment(expr);
1978 void FullCodeGenerator::VisitYield(Yield* expr) {
1979 Comment cmnt(masm_, "[ Yield");
1980 // Evaluate yielded value first; the initial iterator definition depends on
1981 // this. It stays on the stack while we update the iterator.
1982 VisitForStackValue(expr->expression());
1984 switch (expr->yield_kind()) {
1985 case Yield::SUSPEND:
1986 // Pop value from top-of-stack slot; box result into result register.
1987 EmitCreateIteratorResult(false);
1988 __ push(result_register());
1990 case Yield::INITIAL: {
1991 Label suspend, continuation, post_runtime, resume;
1995 __ bind(&continuation);
1999 VisitForAccumulatorValue(expr->generator_object());
2000 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2001 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2002 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2003 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2005 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2006 kLRHasBeenSaved, kDontSaveFPRegs);
2007 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2009 __ b(eq, &post_runtime);
2010 __ push(r0); // generator object
2011 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2012 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2013 __ bind(&post_runtime);
2014 __ pop(result_register());
2015 EmitReturnSequence();
2018 context()->Plug(result_register());
2022 case Yield::FINAL: {
2023 VisitForAccumulatorValue(expr->generator_object());
2024 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2025 __ str(r1, FieldMemOperand(result_register(),
2026 JSGeneratorObject::kContinuationOffset));
2027 // Pop value from top-of-stack slot, box result into result register.
2028 EmitCreateIteratorResult(true);
2029 EmitUnwindBeforeReturn();
2030 EmitReturnSequence();
2034 case Yield::DELEGATING: {
2035 VisitForStackValue(expr->generator_object());
2037 // Initial stack layout is as follows:
2038 // [sp + 1 * kPointerSize] iter
2039 // [sp + 0 * kPointerSize] g
2041 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2042 Label l_next, l_call, l_loop;
2043 // Initial send value is undefined.
2044 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2047 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2049 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2050 __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2051 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2052 __ Push(r2, r3, r0); // "throw", iter, except
2055 // try { received = %yield result }
2056 // Shuffle the received result above a try handler and yield it without
2059 __ pop(r0); // result
2060 __ PushTryHandler(StackHandler::CATCH, expr->index());
2061 const int handler_size = StackHandlerConstants::kSize;
2062 __ push(r0); // result
2064 __ bind(&l_continuation);
2066 __ bind(&l_suspend);
2067 const int generator_object_depth = kPointerSize + handler_size;
2068 __ ldr(r0, MemOperand(sp, generator_object_depth));
2070 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2071 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2072 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2073 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2075 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2076 kLRHasBeenSaved, kDontSaveFPRegs);
2077 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2078 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2079 __ pop(r0); // result
2080 EmitReturnSequence();
2081 __ bind(&l_resume); // received in r0
2084 // receiver = iter; f = 'next'; arg = received;
2086 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2087 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2088 __ Push(r2, r3, r0); // "next", iter, received
2090 // result = receiver[f](arg);
2092 __ ldr(r1, MemOperand(sp, kPointerSize));
2093 __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2094 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2095 CallIC(ic, TypeFeedbackId::None());
2097 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2098 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2101 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2102 __ Drop(1); // The function is still on the stack; drop it.
2104 // if (!result.done) goto l_try;
2106 __ push(r0); // save result
2107 __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2108 CallLoadIC(NOT_CONTEXTUAL); // result.done in r0
2109 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2111 __ cmp(r0, Operand(0));
2115 __ pop(r0); // result
2116 __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2117 CallLoadIC(NOT_CONTEXTUAL); // result.value in r0
2118 context()->DropAndPlug(2, r0); // drop iter and g
2125 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2127 JSGeneratorObject::ResumeMode resume_mode) {
2128 // The value stays in r0, and is ultimately read by the resumed generator, as
2129 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2130 // is read to throw the value when the resumed generator is already closed.
2131 // r1 will hold the generator object until the activation has been resumed.
2132 VisitForStackValue(generator);
2133 VisitForAccumulatorValue(value);
2136 // Check generator state.
2137 Label wrong_state, closed_state, done;
2138 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2139 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2140 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2141 __ cmp(r3, Operand(Smi::FromInt(0)));
2142 __ b(eq, &closed_state);
2143 __ b(lt, &wrong_state);
2145 // Load suspended function and context.
2146 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2147 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2149 // Load receiver and store as the first argument.
2150 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2153 // Push holes for the rest of the arguments to the generator function.
2154 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2156 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2157 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2158 Label push_argument_holes, push_frame;
2159 __ bind(&push_argument_holes);
2160 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2161 __ b(mi, &push_frame);
2163 __ jmp(&push_argument_holes);
2165 // Enter a new JavaScript frame, and initialize its slots as they were when
2166 // the generator was suspended.
2168 __ bind(&push_frame);
2169 __ bl(&resume_frame);
2171 __ bind(&resume_frame);
2172 // lr = return address.
2173 // fp = caller's frame pointer.
2174 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2175 // cp = callee's context,
2176 // r4 = callee's JS function.
2177 __ PushFixedFrame(r4);
2178 // Adjust FP to point to saved FP.
2179 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2181 // Load the operand stack size.
2182 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2183 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2186 // If we are sending a value and there is no operand stack, we can jump back
2188 if (resume_mode == JSGeneratorObject::NEXT) {
2190 __ cmp(r3, Operand(0));
2191 __ b(ne, &slow_resume);
2192 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2194 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2195 if (FLAG_enable_ool_constant_pool) {
2196 // Load the new code object's constant pool pointer.
2198 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
2201 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2204 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2205 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2208 __ bind(&slow_resume);
2211 // Otherwise, we push holes for the operand stack and call the runtime to fix
2212 // up the stack and the handlers.
2213 Label push_operand_holes, call_resume;
2214 __ bind(&push_operand_holes);
2215 __ sub(r3, r3, Operand(1), SetCC);
2216 __ b(mi, &call_resume);
2218 __ b(&push_operand_holes);
2219 __ bind(&call_resume);
2220 ASSERT(!result_register().is(r1));
2221 __ Push(r1, result_register());
2222 __ Push(Smi::FromInt(resume_mode));
2223 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2224 // Not reached: the runtime call returns elsewhere.
2225 __ stop("not-reached");
2227 // Reach here when generator is closed.
2228 __ bind(&closed_state);
2229 if (resume_mode == JSGeneratorObject::NEXT) {
2230 // Return completed iterator result when generator is closed.
2231 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2233 // Pop value from top-of-stack slot; box result into result register.
2234 EmitCreateIteratorResult(true);
2236 // Throw the provided value.
2238 __ CallRuntime(Runtime::kHiddenThrow, 1);
2242 // Throw error if we attempt to operate on a running generator.
2243 __ bind(&wrong_state);
2245 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2248 context()->Plug(result_register());
2252 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2256 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2258 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2261 __ bind(&gc_required);
2262 __ Push(Smi::FromInt(map->instance_size()));
2263 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2264 __ ldr(context_register(),
2265 MemOperand(fp, StandardFrameConstants::kContextOffset));
2267 __ bind(&allocated);
2268 __ mov(r1, Operand(map));
2270 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2271 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2272 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2273 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2274 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2275 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2277 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2279 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2281 // Only the value field needs a write barrier, as the other values are in the
2283 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2284 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2288 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2289 SetSourcePosition(prop->position());
2290 Literal* key = prop->key()->AsLiteral();
2291 __ mov(r2, Operand(key->value()));
2292 // Call load IC. It has arguments receiver and property name r0 and r2.
2293 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2297 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2298 SetSourcePosition(prop->position());
2299 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2300 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2301 CallIC(ic, prop->PropertyFeedbackId());
2305 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2308 Expression* left_expr,
2309 Expression* right_expr) {
2310 Label done, smi_case, stub_call;
2312 Register scratch1 = r2;
2313 Register scratch2 = r3;
2315 // Get the arguments.
2317 Register right = r0;
2320 // Perform combined smi check on both operands.
2321 __ orr(scratch1, left, Operand(right));
2322 STATIC_ASSERT(kSmiTag == 0);
2323 JumpPatchSite patch_site(masm_);
2324 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2326 __ bind(&stub_call);
2327 BinaryOpICStub stub(isolate(), op, mode);
2328 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2329 patch_site.EmitPatchInfo();
2333 // Smi case. This code works the same way as the smi-smi case in the type
2334 // recording binary operation stub, see
2337 __ GetLeastBitsFromSmi(scratch1, right, 5);
2338 __ mov(right, Operand(left, ASR, scratch1));
2339 __ bic(right, right, Operand(kSmiTagMask));
2342 __ SmiUntag(scratch1, left);
2343 __ GetLeastBitsFromSmi(scratch2, right, 5);
2344 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2345 __ TrySmiTag(right, scratch1, &stub_call);
2349 __ SmiUntag(scratch1, left);
2350 __ GetLeastBitsFromSmi(scratch2, right, 5);
2351 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2352 __ tst(scratch1, Operand(0xc0000000));
2353 __ b(ne, &stub_call);
2354 __ SmiTag(right, scratch1);
2358 __ add(scratch1, left, Operand(right), SetCC);
2359 __ b(vs, &stub_call);
2360 __ mov(right, scratch1);
2363 __ sub(scratch1, left, Operand(right), SetCC);
2364 __ b(vs, &stub_call);
2365 __ mov(right, scratch1);
2368 __ SmiUntag(ip, right);
2369 __ smull(scratch1, scratch2, left, ip);
2370 __ mov(ip, Operand(scratch1, ASR, 31));
2371 __ cmp(ip, Operand(scratch2));
2372 __ b(ne, &stub_call);
2373 __ cmp(scratch1, Operand::Zero());
2374 __ mov(right, Operand(scratch1), LeaveCC, ne);
2376 __ add(scratch2, right, Operand(left), SetCC);
2377 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2378 __ b(mi, &stub_call);
2382 __ orr(right, left, Operand(right));
2384 case Token::BIT_AND:
2385 __ and_(right, left, Operand(right));
2387 case Token::BIT_XOR:
2388 __ eor(right, left, Operand(right));
2395 context()->Plug(r0);
2399 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2401 OverwriteMode mode) {
2403 BinaryOpICStub stub(isolate(), op, mode);
2404 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2405 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2406 patch_site.EmitPatchInfo();
2407 context()->Plug(r0);
2411 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2412 ASSERT(expr->IsValidReferenceExpression());
2414 // Left-hand side can only be a property, a global or a (parameter or local)
2416 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2417 LhsKind assign_type = VARIABLE;
2418 Property* prop = expr->AsProperty();
2420 assign_type = (prop->key()->IsPropertyName())
2425 switch (assign_type) {
2427 Variable* var = expr->AsVariableProxy()->var();
2428 EffectContext context(this);
2429 EmitVariableAssignment(var, Token::ASSIGN);
2432 case NAMED_PROPERTY: {
2433 __ push(r0); // Preserve value.
2434 VisitForAccumulatorValue(prop->obj());
2436 __ pop(r0); // Restore value.
2437 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2441 case KEYED_PROPERTY: {
2442 __ push(r0); // Preserve value.
2443 VisitForStackValue(prop->obj());
2444 VisitForAccumulatorValue(prop->key());
2446 __ Pop(r0, r2); // r0 = restored value.
2447 Handle<Code> ic = strict_mode() == SLOPPY
2448 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2449 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2454 context()->Plug(r0);
2458 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2459 Variable* var, MemOperand location) {
2460 __ str(result_register(), location);
2461 if (var->IsContextSlot()) {
2462 // RecordWrite may destroy all its register arguments.
2463 __ mov(r3, result_register());
2464 int offset = Context::SlotOffset(var->index());
2465 __ RecordWriteContextSlot(
2466 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2471 void FullCodeGenerator::EmitCallStoreContextSlot(
2472 Handle<String> name, StrictMode strict_mode) {
2473 __ push(r0); // Value.
2474 __ mov(r1, Operand(name));
2475 __ mov(r0, Operand(Smi::FromInt(strict_mode)));
2476 __ Push(cp, r1, r0); // Context, name, strict mode.
2477 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2481 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2482 if (var->IsUnallocated()) {
2483 // Global var, const, or let.
2484 __ mov(r2, Operand(var->name()));
2485 __ ldr(r1, GlobalObjectOperand());
2488 } else if (op == Token::INIT_CONST_LEGACY) {
2489 // Const initializers need a write barrier.
2490 ASSERT(!var->IsParameter()); // No const parameters.
2491 if (var->IsLookupSlot()) {
2493 __ mov(r0, Operand(var->name()));
2494 __ Push(cp, r0); // Context and name.
2495 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2497 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2499 MemOperand location = VarOperand(var, r1);
2500 __ ldr(r2, location);
2501 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2503 EmitStoreToStackLocalOrContextSlot(var, location);
2507 } else if (var->mode() == LET && op != Token::INIT_LET) {
2508 // Non-initializing assignment to let variable needs a write barrier.
2509 if (var->IsLookupSlot()) {
2510 EmitCallStoreContextSlot(var->name(), strict_mode());
2512 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2514 MemOperand location = VarOperand(var, r1);
2515 __ ldr(r3, location);
2516 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2518 __ mov(r3, Operand(var->name()));
2520 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2521 // Perform the assignment.
2523 EmitStoreToStackLocalOrContextSlot(var, location);
2526 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2527 // Assignment to var or initializing assignment to let/const
2529 if (var->IsLookupSlot()) {
2530 EmitCallStoreContextSlot(var->name(), strict_mode());
2532 ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2533 MemOperand location = VarOperand(var, r1);
2534 if (generate_debug_code_ && op == Token::INIT_LET) {
2535 // Check for an uninitialized let binding.
2536 __ ldr(r2, location);
2537 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2538 __ Check(eq, kLetBindingReInitialization);
2540 EmitStoreToStackLocalOrContextSlot(var, location);
2543 // Non-initializing assignments to consts are ignored.
2547 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2548 // Assignment to a property, using a named store IC.
2549 Property* prop = expr->target()->AsProperty();
2550 ASSERT(prop != NULL);
2551 ASSERT(prop->key()->AsLiteral() != NULL);
2553 // Record source code position before IC call.
2554 SetSourcePosition(expr->position());
2555 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2558 CallStoreIC(expr->AssignmentFeedbackId());
2560 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2561 context()->Plug(r0);
2565 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2566 // Assignment to a property, using a keyed store IC.
2568 // Record source code position before IC call.
2569 SetSourcePosition(expr->position());
2570 __ Pop(r2, r1); // r1 = key.
2572 Handle<Code> ic = strict_mode() == SLOPPY
2573 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2574 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2575 CallIC(ic, expr->AssignmentFeedbackId());
2577 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2578 context()->Plug(r0);
2582 void FullCodeGenerator::VisitProperty(Property* expr) {
2583 Comment cmnt(masm_, "[ Property");
2584 Expression* key = expr->key();
2586 if (key->IsPropertyName()) {
2587 VisitForAccumulatorValue(expr->obj());
2588 EmitNamedPropertyLoad(expr);
2589 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2590 context()->Plug(r0);
2592 VisitForStackValue(expr->obj());
2593 VisitForAccumulatorValue(expr->key());
2595 EmitKeyedPropertyLoad(expr);
2596 context()->Plug(r0);
2601 void FullCodeGenerator::CallIC(Handle<Code> code,
2602 TypeFeedbackId ast_id) {
2604 // All calls must have a predictable size in full-codegen code to ensure that
2605 // the debugger can patch them correctly.
2606 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2607 NEVER_INLINE_TARGET_ADDRESS);
2611 // Code common for calls using the IC.
2612 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2613 Expression* callee = expr->expression();
2615 CallIC::CallType call_type = callee->IsVariableProxy()
2619 // Get the target function.
2620 if (call_type == CallIC::FUNCTION) {
2621 { StackValueContext context(this);
2622 EmitVariableLoad(callee->AsVariableProxy());
2623 PrepareForBailout(callee, NO_REGISTERS);
2625 // Push undefined as receiver. This is patched in the method prologue if it
2626 // is a sloppy mode method.
2627 __ Push(isolate()->factory()->undefined_value());
2629 // Load the function from the receiver.
2630 ASSERT(callee->IsProperty());
2631 __ ldr(r0, MemOperand(sp, 0));
2632 EmitNamedPropertyLoad(callee->AsProperty());
2633 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2634 // Push the target function under the receiver.
2635 __ ldr(ip, MemOperand(sp, 0));
2637 __ str(r0, MemOperand(sp, kPointerSize));
2640 EmitCall(expr, call_type);
2644 // Code common for calls using the IC.
2645 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2648 VisitForAccumulatorValue(key);
2650 Expression* callee = expr->expression();
2652 // Load the function from the receiver.
2653 ASSERT(callee->IsProperty());
2654 __ ldr(r1, MemOperand(sp, 0));
2655 EmitKeyedPropertyLoad(callee->AsProperty());
2656 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2658 // Push the target function under the receiver.
2659 __ ldr(ip, MemOperand(sp, 0));
2661 __ str(r0, MemOperand(sp, kPointerSize));
2663 EmitCall(expr, CallIC::METHOD);
2667 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2668 // Load the arguments.
2669 ZoneList<Expression*>* args = expr->arguments();
2670 int arg_count = args->length();
2671 { PreservePositionScope scope(masm()->positions_recorder());
2672 for (int i = 0; i < arg_count; i++) {
2673 VisitForStackValue(args->at(i));
2677 // Record source position of the IC call.
2678 SetSourcePosition(expr->position());
2679 Handle<Code> ic = CallIC::initialize_stub(
2680 isolate(), arg_count, call_type);
2681 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2682 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2683 // Don't assign a type feedback id to the IC, since type feedback is provided
2684 // by the vector above.
2687 RecordJSReturnSite(expr);
2688 // Restore context register.
2689 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2690 context()->DropAndPlug(1, r0);
2694 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2695 // r4: copy of the first argument or undefined if it doesn't exist.
2696 if (arg_count > 0) {
2697 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2699 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2702 // r3: the receiver of the enclosing function.
2703 int receiver_offset = 2 + info_->scope()->num_parameters();
2704 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2707 __ mov(r2, Operand(Smi::FromInt(strict_mode())));
2709 // r1: the start position of the scope the calls resides in.
2710 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2712 // Do the runtime call.
2713 __ Push(r4, r3, r2, r1);
2714 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2718 void FullCodeGenerator::VisitCall(Call* expr) {
2720 // We want to verify that RecordJSReturnSite gets called on all paths
2721 // through this function. Avoid early returns.
2722 expr->return_is_recorded_ = false;
2725 Comment cmnt(masm_, "[ Call");
2726 Expression* callee = expr->expression();
2727 Call::CallType call_type = expr->GetCallType(isolate());
2729 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2730 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2731 // to resolve the function we need to call and the receiver of the
2732 // call. Then we call the resolved function using the given
2734 ZoneList<Expression*>* args = expr->arguments();
2735 int arg_count = args->length();
2737 { PreservePositionScope pos_scope(masm()->positions_recorder());
2738 VisitForStackValue(callee);
2739 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2740 __ push(r2); // Reserved receiver slot.
2742 // Push the arguments.
2743 for (int i = 0; i < arg_count; i++) {
2744 VisitForStackValue(args->at(i));
2747 // Push a copy of the function (found below the arguments) and
2749 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2751 EmitResolvePossiblyDirectEval(arg_count);
2753 // The runtime call returns a pair of values in r0 (function) and
2754 // r1 (receiver). Touch up the stack with the right values.
2755 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2756 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2759 // Record source position for debugger.
2760 SetSourcePosition(expr->position());
2761 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2762 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2764 RecordJSReturnSite(expr);
2765 // Restore context register.
2766 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2767 context()->DropAndPlug(1, r0);
2768 } else if (call_type == Call::GLOBAL_CALL) {
2769 EmitCallWithLoadIC(expr);
2771 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2772 // Call to a lookup slot (dynamically introduced variable).
2773 VariableProxy* proxy = callee->AsVariableProxy();
2776 { PreservePositionScope scope(masm()->positions_recorder());
2777 // Generate code for loading from variables potentially shadowed
2778 // by eval-introduced variables.
2779 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2783 // Call the runtime to find the function to call (returned in r0)
2784 // and the object holding it (returned in edx).
2785 ASSERT(!context_register().is(r2));
2786 __ mov(r2, Operand(proxy->name()));
2787 __ Push(context_register(), r2);
2788 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2789 __ Push(r0, r1); // Function, receiver.
2791 // If fast case code has been generated, emit code to push the
2792 // function and receiver and have the slow path jump around this
2794 if (done.is_linked()) {
2800 // The receiver is implicitly the global receiver. Indicate this
2801 // by passing the hole to the call function stub.
2802 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2807 // The receiver is either the global receiver or an object found
2808 // by LoadContextSlot.
2810 } else if (call_type == Call::PROPERTY_CALL) {
2811 Property* property = callee->AsProperty();
2812 { PreservePositionScope scope(masm()->positions_recorder());
2813 VisitForStackValue(property->obj());
2815 if (property->key()->IsPropertyName()) {
2816 EmitCallWithLoadIC(expr);
2818 EmitKeyedCallWithLoadIC(expr, property->key());
2821 ASSERT(call_type == Call::OTHER_CALL);
2822 // Call to an arbitrary expression not handled specially above.
2823 { PreservePositionScope scope(masm()->positions_recorder());
2824 VisitForStackValue(callee);
2826 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2828 // Emit function call.
2833 // RecordJSReturnSite should have been called.
2834 ASSERT(expr->return_is_recorded_);
2839 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2840 Comment cmnt(masm_, "[ CallNew");
2841 // According to ECMA-262, section 11.2.2, page 44, the function
2842 // expression in new calls must be evaluated before the
2845 // Push constructor on the stack. If it's not a function it's used as
2846 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2848 VisitForStackValue(expr->expression());
2850 // Push the arguments ("left-to-right") on the stack.
2851 ZoneList<Expression*>* args = expr->arguments();
2852 int arg_count = args->length();
2853 for (int i = 0; i < arg_count; i++) {
2854 VisitForStackValue(args->at(i));
2857 // Call the construct call builtin that handles allocation and
2858 // constructor invocation.
2859 SetSourcePosition(expr->position());
2861 // Load function and argument count into r1 and r0.
2862 __ mov(r0, Operand(arg_count));
2863 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2865 // Record call targets in unoptimized code.
2866 if (FLAG_pretenuring_call_new) {
2867 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2868 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2869 expr->CallNewFeedbackSlot() + 1);
2872 __ Move(r2, FeedbackVector());
2873 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2875 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2876 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2877 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2878 context()->Plug(r0);
2882 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2883 ZoneList<Expression*>* args = expr->arguments();
2884 ASSERT(args->length() == 1);
2886 VisitForAccumulatorValue(args->at(0));
2888 Label materialize_true, materialize_false;
2889 Label* if_true = NULL;
2890 Label* if_false = NULL;
2891 Label* fall_through = NULL;
2892 context()->PrepareTest(&materialize_true, &materialize_false,
2893 &if_true, &if_false, &fall_through);
2895 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2897 Split(eq, if_true, if_false, fall_through);
2899 context()->Plug(if_true, if_false);
2903 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2904 ZoneList<Expression*>* args = expr->arguments();
2905 ASSERT(args->length() == 1);
2907 VisitForAccumulatorValue(args->at(0));
2909 Label materialize_true, materialize_false;
2910 Label* if_true = NULL;
2911 Label* if_false = NULL;
2912 Label* fall_through = NULL;
2913 context()->PrepareTest(&materialize_true, &materialize_false,
2914 &if_true, &if_false, &fall_through);
2916 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2917 __ NonNegativeSmiTst(r0);
2918 Split(eq, if_true, if_false, fall_through);
2920 context()->Plug(if_true, if_false);
2924 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2925 ZoneList<Expression*>* args = expr->arguments();
2926 ASSERT(args->length() == 1);
2928 VisitForAccumulatorValue(args->at(0));
2930 Label materialize_true, materialize_false;
2931 Label* if_true = NULL;
2932 Label* if_false = NULL;
2933 Label* fall_through = NULL;
2934 context()->PrepareTest(&materialize_true, &materialize_false,
2935 &if_true, &if_false, &fall_through);
2937 __ JumpIfSmi(r0, if_false);
2938 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2941 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2942 // Undetectable objects behave like undefined when tested with typeof.
2943 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2944 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2946 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2947 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2949 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2950 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2951 Split(le, if_true, if_false, fall_through);
2953 context()->Plug(if_true, if_false);
2957 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2958 ZoneList<Expression*>* args = expr->arguments();
2959 ASSERT(args->length() == 1);
2961 VisitForAccumulatorValue(args->at(0));
2963 Label materialize_true, materialize_false;
2964 Label* if_true = NULL;
2965 Label* if_false = NULL;
2966 Label* fall_through = NULL;
2967 context()->PrepareTest(&materialize_true, &materialize_false,
2968 &if_true, &if_false, &fall_through);
2970 __ JumpIfSmi(r0, if_false);
2971 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2972 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2973 Split(ge, if_true, if_false, fall_through);
2975 context()->Plug(if_true, if_false);
2979 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2980 ZoneList<Expression*>* args = expr->arguments();
2981 ASSERT(args->length() == 1);
2983 VisitForAccumulatorValue(args->at(0));
2985 Label materialize_true, materialize_false;
2986 Label* if_true = NULL;
2987 Label* if_false = NULL;
2988 Label* fall_through = NULL;
2989 context()->PrepareTest(&materialize_true, &materialize_false,
2990 &if_true, &if_false, &fall_through);
2992 __ JumpIfSmi(r0, if_false);
2993 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2994 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2995 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2996 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2997 Split(ne, if_true, if_false, fall_through);
2999 context()->Plug(if_true, if_false);
3003 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3004 CallRuntime* expr) {
3005 ZoneList<Expression*>* args = expr->arguments();
3006 ASSERT(args->length() == 1);
3008 VisitForAccumulatorValue(args->at(0));
3010 Label materialize_true, materialize_false, skip_lookup;
3011 Label* if_true = NULL;
3012 Label* if_false = NULL;
3013 Label* fall_through = NULL;
3014 context()->PrepareTest(&materialize_true, &materialize_false,
3015 &if_true, &if_false, &fall_through);
3017 __ AssertNotSmi(r0);
3019 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3020 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3021 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3022 __ b(ne, &skip_lookup);
3024 // Check for fast case object. Generate false result for slow case object.
3025 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3026 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3027 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3031 // Look for valueOf name in the descriptor array, and indicate false if
3032 // found. Since we omit an enumeration index check, if it is added via a
3033 // transition that shares its descriptor array, this is a false positive.
3034 Label entry, loop, done;
3036 // Skip loop if no descriptors are valid.
3037 __ NumberOfOwnDescriptors(r3, r1);
3038 __ cmp(r3, Operand::Zero());
3041 __ LoadInstanceDescriptors(r1, r4);
3042 // r4: descriptor array.
3043 // r3: valid entries in the descriptor array.
3044 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3046 // Calculate location of the first key name.
3047 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3048 // Calculate the end of the descriptor array.
3050 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3052 // Loop through all the keys in the descriptor array. If one of these is the
3053 // string "valueOf" the result is false.
3054 // The use of ip to store the valueOf string assumes that it is not otherwise
3055 // used in the loop below.
3056 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3059 __ ldr(r3, MemOperand(r4, 0));
3062 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3064 __ cmp(r4, Operand(r2));
3069 // Set the bit in the map to indicate that there is no local valueOf field.
3070 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3071 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3072 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3074 __ bind(&skip_lookup);
3076 // If a valueOf property is not found on the object check that its
3077 // prototype is the un-modified String prototype. If not result is false.
3078 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3079 __ JumpIfSmi(r2, if_false);
3080 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3081 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3082 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3083 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3085 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3086 Split(eq, if_true, if_false, fall_through);
3088 context()->Plug(if_true, if_false);
3092 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3093 ZoneList<Expression*>* args = expr->arguments();
3094 ASSERT(args->length() == 1);
3096 VisitForAccumulatorValue(args->at(0));
3098 Label materialize_true, materialize_false;
3099 Label* if_true = NULL;
3100 Label* if_false = NULL;
3101 Label* fall_through = NULL;
3102 context()->PrepareTest(&materialize_true, &materialize_false,
3103 &if_true, &if_false, &fall_through);
3105 __ JumpIfSmi(r0, if_false);
3106 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3107 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3108 Split(eq, if_true, if_false, fall_through);
3110 context()->Plug(if_true, if_false);
3114 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3115 ZoneList<Expression*>* args = expr->arguments();
3116 ASSERT(args->length() == 1);
3118 VisitForAccumulatorValue(args->at(0));
3120 Label materialize_true, materialize_false;
3121 Label* if_true = NULL;
3122 Label* if_false = NULL;
3123 Label* fall_through = NULL;
3124 context()->PrepareTest(&materialize_true, &materialize_false,
3125 &if_true, &if_false, &fall_through);
3127 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3128 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3129 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3130 __ cmp(r2, Operand(0x80000000));
3131 __ cmp(r1, Operand(0x00000000), eq);
3133 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3134 Split(eq, if_true, if_false, fall_through);
3136 context()->Plug(if_true, if_false);
3140 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3141 ZoneList<Expression*>* args = expr->arguments();
3142 ASSERT(args->length() == 1);
3144 VisitForAccumulatorValue(args->at(0));
3146 Label materialize_true, materialize_false;
3147 Label* if_true = NULL;
3148 Label* if_false = NULL;
3149 Label* fall_through = NULL;
3150 context()->PrepareTest(&materialize_true, &materialize_false,
3151 &if_true, &if_false, &fall_through);
3153 __ JumpIfSmi(r0, if_false);
3154 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3155 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3156 Split(eq, if_true, if_false, fall_through);
3158 context()->Plug(if_true, if_false);
3162 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3163 ZoneList<Expression*>* args = expr->arguments();
3164 ASSERT(args->length() == 1);
3166 VisitForAccumulatorValue(args->at(0));
3168 Label materialize_true, materialize_false;
3169 Label* if_true = NULL;
3170 Label* if_false = NULL;
3171 Label* fall_through = NULL;
3172 context()->PrepareTest(&materialize_true, &materialize_false,
3173 &if_true, &if_false, &fall_through);
3175 __ JumpIfSmi(r0, if_false);
3176 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3177 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3178 Split(eq, if_true, if_false, fall_through);
3180 context()->Plug(if_true, if_false);
3185 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3186 ASSERT(expr->arguments()->length() == 0);
3188 Label materialize_true, materialize_false;
3189 Label* if_true = NULL;
3190 Label* if_false = NULL;
3191 Label* fall_through = NULL;
3192 context()->PrepareTest(&materialize_true, &materialize_false,
3193 &if_true, &if_false, &fall_through);
3195 // Get the frame pointer for the calling frame.
3196 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3198 // Skip the arguments adaptor frame if it exists.
3199 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3200 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3201 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3203 // Check the marker in the calling frame.
3204 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3205 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3206 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3207 Split(eq, if_true, if_false, fall_through);
3209 context()->Plug(if_true, if_false);
3213 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3214 ZoneList<Expression*>* args = expr->arguments();
3215 ASSERT(args->length() == 2);
3217 // Load the two objects into registers and perform the comparison.
3218 VisitForStackValue(args->at(0));
3219 VisitForAccumulatorValue(args->at(1));
3221 Label materialize_true, materialize_false;
3222 Label* if_true = NULL;
3223 Label* if_false = NULL;
3224 Label* fall_through = NULL;
3225 context()->PrepareTest(&materialize_true, &materialize_false,
3226 &if_true, &if_false, &fall_through);
3230 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3231 Split(eq, if_true, if_false, fall_through);
3233 context()->Plug(if_true, if_false);
3237 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3238 ZoneList<Expression*>* args = expr->arguments();
3239 ASSERT(args->length() == 1);
3241 // ArgumentsAccessStub expects the key in edx and the formal
3242 // parameter count in r0.
3243 VisitForAccumulatorValue(args->at(0));
3245 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3246 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3248 context()->Plug(r0);
3252 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3253 ASSERT(expr->arguments()->length() == 0);
3255 // Get the number of formal parameters.
3256 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3258 // Check if the calling frame is an arguments adaptor frame.
3259 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3260 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3261 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3263 // Arguments adaptor case: Read the arguments length from the
3265 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3267 context()->Plug(r0);
3271 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3272 ZoneList<Expression*>* args = expr->arguments();
3273 ASSERT(args->length() == 1);
3274 Label done, null, function, non_function_constructor;
3276 VisitForAccumulatorValue(args->at(0));
3278 // If the object is a smi, we return null.
3279 __ JumpIfSmi(r0, &null);
3281 // Check that the object is a JS object but take special care of JS
3282 // functions to make sure they have 'Function' as their class.
3283 // Assume that there are only two callable types, and one of them is at
3284 // either end of the type range for JS object types. Saves extra comparisons.
3285 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3286 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3287 // Map is now in r0.
3289 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3290 FIRST_SPEC_OBJECT_TYPE + 1);
3291 __ b(eq, &function);
3293 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3294 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3295 LAST_SPEC_OBJECT_TYPE - 1);
3296 __ b(eq, &function);
3297 // Assume that there is no larger type.
3298 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3300 // Check if the constructor in the map is a JS function.
3301 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3302 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3303 __ b(ne, &non_function_constructor);
3305 // r0 now contains the constructor function. Grab the
3306 // instance class name from there.
3307 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3308 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3311 // Functions have class 'Function'.
3313 __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
3316 // Objects with a non-function constructor have class 'Object'.
3317 __ bind(&non_function_constructor);
3318 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3321 // Non-JS objects have class null.
3323 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3328 context()->Plug(r0);
3332 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3333 // Conditionally generate a log call.
3335 // 0 (literal string): The type of logging (corresponds to the flags).
3336 // This is used to determine whether or not to generate the log call.
3337 // 1 (string): Format string. Access the string at argument index 2
3338 // with '%2s' (see Logger::LogRuntime for all the formats).
3339 // 2 (array): Arguments to the format string.
3340 ZoneList<Expression*>* args = expr->arguments();
3341 ASSERT_EQ(args->length(), 3);
3342 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3343 VisitForStackValue(args->at(1));
3344 VisitForStackValue(args->at(2));
3345 __ CallRuntime(Runtime::kHiddenLog, 2);
3348 // Finally, we're expected to leave a value on the top of the stack.
3349 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3350 context()->Plug(r0);
3354 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3355 // Load the arguments on the stack and call the stub.
3356 SubStringStub stub(isolate());
3357 ZoneList<Expression*>* args = expr->arguments();
3358 ASSERT(args->length() == 3);
3359 VisitForStackValue(args->at(0));
3360 VisitForStackValue(args->at(1));
3361 VisitForStackValue(args->at(2));
3363 context()->Plug(r0);
3367 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3368 // Load the arguments on the stack and call the stub.
3369 RegExpExecStub stub(isolate());
3370 ZoneList<Expression*>* args = expr->arguments();
3371 ASSERT(args->length() == 4);
3372 VisitForStackValue(args->at(0));
3373 VisitForStackValue(args->at(1));
3374 VisitForStackValue(args->at(2));
3375 VisitForStackValue(args->at(3));
3377 context()->Plug(r0);
3381 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3382 ZoneList<Expression*>* args = expr->arguments();
3383 ASSERT(args->length() == 1);
3384 VisitForAccumulatorValue(args->at(0)); // Load the object.
3387 // If the object is a smi return the object.
3388 __ JumpIfSmi(r0, &done);
3389 // If the object is not a value type, return the object.
3390 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3391 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3394 context()->Plug(r0);
3398 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3399 ZoneList<Expression*>* args = expr->arguments();
3400 ASSERT(args->length() == 2);
3401 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3402 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3404 VisitForAccumulatorValue(args->at(0)); // Load the object.
3406 Label runtime, done, not_date_object;
3407 Register object = r0;
3408 Register result = r0;
3409 Register scratch0 = r9;
3410 Register scratch1 = r1;
3412 __ JumpIfSmi(object, ¬_date_object);
3413 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3414 __ b(ne, ¬_date_object);
3416 if (index->value() == 0) {
3417 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3420 if (index->value() < JSDate::kFirstUncachedField) {
3421 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3422 __ mov(scratch1, Operand(stamp));
3423 __ ldr(scratch1, MemOperand(scratch1));
3424 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3425 __ cmp(scratch1, scratch0);
3427 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3428 kPointerSize * index->value()));
3432 __ PrepareCallCFunction(2, scratch1);
3433 __ mov(r1, Operand(index));
3434 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3438 __ bind(¬_date_object);
3439 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3441 context()->Plug(r0);
3445 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3446 ZoneList<Expression*>* args = expr->arguments();
3447 ASSERT_EQ(3, args->length());
3449 Register string = r0;
3450 Register index = r1;
3451 Register value = r2;
3453 VisitForStackValue(args->at(1)); // index
3454 VisitForStackValue(args->at(2)); // value
3455 VisitForAccumulatorValue(args->at(0)); // string
3456 __ Pop(index, value);
3458 if (FLAG_debug_code) {
3460 __ Check(eq, kNonSmiValue);
3462 __ Check(eq, kNonSmiIndex);
3463 __ SmiUntag(index, index);
3464 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3465 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3466 __ SmiTag(index, index);
3469 __ SmiUntag(value, value);
3472 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3473 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3474 context()->Plug(string);
3478 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3479 ZoneList<Expression*>* args = expr->arguments();
3480 ASSERT_EQ(3, args->length());
3482 Register string = r0;
3483 Register index = r1;
3484 Register value = r2;
3486 VisitForStackValue(args->at(1)); // index
3487 VisitForStackValue(args->at(2)); // value
3488 VisitForAccumulatorValue(args->at(0)); // string
3489 __ Pop(index, value);
3491 if (FLAG_debug_code) {
3493 __ Check(eq, kNonSmiValue);
3495 __ Check(eq, kNonSmiIndex);
3496 __ SmiUntag(index, index);
3497 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3498 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3499 __ SmiTag(index, index);
3502 __ SmiUntag(value, value);
3505 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3506 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3507 __ strh(value, MemOperand(ip, index));
3508 context()->Plug(string);
3513 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3514 // Load the arguments on the stack and call the runtime function.
3515 ZoneList<Expression*>* args = expr->arguments();
3516 ASSERT(args->length() == 2);
3517 VisitForStackValue(args->at(0));
3518 VisitForStackValue(args->at(1));
3519 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3521 context()->Plug(r0);
3525 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3526 ZoneList<Expression*>* args = expr->arguments();
3527 ASSERT(args->length() == 2);
3528 VisitForStackValue(args->at(0)); // Load the object.
3529 VisitForAccumulatorValue(args->at(1)); // Load the value.
3530 __ pop(r1); // r0 = value. r1 = object.
3533 // If the object is a smi, return the value.
3534 __ JumpIfSmi(r1, &done);
3536 // If the object is not a value type, return the value.
3537 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3541 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3542 // Update the write barrier. Save the value as it will be
3543 // overwritten by the write barrier code and is needed afterward.
3545 __ RecordWriteField(
3546 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3549 context()->Plug(r0);
3553 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3554 ZoneList<Expression*>* args = expr->arguments();
3555 ASSERT_EQ(args->length(), 1);
3556 // Load the argument into r0 and call the stub.
3557 VisitForAccumulatorValue(args->at(0));
3559 NumberToStringStub stub(isolate());
3561 context()->Plug(r0);
3565 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3566 ZoneList<Expression*>* args = expr->arguments();
3567 ASSERT(args->length() == 1);
3568 VisitForAccumulatorValue(args->at(0));
3571 StringCharFromCodeGenerator generator(r0, r1);
3572 generator.GenerateFast(masm_);
3575 NopRuntimeCallHelper call_helper;
3576 generator.GenerateSlow(masm_, call_helper);
3579 context()->Plug(r1);
3583 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3584 ZoneList<Expression*>* args = expr->arguments();
3585 ASSERT(args->length() == 2);
3586 VisitForStackValue(args->at(0));
3587 VisitForAccumulatorValue(args->at(1));
3589 Register object = r1;
3590 Register index = r0;
3591 Register result = r3;
3595 Label need_conversion;
3596 Label index_out_of_range;
3598 StringCharCodeAtGenerator generator(object,
3603 &index_out_of_range,
3604 STRING_INDEX_IS_NUMBER);
3605 generator.GenerateFast(masm_);
3608 __ bind(&index_out_of_range);
3609 // When the index is out of range, the spec requires us to return
3611 __ LoadRoot(result, Heap::kNanValueRootIndex);
3614 __ bind(&need_conversion);
3615 // Load the undefined value into the result register, which will
3616 // trigger conversion.
3617 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3620 NopRuntimeCallHelper call_helper;
3621 generator.GenerateSlow(masm_, call_helper);
3624 context()->Plug(result);
3628 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3629 ZoneList<Expression*>* args = expr->arguments();
3630 ASSERT(args->length() == 2);
3631 VisitForStackValue(args->at(0));
3632 VisitForAccumulatorValue(args->at(1));
3634 Register object = r1;
3635 Register index = r0;
3636 Register scratch = r3;
3637 Register result = r0;
3641 Label need_conversion;
3642 Label index_out_of_range;
3644 StringCharAtGenerator generator(object,
3650 &index_out_of_range,
3651 STRING_INDEX_IS_NUMBER);
3652 generator.GenerateFast(masm_);
3655 __ bind(&index_out_of_range);
3656 // When the index is out of range, the spec requires us to return
3657 // the empty string.
3658 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3661 __ bind(&need_conversion);
3662 // Move smi zero into the result register, which will trigger
3664 __ mov(result, Operand(Smi::FromInt(0)));
3667 NopRuntimeCallHelper call_helper;
3668 generator.GenerateSlow(masm_, call_helper);
3671 context()->Plug(result);
3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments();
3677 ASSERT_EQ(2, args->length());
3678 VisitForStackValue(args->at(0));
3679 VisitForAccumulatorValue(args->at(1));
3682 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3684 context()->Plug(r0);
3688 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3690 ASSERT_EQ(2, args->length());
3691 VisitForStackValue(args->at(0));
3692 VisitForStackValue(args->at(1));
3694 StringCompareStub stub(isolate());
3696 context()->Plug(r0);
3700 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3701 ZoneList<Expression*>* args = expr->arguments();
3702 ASSERT(args->length() >= 2);
3704 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3705 for (int i = 0; i < arg_count + 1; i++) {
3706 VisitForStackValue(args->at(i));
3708 VisitForAccumulatorValue(args->last()); // Function.
3710 Label runtime, done;
3711 // Check for non-function argument (including proxy).
3712 __ JumpIfSmi(r0, &runtime);
3713 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3716 // InvokeFunction requires the function in r1. Move it in there.
3717 __ mov(r1, result_register());
3718 ParameterCount count(arg_count);
3719 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3720 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3725 __ CallRuntime(Runtime::kCall, args->length());
3728 context()->Plug(r0);
3732 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3733 RegExpConstructResultStub stub(isolate());
3734 ZoneList<Expression*>* args = expr->arguments();
3735 ASSERT(args->length() == 3);
3736 VisitForStackValue(args->at(0));
3737 VisitForStackValue(args->at(1));
3738 VisitForAccumulatorValue(args->at(2));
3742 context()->Plug(r0);
3746 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3747 ZoneList<Expression*>* args = expr->arguments();
3748 ASSERT_EQ(2, args->length());
3749 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3750 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3752 Handle<FixedArray> jsfunction_result_caches(
3753 isolate()->native_context()->jsfunction_result_caches());
3754 if (jsfunction_result_caches->length() <= cache_id) {
3755 __ Abort(kAttemptToUseUndefinedCache);
3756 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3757 context()->Plug(r0);
3761 VisitForAccumulatorValue(args->at(1));
3764 Register cache = r1;
3765 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3766 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3767 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3769 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3772 Label done, not_found;
3773 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3774 // r2 now holds finger offset as a smi.
3775 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3776 // r3 now points to the start of fixed array elements.
3777 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3778 // Note side effect of PreIndex: r3 now points to the key of the pair.
3780 __ b(ne, ¬_found);
3782 __ ldr(r0, MemOperand(r3, kPointerSize));
3785 __ bind(¬_found);
3786 // Call runtime to perform the lookup.
3787 __ Push(cache, key);
3788 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3791 context()->Plug(r0);
3795 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3796 ZoneList<Expression*>* args = expr->arguments();
3797 VisitForAccumulatorValue(args->at(0));
3799 Label materialize_true, materialize_false;
3800 Label* if_true = NULL;
3801 Label* if_false = NULL;
3802 Label* fall_through = NULL;
3803 context()->PrepareTest(&materialize_true, &materialize_false,
3804 &if_true, &if_false, &fall_through);
3806 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3807 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3808 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3809 Split(eq, if_true, if_false, fall_through);
3811 context()->Plug(if_true, if_false);
3815 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3816 ZoneList<Expression*>* args = expr->arguments();
3817 ASSERT(args->length() == 1);
3818 VisitForAccumulatorValue(args->at(0));
3820 __ AssertString(r0);
3822 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3823 __ IndexFromHash(r0, r0);
3825 context()->Plug(r0);
3829 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3830 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3831 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3832 one_char_separator_loop_entry, long_separator_loop;
3833 ZoneList<Expression*>* args = expr->arguments();
3834 ASSERT(args->length() == 2);
3835 VisitForStackValue(args->at(1));
3836 VisitForAccumulatorValue(args->at(0));
3838 // All aliases of the same register have disjoint lifetimes.
3839 Register array = r0;
3840 Register elements = no_reg; // Will be r0.
3841 Register result = no_reg; // Will be r0.
3842 Register separator = r1;
3843 Register array_length = r2;
3844 Register result_pos = no_reg; // Will be r2
3845 Register string_length = r3;
3846 Register string = r4;
3847 Register element = r5;
3848 Register elements_end = r6;
3849 Register scratch = r9;
3851 // Separator operand is on the stack.
3854 // Check that the array is a JSArray.
3855 __ JumpIfSmi(array, &bailout);
3856 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3859 // Check that the array has fast elements.
3860 __ CheckFastElements(scratch, array_length, &bailout);
3862 // If the array has length zero, return the empty string.
3863 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3864 __ SmiUntag(array_length, SetCC);
3865 __ b(ne, &non_trivial_array);
3866 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3869 __ bind(&non_trivial_array);
3871 // Get the FixedArray containing array's elements.
3873 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3874 array = no_reg; // End of array's live range.
3876 // Check that all array elements are sequential ASCII strings, and
3877 // accumulate the sum of their lengths, as a smi-encoded value.
3878 __ mov(string_length, Operand::Zero());
3880 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3881 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3882 // Loop condition: while (element < elements_end).
3883 // Live values in registers:
3884 // elements: Fixed array of strings.
3885 // array_length: Length of the fixed array of strings (not smi)
3886 // separator: Separator string
3887 // string_length: Accumulated sum of string lengths (smi).
3888 // element: Current array element.
3889 // elements_end: Array end.
3890 if (generate_debug_code_) {
3891 __ cmp(array_length, Operand::Zero());
3892 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3895 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3896 __ JumpIfSmi(string, &bailout);
3897 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3898 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3899 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3900 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3901 __ add(string_length, string_length, Operand(scratch), SetCC);
3903 __ cmp(element, elements_end);
3906 // If array_length is 1, return elements[0], a string.
3907 __ cmp(array_length, Operand(1));
3908 __ b(ne, ¬_size_one_array);
3909 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3912 __ bind(¬_size_one_array);
3914 // Live values in registers:
3915 // separator: Separator string
3916 // array_length: Length of the array.
3917 // string_length: Sum of string lengths (smi).
3918 // elements: FixedArray of strings.
3920 // Check that the separator is a flat ASCII string.
3921 __ JumpIfSmi(separator, &bailout);
3922 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
3923 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3924 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3926 // Add (separator length times array_length) - separator length to the
3927 // string_length to get the length of the result string. array_length is not
3928 // smi but the other values are, so the result is a smi
3929 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3930 __ sub(string_length, string_length, Operand(scratch));
3931 __ smull(scratch, ip, array_length, scratch);
3932 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3934 __ cmp(ip, Operand::Zero());
3936 __ tst(scratch, Operand(0x80000000));
3938 __ add(string_length, string_length, Operand(scratch), SetCC);
3940 __ SmiUntag(string_length);
3942 // Get first element in the array to free up the elements register to be used
3945 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3946 result = elements; // End of live range for elements.
3948 // Live values in registers:
3949 // element: First array element
3950 // separator: Separator string
3951 // string_length: Length of result string (not smi)
3952 // array_length: Length of the array.
3953 __ AllocateAsciiString(result,
3956 string, // used as scratch
3957 elements_end, // used as scratch
3959 // Prepare for looping. Set up elements_end to end of the array. Set
3960 // result_pos to the position of the result where to write the first
3962 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3963 result_pos = array_length; // End of live range for array_length.
3964 array_length = no_reg;
3967 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3969 // Check the length of the separator.
3970 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3971 __ cmp(scratch, Operand(Smi::FromInt(1)));
3972 __ b(eq, &one_char_separator);
3973 __ b(gt, &long_separator);
3975 // Empty separator case
3976 __ bind(&empty_separator_loop);
3977 // Live values in registers:
3978 // result_pos: the position to which we are currently copying characters.
3979 // element: Current array element.
3980 // elements_end: Array end.
3982 // Copy next array element to the result.
3983 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3984 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3985 __ SmiUntag(string_length);
3988 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3989 __ CopyBytes(string, result_pos, string_length, scratch);
3990 __ cmp(element, elements_end);
3991 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3992 ASSERT(result.is(r0));
3995 // One-character separator case
3996 __ bind(&one_char_separator);
3997 // Replace separator with its ASCII character value.
3998 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3999 // Jump into the loop after the code that copies the separator, so the first
4000 // element is not preceded by a separator
4001 __ jmp(&one_char_separator_loop_entry);
4003 __ bind(&one_char_separator_loop);
4004 // Live values in registers:
4005 // result_pos: the position to which we are currently copying characters.
4006 // element: Current array element.
4007 // elements_end: Array end.
4008 // separator: Single separator ASCII char (in lower byte).
4010 // Copy the separator character to the result.
4011 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4013 // Copy next array element to the result.
4014 __ bind(&one_char_separator_loop_entry);
4015 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4016 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4017 __ SmiUntag(string_length);
4020 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4021 __ CopyBytes(string, result_pos, string_length, scratch);
4022 __ cmp(element, elements_end);
4023 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4024 ASSERT(result.is(r0));
4027 // Long separator case (separator is more than one character). Entry is at the
4028 // label long_separator below.
4029 __ bind(&long_separator_loop);
4030 // Live values in registers:
4031 // result_pos: the position to which we are currently copying characters.
4032 // element: Current array element.
4033 // elements_end: Array end.
4034 // separator: Separator string.
4036 // Copy the separator to the result.
4037 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4038 __ SmiUntag(string_length);
4041 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4042 __ CopyBytes(string, result_pos, string_length, scratch);
4044 __ bind(&long_separator);
4045 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4046 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4047 __ SmiUntag(string_length);
4050 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4051 __ CopyBytes(string, result_pos, string_length, scratch);
4052 __ cmp(element, elements_end);
4053 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4054 ASSERT(result.is(r0));
4058 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4060 context()->Plug(r0);
4064 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4065 if (expr->function() != NULL &&
4066 expr->function()->intrinsic_type == Runtime::INLINE) {
4067 Comment cmnt(masm_, "[ InlineRuntimeCall");
4068 EmitInlineRuntimeCall(expr);
4072 Comment cmnt(masm_, "[ CallRuntime");
4073 ZoneList<Expression*>* args = expr->arguments();
4074 int arg_count = args->length();
4076 if (expr->is_jsruntime()) {
4077 // Push the builtins object as the receiver.
4078 __ ldr(r0, GlobalObjectOperand());
4079 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
4082 // Load the function from the receiver.
4083 __ mov(r2, Operand(expr->name()));
4084 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4086 // Push the target function under the receiver.
4087 __ ldr(ip, MemOperand(sp, 0));
4089 __ str(r0, MemOperand(sp, kPointerSize));
4091 // Push the arguments ("left-to-right").
4092 int arg_count = args->length();
4093 for (int i = 0; i < arg_count; i++) {
4094 VisitForStackValue(args->at(i));
4097 // Record source position of the IC call.
4098 SetSourcePosition(expr->position());
4099 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4100 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4103 // Restore context register.
4104 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4106 context()->DropAndPlug(1, r0);
4108 // Push the arguments ("left-to-right").
4109 for (int i = 0; i < arg_count; i++) {
4110 VisitForStackValue(args->at(i));
4113 // Call the C runtime function.
4114 __ CallRuntime(expr->function(), arg_count);
4115 context()->Plug(r0);
4120 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4121 switch (expr->op()) {
4122 case Token::DELETE: {
4123 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4124 Property* property = expr->expression()->AsProperty();
4125 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4127 if (property != NULL) {
4128 VisitForStackValue(property->obj());
4129 VisitForStackValue(property->key());
4130 __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4132 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4133 context()->Plug(r0);
4134 } else if (proxy != NULL) {
4135 Variable* var = proxy->var();
4136 // Delete of an unqualified identifier is disallowed in strict mode
4137 // but "delete this" is allowed.
4138 ASSERT(strict_mode() == SLOPPY || var->is_this());
4139 if (var->IsUnallocated()) {
4140 __ ldr(r2, GlobalObjectOperand());
4141 __ mov(r1, Operand(var->name()));
4142 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4143 __ Push(r2, r1, r0);
4144 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4145 context()->Plug(r0);
4146 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4147 // Result of deleting non-global, non-dynamic variables is false.
4148 // The subexpression does not have side effects.
4149 context()->Plug(var->is_this());
4151 // Non-global variable. Call the runtime to try to delete from the
4152 // context where the variable was introduced.
4153 ASSERT(!context_register().is(r2));
4154 __ mov(r2, Operand(var->name()));
4155 __ Push(context_register(), r2);
4156 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4157 context()->Plug(r0);
4160 // Result of deleting non-property, non-variable reference is true.
4161 // The subexpression may have side effects.
4162 VisitForEffect(expr->expression());
4163 context()->Plug(true);
4169 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4170 VisitForEffect(expr->expression());
4171 context()->Plug(Heap::kUndefinedValueRootIndex);
4176 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4177 if (context()->IsEffect()) {
4178 // Unary NOT has no side effects so it's only necessary to visit the
4179 // subexpression. Match the optimizing compiler by not branching.
4180 VisitForEffect(expr->expression());
4181 } else if (context()->IsTest()) {
4182 const TestContext* test = TestContext::cast(context());
4183 // The labels are swapped for the recursive call.
4184 VisitForControl(expr->expression(),
4185 test->false_label(),
4187 test->fall_through());
4188 context()->Plug(test->true_label(), test->false_label());
4190 // We handle value contexts explicitly rather than simply visiting
4191 // for control and plugging the control flow into the context,
4192 // because we need to prepare a pair of extra administrative AST ids
4193 // for the optimizing compiler.
4194 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4195 Label materialize_true, materialize_false, done;
4196 VisitForControl(expr->expression(),
4200 __ bind(&materialize_true);
4201 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4202 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4203 if (context()->IsStackValue()) __ push(r0);
4205 __ bind(&materialize_false);
4206 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4207 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4208 if (context()->IsStackValue()) __ push(r0);
4214 case Token::TYPEOF: {
4215 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4216 { StackValueContext context(this);
4217 VisitForTypeofValue(expr->expression());
4219 __ CallRuntime(Runtime::kTypeof, 1);
4220 context()->Plug(r0);
4230 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4231 ASSERT(expr->expression()->IsValidReferenceExpression());
4233 Comment cmnt(masm_, "[ CountOperation");
4234 SetSourcePosition(expr->position());
4236 // Expression can only be a property, a global or a (parameter or local)
4238 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4239 LhsKind assign_type = VARIABLE;
4240 Property* prop = expr->expression()->AsProperty();
4241 // In case of a property we use the uninitialized expression context
4242 // of the key to detect a named property.
4245 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4248 // Evaluate expression and get value.
4249 if (assign_type == VARIABLE) {
4250 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4251 AccumulatorValueContext context(this);
4252 EmitVariableLoad(expr->expression()->AsVariableProxy());
4254 // Reserve space for result of postfix operation.
4255 if (expr->is_postfix() && !context()->IsEffect()) {
4256 __ mov(ip, Operand(Smi::FromInt(0)));
4259 if (assign_type == NAMED_PROPERTY) {
4260 // Put the object both on the stack and in the accumulator.
4261 VisitForAccumulatorValue(prop->obj());
4263 EmitNamedPropertyLoad(prop);
4265 VisitForStackValue(prop->obj());
4266 VisitForAccumulatorValue(prop->key());
4267 __ ldr(r1, MemOperand(sp, 0));
4269 EmitKeyedPropertyLoad(prop);
4273 // We need a second deoptimization point after loading the value
4274 // in case evaluating the property load my have a side effect.
4275 if (assign_type == VARIABLE) {
4276 PrepareForBailout(expr->expression(), TOS_REG);
4278 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4281 // Inline smi case if we are in a loop.
4282 Label stub_call, done;
4283 JumpPatchSite patch_site(masm_);
4285 int count_value = expr->op() == Token::INC ? 1 : -1;
4286 if (ShouldInlineSmiCase(expr->op())) {
4288 patch_site.EmitJumpIfNotSmi(r0, &slow);
4290 // Save result for postfix expressions.
4291 if (expr->is_postfix()) {
4292 if (!context()->IsEffect()) {
4293 // Save the result on the stack. If we have a named or keyed property
4294 // we store the result under the receiver that is currently on top
4296 switch (assign_type) {
4300 case NAMED_PROPERTY:
4301 __ str(r0, MemOperand(sp, kPointerSize));
4303 case KEYED_PROPERTY:
4304 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4310 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4312 // Call stub. Undo operation first.
4313 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4317 ToNumberStub convert_stub(isolate());
4318 __ CallStub(&convert_stub);
4320 // Save result for postfix expressions.
4321 if (expr->is_postfix()) {
4322 if (!context()->IsEffect()) {
4323 // Save the result on the stack. If we have a named or keyed property
4324 // we store the result under the receiver that is currently on top
4326 switch (assign_type) {
4330 case NAMED_PROPERTY:
4331 __ str(r0, MemOperand(sp, kPointerSize));
4333 case KEYED_PROPERTY:
4334 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4341 __ bind(&stub_call);
4343 __ mov(r0, Operand(Smi::FromInt(count_value)));
4345 // Record position before stub call.
4346 SetSourcePosition(expr->position());
4348 BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
4349 CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4350 patch_site.EmitPatchInfo();
4353 // Store the value returned in r0.
4354 switch (assign_type) {
4356 if (expr->is_postfix()) {
4357 { EffectContext context(this);
4358 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4360 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4363 // For all contexts except EffectConstant We have the result on
4364 // top of the stack.
4365 if (!context()->IsEffect()) {
4366 context()->PlugTOS();
4369 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4371 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4372 context()->Plug(r0);
4375 case NAMED_PROPERTY: {
4376 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4378 CallStoreIC(expr->CountStoreFeedbackId());
4379 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4380 if (expr->is_postfix()) {
4381 if (!context()->IsEffect()) {
4382 context()->PlugTOS();
4385 context()->Plug(r0);
4389 case KEYED_PROPERTY: {
4390 __ Pop(r2, r1); // r1 = key. r2 = receiver.
4391 Handle<Code> ic = strict_mode() == SLOPPY
4392 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4393 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4394 CallIC(ic, expr->CountStoreFeedbackId());
4395 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4396 if (expr->is_postfix()) {
4397 if (!context()->IsEffect()) {
4398 context()->PlugTOS();
4401 context()->Plug(r0);
4409 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4410 ASSERT(!context()->IsEffect());
4411 ASSERT(!context()->IsTest());
4412 VariableProxy* proxy = expr->AsVariableProxy();
4413 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4414 Comment cmnt(masm_, "[ Global variable");
4415 __ ldr(r0, GlobalObjectOperand());
4416 __ mov(r2, Operand(proxy->name()));
4417 // Use a regular load, not a contextual load, to avoid a reference
4419 CallLoadIC(NOT_CONTEXTUAL);
4420 PrepareForBailout(expr, TOS_REG);
4421 context()->Plug(r0);
4422 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4423 Comment cmnt(masm_, "[ Lookup slot");
4426 // Generate code for loading from variables potentially shadowed
4427 // by eval-introduced variables.
4428 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4431 __ mov(r0, Operand(proxy->name()));
4433 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4434 PrepareForBailout(expr, TOS_REG);
4437 context()->Plug(r0);
4439 // This expression cannot throw a reference error at the top level.
4440 VisitInDuplicateContext(expr);
4445 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4446 Expression* sub_expr,
4447 Handle<String> check) {
4448 Label materialize_true, materialize_false;
4449 Label* if_true = NULL;
4450 Label* if_false = NULL;
4451 Label* fall_through = NULL;
4452 context()->PrepareTest(&materialize_true, &materialize_false,
4453 &if_true, &if_false, &fall_through);
4455 { AccumulatorValueContext context(this);
4456 VisitForTypeofValue(sub_expr);
4458 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4460 Factory* factory = isolate()->factory();
4461 if (String::Equals(check, factory->number_string())) {
4462 __ JumpIfSmi(r0, if_true);
4463 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4464 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4466 Split(eq, if_true, if_false, fall_through);
4467 } else if (String::Equals(check, factory->float32x4_string())) {
4468 __ JumpIfSmi(r0, if_false);
4469 __ CompareObjectType(r0, r0, r1, FLOAT32x4_TYPE);
4470 Split(eq, if_true, if_false, fall_through);
4471 } else if (String::Equals(check, factory->float64x2_string())) {
4472 __ JumpIfSmi(r0, if_false);
4473 __ CompareObjectType(r0, r0, r1, FLOAT64x2_TYPE);
4474 Split(eq, if_true, if_false, fall_through);
4475 } else if (String::Equals(check, factory->int32x4_string())) {
4476 __ JumpIfSmi(r0, if_false);
4477 __ CompareObjectType(r0, r0, r1, INT32x4_TYPE);
4478 Split(eq, if_true, if_false, fall_through);
4479 } else if (String::Equals(check, factory->string_string())) {
4480 __ JumpIfSmi(r0, if_false);
4481 // Check for undetectable objects => false.
4482 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4484 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4485 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4486 Split(eq, if_true, if_false, fall_through);
4487 } else if (String::Equals(check, factory->symbol_string())) {
4488 __ JumpIfSmi(r0, if_false);
4489 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4490 Split(eq, if_true, if_false, fall_through);
4491 } else if (String::Equals(check, factory->boolean_string())) {
4492 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4494 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4495 Split(eq, if_true, if_false, fall_through);
4496 } else if (FLAG_harmony_typeof &&
4497 String::Equals(check, factory->null_string())) {
4498 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4499 Split(eq, if_true, if_false, fall_through);
4500 } else if (String::Equals(check, factory->undefined_string())) {
4501 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4503 __ JumpIfSmi(r0, if_false);
4504 // Check for undetectable objects => true.
4505 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4506 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4507 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4508 Split(ne, if_true, if_false, fall_through);
4510 } else if (String::Equals(check, factory->function_string())) {
4511 __ JumpIfSmi(r0, if_false);
4512 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4513 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4515 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4516 Split(eq, if_true, if_false, fall_through);
4517 } else if (String::Equals(check, factory->object_string())) {
4518 __ JumpIfSmi(r0, if_false);
4519 if (!FLAG_harmony_typeof) {
4520 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4523 // Check for JS objects => true.
4524 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4526 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4528 // Check for undetectable objects => false.
4529 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4530 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4531 Split(eq, if_true, if_false, fall_through);
4533 if (if_false != fall_through) __ jmp(if_false);
4535 context()->Plug(if_true, if_false);
4539 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4540 Comment cmnt(masm_, "[ CompareOperation");
4541 SetSourcePosition(expr->position());
4543 // First we try a fast inlined version of the compare when one of
4544 // the operands is a literal.
4545 if (TryLiteralCompare(expr)) return;
4547 // Always perform the comparison for its control flow. Pack the result
4548 // into the expression's context after the comparison is performed.
4549 Label materialize_true, materialize_false;
4550 Label* if_true = NULL;
4551 Label* if_false = NULL;
4552 Label* fall_through = NULL;
4553 context()->PrepareTest(&materialize_true, &materialize_false,
4554 &if_true, &if_false, &fall_through);
4556 Token::Value op = expr->op();
4557 VisitForStackValue(expr->left());
4560 VisitForStackValue(expr->right());
4561 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4562 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4563 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4565 Split(eq, if_true, if_false, fall_through);
4568 case Token::INSTANCEOF: {
4569 VisitForStackValue(expr->right());
4570 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4572 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4573 // The stub returns 0 for true.
4575 Split(eq, if_true, if_false, fall_through);
4580 VisitForAccumulatorValue(expr->right());
4581 Condition cond = CompareIC::ComputeCondition(op);
4584 bool inline_smi_code = ShouldInlineSmiCase(op);
4585 JumpPatchSite patch_site(masm_);
4586 if (inline_smi_code) {
4588 __ orr(r2, r0, Operand(r1));
4589 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4591 Split(cond, if_true, if_false, NULL);
4592 __ bind(&slow_case);
4595 // Record position and call the compare IC.
4596 SetSourcePosition(expr->position());
4597 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4598 CallIC(ic, expr->CompareOperationFeedbackId());
4599 patch_site.EmitPatchInfo();
4600 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4601 __ cmp(r0, Operand::Zero());
4602 Split(cond, if_true, if_false, fall_through);
4606 // Convert the result of the comparison into one expected for this
4607 // expression's context.
4608 context()->Plug(if_true, if_false);
4612 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4613 Expression* sub_expr,
4615 Label materialize_true, materialize_false;
4616 Label* if_true = NULL;
4617 Label* if_false = NULL;
4618 Label* fall_through = NULL;
4619 context()->PrepareTest(&materialize_true, &materialize_false,
4620 &if_true, &if_false, &fall_through);
4622 VisitForAccumulatorValue(sub_expr);
4623 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4624 if (expr->op() == Token::EQ_STRICT) {
4625 Heap::RootListIndex nil_value = nil == kNullValue ?
4626 Heap::kNullValueRootIndex :
4627 Heap::kUndefinedValueRootIndex;
4628 __ LoadRoot(r1, nil_value);
4630 Split(eq, if_true, if_false, fall_through);
4632 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4633 CallIC(ic, expr->CompareOperationFeedbackId());
4634 __ cmp(r0, Operand(0));
4635 Split(ne, if_true, if_false, fall_through);
4637 context()->Plug(if_true, if_false);
4641 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4642 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4643 context()->Plug(r0);
4647 Register FullCodeGenerator::result_register() {
4652 Register FullCodeGenerator::context_register() {
4657 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4658 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4659 __ str(value, MemOperand(fp, frame_offset));
4663 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4664 __ ldr(dst, ContextOperand(cp, context_index));
4668 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4669 Scope* declaration_scope = scope()->DeclarationScope();
4670 if (declaration_scope->is_global_scope() ||
4671 declaration_scope->is_module_scope()) {
4672 // Contexts nested in the native context have a canonical empty function
4673 // as their closure, not the anonymous closure containing the global
4674 // code. Pass a smi sentinel and let the runtime look up the empty
4676 __ mov(ip, Operand(Smi::FromInt(0)));
4677 } else if (declaration_scope->is_eval_scope()) {
4678 // Contexts created by a call to eval have the same closure as the
4679 // context calling eval, not the anonymous closure containing the eval
4680 // code. Fetch it from the context.
4681 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4683 ASSERT(declaration_scope->is_function_scope());
4684 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4690 // ----------------------------------------------------------------------------
4691 // Non-local control flow support.
4693 void FullCodeGenerator::EnterFinallyBlock() {
4694 ASSERT(!result_register().is(r1));
4695 // Store result register while executing finally block.
4696 __ push(result_register());
4697 // Cook return address in link register to stack (smi encoded Code* delta)
4698 __ sub(r1, lr, Operand(masm_->CodeObject()));
4701 // Store result register while executing finally block.
4704 // Store pending message while executing finally block.
4705 ExternalReference pending_message_obj =
4706 ExternalReference::address_of_pending_message_obj(isolate());
4707 __ mov(ip, Operand(pending_message_obj));
4708 __ ldr(r1, MemOperand(ip));
4711 ExternalReference has_pending_message =
4712 ExternalReference::address_of_has_pending_message(isolate());
4713 __ mov(ip, Operand(has_pending_message));
4714 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4715 __ ldrb(r1, MemOperand(ip));
4719 ExternalReference pending_message_script =
4720 ExternalReference::address_of_pending_message_script(isolate());
4721 __ mov(ip, Operand(pending_message_script));
4722 __ ldr(r1, MemOperand(ip));
4727 void FullCodeGenerator::ExitFinallyBlock() {
4728 ASSERT(!result_register().is(r1));
4729 // Restore pending message from stack.
4731 ExternalReference pending_message_script =
4732 ExternalReference::address_of_pending_message_script(isolate());
4733 __ mov(ip, Operand(pending_message_script));
4734 __ str(r1, MemOperand(ip));
4738 ExternalReference has_pending_message =
4739 ExternalReference::address_of_has_pending_message(isolate());
4740 __ mov(ip, Operand(has_pending_message));
4741 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4742 __ strb(r1, MemOperand(ip));
4745 ExternalReference pending_message_obj =
4746 ExternalReference::address_of_pending_message_obj(isolate());
4747 __ mov(ip, Operand(pending_message_obj));
4748 __ str(r1, MemOperand(ip));
4750 // Restore result register from stack.
4753 // Uncook return address and return.
4754 __ pop(result_register());
4756 __ add(pc, r1, Operand(masm_->CodeObject()));
4762 #define __ ACCESS_MASM(masm())
4764 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4766 int* context_length) {
4767 // The macros used here must preserve the result register.
4769 // Because the handler block contains the context of the finally
4770 // code, we can restore it directly from there for the finally code
4771 // rather than iteratively unwinding contexts via their previous
4773 __ Drop(*stack_depth); // Down to the handler block.
4774 if (*context_length > 0) {
4775 // Restore the context to its dedicated register and the stack.
4776 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4777 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4780 __ bl(finally_entry_);
4783 *context_length = 0;
4791 static Address GetInterruptImmediateLoadAddress(Address pc) {
4792 Address load_address = pc - 2 * Assembler::kInstrSize;
4793 if (!FLAG_enable_ool_constant_pool) {
4794 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4795 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) {
4796 load_address -= Assembler::kInstrSize;
4797 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address)));
4799 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4801 return load_address;
4805 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4807 BackEdgeState target_state,
4808 Code* replacement_code) {
4809 static const int kInstrSize = Assembler::kInstrSize;
4810 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4811 Address branch_address = pc_immediate_load_address - kInstrSize;
4812 CodePatcher patcher(branch_address, 1);
4813 switch (target_state) {
4816 // <decrement profiling counter>
4818 // ; load interrupt stub address into ip - either of:
4819 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4820 // | movt ip, <immed high>
4824 // Calculate branch offet to the ok-label - this is the difference between
4825 // the branch address and |pc| (which points at <blx ip>) plus one instr.
4826 int branch_offset = pc + kInstrSize - branch_address;
4827 patcher.masm()->b(branch_offset, pl);
4830 case ON_STACK_REPLACEMENT:
4831 case OSR_AFTER_STACK_CHECK:
4832 // <decrement profiling counter>
4834 // ; load on-stack replacement address into ip - either of:
4835 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4836 // | movt ip, <immed high>
4839 patcher.masm()->nop();
4843 // Replace the call address.
4844 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
4845 replacement_code->entry());
4847 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4848 unoptimized_code, pc_immediate_load_address, replacement_code);
4852 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4854 Code* unoptimized_code,
4856 static const int kInstrSize = Assembler::kInstrSize;
4857 ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
4859 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4860 Address branch_address = pc_immediate_load_address - kInstrSize;
4861 Address interrupt_address = Assembler::target_address_at(
4862 pc_immediate_load_address, unoptimized_code);
4864 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4865 ASSERT(interrupt_address ==
4866 isolate->builtins()->InterruptCheck()->entry());
4870 ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
4872 if (interrupt_address ==
4873 isolate->builtins()->OnStackReplacement()->entry()) {
4874 return ON_STACK_REPLACEMENT;
4877 ASSERT(interrupt_address ==
4878 isolate->builtins()->OsrAfterStackCheck()->entry());
4879 return OSR_AFTER_STACK_CHECK;
4883 } } // namespace v8::internal
4885 #endif // V8_TARGET_ARCH_ARM