1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parser.h"
15 #include "src/scopes.h"
17 #include "src/arm/code-stubs-arm.h"
18 #include "src/arm/macro-assembler-arm.h"
23 #define __ ACCESS_MASM(masm_)
26 // A patch site is a location in the code which it is possible to patch. This
27 // class has a number of methods to emit the code which is patchable and the
28 // method EmitPatchInfo to record a marker back to the patchable code. This
29 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
30 // immediate value is used) is the delta from the pc to the first instruction of
31 // the patchable code.
32 class JumpPatchSite BASE_EMBEDDED {
34 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
36 info_emitted_ = false;
41 DCHECK(patch_site_.is_bound() == info_emitted_);
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockConstPoolScope block_const_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, Operand(reg));
51 __ b(eq, target); // Always taken before patched.
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
56 void EmitJumpIfSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockConstPoolScope block_const_pool(masm_);
59 __ bind(&patch_site_);
60 __ cmp(reg, Operand(reg));
61 __ b(ne, target); // Never taken before patched.
64 void EmitPatchInfo() {
65 // Block literal pool emission whilst recording patch site information.
66 Assembler::BlockConstPoolScope block_const_pool(masm_);
67 if (patch_site_.is_bound()) {
68 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
70 reg.set_code(delta_to_patch_site / kOff12Mask);
71 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
76 __ nop(); // Signals no inlined code.
81 MacroAssembler* masm_;
89 // Generate code for a JS function. On entry to the function the receiver
90 // and arguments have been pushed on the stack left to right. The actual
91 // argument count matches the formal parameter count expected by the
94 // The live registers are:
95 // o r1: the JS function object being called (i.e., ourselves)
97 // o pp: our caller's constant pool pointer (if enabled)
98 // o fp: our caller's frame pointer
99 // o sp: stack pointer
100 // o lr: return address
102 // The function builds a JS frame. Please see JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
114 if (strlen(FLAG_stop_at) > 0 &&
115 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120 // Sloppy mode functions and builtins need to replace the receiver with the
121 // global proxy when called as functions (without an explicit receiver
123 if (info->MustReplaceUndefinedReceiverWithGlobalProxy()) {
125 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
126 __ ldr(r2, MemOperand(sp, receiver_offset));
127 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
130 __ ldr(r2, GlobalObjectOperand());
131 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
133 __ str(r2, MemOperand(sp, receiver_offset));
138 // Open a frame scope to indicate that there is a frame on the stack. The
139 // MANUAL indicates that the scope shouldn't actually generate code to set up
140 // the frame (that is done below).
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
143 info->set_prologue_offset(masm_->pc_offset());
144 __ Prologue(info->IsCodePreAgingActive());
145 info->AddNoFrameRange(0, masm_->pc_offset());
147 { Comment cmnt(masm_, "[ Allocate locals");
148 int locals_count = info->scope()->num_stack_slots();
149 // Generators allocate locals, if any, in context slots.
150 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
151 if (locals_count > 0) {
152 if (locals_count >= 128) {
154 __ sub(r9, sp, Operand(locals_count * kPointerSize));
155 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
156 __ cmp(r9, Operand(r2));
158 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
161 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
162 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
163 if (locals_count >= kMaxPushes) {
164 int loop_iterations = locals_count / kMaxPushes;
165 __ mov(r2, Operand(loop_iterations));
167 __ bind(&loop_header);
169 for (int i = 0; i < kMaxPushes; i++) {
172 // Continue loop if not done.
173 __ sub(r2, r2, Operand(1), SetCC);
174 __ b(&loop_header, ne);
176 int remaining = locals_count % kMaxPushes;
177 // Emit the remaining pushes.
178 for (int i = 0; i < remaining; i++) {
184 bool function_in_register_r1 = true;
186 // Possibly allocate a local context.
187 if (info->scope()->num_heap_slots() > 0) {
188 // Argument to NewContext is the function, which is still in r1.
189 Comment cmnt(masm_, "[ Allocate context");
190 bool need_write_barrier = true;
191 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
192 if (info->scope()->is_script_scope()) {
194 __ Push(info->scope()->GetScopeInfo(info->isolate()));
195 __ CallRuntime(Runtime::kNewScriptContext, 2);
196 } else if (slots <= FastNewContextStub::kMaximumSlots) {
197 FastNewContextStub stub(isolate(), slots);
199 // Result of FastNewContextStub is always in new space.
200 need_write_barrier = false;
203 __ CallRuntime(Runtime::kNewFunctionContext, 1);
205 function_in_register_r1 = false;
206 // Context is returned in r0. It replaces the context passed to us.
207 // It's saved in the stack and kept live in cp.
209 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
210 // Copy any necessary parameters into the context.
211 int num_parameters = info->scope()->num_parameters();
212 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
213 for (int i = first_parameter; i < num_parameters; i++) {
214 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
215 if (var->IsContextSlot()) {
216 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
217 (num_parameters - 1 - i) * kPointerSize;
218 // Load parameter from stack.
219 __ ldr(r0, MemOperand(fp, parameter_offset));
220 // Store it in the context.
221 MemOperand target = ContextOperand(cp, var->index());
224 // Update the write barrier.
225 if (need_write_barrier) {
226 __ RecordWriteContextSlot(
227 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
228 } else if (FLAG_debug_code) {
230 __ JumpIfInNewSpace(cp, r0, &done);
231 __ Abort(kExpectedNewSpaceObject);
238 PrepareForBailoutForId(BailoutId::Prologue(), NO_REGISTERS);
239 // Function register is trashed in case we bailout here. But since that
240 // could happen only when we allocate a context the value of
241 // |function_in_register_r1| is correct.
243 // Possibly set up a local binding to the this function which is used in
244 // derived constructors with super calls.
245 Variable* this_function_var = scope()->this_function_var();
246 if (this_function_var != nullptr) {
247 Comment cmnt(masm_, "[ This function");
248 if (!function_in_register_r1) {
249 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
250 // The write barrier clobbers register again, keep it marked as such.
252 SetVar(this_function_var, r1, r0, r2);
255 Variable* new_target_var = scope()->new_target_var();
256 if (new_target_var != nullptr) {
257 Comment cmnt(masm_, "[ new.target");
259 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
260 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
261 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
262 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
263 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
264 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
265 Label non_construct_frame, done;
266 function_in_register_r1 = false;
268 __ b(ne, &non_construct_frame);
270 MemOperand(r2, ConstructFrameConstants::kOriginalConstructorOffset));
273 __ bind(&non_construct_frame);
274 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
277 SetVar(new_target_var, r0, r2, r3);
280 Variable* arguments = scope()->arguments();
281 if (arguments != NULL) {
282 // Function uses arguments object.
283 Comment cmnt(masm_, "[ Allocate arguments object");
284 if (!function_in_register_r1) {
285 // Load this again, if it's used by the local context below.
286 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
290 // Receiver is just before the parameters on the caller's stack.
291 int num_parameters = info->scope()->num_parameters();
292 int offset = num_parameters * kPointerSize;
294 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
295 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
298 // Arguments to ArgumentsAccessStub:
299 // function, receiver address, parameter count.
300 // The stub will rewrite receiver and parameter count if the previous
301 // stack frame was an arguments adapter frame.
302 ArgumentsAccessStub::Type type;
303 if (is_strict(language_mode()) || !has_simple_parameters()) {
304 type = ArgumentsAccessStub::NEW_STRICT;
305 } else if (literal()->has_duplicate_parameters()) {
306 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
308 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
310 ArgumentsAccessStub stub(isolate(), type);
313 SetVar(arguments, r0, r1, r2);
317 __ CallRuntime(Runtime::kTraceEnter, 0);
320 // Visit the declarations and body unless there is an illegal
322 if (scope()->HasIllegalRedeclaration()) {
323 Comment cmnt(masm_, "[ Declarations");
324 VisitForEffect(scope()->GetIllegalRedeclaration());
327 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
328 { Comment cmnt(masm_, "[ Declarations");
329 VisitDeclarations(scope()->declarations());
332 // Assert that the declarations do not use ICs. Otherwise the debugger
333 // won't be able to redirect a PC at an IC to the correct IC in newly
335 DCHECK_EQ(0, ic_total_count_);
337 { Comment cmnt(masm_, "[ Stack check");
338 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
340 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
341 __ cmp(sp, Operand(ip));
343 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
344 PredictableCodeSizeScope predictable(masm_);
345 predictable.ExpectSize(
346 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
347 __ Call(stack_check, RelocInfo::CODE_TARGET);
351 { Comment cmnt(masm_, "[ Body");
352 DCHECK(loop_depth() == 0);
353 VisitStatements(literal()->body());
354 DCHECK(loop_depth() == 0);
358 // Always emit a 'return undefined' in case control fell off the end of
360 { Comment cmnt(masm_, "[ return <undefined>;");
361 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
363 EmitReturnSequence();
365 // Force emit the constant pool, so it doesn't get emitted in the middle
366 // of the back edge table.
367 masm()->CheckConstPool(true, false);
371 void FullCodeGenerator::ClearAccumulator() {
372 __ mov(r0, Operand(Smi::FromInt(0)));
376 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
377 __ mov(r2, Operand(profiling_counter_));
378 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
379 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
380 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
384 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
385 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
387 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
391 void FullCodeGenerator::EmitProfilingCounterReset() {
392 Assembler::BlockConstPoolScope block_const_pool(masm_);
393 PredictableCodeSizeScope predictable_code_size_scope(
394 masm_, kProfileCounterResetSequenceLength);
397 int reset_value = FLAG_interrupt_budget;
398 __ mov(r2, Operand(profiling_counter_));
399 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
400 // instructions (for ARMv6) depending upon whether it is an extended constant
401 // pool - insert nop to compensate.
402 int expected_instr_count =
403 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
404 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
405 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
408 __ mov(r3, Operand(Smi::FromInt(reset_value)));
409 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
413 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
414 Label* back_edge_target) {
415 Comment cmnt(masm_, "[ Back edge bookkeeping");
416 // Block literal pools whilst emitting back edge code.
417 Assembler::BlockConstPoolScope block_const_pool(masm_);
420 DCHECK(back_edge_target->is_bound());
421 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
422 int weight = Min(kMaxBackEdgeWeight,
423 Max(1, distance / kCodeSizeMultiplier));
424 EmitProfilingCounterDecrement(weight);
426 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428 // Record a mapping of this PC offset to the OSR id. This is used to find
429 // the AST id from the unoptimized code in order to use it as a key into
430 // the deoptimization input data found in the optimized code.
431 RecordBackEdge(stmt->OsrEntryId());
433 EmitProfilingCounterReset();
436 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
437 // Record a mapping of the OSR id to this PC. This is used if the OSR
438 // entry becomes the target of a bailout. We don't expect it to be, but
439 // we want it to work if it is.
440 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
444 void FullCodeGenerator::EmitReturnSequence() {
445 Comment cmnt(masm_, "[ Return sequence");
446 if (return_label_.is_bound()) {
447 __ b(&return_label_);
449 __ bind(&return_label_);
451 // Push the return value on the stack as the parameter.
452 // Runtime::TraceExit returns its parameter in r0.
454 __ CallRuntime(Runtime::kTraceExit, 1);
456 // Pretend that the exit is a backwards jump to the entry.
458 if (info_->ShouldSelfOptimize()) {
459 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
461 int distance = masm_->pc_offset();
462 weight = Min(kMaxBackEdgeWeight,
463 Max(1, distance / kCodeSizeMultiplier));
465 EmitProfilingCounterDecrement(weight);
469 __ Call(isolate()->builtins()->InterruptCheck(),
470 RelocInfo::CODE_TARGET);
472 EmitProfilingCounterReset();
475 // Make sure that the constant pool is not emitted inside of the return
477 { Assembler::BlockConstPoolScope block_const_pool(masm_);
478 int32_t arg_count = info_->scope()->num_parameters() + 1;
479 int32_t sp_delta = arg_count * kPointerSize;
480 SetReturnPosition(literal());
481 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
482 PredictableCodeSizeScope predictable(masm_, -1);
483 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
484 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
485 __ add(sp, sp, Operand(sp_delta));
487 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
494 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
495 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496 codegen()->GetVar(result_register(), var);
497 __ push(result_register());
501 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
505 void FullCodeGenerator::AccumulatorValueContext::Plug(
506 Heap::RootListIndex index) const {
507 __ LoadRoot(result_register(), index);
511 void FullCodeGenerator::StackValueContext::Plug(
512 Heap::RootListIndex index) const {
513 __ LoadRoot(result_register(), index);
514 __ push(result_register());
518 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
519 codegen()->PrepareForBailoutBeforeSplit(condition(),
523 if (index == Heap::kUndefinedValueRootIndex ||
524 index == Heap::kNullValueRootIndex ||
525 index == Heap::kFalseValueRootIndex) {
526 if (false_label_ != fall_through_) __ b(false_label_);
527 } else if (index == Heap::kTrueValueRootIndex) {
528 if (true_label_ != fall_through_) __ b(true_label_);
530 __ LoadRoot(result_register(), index);
531 codegen()->DoTest(this);
536 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
540 void FullCodeGenerator::AccumulatorValueContext::Plug(
541 Handle<Object> lit) const {
542 __ mov(result_register(), Operand(lit));
546 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
547 // Immediates cannot be pushed directly.
548 __ mov(result_register(), Operand(lit));
549 __ push(result_register());
553 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
554 codegen()->PrepareForBailoutBeforeSplit(condition(),
558 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
559 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
560 if (false_label_ != fall_through_) __ b(false_label_);
561 } else if (lit->IsTrue() || lit->IsJSObject()) {
562 if (true_label_ != fall_through_) __ b(true_label_);
563 } else if (lit->IsString()) {
564 if (String::cast(*lit)->length() == 0) {
565 if (false_label_ != fall_through_) __ b(false_label_);
567 if (true_label_ != fall_through_) __ b(true_label_);
569 } else if (lit->IsSmi()) {
570 if (Smi::cast(*lit)->value() == 0) {
571 if (false_label_ != fall_through_) __ b(false_label_);
573 if (true_label_ != fall_through_) __ b(true_label_);
576 // For simplicity we always test the accumulator register.
577 __ mov(result_register(), Operand(lit));
578 codegen()->DoTest(this);
583 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
584 Register reg) const {
590 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
592 Register reg) const {
595 __ Move(result_register(), reg);
599 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
600 Register reg) const {
602 if (count > 1) __ Drop(count - 1);
603 __ str(reg, MemOperand(sp, 0));
607 void FullCodeGenerator::TestContext::DropAndPlug(int count,
608 Register reg) const {
610 // For simplicity we always test the accumulator register.
612 __ Move(result_register(), reg);
613 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
614 codegen()->DoTest(this);
618 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
619 Label* materialize_false) const {
620 DCHECK(materialize_true == materialize_false);
621 __ bind(materialize_true);
625 void FullCodeGenerator::AccumulatorValueContext::Plug(
626 Label* materialize_true,
627 Label* materialize_false) const {
629 __ bind(materialize_true);
630 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
632 __ bind(materialize_false);
633 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
638 void FullCodeGenerator::StackValueContext::Plug(
639 Label* materialize_true,
640 Label* materialize_false) const {
642 __ bind(materialize_true);
643 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
645 __ bind(materialize_false);
646 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
652 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
653 Label* materialize_false) const {
654 DCHECK(materialize_true == true_label_);
655 DCHECK(materialize_false == false_label_);
659 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
660 Heap::RootListIndex value_root_index =
661 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
662 __ LoadRoot(result_register(), value_root_index);
666 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
667 Heap::RootListIndex value_root_index =
668 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
669 __ LoadRoot(ip, value_root_index);
674 void FullCodeGenerator::TestContext::Plug(bool flag) const {
675 codegen()->PrepareForBailoutBeforeSplit(condition(),
680 if (true_label_ != fall_through_) __ b(true_label_);
682 if (false_label_ != fall_through_) __ b(false_label_);
687 void FullCodeGenerator::DoTest(Expression* condition,
690 Label* fall_through) {
691 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
692 CallIC(ic, condition->test_id());
693 __ tst(result_register(), result_register());
694 Split(ne, if_true, if_false, fall_through);
698 void FullCodeGenerator::Split(Condition cond,
701 Label* fall_through) {
702 if (if_false == fall_through) {
704 } else if (if_true == fall_through) {
705 __ b(NegateCondition(cond), if_false);
713 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
714 DCHECK(var->IsStackAllocated());
715 // Offset is negative because higher indexes are at lower addresses.
716 int offset = -var->index() * kPointerSize;
717 // Adjust by a (parameter or local) base offset.
718 if (var->IsParameter()) {
719 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
721 offset += JavaScriptFrameConstants::kLocal0Offset;
723 return MemOperand(fp, offset);
727 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
728 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
729 if (var->IsContextSlot()) {
730 int context_chain_length = scope()->ContextChainLength(var->scope());
731 __ LoadContext(scratch, context_chain_length);
732 return ContextOperand(scratch, var->index());
734 return StackOperand(var);
739 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
740 // Use destination as scratch.
741 MemOperand location = VarOperand(var, dest);
742 __ ldr(dest, location);
746 void FullCodeGenerator::SetVar(Variable* var,
750 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
751 DCHECK(!scratch0.is(src));
752 DCHECK(!scratch0.is(scratch1));
753 DCHECK(!scratch1.is(src));
754 MemOperand location = VarOperand(var, scratch0);
755 __ str(src, location);
757 // Emit the write barrier code if the location is in the heap.
758 if (var->IsContextSlot()) {
759 __ RecordWriteContextSlot(scratch0,
769 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
770 bool should_normalize,
773 // Only prepare for bailouts before splits if we're in a test
774 // context. Otherwise, we let the Visit function deal with the
775 // preparation to avoid preparing with the same AST id twice.
776 if (!context()->IsTest()) return;
779 if (should_normalize) __ b(&skip);
780 PrepareForBailout(expr, TOS_REG);
781 if (should_normalize) {
782 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
784 Split(eq, if_true, if_false, NULL);
790 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
791 // The variable in the declaration always resides in the current function
793 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
794 if (generate_debug_code_) {
795 // Check that we're not inside a with or catch context.
796 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
797 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
798 __ Check(ne, kDeclarationInWithContext);
799 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
800 __ Check(ne, kDeclarationInCatchContext);
805 void FullCodeGenerator::VisitVariableDeclaration(
806 VariableDeclaration* declaration) {
807 // If it was not possible to allocate the variable at compile time, we
808 // need to "declare" it at runtime to make sure it actually exists in the
810 VariableProxy* proxy = declaration->proxy();
811 VariableMode mode = declaration->mode();
812 Variable* variable = proxy->var();
813 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
814 switch (variable->location()) {
815 case VariableLocation::GLOBAL:
816 case VariableLocation::UNALLOCATED:
817 globals_->Add(variable->name(), zone());
818 globals_->Add(variable->binding_needs_init()
819 ? isolate()->factory()->the_hole_value()
820 : isolate()->factory()->undefined_value(),
824 case VariableLocation::PARAMETER:
825 case VariableLocation::LOCAL:
827 Comment cmnt(masm_, "[ VariableDeclaration");
828 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
829 __ str(r0, StackOperand(variable));
833 case VariableLocation::CONTEXT:
835 Comment cmnt(masm_, "[ VariableDeclaration");
836 EmitDebugCheckDeclarationContext(variable);
837 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
838 __ str(r0, ContextOperand(cp, variable->index()));
839 // No write barrier since the_hole_value is in old space.
840 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
844 case VariableLocation::LOOKUP: {
845 Comment cmnt(masm_, "[ VariableDeclaration");
846 __ mov(r2, Operand(variable->name()));
847 // Declaration nodes are always introduced in one of four modes.
848 DCHECK(IsDeclaredVariableMode(mode));
849 // Push initial value, if any.
850 // Note: For variables we must not push an initial value (such as
851 // 'undefined') because we may have a (legal) redeclaration and we
852 // must not destroy the current value.
854 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
856 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
859 __ CallRuntime(IsImmutableVariableMode(mode)
860 ? Runtime::kDeclareReadOnlyLookupSlot
861 : Runtime::kDeclareLookupSlot,
869 void FullCodeGenerator::VisitFunctionDeclaration(
870 FunctionDeclaration* declaration) {
871 VariableProxy* proxy = declaration->proxy();
872 Variable* variable = proxy->var();
873 switch (variable->location()) {
874 case VariableLocation::GLOBAL:
875 case VariableLocation::UNALLOCATED: {
876 globals_->Add(variable->name(), zone());
877 Handle<SharedFunctionInfo> function =
878 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
879 // Check for stack-overflow exception.
880 if (function.is_null()) return SetStackOverflow();
881 globals_->Add(function, zone());
885 case VariableLocation::PARAMETER:
886 case VariableLocation::LOCAL: {
887 Comment cmnt(masm_, "[ FunctionDeclaration");
888 VisitForAccumulatorValue(declaration->fun());
889 __ str(result_register(), StackOperand(variable));
893 case VariableLocation::CONTEXT: {
894 Comment cmnt(masm_, "[ FunctionDeclaration");
895 EmitDebugCheckDeclarationContext(variable);
896 VisitForAccumulatorValue(declaration->fun());
897 __ str(result_register(), ContextOperand(cp, variable->index()));
898 int offset = Context::SlotOffset(variable->index());
899 // We know that we have written a function, which is not a smi.
900 __ RecordWriteContextSlot(cp,
908 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
912 case VariableLocation::LOOKUP: {
913 Comment cmnt(masm_, "[ FunctionDeclaration");
914 __ mov(r2, Operand(variable->name()));
916 // Push initial value for function declaration.
917 VisitForStackValue(declaration->fun());
918 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
925 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
926 // Call the runtime to declare the globals.
927 __ mov(r1, Operand(pairs));
928 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
930 __ CallRuntime(Runtime::kDeclareGlobals, 2);
931 // Return value is ignored.
935 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
936 // Call the runtime to declare the modules.
937 __ Push(descriptions);
938 __ CallRuntime(Runtime::kDeclareModules, 1);
939 // Return value is ignored.
943 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
944 Comment cmnt(masm_, "[ SwitchStatement");
945 Breakable nested_statement(this, stmt);
946 SetStatementPosition(stmt);
948 // Keep the switch value on the stack until a case matches.
949 VisitForStackValue(stmt->tag());
950 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
952 ZoneList<CaseClause*>* clauses = stmt->cases();
953 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
955 Label next_test; // Recycled for each test.
956 // Compile all the tests with branches to their bodies.
957 for (int i = 0; i < clauses->length(); i++) {
958 CaseClause* clause = clauses->at(i);
959 clause->body_target()->Unuse();
961 // The default is not a test, but remember it as final fall through.
962 if (clause->is_default()) {
963 default_clause = clause;
967 Comment cmnt(masm_, "[ Case comparison");
971 // Compile the label expression.
972 VisitForAccumulatorValue(clause->label());
974 // Perform the comparison as if via '==='.
975 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
976 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
977 JumpPatchSite patch_site(masm_);
978 if (inline_smi_code) {
981 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
984 __ b(ne, &next_test);
985 __ Drop(1); // Switch value is no longer needed.
986 __ b(clause->body_target());
990 // Record position before stub call for type feedback.
991 SetExpressionPosition(clause);
992 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
993 strength(language_mode())).code();
994 CallIC(ic, clause->CompareId());
995 patch_site.EmitPatchInfo();
999 PrepareForBailout(clause, TOS_REG);
1000 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1002 __ b(ne, &next_test);
1004 __ jmp(clause->body_target());
1007 __ cmp(r0, Operand::Zero());
1008 __ b(ne, &next_test);
1009 __ Drop(1); // Switch value is no longer needed.
1010 __ b(clause->body_target());
1013 // Discard the test value and jump to the default if present, otherwise to
1014 // the end of the statement.
1015 __ bind(&next_test);
1016 __ Drop(1); // Switch value is no longer needed.
1017 if (default_clause == NULL) {
1018 __ b(nested_statement.break_label());
1020 __ b(default_clause->body_target());
1023 // Compile all the case bodies.
1024 for (int i = 0; i < clauses->length(); i++) {
1025 Comment cmnt(masm_, "[ Case body");
1026 CaseClause* clause = clauses->at(i);
1027 __ bind(clause->body_target());
1028 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1029 VisitStatements(clause->statements());
1032 __ bind(nested_statement.break_label());
1033 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1037 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1038 Comment cmnt(masm_, "[ ForInStatement");
1039 SetStatementPosition(stmt, SKIP_BREAK);
1041 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1044 ForIn loop_statement(this, stmt);
1045 increment_loop_depth();
1047 // Get the object to enumerate over. If the object is null or undefined, skip
1048 // over the loop. See ECMA-262 version 5, section 12.6.4.
1049 SetExpressionAsStatementPosition(stmt->enumerable());
1050 VisitForAccumulatorValue(stmt->enumerable());
1051 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1054 Register null_value = r5;
1055 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1056 __ cmp(r0, null_value);
1059 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1061 // Convert the object to a JS object.
1062 Label convert, done_convert;
1063 __ JumpIfSmi(r0, &convert);
1064 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1065 __ b(ge, &done_convert);
1067 ToObjectStub stub(isolate());
1069 __ bind(&done_convert);
1070 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1073 // Check for proxies.
1075 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1076 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1077 __ b(le, &call_runtime);
1079 // Check cache validity in generated code. This is a fast case for
1080 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1081 // guarantee cache validity, call the runtime system to check cache
1082 // validity or get the property names in a fixed array.
1083 __ CheckEnumCache(null_value, &call_runtime);
1085 // The enum cache is valid. Load the map of the object being
1086 // iterated over and use the cache for the iteration.
1088 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1091 // Get the set of properties to enumerate.
1092 __ bind(&call_runtime);
1093 __ push(r0); // Duplicate the enumerable object on the stack.
1094 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1095 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1097 // If we got a map from the runtime call, we can do a fast
1098 // modification check. Otherwise, we got a fixed array, and we have
1099 // to do a slow check.
1101 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1102 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1104 __ b(ne, &fixed_array);
1106 // We got a map in register r0. Get the enumeration cache from it.
1107 Label no_descriptors;
1108 __ bind(&use_cache);
1110 __ EnumLength(r1, r0);
1111 __ cmp(r1, Operand(Smi::FromInt(0)));
1112 __ b(eq, &no_descriptors);
1114 __ LoadInstanceDescriptors(r0, r2);
1115 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1116 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1118 // Set up the four remaining stack slots.
1119 __ push(r0); // Map.
1120 __ mov(r0, Operand(Smi::FromInt(0)));
1121 // Push enumeration cache, enumeration cache length (as smi) and zero.
1122 __ Push(r2, r1, r0);
1125 __ bind(&no_descriptors);
1129 // We got a fixed array in register r0. Iterate through that.
1131 __ bind(&fixed_array);
1133 __ Move(r1, FeedbackVector());
1134 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1135 int vector_index = FeedbackVector()->GetIndex(slot);
1136 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1138 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1139 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1140 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1141 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1142 __ b(gt, &non_proxy);
1143 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1144 __ bind(&non_proxy);
1145 __ Push(r1, r0); // Smi and array
1146 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1147 __ mov(r0, Operand(Smi::FromInt(0)));
1148 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1150 // Generate code for doing the condition check.
1151 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1153 SetExpressionAsStatementPosition(stmt->each());
1155 // Load the current count to r0, load the length to r1.
1156 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1157 __ cmp(r0, r1); // Compare to the array length.
1158 __ b(hs, loop_statement.break_label());
1160 // Get the current entry of the array into register r3.
1161 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1162 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1163 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1165 // Get the expected map from the stack or a smi in the
1166 // permanent slow case into register r2.
1167 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1169 // Check if the expected map still matches that of the enumerable.
1170 // If not, we may have to filter the key.
1172 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1173 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1174 __ cmp(r4, Operand(r2));
1175 __ b(eq, &update_each);
1177 // For proxies, no filtering is done.
1178 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1179 __ cmp(r2, Operand(Smi::FromInt(0)));
1180 __ b(eq, &update_each);
1182 // Convert the entry to a string or (smi) 0 if it isn't a property
1183 // any more. If the property has been removed while iterating, we
1185 __ push(r1); // Enumerable.
1186 __ push(r3); // Current entry.
1187 __ CallRuntime(Runtime::kForInFilter, 2);
1188 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1189 __ mov(r3, Operand(r0));
1190 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1192 __ b(eq, loop_statement.continue_label());
1194 // Update the 'each' property or variable from the possibly filtered
1195 // entry in register r3.
1196 __ bind(&update_each);
1197 __ mov(result_register(), r3);
1198 // Perform the assignment as if via '='.
1199 { EffectContext context(this);
1200 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1201 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1204 // Generate code for the body of the loop.
1205 Visit(stmt->body());
1207 // Generate code for the going to the next element by incrementing
1208 // the index (smi) stored on top of the stack.
1209 __ bind(loop_statement.continue_label());
1211 __ add(r0, r0, Operand(Smi::FromInt(1)));
1214 EmitBackEdgeBookkeeping(stmt, &loop);
1217 // Remove the pointers stored on the stack.
1218 __ bind(loop_statement.break_label());
1221 // Exit and decrement the loop depth.
1222 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1224 decrement_loop_depth();
1228 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1230 // Use the fast case closure allocation code that allocates in new
1231 // space for nested functions that don't need literals cloning. If
1232 // we're running with the --always-opt or the --prepare-always-opt
1233 // flag, we need to use the runtime function so that the new function
1234 // we are creating here gets a chance to have its code optimized and
1235 // doesn't just get a copy of the existing unoptimized code.
1236 if (!FLAG_always_opt &&
1237 !FLAG_prepare_always_opt &&
1239 scope()->is_function_scope() &&
1240 info->num_literals() == 0) {
1241 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1242 __ mov(r2, Operand(info));
1247 pretenure ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure, 1);
1249 context()->Plug(r0);
1253 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1254 FeedbackVectorICSlot slot) {
1255 DCHECK(NeedsHomeObject(initializer));
1256 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1257 __ mov(StoreDescriptor::NameRegister(),
1258 Operand(isolate()->factory()->home_object_symbol()));
1259 __ ldr(StoreDescriptor::ValueRegister(),
1260 MemOperand(sp, offset * kPointerSize));
1261 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1266 void FullCodeGenerator::EmitSetHomeObjectAccumulator(
1267 Expression* initializer, int offset, FeedbackVectorICSlot slot) {
1268 DCHECK(NeedsHomeObject(initializer));
1269 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1270 __ mov(StoreDescriptor::NameRegister(),
1271 Operand(isolate()->factory()->home_object_symbol()));
1272 __ ldr(StoreDescriptor::ValueRegister(),
1273 MemOperand(sp, offset * kPointerSize));
1274 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1279 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1280 TypeofMode typeof_mode,
1282 Register current = cp;
1288 if (s->num_heap_slots() > 0) {
1289 if (s->calls_sloppy_eval()) {
1290 // Check that extension is NULL.
1291 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1295 // Load next context in chain.
1296 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1297 // Walk the rest of the chain without clobbering cp.
1300 // If no outer scope calls eval, we do not need to check more
1301 // context extensions.
1302 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1303 s = s->outer_scope();
1306 if (s->is_eval_scope()) {
1308 if (!current.is(next)) {
1309 __ Move(next, current);
1312 // Terminate at native context.
1313 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1314 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1317 // Check that extension is NULL.
1318 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1321 // Load next context in chain.
1322 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1327 // All extension objects were empty and it is safe to use a normal global
1329 EmitGlobalVariableLoad(proxy, typeof_mode);
1333 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1335 DCHECK(var->IsContextSlot());
1336 Register context = cp;
1340 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1341 if (s->num_heap_slots() > 0) {
1342 if (s->calls_sloppy_eval()) {
1343 // Check that extension is NULL.
1344 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1348 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1349 // Walk the rest of the chain without clobbering cp.
1353 // Check that last extension is NULL.
1354 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1358 // This function is used only for loads, not stores, so it's safe to
1359 // return an cp-based operand (the write barrier cannot be allowed to
1360 // destroy the cp register).
1361 return ContextOperand(context, var->index());
1365 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1366 TypeofMode typeof_mode,
1367 Label* slow, Label* done) {
1368 // Generate fast-case code for variables that might be shadowed by
1369 // eval-introduced variables. Eval is used a lot without
1370 // introducing variables. In those cases, we do not want to
1371 // perform a runtime call for all variables in the scope
1372 // containing the eval.
1373 Variable* var = proxy->var();
1374 if (var->mode() == DYNAMIC_GLOBAL) {
1375 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1377 } else if (var->mode() == DYNAMIC_LOCAL) {
1378 Variable* local = var->local_if_not_shadowed();
1379 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1380 if (local->mode() == LET || local->mode() == CONST ||
1381 local->mode() == CONST_LEGACY) {
1382 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1383 if (local->mode() == CONST_LEGACY) {
1384 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1385 } else { // LET || CONST
1387 __ mov(r0, Operand(var->name()));
1389 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1397 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1398 TypeofMode typeof_mode) {
1399 Variable* var = proxy->var();
1400 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1401 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1402 if (var->IsGlobalSlot()) {
1403 DCHECK(var->index() > 0);
1404 DCHECK(var->IsStaticGlobalObjectProperty());
1405 const int slot = var->index();
1406 const int depth = scope()->ContextChainLength(var->scope());
1407 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1408 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1409 LoadGlobalViaContextStub stub(isolate(), depth);
1412 __ Push(Smi::FromInt(slot));
1413 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1416 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1417 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1418 __ mov(LoadDescriptor::SlotRegister(),
1419 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1420 CallLoadIC(typeof_mode);
1425 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1426 TypeofMode typeof_mode) {
1427 // Record position before possible IC call.
1428 SetExpressionPosition(proxy);
1429 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1430 Variable* var = proxy->var();
1432 // Three cases: global variables, lookup variables, and all other types of
1434 switch (var->location()) {
1435 case VariableLocation::GLOBAL:
1436 case VariableLocation::UNALLOCATED: {
1437 Comment cmnt(masm_, "[ Global variable");
1438 EmitGlobalVariableLoad(proxy, typeof_mode);
1439 context()->Plug(r0);
1443 case VariableLocation::PARAMETER:
1444 case VariableLocation::LOCAL:
1445 case VariableLocation::CONTEXT: {
1446 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1447 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1448 : "[ Stack variable");
1449 if (NeedsHoleCheckForLoad(proxy)) {
1450 // Let and const need a read barrier.
1452 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1453 if (var->mode() == LET || var->mode() == CONST) {
1454 // Throw a reference error when using an uninitialized let/const
1455 // binding in harmony mode.
1458 __ mov(r0, Operand(var->name()));
1460 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1463 // Uninitialized legacy const bindings are unholed.
1464 DCHECK(var->mode() == CONST_LEGACY);
1465 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1467 context()->Plug(r0);
1470 context()->Plug(var);
1474 case VariableLocation::LOOKUP: {
1475 Comment cmnt(masm_, "[ Lookup variable");
1477 // Generate code for loading from variables potentially shadowed
1478 // by eval-introduced variables.
1479 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1481 __ mov(r1, Operand(var->name()));
1482 __ Push(cp, r1); // Context and name.
1483 Runtime::FunctionId function_id =
1484 typeof_mode == NOT_INSIDE_TYPEOF
1485 ? Runtime::kLoadLookupSlot
1486 : Runtime::kLoadLookupSlotNoReferenceError;
1487 __ CallRuntime(function_id, 2);
1489 context()->Plug(r0);
1495 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1496 Comment cmnt(masm_, "[ RegExpLiteral");
1498 // Registers will be used as follows:
1499 // r5 = materialized value (RegExp literal)
1500 // r4 = JS function, literals array
1501 // r3 = literal index
1502 // r2 = RegExp pattern
1503 // r1 = RegExp flags
1504 // r0 = RegExp literal clone
1505 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1506 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1507 int literal_offset =
1508 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1509 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1510 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1512 __ b(ne, &materialized);
1514 // Create regexp literal using runtime function.
1515 // Result will be in r0.
1516 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1517 __ mov(r2, Operand(expr->pattern()));
1518 __ mov(r1, Operand(expr->flags()));
1519 __ Push(r4, r3, r2, r1);
1520 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1523 __ bind(&materialized);
1524 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1525 Label allocated, runtime_allocate;
1526 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1529 __ bind(&runtime_allocate);
1530 __ mov(r0, Operand(Smi::FromInt(size)));
1532 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1535 __ bind(&allocated);
1536 // After this, registers are used as follows:
1537 // r0: Newly allocated regexp.
1538 // r5: Materialized regexp.
1540 __ CopyFields(r0, r5, d0, size / kPointerSize);
1541 context()->Plug(r0);
1545 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1546 Expression* expression = (property == NULL) ? NULL : property->value();
1547 if (expression == NULL) {
1548 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1551 VisitForStackValue(expression);
1552 if (NeedsHomeObject(expression)) {
1553 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1554 property->kind() == ObjectLiteral::Property::SETTER);
1555 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1556 EmitSetHomeObject(expression, offset, property->GetSlot());
1562 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1563 Comment cmnt(masm_, "[ ObjectLiteral");
1565 Handle<FixedArray> constant_properties = expr->constant_properties();
1566 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1567 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1568 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1569 __ mov(r1, Operand(constant_properties));
1570 int flags = expr->ComputeFlags();
1571 __ mov(r0, Operand(Smi::FromInt(flags)));
1572 if (MustCreateObjectLiteralWithRuntime(expr)) {
1573 __ Push(r3, r2, r1, r0);
1574 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1576 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1579 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1581 // If result_saved is true the result is on top of the stack. If
1582 // result_saved is false the result is in r0.
1583 bool result_saved = false;
1585 AccessorTable accessor_table(zone());
1586 int property_index = 0;
1587 for (; property_index < expr->properties()->length(); property_index++) {
1588 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1589 if (property->is_computed_name()) break;
1590 if (property->IsCompileTimeValue()) continue;
1592 Literal* key = property->key()->AsLiteral();
1593 Expression* value = property->value();
1594 if (!result_saved) {
1595 __ push(r0); // Save result on stack
1596 result_saved = true;
1598 switch (property->kind()) {
1599 case ObjectLiteral::Property::CONSTANT:
1601 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1602 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1604 case ObjectLiteral::Property::COMPUTED:
1605 // It is safe to use [[Put]] here because the boilerplate already
1606 // contains computed properties with an uninitialized value.
1607 if (key->value()->IsInternalizedString()) {
1608 if (property->emit_store()) {
1609 VisitForAccumulatorValue(value);
1610 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1611 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1612 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1613 if (FLAG_vector_stores) {
1614 EmitLoadStoreICSlot(property->GetSlot(0));
1617 CallStoreIC(key->LiteralFeedbackId());
1619 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1621 if (NeedsHomeObject(value)) {
1622 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1625 VisitForEffect(value);
1629 // Duplicate receiver on stack.
1630 __ ldr(r0, MemOperand(sp));
1632 VisitForStackValue(key);
1633 VisitForStackValue(value);
1634 if (property->emit_store()) {
1635 if (NeedsHomeObject(value)) {
1636 EmitSetHomeObject(value, 2, property->GetSlot());
1638 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1640 __ CallRuntime(Runtime::kSetProperty, 4);
1645 case ObjectLiteral::Property::PROTOTYPE:
1646 // Duplicate receiver on stack.
1647 __ ldr(r0, MemOperand(sp));
1649 VisitForStackValue(value);
1650 DCHECK(property->emit_store());
1651 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1654 case ObjectLiteral::Property::GETTER:
1655 if (property->emit_store()) {
1656 accessor_table.lookup(key)->second->getter = property;
1659 case ObjectLiteral::Property::SETTER:
1660 if (property->emit_store()) {
1661 accessor_table.lookup(key)->second->setter = property;
1667 // Emit code to define accessors, using only a single call to the runtime for
1668 // each pair of corresponding getters and setters.
1669 for (AccessorTable::Iterator it = accessor_table.begin();
1670 it != accessor_table.end();
1672 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1674 VisitForStackValue(it->first);
1675 EmitAccessor(it->second->getter);
1676 EmitAccessor(it->second->setter);
1677 __ mov(r0, Operand(Smi::FromInt(NONE)));
1679 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1682 // Object literals have two parts. The "static" part on the left contains no
1683 // computed property names, and so we can compute its map ahead of time; see
1684 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1685 // starts with the first computed property name, and continues with all
1686 // properties to its right. All the code from above initializes the static
1687 // component of the object literal, and arranges for the map of the result to
1688 // reflect the static order in which the keys appear. For the dynamic
1689 // properties, we compile them into a series of "SetOwnProperty" runtime
1690 // calls. This will preserve insertion order.
1691 for (; property_index < expr->properties()->length(); property_index++) {
1692 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1694 Expression* value = property->value();
1695 if (!result_saved) {
1696 __ push(r0); // Save result on the stack
1697 result_saved = true;
1700 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1703 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1704 DCHECK(!property->is_computed_name());
1705 VisitForStackValue(value);
1706 DCHECK(property->emit_store());
1707 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1709 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1710 VisitForStackValue(value);
1711 if (NeedsHomeObject(value)) {
1712 EmitSetHomeObject(value, 2, property->GetSlot());
1715 switch (property->kind()) {
1716 case ObjectLiteral::Property::CONSTANT:
1717 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1718 case ObjectLiteral::Property::COMPUTED:
1719 if (property->emit_store()) {
1720 __ mov(r0, Operand(Smi::FromInt(NONE)));
1722 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1728 case ObjectLiteral::Property::PROTOTYPE:
1732 case ObjectLiteral::Property::GETTER:
1733 __ mov(r0, Operand(Smi::FromInt(NONE)));
1735 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1738 case ObjectLiteral::Property::SETTER:
1739 __ mov(r0, Operand(Smi::FromInt(NONE)));
1741 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1747 if (expr->has_function()) {
1748 DCHECK(result_saved);
1749 __ ldr(r0, MemOperand(sp));
1751 __ CallRuntime(Runtime::kToFastProperties, 1);
1755 context()->PlugTOS();
1757 context()->Plug(r0);
1762 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1763 Comment cmnt(masm_, "[ ArrayLiteral");
1765 expr->BuildConstantElements(isolate());
1767 Handle<FixedArray> constant_elements = expr->constant_elements();
1768 bool has_fast_elements =
1769 IsFastObjectElementsKind(expr->constant_elements_kind());
1770 Handle<FixedArrayBase> constant_elements_values(
1771 FixedArrayBase::cast(constant_elements->get(1)));
1773 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1774 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1775 // If the only customer of allocation sites is transitioning, then
1776 // we can turn it off if we don't have anywhere else to transition to.
1777 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1780 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1781 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1782 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1783 __ mov(r1, Operand(constant_elements));
1784 if (MustCreateArrayLiteralWithRuntime(expr)) {
1785 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1786 __ Push(r3, r2, r1, r0);
1787 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1789 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1792 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1794 bool result_saved = false; // Is the result saved to the stack?
1795 ZoneList<Expression*>* subexprs = expr->values();
1796 int length = subexprs->length();
1798 // Emit code to evaluate all the non-constant subexpressions and to store
1799 // them into the newly cloned array.
1800 int array_index = 0;
1801 for (; array_index < length; array_index++) {
1802 Expression* subexpr = subexprs->at(array_index);
1803 if (subexpr->IsSpread()) break;
1805 // If the subexpression is a literal or a simple materialized literal it
1806 // is already set in the cloned array.
1807 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1809 if (!result_saved) {
1811 __ Push(Smi::FromInt(expr->literal_index()));
1812 result_saved = true;
1814 VisitForAccumulatorValue(subexpr);
1816 if (has_fast_elements) {
1817 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1818 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1819 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1820 __ str(result_register(), FieldMemOperand(r1, offset));
1821 // Update the write barrier for the array store.
1822 __ RecordWriteField(r1, offset, result_register(), r2,
1823 kLRHasBeenSaved, kDontSaveFPRegs,
1824 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1826 __ mov(r3, Operand(Smi::FromInt(array_index)));
1827 StoreArrayLiteralElementStub stub(isolate());
1831 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1834 // In case the array literal contains spread expressions it has two parts. The
1835 // first part is the "static" array which has a literal index is handled
1836 // above. The second part is the part after the first spread expression
1837 // (inclusive) and these elements gets appended to the array. Note that the
1838 // number elements an iterable produces is unknown ahead of time.
1839 if (array_index < length && result_saved) {
1840 __ pop(); // literal index
1842 result_saved = false;
1844 for (; array_index < length; array_index++) {
1845 Expression* subexpr = subexprs->at(array_index);
1848 if (subexpr->IsSpread()) {
1849 VisitForStackValue(subexpr->AsSpread()->expression());
1850 __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1853 VisitForStackValue(subexpr);
1854 __ CallRuntime(Runtime::kAppendElement, 2);
1857 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1861 __ pop(); // literal index
1862 context()->PlugTOS();
1864 context()->Plug(r0);
1869 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1870 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1872 Comment cmnt(masm_, "[ Assignment");
1873 SetExpressionPosition(expr, INSERT_BREAK);
1875 Property* property = expr->target()->AsProperty();
1876 LhsKind assign_type = Property::GetAssignType(property);
1878 // Evaluate LHS expression.
1879 switch (assign_type) {
1881 // Nothing to do here.
1883 case NAMED_PROPERTY:
1884 if (expr->is_compound()) {
1885 // We need the receiver both on the stack and in the register.
1886 VisitForStackValue(property->obj());
1887 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1889 VisitForStackValue(property->obj());
1892 case NAMED_SUPER_PROPERTY:
1894 property->obj()->AsSuperPropertyReference()->this_var());
1895 VisitForAccumulatorValue(
1896 property->obj()->AsSuperPropertyReference()->home_object());
1897 __ Push(result_register());
1898 if (expr->is_compound()) {
1899 const Register scratch = r1;
1900 __ ldr(scratch, MemOperand(sp, kPointerSize));
1902 __ Push(result_register());
1905 case KEYED_SUPER_PROPERTY:
1907 property->obj()->AsSuperPropertyReference()->this_var());
1909 property->obj()->AsSuperPropertyReference()->home_object());
1910 VisitForAccumulatorValue(property->key());
1911 __ Push(result_register());
1912 if (expr->is_compound()) {
1913 const Register scratch = r1;
1914 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1916 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1918 __ Push(result_register());
1921 case KEYED_PROPERTY:
1922 if (expr->is_compound()) {
1923 VisitForStackValue(property->obj());
1924 VisitForStackValue(property->key());
1925 __ ldr(LoadDescriptor::ReceiverRegister(),
1926 MemOperand(sp, 1 * kPointerSize));
1927 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1929 VisitForStackValue(property->obj());
1930 VisitForStackValue(property->key());
1935 // For compound assignments we need another deoptimization point after the
1936 // variable/property load.
1937 if (expr->is_compound()) {
1938 { AccumulatorValueContext context(this);
1939 switch (assign_type) {
1941 EmitVariableLoad(expr->target()->AsVariableProxy());
1942 PrepareForBailout(expr->target(), TOS_REG);
1944 case NAMED_PROPERTY:
1945 EmitNamedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1948 case NAMED_SUPER_PROPERTY:
1949 EmitNamedSuperPropertyLoad(property);
1950 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1952 case KEYED_SUPER_PROPERTY:
1953 EmitKeyedSuperPropertyLoad(property);
1954 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1956 case KEYED_PROPERTY:
1957 EmitKeyedPropertyLoad(property);
1958 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1963 Token::Value op = expr->binary_op();
1964 __ push(r0); // Left operand goes on the stack.
1965 VisitForAccumulatorValue(expr->value());
1967 AccumulatorValueContext context(this);
1968 if (ShouldInlineSmiCase(op)) {
1969 EmitInlineSmiBinaryOp(expr->binary_operation(),
1974 EmitBinaryOp(expr->binary_operation(), op);
1977 // Deoptimization point in case the binary operation may have side effects.
1978 PrepareForBailout(expr->binary_operation(), TOS_REG);
1980 VisitForAccumulatorValue(expr->value());
1983 SetExpressionPosition(expr);
1986 switch (assign_type) {
1988 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1989 expr->op(), expr->AssignmentSlot());
1990 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1991 context()->Plug(r0);
1993 case NAMED_PROPERTY:
1994 EmitNamedPropertyAssignment(expr);
1996 case NAMED_SUPER_PROPERTY:
1997 EmitNamedSuperPropertyStore(property);
1998 context()->Plug(r0);
2000 case KEYED_SUPER_PROPERTY:
2001 EmitKeyedSuperPropertyStore(property);
2002 context()->Plug(r0);
2004 case KEYED_PROPERTY:
2005 EmitKeyedPropertyAssignment(expr);
2011 void FullCodeGenerator::VisitYield(Yield* expr) {
2012 Comment cmnt(masm_, "[ Yield");
2013 SetExpressionPosition(expr);
2015 // Evaluate yielded value first; the initial iterator definition depends on
2016 // this. It stays on the stack while we update the iterator.
2017 VisitForStackValue(expr->expression());
2019 switch (expr->yield_kind()) {
2020 case Yield::kSuspend:
2021 // Pop value from top-of-stack slot; box result into result register.
2022 EmitCreateIteratorResult(false);
2023 __ push(result_register());
2025 case Yield::kInitial: {
2026 Label suspend, continuation, post_runtime, resume;
2029 __ bind(&continuation);
2030 __ RecordGeneratorContinuation();
2034 VisitForAccumulatorValue(expr->generator_object());
2035 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2036 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2037 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2038 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2040 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2041 kLRHasBeenSaved, kDontSaveFPRegs);
2042 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2044 __ b(eq, &post_runtime);
2045 __ push(r0); // generator object
2046 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2047 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2048 __ bind(&post_runtime);
2049 __ pop(result_register());
2050 EmitReturnSequence();
2053 context()->Plug(result_register());
2057 case Yield::kFinal: {
2058 VisitForAccumulatorValue(expr->generator_object());
2059 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2060 __ str(r1, FieldMemOperand(result_register(),
2061 JSGeneratorObject::kContinuationOffset));
2062 // Pop value from top-of-stack slot, box result into result register.
2063 EmitCreateIteratorResult(true);
2064 EmitUnwindBeforeReturn();
2065 EmitReturnSequence();
2069 case Yield::kDelegating: {
2070 VisitForStackValue(expr->generator_object());
2072 // Initial stack layout is as follows:
2073 // [sp + 1 * kPointerSize] iter
2074 // [sp + 0 * kPointerSize] g
2076 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2077 Label l_next, l_call, l_loop;
2078 Register load_receiver = LoadDescriptor::ReceiverRegister();
2079 Register load_name = LoadDescriptor::NameRegister();
2081 // Initial send value is undefined.
2082 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2085 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2087 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2088 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2089 __ Push(load_name, r3, r0); // "throw", iter, except
2092 // try { received = %yield result }
2093 // Shuffle the received result above a try handler and yield it without
2096 __ pop(r0); // result
2097 int handler_index = NewHandlerTableEntry();
2098 EnterTryBlock(handler_index, &l_catch);
2099 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2100 __ push(r0); // result
2103 __ bind(&l_continuation);
2104 __ RecordGeneratorContinuation();
2107 __ bind(&l_suspend);
2108 const int generator_object_depth = kPointerSize + try_block_size;
2109 __ ldr(r0, MemOperand(sp, generator_object_depth));
2111 __ Push(Smi::FromInt(handler_index)); // handler-index
2112 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2113 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2114 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2115 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2117 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2118 kLRHasBeenSaved, kDontSaveFPRegs);
2119 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2120 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2121 __ pop(r0); // result
2122 EmitReturnSequence();
2123 __ bind(&l_resume); // received in r0
2124 ExitTryBlock(handler_index);
2126 // receiver = iter; f = 'next'; arg = received;
2129 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2130 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2131 __ Push(load_name, r3, r0); // "next", iter, received
2133 // result = receiver[f](arg);
2135 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2136 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2137 __ mov(LoadDescriptor::SlotRegister(),
2138 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2139 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2140 CallIC(ic, TypeFeedbackId::None());
2142 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2143 SetCallPosition(expr, 1);
2144 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2147 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2148 __ Drop(1); // The function is still on the stack; drop it.
2150 // if (!result.done) goto l_try;
2152 __ Move(load_receiver, r0);
2154 __ push(load_receiver); // save result
2155 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2156 __ mov(LoadDescriptor::SlotRegister(),
2157 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2158 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2159 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2161 __ cmp(r0, Operand(0));
2165 __ pop(load_receiver); // result
2166 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2167 __ mov(LoadDescriptor::SlotRegister(),
2168 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2169 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.value
2170 context()->DropAndPlug(2, r0); // drop iter and g
2177 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2179 JSGeneratorObject::ResumeMode resume_mode) {
2180 // The value stays in r0, and is ultimately read by the resumed generator, as
2181 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2182 // is read to throw the value when the resumed generator is already closed.
2183 // r1 will hold the generator object until the activation has been resumed.
2184 VisitForStackValue(generator);
2185 VisitForAccumulatorValue(value);
2188 // Load suspended function and context.
2189 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2190 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2192 // Load receiver and store as the first argument.
2193 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2196 // Push holes for the rest of the arguments to the generator function.
2197 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2199 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2200 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2201 Label push_argument_holes, push_frame;
2202 __ bind(&push_argument_holes);
2203 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2204 __ b(mi, &push_frame);
2206 __ jmp(&push_argument_holes);
2208 // Enter a new JavaScript frame, and initialize its slots as they were when
2209 // the generator was suspended.
2210 Label resume_frame, done;
2211 __ bind(&push_frame);
2212 __ bl(&resume_frame);
2214 __ bind(&resume_frame);
2215 // lr = return address.
2216 // fp = caller's frame pointer.
2217 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
2218 // cp = callee's context,
2219 // r4 = callee's JS function.
2220 __ PushFixedFrame(r4);
2221 // Adjust FP to point to saved FP.
2222 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2224 // Load the operand stack size.
2225 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2226 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2229 // If we are sending a value and there is no operand stack, we can jump back
2231 if (resume_mode == JSGeneratorObject::NEXT) {
2233 __ cmp(r3, Operand(0));
2234 __ b(ne, &slow_resume);
2235 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2237 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2238 if (FLAG_enable_embedded_constant_pool) {
2239 // Load the new code object's constant pool pointer.
2240 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2243 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2246 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2247 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2250 __ bind(&slow_resume);
2253 // Otherwise, we push holes for the operand stack and call the runtime to fix
2254 // up the stack and the handlers.
2255 Label push_operand_holes, call_resume;
2256 __ bind(&push_operand_holes);
2257 __ sub(r3, r3, Operand(1), SetCC);
2258 __ b(mi, &call_resume);
2260 __ b(&push_operand_holes);
2261 __ bind(&call_resume);
2262 DCHECK(!result_register().is(r1));
2263 __ Push(r1, result_register());
2264 __ Push(Smi::FromInt(resume_mode));
2265 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2266 // Not reached: the runtime call returns elsewhere.
2267 __ stop("not-reached");
2270 context()->Plug(result_register());
2274 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2275 Label allocate, done_allocate;
2277 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate, TAG_OBJECT);
2278 __ b(&done_allocate);
2281 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2282 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2284 __ bind(&done_allocate);
2285 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2286 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2287 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2290 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2291 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2292 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2293 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2294 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2295 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
2296 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
2300 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2301 SetExpressionPosition(prop);
2302 Literal* key = prop->key()->AsLiteral();
2303 DCHECK(!prop->IsSuperAccess());
2305 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2306 __ mov(LoadDescriptor::SlotRegister(),
2307 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2308 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2312 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2313 // Stack: receiver, home_object.
2314 SetExpressionPosition(prop);
2315 Literal* key = prop->key()->AsLiteral();
2316 DCHECK(!key->value()->IsSmi());
2317 DCHECK(prop->IsSuperAccess());
2319 __ Push(key->value());
2320 __ Push(Smi::FromInt(language_mode()));
2321 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2325 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2326 SetExpressionPosition(prop);
2327 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2328 __ mov(LoadDescriptor::SlotRegister(),
2329 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2334 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2335 // Stack: receiver, home_object, key.
2336 SetExpressionPosition(prop);
2337 __ Push(Smi::FromInt(language_mode()));
2338 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2342 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2344 Expression* left_expr,
2345 Expression* right_expr) {
2346 Label done, smi_case, stub_call;
2348 Register scratch1 = r2;
2349 Register scratch2 = r3;
2351 // Get the arguments.
2353 Register right = r0;
2356 // Perform combined smi check on both operands.
2357 __ orr(scratch1, left, Operand(right));
2358 STATIC_ASSERT(kSmiTag == 0);
2359 JumpPatchSite patch_site(masm_);
2360 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2362 __ bind(&stub_call);
2364 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2365 CallIC(code, expr->BinaryOperationFeedbackId());
2366 patch_site.EmitPatchInfo();
2370 // Smi case. This code works the same way as the smi-smi case in the type
2371 // recording binary operation stub, see
2374 __ GetLeastBitsFromSmi(scratch1, right, 5);
2375 __ mov(right, Operand(left, ASR, scratch1));
2376 __ bic(right, right, Operand(kSmiTagMask));
2379 __ SmiUntag(scratch1, left);
2380 __ GetLeastBitsFromSmi(scratch2, right, 5);
2381 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2382 __ TrySmiTag(right, scratch1, &stub_call);
2386 __ SmiUntag(scratch1, left);
2387 __ GetLeastBitsFromSmi(scratch2, right, 5);
2388 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2389 __ tst(scratch1, Operand(0xc0000000));
2390 __ b(ne, &stub_call);
2391 __ SmiTag(right, scratch1);
2395 __ add(scratch1, left, Operand(right), SetCC);
2396 __ b(vs, &stub_call);
2397 __ mov(right, scratch1);
2400 __ sub(scratch1, left, Operand(right), SetCC);
2401 __ b(vs, &stub_call);
2402 __ mov(right, scratch1);
2405 __ SmiUntag(ip, right);
2406 __ smull(scratch1, scratch2, left, ip);
2407 __ mov(ip, Operand(scratch1, ASR, 31));
2408 __ cmp(ip, Operand(scratch2));
2409 __ b(ne, &stub_call);
2410 __ cmp(scratch1, Operand::Zero());
2411 __ mov(right, Operand(scratch1), LeaveCC, ne);
2413 __ add(scratch2, right, Operand(left), SetCC);
2414 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2415 __ b(mi, &stub_call);
2419 __ orr(right, left, Operand(right));
2421 case Token::BIT_AND:
2422 __ and_(right, left, Operand(right));
2424 case Token::BIT_XOR:
2425 __ eor(right, left, Operand(right));
2432 context()->Plug(r0);
2436 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2437 // Constructor is in r0.
2438 DCHECK(lit != NULL);
2441 // No access check is needed here since the constructor is created by the
2443 Register scratch = r1;
2445 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2448 for (int i = 0; i < lit->properties()->length(); i++) {
2449 ObjectLiteral::Property* property = lit->properties()->at(i);
2450 Expression* value = property->value();
2452 if (property->is_static()) {
2453 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2455 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2458 EmitPropertyKey(property, lit->GetIdForProperty(i));
2460 // The static prototype property is read only. We handle the non computed
2461 // property name case in the parser. Since this is the only case where we
2462 // need to check for an own read only property we special case this so we do
2463 // not need to do this for every property.
2464 if (property->is_static() && property->is_computed_name()) {
2465 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2469 VisitForStackValue(value);
2470 if (NeedsHomeObject(value)) {
2471 EmitSetHomeObject(value, 2, property->GetSlot());
2474 switch (property->kind()) {
2475 case ObjectLiteral::Property::CONSTANT:
2476 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2477 case ObjectLiteral::Property::PROTOTYPE:
2479 case ObjectLiteral::Property::COMPUTED:
2480 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2483 case ObjectLiteral::Property::GETTER:
2484 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2486 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2489 case ObjectLiteral::Property::SETTER:
2490 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2492 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2500 // Set both the prototype and constructor to have fast properties, and also
2501 // freeze them in strong mode.
2502 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2506 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2509 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2510 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2511 CallIC(code, expr->BinaryOperationFeedbackId());
2512 patch_site.EmitPatchInfo();
2513 context()->Plug(r0);
2517 void FullCodeGenerator::EmitAssignment(Expression* expr,
2518 FeedbackVectorICSlot slot) {
2519 DCHECK(expr->IsValidReferenceExpressionOrThis());
2521 Property* prop = expr->AsProperty();
2522 LhsKind assign_type = Property::GetAssignType(prop);
2524 switch (assign_type) {
2526 Variable* var = expr->AsVariableProxy()->var();
2527 EffectContext context(this);
2528 EmitVariableAssignment(var, Token::ASSIGN, slot);
2531 case NAMED_PROPERTY: {
2532 __ push(r0); // Preserve value.
2533 VisitForAccumulatorValue(prop->obj());
2534 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2535 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2536 __ mov(StoreDescriptor::NameRegister(),
2537 Operand(prop->key()->AsLiteral()->value()));
2538 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2542 case NAMED_SUPER_PROPERTY: {
2544 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2545 VisitForAccumulatorValue(
2546 prop->obj()->AsSuperPropertyReference()->home_object());
2547 // stack: value, this; r0: home_object
2548 Register scratch = r2;
2549 Register scratch2 = r3;
2550 __ mov(scratch, result_register()); // home_object
2551 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2552 __ ldr(scratch2, MemOperand(sp, 0)); // this
2553 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2554 __ str(scratch, MemOperand(sp, 0)); // home_object
2555 // stack: this, home_object; r0: value
2556 EmitNamedSuperPropertyStore(prop);
2559 case KEYED_SUPER_PROPERTY: {
2561 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2563 prop->obj()->AsSuperPropertyReference()->home_object());
2564 VisitForAccumulatorValue(prop->key());
2565 Register scratch = r2;
2566 Register scratch2 = r3;
2567 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2568 // stack: value, this, home_object; r0: key, r3: value
2569 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2570 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2571 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2572 __ str(scratch, MemOperand(sp, kPointerSize));
2573 __ str(r0, MemOperand(sp, 0));
2574 __ Move(r0, scratch2);
2575 // stack: this, home_object, key; r0: value.
2576 EmitKeyedSuperPropertyStore(prop);
2579 case KEYED_PROPERTY: {
2580 __ push(r0); // Preserve value.
2581 VisitForStackValue(prop->obj());
2582 VisitForAccumulatorValue(prop->key());
2583 __ Move(StoreDescriptor::NameRegister(), r0);
2584 __ Pop(StoreDescriptor::ValueRegister(),
2585 StoreDescriptor::ReceiverRegister());
2586 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2588 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2593 context()->Plug(r0);
2597 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2598 Variable* var, MemOperand location) {
2599 __ str(result_register(), location);
2600 if (var->IsContextSlot()) {
2601 // RecordWrite may destroy all its register arguments.
2602 __ mov(r3, result_register());
2603 int offset = Context::SlotOffset(var->index());
2604 __ RecordWriteContextSlot(
2605 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2610 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2611 FeedbackVectorICSlot slot) {
2612 if (var->IsUnallocated()) {
2613 // Global var, const, or let.
2614 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2615 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2616 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2619 } else if (var->IsGlobalSlot()) {
2620 // Global var, const, or let.
2621 DCHECK(var->index() > 0);
2622 DCHECK(var->IsStaticGlobalObjectProperty());
2623 const int slot = var->index();
2624 const int depth = scope()->ContextChainLength(var->scope());
2625 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2626 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2627 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
2628 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2631 __ Push(Smi::FromInt(slot));
2633 __ CallRuntime(is_strict(language_mode())
2634 ? Runtime::kStoreGlobalViaContext_Strict
2635 : Runtime::kStoreGlobalViaContext_Sloppy,
2638 } else if (var->mode() == LET && op != Token::INIT_LET) {
2639 // Non-initializing assignment to let variable needs a write barrier.
2640 DCHECK(!var->IsLookupSlot());
2641 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2643 MemOperand location = VarOperand(var, r1);
2644 __ ldr(r3, location);
2645 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2647 __ mov(r3, Operand(var->name()));
2649 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2650 // Perform the assignment.
2652 EmitStoreToStackLocalOrContextSlot(var, location);
2654 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2655 // Assignment to const variable needs a write barrier.
2656 DCHECK(!var->IsLookupSlot());
2657 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2659 MemOperand location = VarOperand(var, r1);
2660 __ ldr(r3, location);
2661 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2662 __ b(ne, &const_error);
2663 __ mov(r3, Operand(var->name()));
2665 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2666 __ bind(&const_error);
2667 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2669 } else if (var->is_this() && op == Token::INIT_CONST) {
2670 // Initializing assignment to const {this} needs a write barrier.
2671 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2672 Label uninitialized_this;
2673 MemOperand location = VarOperand(var, r1);
2674 __ ldr(r3, location);
2675 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2676 __ b(eq, &uninitialized_this);
2677 __ mov(r0, Operand(var->name()));
2679 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2680 __ bind(&uninitialized_this);
2681 EmitStoreToStackLocalOrContextSlot(var, location);
2683 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2684 if (var->IsLookupSlot()) {
2685 // Assignment to var.
2686 __ push(r0); // Value.
2687 __ mov(r1, Operand(var->name()));
2688 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2689 __ Push(cp, r1, r0); // Context, name, language mode.
2690 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2692 // Assignment to var or initializing assignment to let/const in harmony
2694 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2695 MemOperand location = VarOperand(var, r1);
2696 if (generate_debug_code_ && op == Token::INIT_LET) {
2697 // Check for an uninitialized let binding.
2698 __ ldr(r2, location);
2699 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2700 __ Check(eq, kLetBindingReInitialization);
2702 EmitStoreToStackLocalOrContextSlot(var, location);
2705 } else if (op == Token::INIT_CONST_LEGACY) {
2706 // Const initializers need a write barrier.
2707 DCHECK(var->mode() == CONST_LEGACY);
2708 DCHECK(!var->IsParameter()); // No const parameters.
2709 if (var->IsLookupSlot()) {
2711 __ mov(r0, Operand(var->name()));
2712 __ Push(cp, r0); // Context and name.
2713 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2715 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2717 MemOperand location = VarOperand(var, r1);
2718 __ ldr(r2, location);
2719 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2721 EmitStoreToStackLocalOrContextSlot(var, location);
2726 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2727 if (is_strict(language_mode())) {
2728 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2730 // Silently ignore store in sloppy mode.
2735 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2736 // Assignment to a property, using a named store IC.
2737 Property* prop = expr->target()->AsProperty();
2738 DCHECK(prop != NULL);
2739 DCHECK(prop->key()->IsLiteral());
2741 __ mov(StoreDescriptor::NameRegister(),
2742 Operand(prop->key()->AsLiteral()->value()));
2743 __ pop(StoreDescriptor::ReceiverRegister());
2744 if (FLAG_vector_stores) {
2745 EmitLoadStoreICSlot(expr->AssignmentSlot());
2748 CallStoreIC(expr->AssignmentFeedbackId());
2751 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2752 context()->Plug(r0);
2756 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2757 // Assignment to named property of super.
2759 // stack : receiver ('this'), home_object
2760 DCHECK(prop != NULL);
2761 Literal* key = prop->key()->AsLiteral();
2762 DCHECK(key != NULL);
2764 __ Push(key->value());
2766 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2767 : Runtime::kStoreToSuper_Sloppy),
2772 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2773 // Assignment to named property of super.
2775 // stack : receiver ('this'), home_object, key
2776 DCHECK(prop != NULL);
2780 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2781 : Runtime::kStoreKeyedToSuper_Sloppy),
2786 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2787 // Assignment to a property, using a keyed store IC.
2788 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2789 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2792 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2793 if (FLAG_vector_stores) {
2794 EmitLoadStoreICSlot(expr->AssignmentSlot());
2797 CallIC(ic, expr->AssignmentFeedbackId());
2800 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2801 context()->Plug(r0);
2805 void FullCodeGenerator::VisitProperty(Property* expr) {
2806 Comment cmnt(masm_, "[ Property");
2807 SetExpressionPosition(expr);
2809 Expression* key = expr->key();
2811 if (key->IsPropertyName()) {
2812 if (!expr->IsSuperAccess()) {
2813 VisitForAccumulatorValue(expr->obj());
2814 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2815 EmitNamedPropertyLoad(expr);
2817 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2819 expr->obj()->AsSuperPropertyReference()->home_object());
2820 EmitNamedSuperPropertyLoad(expr);
2823 if (!expr->IsSuperAccess()) {
2824 VisitForStackValue(expr->obj());
2825 VisitForAccumulatorValue(expr->key());
2826 __ Move(LoadDescriptor::NameRegister(), r0);
2827 __ pop(LoadDescriptor::ReceiverRegister());
2828 EmitKeyedPropertyLoad(expr);
2830 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2832 expr->obj()->AsSuperPropertyReference()->home_object());
2833 VisitForStackValue(expr->key());
2834 EmitKeyedSuperPropertyLoad(expr);
2837 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2838 context()->Plug(r0);
2842 void FullCodeGenerator::CallIC(Handle<Code> code,
2843 TypeFeedbackId ast_id) {
2845 // All calls must have a predictable size in full-codegen code to ensure that
2846 // the debugger can patch them correctly.
2847 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2848 NEVER_INLINE_TARGET_ADDRESS);
2852 // Code common for calls using the IC.
2853 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2854 Expression* callee = expr->expression();
2856 CallICState::CallType call_type =
2857 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2859 // Get the target function.
2860 if (call_type == CallICState::FUNCTION) {
2861 { StackValueContext context(this);
2862 EmitVariableLoad(callee->AsVariableProxy());
2863 PrepareForBailout(callee, NO_REGISTERS);
2865 // Push undefined as receiver. This is patched in the method prologue if it
2866 // is a sloppy mode method.
2867 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2870 // Load the function from the receiver.
2871 DCHECK(callee->IsProperty());
2872 DCHECK(!callee->AsProperty()->IsSuperAccess());
2873 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2874 EmitNamedPropertyLoad(callee->AsProperty());
2875 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2876 // Push the target function under the receiver.
2877 __ ldr(ip, MemOperand(sp, 0));
2879 __ str(r0, MemOperand(sp, kPointerSize));
2882 EmitCall(expr, call_type);
2886 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2887 Expression* callee = expr->expression();
2888 DCHECK(callee->IsProperty());
2889 Property* prop = callee->AsProperty();
2890 DCHECK(prop->IsSuperAccess());
2891 SetExpressionPosition(prop);
2893 Literal* key = prop->key()->AsLiteral();
2894 DCHECK(!key->value()->IsSmi());
2895 // Load the function from the receiver.
2896 const Register scratch = r1;
2897 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2898 VisitForStackValue(super_ref->home_object());
2899 VisitForAccumulatorValue(super_ref->this_var());
2902 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2904 __ Push(key->value());
2905 __ Push(Smi::FromInt(language_mode()));
2909 // - this (receiver)
2910 // - this (receiver) <-- LoadFromSuper will pop here and below.
2914 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2916 // Replace home_object with target function.
2917 __ str(r0, MemOperand(sp, kPointerSize));
2920 // - target function
2921 // - this (receiver)
2922 EmitCall(expr, CallICState::METHOD);
2926 // Code common for calls using the IC.
2927 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2930 VisitForAccumulatorValue(key);
2932 Expression* callee = expr->expression();
2934 // Load the function from the receiver.
2935 DCHECK(callee->IsProperty());
2936 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2937 __ Move(LoadDescriptor::NameRegister(), r0);
2938 EmitKeyedPropertyLoad(callee->AsProperty());
2939 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2941 // Push the target function under the receiver.
2942 __ ldr(ip, MemOperand(sp, 0));
2944 __ str(r0, MemOperand(sp, kPointerSize));
2946 EmitCall(expr, CallICState::METHOD);
2950 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2951 Expression* callee = expr->expression();
2952 DCHECK(callee->IsProperty());
2953 Property* prop = callee->AsProperty();
2954 DCHECK(prop->IsSuperAccess());
2956 SetExpressionPosition(prop);
2957 // Load the function from the receiver.
2958 const Register scratch = r1;
2959 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2960 VisitForStackValue(super_ref->home_object());
2961 VisitForAccumulatorValue(super_ref->this_var());
2964 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2966 VisitForStackValue(prop->key());
2967 __ Push(Smi::FromInt(language_mode()));
2971 // - this (receiver)
2972 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2976 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2978 // Replace home_object with target function.
2979 __ str(r0, MemOperand(sp, kPointerSize));
2982 // - target function
2983 // - this (receiver)
2984 EmitCall(expr, CallICState::METHOD);
2988 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2989 // Load the arguments.
2990 ZoneList<Expression*>* args = expr->arguments();
2991 int arg_count = args->length();
2992 for (int i = 0; i < arg_count; i++) {
2993 VisitForStackValue(args->at(i));
2996 SetCallPosition(expr, arg_count);
2997 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2998 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2999 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3000 // Don't assign a type feedback id to the IC, since type feedback is provided
3001 // by the vector above.
3004 RecordJSReturnSite(expr);
3005 // Restore context register.
3006 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3007 context()->DropAndPlug(1, r0);
3011 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3012 // r4: copy of the first argument or undefined if it doesn't exist.
3013 if (arg_count > 0) {
3014 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
3016 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3019 // r3: the receiver of the enclosing function.
3020 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3022 // r2: language mode.
3023 __ mov(r2, Operand(Smi::FromInt(language_mode())));
3025 // r1: the start position of the scope the calls resides in.
3026 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3028 // Do the runtime call.
3029 __ Push(r4, r3, r2, r1);
3030 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3034 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3035 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3036 VariableProxy* callee = expr->expression()->AsVariableProxy();
3037 if (callee->var()->IsLookupSlot()) {
3039 SetExpressionPosition(callee);
3040 // Generate code for loading from variables potentially shadowed
3041 // by eval-introduced variables.
3042 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3045 // Call the runtime to find the function to call (returned in r0)
3046 // and the object holding it (returned in edx).
3047 DCHECK(!context_register().is(r2));
3048 __ mov(r2, Operand(callee->name()));
3049 __ Push(context_register(), r2);
3050 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3051 __ Push(r0, r1); // Function, receiver.
3052 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3054 // If fast case code has been generated, emit code to push the
3055 // function and receiver and have the slow path jump around this
3057 if (done.is_linked()) {
3063 // The receiver is implicitly the global receiver. Indicate this
3064 // by passing the hole to the call function stub.
3065 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3070 VisitForStackValue(callee);
3071 // refEnv.WithBaseObject()
3072 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3073 __ push(r2); // Reserved receiver slot.
3078 void FullCodeGenerator::VisitCall(Call* expr) {
3080 // We want to verify that RecordJSReturnSite gets called on all paths
3081 // through this function. Avoid early returns.
3082 expr->return_is_recorded_ = false;
3085 Comment cmnt(masm_, "[ Call");
3086 Expression* callee = expr->expression();
3087 Call::CallType call_type = expr->GetCallType(isolate());
3089 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3090 // In a call to eval, we first call
3091 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
3092 // to call. Then we call the resolved function using the given arguments.
3093 ZoneList<Expression*>* args = expr->arguments();
3094 int arg_count = args->length();
3096 PushCalleeAndWithBaseObject(expr);
3098 // Push the arguments.
3099 for (int i = 0; i < arg_count; i++) {
3100 VisitForStackValue(args->at(i));
3103 // Push a copy of the function (found below the arguments) and
3105 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3107 EmitResolvePossiblyDirectEval(arg_count);
3109 // Touch up the stack with the resolved function.
3110 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3112 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3114 // Record source position for debugger.
3115 SetCallPosition(expr, arg_count);
3116 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3117 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3119 RecordJSReturnSite(expr);
3120 // Restore context register.
3121 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3122 context()->DropAndPlug(1, r0);
3123 } else if (call_type == Call::GLOBAL_CALL) {
3124 EmitCallWithLoadIC(expr);
3126 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3127 // Call to a lookup slot (dynamically introduced variable).
3128 PushCalleeAndWithBaseObject(expr);
3130 } else if (call_type == Call::PROPERTY_CALL) {
3131 Property* property = callee->AsProperty();
3132 bool is_named_call = property->key()->IsPropertyName();
3133 if (property->IsSuperAccess()) {
3134 if (is_named_call) {
3135 EmitSuperCallWithLoadIC(expr);
3137 EmitKeyedSuperCallWithLoadIC(expr);
3140 VisitForStackValue(property->obj());
3141 if (is_named_call) {
3142 EmitCallWithLoadIC(expr);
3144 EmitKeyedCallWithLoadIC(expr, property->key());
3147 } else if (call_type == Call::SUPER_CALL) {
3148 EmitSuperConstructorCall(expr);
3150 DCHECK(call_type == Call::OTHER_CALL);
3151 // Call to an arbitrary expression not handled specially above.
3152 VisitForStackValue(callee);
3153 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3155 // Emit function call.
3160 // RecordJSReturnSite should have been called.
3161 DCHECK(expr->return_is_recorded_);
3166 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3167 Comment cmnt(masm_, "[ CallNew");
3168 // According to ECMA-262, section 11.2.2, page 44, the function
3169 // expression in new calls must be evaluated before the
3172 // Push constructor on the stack. If it's not a function it's used as
3173 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3175 DCHECK(!expr->expression()->IsSuperPropertyReference());
3176 VisitForStackValue(expr->expression());
3178 // Push the arguments ("left-to-right") on the stack.
3179 ZoneList<Expression*>* args = expr->arguments();
3180 int arg_count = args->length();
3181 for (int i = 0; i < arg_count; i++) {
3182 VisitForStackValue(args->at(i));
3185 // Call the construct call builtin that handles allocation and
3186 // constructor invocation.
3187 SetConstructCallPosition(expr);
3189 // Load function and argument count into r1 and r0.
3190 __ mov(r0, Operand(arg_count));
3191 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3193 // Record call targets in unoptimized code.
3194 if (FLAG_pretenuring_call_new) {
3195 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3196 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3197 expr->CallNewFeedbackSlot().ToInt() + 1);
3200 __ Move(r2, FeedbackVector());
3201 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3203 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3204 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3205 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3206 // Restore context register.
3207 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3208 context()->Plug(r0);
3212 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3213 SuperCallReference* super_call_ref =
3214 expr->expression()->AsSuperCallReference();
3215 DCHECK_NOT_NULL(super_call_ref);
3217 EmitLoadSuperConstructor(super_call_ref);
3218 __ push(result_register());
3220 // Push the arguments ("left-to-right") on the stack.
3221 ZoneList<Expression*>* args = expr->arguments();
3222 int arg_count = args->length();
3223 for (int i = 0; i < arg_count; i++) {
3224 VisitForStackValue(args->at(i));
3227 // Call the construct call builtin that handles allocation and
3228 // constructor invocation.
3229 SetConstructCallPosition(expr);
3231 // Load original constructor into r4.
3232 VisitForAccumulatorValue(super_call_ref->new_target_var());
3233 __ mov(r4, result_register());
3235 // Load function and argument count into r1 and r0.
3236 __ mov(r0, Operand(arg_count));
3237 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3239 // Record call targets in unoptimized code.
3240 if (FLAG_pretenuring_call_new) {
3242 /* TODO(dslomov): support pretenuring.
3243 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3244 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3245 expr->CallNewFeedbackSlot().ToInt() + 1);
3249 __ Move(r2, FeedbackVector());
3250 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3252 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3253 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3255 RecordJSReturnSite(expr);
3257 // Restore context register.
3258 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3259 context()->Plug(r0);
3263 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3264 ZoneList<Expression*>* args = expr->arguments();
3265 DCHECK(args->length() == 1);
3267 VisitForAccumulatorValue(args->at(0));
3269 Label materialize_true, materialize_false;
3270 Label* if_true = NULL;
3271 Label* if_false = NULL;
3272 Label* fall_through = NULL;
3273 context()->PrepareTest(&materialize_true, &materialize_false,
3274 &if_true, &if_false, &fall_through);
3276 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3278 Split(eq, if_true, if_false, fall_through);
3280 context()->Plug(if_true, if_false);
3284 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3285 ZoneList<Expression*>* args = expr->arguments();
3286 DCHECK(args->length() == 1);
3288 VisitForAccumulatorValue(args->at(0));
3290 Label materialize_true, materialize_false;
3291 Label* if_true = NULL;
3292 Label* if_false = NULL;
3293 Label* fall_through = NULL;
3294 context()->PrepareTest(&materialize_true, &materialize_false,
3295 &if_true, &if_false, &fall_through);
3297 __ JumpIfSmi(r0, if_false);
3298 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3299 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3300 Split(ge, if_true, if_false, fall_through);
3302 context()->Plug(if_true, if_false);
3306 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3307 ZoneList<Expression*>* args = expr->arguments();
3308 DCHECK(args->length() == 1);
3310 VisitForAccumulatorValue(args->at(0));
3312 Label materialize_true, materialize_false;
3313 Label* if_true = NULL;
3314 Label* if_false = NULL;
3315 Label* fall_through = NULL;
3316 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3317 &if_false, &fall_through);
3319 __ JumpIfSmi(r0, if_false);
3320 __ CompareObjectType(r0, r1, r1, SIMD128_VALUE_TYPE);
3321 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3322 Split(eq, if_true, if_false, fall_through);
3324 context()->Plug(if_true, if_false);
3328 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3329 CallRuntime* expr) {
3330 ZoneList<Expression*>* args = expr->arguments();
3331 DCHECK(args->length() == 1);
3333 VisitForAccumulatorValue(args->at(0));
3335 Label materialize_true, materialize_false, skip_lookup;
3336 Label* if_true = NULL;
3337 Label* if_false = NULL;
3338 Label* fall_through = NULL;
3339 context()->PrepareTest(&materialize_true, &materialize_false,
3340 &if_true, &if_false, &fall_through);
3342 __ AssertNotSmi(r0);
3344 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3345 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3346 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3347 __ b(ne, &skip_lookup);
3349 // Check for fast case object. Generate false result for slow case object.
3350 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3351 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3352 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3356 // Look for valueOf name in the descriptor array, and indicate false if
3357 // found. Since we omit an enumeration index check, if it is added via a
3358 // transition that shares its descriptor array, this is a false positive.
3359 Label entry, loop, done;
3361 // Skip loop if no descriptors are valid.
3362 __ NumberOfOwnDescriptors(r3, r1);
3363 __ cmp(r3, Operand::Zero());
3366 __ LoadInstanceDescriptors(r1, r4);
3367 // r4: descriptor array.
3368 // r3: valid entries in the descriptor array.
3369 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3371 // Calculate location of the first key name.
3372 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3373 // Calculate the end of the descriptor array.
3375 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3377 // Loop through all the keys in the descriptor array. If one of these is the
3378 // string "valueOf" the result is false.
3379 // The use of ip to store the valueOf string assumes that it is not otherwise
3380 // used in the loop below.
3381 __ LoadRoot(ip, Heap::kvalueOf_stringRootIndex);
3384 __ ldr(r3, MemOperand(r4, 0));
3387 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3389 __ cmp(r4, Operand(r2));
3394 // Set the bit in the map to indicate that there is no local valueOf field.
3395 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3396 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3397 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3399 __ bind(&skip_lookup);
3401 // If a valueOf property is not found on the object check that its
3402 // prototype is the un-modified String prototype. If not result is false.
3403 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3404 __ JumpIfSmi(r2, if_false);
3405 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3406 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3407 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3408 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3410 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3411 Split(eq, if_true, if_false, fall_through);
3413 context()->Plug(if_true, if_false);
3417 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3418 ZoneList<Expression*>* args = expr->arguments();
3419 DCHECK(args->length() == 1);
3421 VisitForAccumulatorValue(args->at(0));
3423 Label materialize_true, materialize_false;
3424 Label* if_true = NULL;
3425 Label* if_false = NULL;
3426 Label* fall_through = NULL;
3427 context()->PrepareTest(&materialize_true, &materialize_false,
3428 &if_true, &if_false, &fall_through);
3430 __ JumpIfSmi(r0, if_false);
3431 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3432 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3433 Split(eq, if_true, if_false, fall_through);
3435 context()->Plug(if_true, if_false);
3439 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3440 ZoneList<Expression*>* args = expr->arguments();
3441 DCHECK(args->length() == 1);
3443 VisitForAccumulatorValue(args->at(0));
3445 Label materialize_true, materialize_false;
3446 Label* if_true = NULL;
3447 Label* if_false = NULL;
3448 Label* fall_through = NULL;
3449 context()->PrepareTest(&materialize_true, &materialize_false,
3450 &if_true, &if_false, &fall_through);
3452 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3453 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3454 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3455 __ cmp(r2, Operand(0x80000000));
3456 __ cmp(r1, Operand(0x00000000), eq);
3458 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3459 Split(eq, if_true, if_false, fall_through);
3461 context()->Plug(if_true, if_false);
3465 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3466 ZoneList<Expression*>* args = expr->arguments();
3467 DCHECK(args->length() == 1);
3469 VisitForAccumulatorValue(args->at(0));
3471 Label materialize_true, materialize_false;
3472 Label* if_true = NULL;
3473 Label* if_false = NULL;
3474 Label* fall_through = NULL;
3475 context()->PrepareTest(&materialize_true, &materialize_false,
3476 &if_true, &if_false, &fall_through);
3478 __ JumpIfSmi(r0, if_false);
3479 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3480 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3481 Split(eq, if_true, if_false, fall_through);
3483 context()->Plug(if_true, if_false);
3487 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3488 ZoneList<Expression*>* args = expr->arguments();
3489 DCHECK(args->length() == 1);
3491 VisitForAccumulatorValue(args->at(0));
3493 Label materialize_true, materialize_false;
3494 Label* if_true = NULL;
3495 Label* if_false = NULL;
3496 Label* fall_through = NULL;
3497 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3498 &if_false, &fall_through);
3500 __ JumpIfSmi(r0, if_false);
3501 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
3502 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3503 Split(eq, if_true, if_false, fall_through);
3505 context()->Plug(if_true, if_false);
3509 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3510 ZoneList<Expression*>* args = expr->arguments();
3511 DCHECK(args->length() == 1);
3513 VisitForAccumulatorValue(args->at(0));
3515 Label materialize_true, materialize_false;
3516 Label* if_true = NULL;
3517 Label* if_false = NULL;
3518 Label* fall_through = NULL;
3519 context()->PrepareTest(&materialize_true, &materialize_false,
3520 &if_true, &if_false, &fall_through);
3522 __ JumpIfSmi(r0, if_false);
3523 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3524 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3525 Split(eq, if_true, if_false, fall_through);
3527 context()->Plug(if_true, if_false);
3531 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3532 ZoneList<Expression*>* args = expr->arguments();
3533 DCHECK(args->length() == 1);
3535 VisitForAccumulatorValue(args->at(0));
3537 Label materialize_true, materialize_false;
3538 Label* if_true = NULL;
3539 Label* if_false = NULL;
3540 Label* fall_through = NULL;
3541 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3542 &if_false, &fall_through);
3544 __ JumpIfSmi(r0, if_false);
3546 Register type_reg = r2;
3547 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3548 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3549 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3550 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3551 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3552 Split(ls, if_true, if_false, fall_through);
3554 context()->Plug(if_true, if_false);
3558 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3559 DCHECK(expr->arguments()->length() == 0);
3561 Label materialize_true, materialize_false;
3562 Label* if_true = NULL;
3563 Label* if_false = NULL;
3564 Label* fall_through = NULL;
3565 context()->PrepareTest(&materialize_true, &materialize_false,
3566 &if_true, &if_false, &fall_through);
3568 // Get the frame pointer for the calling frame.
3569 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3571 // Skip the arguments adaptor frame if it exists.
3572 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3573 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3574 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3576 // Check the marker in the calling frame.
3577 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3578 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3579 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3580 Split(eq, if_true, if_false, fall_through);
3582 context()->Plug(if_true, if_false);
3586 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3587 ZoneList<Expression*>* args = expr->arguments();
3588 DCHECK(args->length() == 2);
3590 // Load the two objects into registers and perform the comparison.
3591 VisitForStackValue(args->at(0));
3592 VisitForAccumulatorValue(args->at(1));
3594 Label materialize_true, materialize_false;
3595 Label* if_true = NULL;
3596 Label* if_false = NULL;
3597 Label* fall_through = NULL;
3598 context()->PrepareTest(&materialize_true, &materialize_false,
3599 &if_true, &if_false, &fall_through);
3603 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3604 Split(eq, if_true, if_false, fall_through);
3606 context()->Plug(if_true, if_false);
3610 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3611 ZoneList<Expression*>* args = expr->arguments();
3612 DCHECK(args->length() == 1);
3614 // ArgumentsAccessStub expects the key in edx and the formal
3615 // parameter count in r0.
3616 VisitForAccumulatorValue(args->at(0));
3618 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3619 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3621 context()->Plug(r0);
3625 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3626 DCHECK(expr->arguments()->length() == 0);
3628 // Get the number of formal parameters.
3629 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3631 // Check if the calling frame is an arguments adaptor frame.
3632 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3633 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3634 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3636 // Arguments adaptor case: Read the arguments length from the
3638 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3640 context()->Plug(r0);
3644 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3645 ZoneList<Expression*>* args = expr->arguments();
3646 DCHECK(args->length() == 1);
3647 Label done, null, function, non_function_constructor;
3649 VisitForAccumulatorValue(args->at(0));
3651 // If the object is a smi, we return null.
3652 __ JumpIfSmi(r0, &null);
3654 // Check that the object is a JS object but take special care of JS
3655 // functions to make sure they have 'Function' as their class.
3656 // Assume that there are only two callable types, and one of them is at
3657 // either end of the type range for JS object types. Saves extra comparisons.
3658 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3659 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3660 // Map is now in r0.
3662 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3663 FIRST_SPEC_OBJECT_TYPE + 1);
3664 __ b(eq, &function);
3666 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3667 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3668 LAST_SPEC_OBJECT_TYPE - 1);
3669 __ b(eq, &function);
3670 // Assume that there is no larger type.
3671 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3673 // Check if the constructor in the map is a JS function.
3674 Register instance_type = r2;
3675 __ GetMapConstructor(r0, r0, r1, instance_type);
3676 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3677 __ b(ne, &non_function_constructor);
3679 // r0 now contains the constructor function. Grab the
3680 // instance class name from there.
3681 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3682 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3685 // Functions have class 'Function'.
3687 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3690 // Objects with a non-function constructor have class 'Object'.
3691 __ bind(&non_function_constructor);
3692 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3695 // Non-JS objects have class null.
3697 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3702 context()->Plug(r0);
3706 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3707 ZoneList<Expression*>* args = expr->arguments();
3708 DCHECK(args->length() == 1);
3709 VisitForAccumulatorValue(args->at(0)); // Load the object.
3712 // If the object is a smi return the object.
3713 __ JumpIfSmi(r0, &done);
3714 // If the object is not a value type, return the object.
3715 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3716 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3719 context()->Plug(r0);
3723 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3724 ZoneList<Expression*>* args = expr->arguments();
3725 DCHECK_EQ(1, args->length());
3727 VisitForAccumulatorValue(args->at(0));
3729 Label materialize_true, materialize_false;
3730 Label* if_true = nullptr;
3731 Label* if_false = nullptr;
3732 Label* fall_through = nullptr;
3733 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3734 &if_false, &fall_through);
3736 __ JumpIfSmi(r0, if_false);
3737 __ CompareObjectType(r0, r1, r1, JS_DATE_TYPE);
3738 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3739 Split(eq, if_true, if_false, fall_through);
3741 context()->Plug(if_true, if_false);
3745 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3746 ZoneList<Expression*>* args = expr->arguments();
3747 DCHECK(args->length() == 2);
3748 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3749 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3751 VisitForAccumulatorValue(args->at(0)); // Load the object.
3753 Register object = r0;
3754 Register result = r0;
3755 Register scratch0 = r9;
3756 Register scratch1 = r1;
3758 if (index->value() == 0) {
3759 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3761 Label runtime, done;
3762 if (index->value() < JSDate::kFirstUncachedField) {
3763 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3764 __ mov(scratch1, Operand(stamp));
3765 __ ldr(scratch1, MemOperand(scratch1));
3766 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3767 __ cmp(scratch1, scratch0);
3769 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3770 kPointerSize * index->value()));
3774 __ PrepareCallCFunction(2, scratch1);
3775 __ mov(r1, Operand(index));
3776 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3780 context()->Plug(result);
3784 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3785 ZoneList<Expression*>* args = expr->arguments();
3786 DCHECK_EQ(3, args->length());
3788 Register string = r0;
3789 Register index = r1;
3790 Register value = r2;
3792 VisitForStackValue(args->at(0)); // index
3793 VisitForStackValue(args->at(1)); // value
3794 VisitForAccumulatorValue(args->at(2)); // string
3795 __ Pop(index, value);
3797 if (FLAG_debug_code) {
3799 __ Check(eq, kNonSmiValue);
3801 __ Check(eq, kNonSmiIndex);
3802 __ SmiUntag(index, index);
3803 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3804 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3805 __ SmiTag(index, index);
3808 __ SmiUntag(value, value);
3811 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3812 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3813 context()->Plug(string);
3817 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3818 ZoneList<Expression*>* args = expr->arguments();
3819 DCHECK_EQ(3, args->length());
3821 Register string = r0;
3822 Register index = r1;
3823 Register value = r2;
3825 VisitForStackValue(args->at(0)); // index
3826 VisitForStackValue(args->at(1)); // value
3827 VisitForAccumulatorValue(args->at(2)); // string
3828 __ Pop(index, value);
3830 if (FLAG_debug_code) {
3832 __ Check(eq, kNonSmiValue);
3834 __ Check(eq, kNonSmiIndex);
3835 __ SmiUntag(index, index);
3836 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3837 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3838 __ SmiTag(index, index);
3841 __ SmiUntag(value, value);
3844 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3845 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3846 __ strh(value, MemOperand(ip, index));
3847 context()->Plug(string);
3851 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3852 ZoneList<Expression*>* args = expr->arguments();
3853 DCHECK(args->length() == 2);
3854 VisitForStackValue(args->at(0)); // Load the object.
3855 VisitForAccumulatorValue(args->at(1)); // Load the value.
3856 __ pop(r1); // r0 = value. r1 = object.
3859 // If the object is a smi, return the value.
3860 __ JumpIfSmi(r1, &done);
3862 // If the object is not a value type, return the value.
3863 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3867 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3868 // Update the write barrier. Save the value as it will be
3869 // overwritten by the write barrier code and is needed afterward.
3871 __ RecordWriteField(
3872 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3875 context()->Plug(r0);
3879 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3880 ZoneList<Expression*>* args = expr->arguments();
3881 DCHECK_EQ(args->length(), 1);
3882 // Load the argument into r0 and call the stub.
3883 VisitForAccumulatorValue(args->at(0));
3885 NumberToStringStub stub(isolate());
3887 context()->Plug(r0);
3891 void FullCodeGenerator::EmitToString(CallRuntime* expr) {
3892 ZoneList<Expression*>* args = expr->arguments();
3893 DCHECK_EQ(1, args->length());
3895 // Load the argument into r0 and convert it.
3896 VisitForAccumulatorValue(args->at(0));
3898 ToStringStub stub(isolate());
3900 context()->Plug(r0);
3904 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3905 ZoneList<Expression*>* args = expr->arguments();
3906 DCHECK_EQ(1, args->length());
3908 // Load the argument into r0 and convert it.
3909 VisitForAccumulatorValue(args->at(0));
3911 Label convert, done_convert;
3912 __ JumpIfSmi(r0, &convert);
3913 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3914 __ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
3915 __ b(ls, &done_convert);
3917 ToStringStub stub(isolate());
3919 __ bind(&done_convert);
3920 context()->Plug(r0);
3924 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3925 ZoneList<Expression*>* args = expr->arguments();
3926 DCHECK_EQ(1, args->length());
3928 // Load the argument into r0 and convert it.
3929 VisitForAccumulatorValue(args->at(0));
3931 ToObjectStub stub(isolate());
3933 context()->Plug(r0);
3937 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3938 ZoneList<Expression*>* args = expr->arguments();
3939 DCHECK(args->length() == 1);
3940 VisitForAccumulatorValue(args->at(0));
3943 StringCharFromCodeGenerator generator(r0, r1);
3944 generator.GenerateFast(masm_);
3947 NopRuntimeCallHelper call_helper;
3948 generator.GenerateSlow(masm_, call_helper);
3951 context()->Plug(r1);
3955 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3956 ZoneList<Expression*>* args = expr->arguments();
3957 DCHECK(args->length() == 2);
3958 VisitForStackValue(args->at(0));
3959 VisitForAccumulatorValue(args->at(1));
3961 Register object = r1;
3962 Register index = r0;
3963 Register result = r3;
3967 Label need_conversion;
3968 Label index_out_of_range;
3970 StringCharCodeAtGenerator generator(object,
3975 &index_out_of_range,
3976 STRING_INDEX_IS_NUMBER);
3977 generator.GenerateFast(masm_);
3980 __ bind(&index_out_of_range);
3981 // When the index is out of range, the spec requires us to return
3983 __ LoadRoot(result, Heap::kNanValueRootIndex);
3986 __ bind(&need_conversion);
3987 // Load the undefined value into the result register, which will
3988 // trigger conversion.
3989 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3992 NopRuntimeCallHelper call_helper;
3993 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3996 context()->Plug(result);
4000 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4001 ZoneList<Expression*>* args = expr->arguments();
4002 DCHECK(args->length() == 2);
4003 VisitForStackValue(args->at(0));
4004 VisitForAccumulatorValue(args->at(1));
4006 Register object = r1;
4007 Register index = r0;
4008 Register scratch = r3;
4009 Register result = r0;
4013 Label need_conversion;
4014 Label index_out_of_range;
4016 StringCharAtGenerator generator(object,
4022 &index_out_of_range,
4023 STRING_INDEX_IS_NUMBER);
4024 generator.GenerateFast(masm_);
4027 __ bind(&index_out_of_range);
4028 // When the index is out of range, the spec requires us to return
4029 // the empty string.
4030 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4033 __ bind(&need_conversion);
4034 // Move smi zero into the result register, which will trigger
4036 __ mov(result, Operand(Smi::FromInt(0)));
4039 NopRuntimeCallHelper call_helper;
4040 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4043 context()->Plug(result);
4047 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4048 ZoneList<Expression*>* args = expr->arguments();
4049 DCHECK_EQ(2, args->length());
4050 VisitForStackValue(args->at(0));
4051 VisitForAccumulatorValue(args->at(1));
4054 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4056 context()->Plug(r0);
4060 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
4061 ZoneList<Expression*>* args = expr->arguments();
4062 DCHECK_LE(2, args->length());
4063 // Push target, receiver and arguments onto the stack.
4064 for (Expression* const arg : *args) {
4065 VisitForStackValue(arg);
4067 // Move target to r1.
4068 int const argc = args->length() - 2;
4069 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
4071 __ mov(r0, Operand(argc));
4072 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
4073 // Restore context register.
4074 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4075 // Discard the function left on TOS.
4076 context()->DropAndPlug(1, r0);
4080 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4081 ZoneList<Expression*>* args = expr->arguments();
4082 DCHECK(args->length() >= 2);
4084 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4085 for (int i = 0; i < arg_count + 1; i++) {
4086 VisitForStackValue(args->at(i));
4088 VisitForAccumulatorValue(args->last()); // Function.
4090 Label runtime, done;
4091 // Check for non-function argument (including proxy).
4092 __ JumpIfSmi(r0, &runtime);
4093 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4096 // InvokeFunction requires the function in r1. Move it in there.
4097 __ mov(r1, result_register());
4098 ParameterCount count(arg_count);
4099 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4100 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4105 __ CallRuntime(Runtime::kCallFunction, args->length());
4108 context()->Plug(r0);
4112 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4113 ZoneList<Expression*>* args = expr->arguments();
4114 DCHECK(args->length() == 2);
4116 // Evaluate new.target and super constructor.
4117 VisitForStackValue(args->at(0));
4118 VisitForStackValue(args->at(1));
4120 // Load original constructor into r4.
4121 __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
4123 // Check if the calling frame is an arguments adaptor frame.
4124 Label adaptor_frame, args_set_up, runtime;
4125 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4126 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4127 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4128 __ b(eq, &adaptor_frame);
4129 // default constructor has no arguments, so no adaptor frame means no args.
4130 __ mov(r0, Operand::Zero());
4133 // Copy arguments from adaptor frame.
4135 __ bind(&adaptor_frame);
4136 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4137 __ SmiUntag(r1, r1);
4140 // Get arguments pointer in r2.
4141 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4142 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset));
4145 // Pre-decrement r2 with kPointerSize on each iteration.
4146 // Pre-decrement in order to skip receiver.
4147 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
4149 __ sub(r1, r1, Operand(1));
4150 __ cmp(r1, Operand::Zero());
4154 __ bind(&args_set_up);
4155 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4156 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
4158 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4159 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4161 // Restore context register.
4162 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4164 context()->DropAndPlug(1, r0);
4168 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4169 RegExpConstructResultStub stub(isolate());
4170 ZoneList<Expression*>* args = expr->arguments();
4171 DCHECK(args->length() == 3);
4172 VisitForStackValue(args->at(0));
4173 VisitForStackValue(args->at(1));
4174 VisitForAccumulatorValue(args->at(2));
4178 context()->Plug(r0);
4182 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4183 ZoneList<Expression*>* args = expr->arguments();
4184 VisitForAccumulatorValue(args->at(0));
4186 Label materialize_true, materialize_false;
4187 Label* if_true = NULL;
4188 Label* if_false = NULL;
4189 Label* fall_through = NULL;
4190 context()->PrepareTest(&materialize_true, &materialize_false,
4191 &if_true, &if_false, &fall_through);
4193 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4194 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4195 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4196 Split(eq, if_true, if_false, fall_through);
4198 context()->Plug(if_true, if_false);
4202 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4203 ZoneList<Expression*>* args = expr->arguments();
4204 DCHECK(args->length() == 1);
4205 VisitForAccumulatorValue(args->at(0));
4207 __ AssertString(r0);
4209 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4210 __ IndexFromHash(r0, r0);
4212 context()->Plug(r0);
4216 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4217 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4218 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4219 one_char_separator_loop_entry, long_separator_loop;
4220 ZoneList<Expression*>* args = expr->arguments();
4221 DCHECK(args->length() == 2);
4222 VisitForStackValue(args->at(1));
4223 VisitForAccumulatorValue(args->at(0));
4225 // All aliases of the same register have disjoint lifetimes.
4226 Register array = r0;
4227 Register elements = no_reg; // Will be r0.
4228 Register result = no_reg; // Will be r0.
4229 Register separator = r1;
4230 Register array_length = r2;
4231 Register result_pos = no_reg; // Will be r2
4232 Register string_length = r3;
4233 Register string = r4;
4234 Register element = r5;
4235 Register elements_end = r6;
4236 Register scratch = r9;
4238 // Separator operand is on the stack.
4241 // Check that the array is a JSArray.
4242 __ JumpIfSmi(array, &bailout);
4243 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4246 // Check that the array has fast elements.
4247 __ CheckFastElements(scratch, array_length, &bailout);
4249 // If the array has length zero, return the empty string.
4250 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4251 __ SmiUntag(array_length, SetCC);
4252 __ b(ne, &non_trivial_array);
4253 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4256 __ bind(&non_trivial_array);
4258 // Get the FixedArray containing array's elements.
4260 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4261 array = no_reg; // End of array's live range.
4263 // Check that all array elements are sequential one-byte strings, and
4264 // accumulate the sum of their lengths, as a smi-encoded value.
4265 __ mov(string_length, Operand::Zero());
4267 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4268 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4269 // Loop condition: while (element < elements_end).
4270 // Live values in registers:
4271 // elements: Fixed array of strings.
4272 // array_length: Length of the fixed array of strings (not smi)
4273 // separator: Separator string
4274 // string_length: Accumulated sum of string lengths (smi).
4275 // element: Current array element.
4276 // elements_end: Array end.
4277 if (generate_debug_code_) {
4278 __ cmp(array_length, Operand::Zero());
4279 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4282 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4283 __ JumpIfSmi(string, &bailout);
4284 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4285 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4286 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4287 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4288 __ add(string_length, string_length, Operand(scratch), SetCC);
4290 __ cmp(element, elements_end);
4293 // If array_length is 1, return elements[0], a string.
4294 __ cmp(array_length, Operand(1));
4295 __ b(ne, ¬_size_one_array);
4296 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4299 __ bind(¬_size_one_array);
4301 // Live values in registers:
4302 // separator: Separator string
4303 // array_length: Length of the array.
4304 // string_length: Sum of string lengths (smi).
4305 // elements: FixedArray of strings.
4307 // Check that the separator is a flat one-byte string.
4308 __ JumpIfSmi(separator, &bailout);
4309 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4310 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4311 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4313 // Add (separator length times array_length) - separator length to the
4314 // string_length to get the length of the result string. array_length is not
4315 // smi but the other values are, so the result is a smi
4316 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4317 __ sub(string_length, string_length, Operand(scratch));
4318 __ smull(scratch, ip, array_length, scratch);
4319 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4321 __ cmp(ip, Operand::Zero());
4323 __ tst(scratch, Operand(0x80000000));
4325 __ add(string_length, string_length, Operand(scratch), SetCC);
4327 __ SmiUntag(string_length);
4329 // Get first element in the array to free up the elements register to be used
4332 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4333 result = elements; // End of live range for elements.
4335 // Live values in registers:
4336 // element: First array element
4337 // separator: Separator string
4338 // string_length: Length of result string (not smi)
4339 // array_length: Length of the array.
4340 __ AllocateOneByteString(result, string_length, scratch,
4341 string, // used as scratch
4342 elements_end, // used as scratch
4344 // Prepare for looping. Set up elements_end to end of the array. Set
4345 // result_pos to the position of the result where to write the first
4347 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4348 result_pos = array_length; // End of live range for array_length.
4349 array_length = no_reg;
4352 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4354 // Check the length of the separator.
4355 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4356 __ cmp(scratch, Operand(Smi::FromInt(1)));
4357 __ b(eq, &one_char_separator);
4358 __ b(gt, &long_separator);
4360 // Empty separator case
4361 __ bind(&empty_separator_loop);
4362 // Live values in registers:
4363 // result_pos: the position to which we are currently copying characters.
4364 // element: Current array element.
4365 // elements_end: Array end.
4367 // Copy next array element to the result.
4368 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4369 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4370 __ SmiUntag(string_length);
4373 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4374 __ CopyBytes(string, result_pos, string_length, scratch);
4375 __ cmp(element, elements_end);
4376 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4377 DCHECK(result.is(r0));
4380 // One-character separator case
4381 __ bind(&one_char_separator);
4382 // Replace separator with its one-byte character value.
4383 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4384 // Jump into the loop after the code that copies the separator, so the first
4385 // element is not preceded by a separator
4386 __ jmp(&one_char_separator_loop_entry);
4388 __ bind(&one_char_separator_loop);
4389 // Live values in registers:
4390 // result_pos: the position to which we are currently copying characters.
4391 // element: Current array element.
4392 // elements_end: Array end.
4393 // separator: Single separator one-byte char (in lower byte).
4395 // Copy the separator character to the result.
4396 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4398 // Copy next array element to the result.
4399 __ bind(&one_char_separator_loop_entry);
4400 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4401 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4402 __ SmiUntag(string_length);
4405 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4406 __ CopyBytes(string, result_pos, string_length, scratch);
4407 __ cmp(element, elements_end);
4408 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4409 DCHECK(result.is(r0));
4412 // Long separator case (separator is more than one character). Entry is at the
4413 // label long_separator below.
4414 __ bind(&long_separator_loop);
4415 // Live values in registers:
4416 // result_pos: the position to which we are currently copying characters.
4417 // element: Current array element.
4418 // elements_end: Array end.
4419 // separator: Separator string.
4421 // Copy the separator to the result.
4422 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4423 __ SmiUntag(string_length);
4426 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4427 __ CopyBytes(string, result_pos, string_length, scratch);
4429 __ bind(&long_separator);
4430 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4431 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4432 __ SmiUntag(string_length);
4435 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4436 __ CopyBytes(string, result_pos, string_length, scratch);
4437 __ cmp(element, elements_end);
4438 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4439 DCHECK(result.is(r0));
4443 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4445 context()->Plug(r0);
4449 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4450 DCHECK(expr->arguments()->length() == 0);
4451 ExternalReference debug_is_active =
4452 ExternalReference::debug_is_active_address(isolate());
4453 __ mov(ip, Operand(debug_is_active));
4454 __ ldrb(r0, MemOperand(ip));
4456 context()->Plug(r0);
4460 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
4461 ZoneList<Expression*>* args = expr->arguments();
4462 DCHECK_EQ(2, args->length());
4463 VisitForStackValue(args->at(0));
4464 VisitForStackValue(args->at(1));
4466 Label runtime, done;
4468 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime, TAG_OBJECT);
4469 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4470 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
4471 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
4474 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
4475 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4476 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
4477 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
4478 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
4479 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
4480 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4484 __ CallRuntime(Runtime::kCreateIterResultObject, 2);
4487 context()->Plug(r0);
4491 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4492 // Push undefined as the receiver.
4493 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4496 __ ldr(r0, GlobalObjectOperand());
4497 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
4498 __ ldr(r0, ContextOperand(r0, expr->context_index()));
4502 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4503 ZoneList<Expression*>* args = expr->arguments();
4504 int arg_count = args->length();
4506 SetCallPosition(expr, arg_count);
4507 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4508 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4513 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4514 ZoneList<Expression*>* args = expr->arguments();
4515 int arg_count = args->length();
4517 if (expr->is_jsruntime()) {
4518 Comment cmnt(masm_, "[ CallRuntime");
4519 EmitLoadJSRuntimeFunction(expr);
4521 // Push the target function under the receiver.
4522 __ ldr(ip, MemOperand(sp, 0));
4524 __ str(r0, MemOperand(sp, kPointerSize));
4526 // Push the arguments ("left-to-right").
4527 for (int i = 0; i < arg_count; i++) {
4528 VisitForStackValue(args->at(i));
4531 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4532 EmitCallJSRuntimeFunction(expr);
4534 // Restore context register.
4535 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4537 context()->DropAndPlug(1, r0);
4540 const Runtime::Function* function = expr->function();
4541 switch (function->function_id) {
4542 #define CALL_INTRINSIC_GENERATOR(Name) \
4543 case Runtime::kInline##Name: { \
4544 Comment cmnt(masm_, "[ Inline" #Name); \
4545 return Emit##Name(expr); \
4547 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4548 #undef CALL_INTRINSIC_GENERATOR
4550 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4551 // Push the arguments ("left-to-right").
4552 for (int i = 0; i < arg_count; i++) {
4553 VisitForStackValue(args->at(i));
4556 // Call the C runtime function.
4557 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4558 __ CallRuntime(expr->function(), arg_count);
4559 context()->Plug(r0);
4566 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4567 switch (expr->op()) {
4568 case Token::DELETE: {
4569 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4570 Property* property = expr->expression()->AsProperty();
4571 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4573 if (property != NULL) {
4574 VisitForStackValue(property->obj());
4575 VisitForStackValue(property->key());
4576 __ CallRuntime(is_strict(language_mode())
4577 ? Runtime::kDeleteProperty_Strict
4578 : Runtime::kDeleteProperty_Sloppy,
4580 context()->Plug(r0);
4581 } else if (proxy != NULL) {
4582 Variable* var = proxy->var();
4583 // Delete of an unqualified identifier is disallowed in strict mode but
4584 // "delete this" is allowed.
4585 bool is_this = var->HasThisName(isolate());
4586 DCHECK(is_sloppy(language_mode()) || is_this);
4587 if (var->IsUnallocatedOrGlobalSlot()) {
4588 __ ldr(r2, GlobalObjectOperand());
4589 __ mov(r1, Operand(var->name()));
4591 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4592 context()->Plug(r0);
4593 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4594 // Result of deleting non-global, non-dynamic variables is false.
4595 // The subexpression does not have side effects.
4596 context()->Plug(is_this);
4598 // Non-global variable. Call the runtime to try to delete from the
4599 // context where the variable was introduced.
4600 DCHECK(!context_register().is(r2));
4601 __ mov(r2, Operand(var->name()));
4602 __ Push(context_register(), r2);
4603 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4604 context()->Plug(r0);
4607 // Result of deleting non-property, non-variable reference is true.
4608 // The subexpression may have side effects.
4609 VisitForEffect(expr->expression());
4610 context()->Plug(true);
4616 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4617 VisitForEffect(expr->expression());
4618 context()->Plug(Heap::kUndefinedValueRootIndex);
4623 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4624 if (context()->IsEffect()) {
4625 // Unary NOT has no side effects so it's only necessary to visit the
4626 // subexpression. Match the optimizing compiler by not branching.
4627 VisitForEffect(expr->expression());
4628 } else if (context()->IsTest()) {
4629 const TestContext* test = TestContext::cast(context());
4630 // The labels are swapped for the recursive call.
4631 VisitForControl(expr->expression(),
4632 test->false_label(),
4634 test->fall_through());
4635 context()->Plug(test->true_label(), test->false_label());
4637 // We handle value contexts explicitly rather than simply visiting
4638 // for control and plugging the control flow into the context,
4639 // because we need to prepare a pair of extra administrative AST ids
4640 // for the optimizing compiler.
4641 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4642 Label materialize_true, materialize_false, done;
4643 VisitForControl(expr->expression(),
4647 __ bind(&materialize_true);
4648 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4649 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4650 if (context()->IsStackValue()) __ push(r0);
4652 __ bind(&materialize_false);
4653 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4654 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4655 if (context()->IsStackValue()) __ push(r0);
4661 case Token::TYPEOF: {
4662 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4664 AccumulatorValueContext context(this);
4665 VisitForTypeofValue(expr->expression());
4668 TypeofStub typeof_stub(isolate());
4669 __ CallStub(&typeof_stub);
4670 context()->Plug(r0);
4680 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4681 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4683 Comment cmnt(masm_, "[ CountOperation");
4685 Property* prop = expr->expression()->AsProperty();
4686 LhsKind assign_type = Property::GetAssignType(prop);
4688 // Evaluate expression and get value.
4689 if (assign_type == VARIABLE) {
4690 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4691 AccumulatorValueContext context(this);
4692 EmitVariableLoad(expr->expression()->AsVariableProxy());
4694 // Reserve space for result of postfix operation.
4695 if (expr->is_postfix() && !context()->IsEffect()) {
4696 __ mov(ip, Operand(Smi::FromInt(0)));
4699 switch (assign_type) {
4700 case NAMED_PROPERTY: {
4701 // Put the object both on the stack and in the register.
4702 VisitForStackValue(prop->obj());
4703 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4704 EmitNamedPropertyLoad(prop);
4708 case NAMED_SUPER_PROPERTY: {
4709 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4710 VisitForAccumulatorValue(
4711 prop->obj()->AsSuperPropertyReference()->home_object());
4712 __ Push(result_register());
4713 const Register scratch = r1;
4714 __ ldr(scratch, MemOperand(sp, kPointerSize));
4716 __ Push(result_register());
4717 EmitNamedSuperPropertyLoad(prop);
4721 case KEYED_SUPER_PROPERTY: {
4722 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4724 prop->obj()->AsSuperPropertyReference()->home_object());
4725 VisitForAccumulatorValue(prop->key());
4726 __ Push(result_register());
4727 const Register scratch = r1;
4728 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4730 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4732 __ Push(result_register());
4733 EmitKeyedSuperPropertyLoad(prop);
4737 case KEYED_PROPERTY: {
4738 VisitForStackValue(prop->obj());
4739 VisitForStackValue(prop->key());
4740 __ ldr(LoadDescriptor::ReceiverRegister(),
4741 MemOperand(sp, 1 * kPointerSize));
4742 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4743 EmitKeyedPropertyLoad(prop);
4752 // We need a second deoptimization point after loading the value
4753 // in case evaluating the property load my have a side effect.
4754 if (assign_type == VARIABLE) {
4755 PrepareForBailout(expr->expression(), TOS_REG);
4757 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4760 // Inline smi case if we are in a loop.
4761 Label stub_call, done;
4762 JumpPatchSite patch_site(masm_);
4764 int count_value = expr->op() == Token::INC ? 1 : -1;
4765 if (ShouldInlineSmiCase(expr->op())) {
4767 patch_site.EmitJumpIfNotSmi(r0, &slow);
4769 // Save result for postfix expressions.
4770 if (expr->is_postfix()) {
4771 if (!context()->IsEffect()) {
4772 // Save the result on the stack. If we have a named or keyed property
4773 // we store the result under the receiver that is currently on top
4775 switch (assign_type) {
4779 case NAMED_PROPERTY:
4780 __ str(r0, MemOperand(sp, kPointerSize));
4782 case NAMED_SUPER_PROPERTY:
4783 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4785 case KEYED_PROPERTY:
4786 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4788 case KEYED_SUPER_PROPERTY:
4789 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4795 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4797 // Call stub. Undo operation first.
4798 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4802 if (!is_strong(language_mode())) {
4803 ToNumberStub convert_stub(isolate());
4804 __ CallStub(&convert_stub);
4805 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4808 // Save result for postfix expressions.
4809 if (expr->is_postfix()) {
4810 if (!context()->IsEffect()) {
4811 // Save the result on the stack. If we have a named or keyed property
4812 // we store the result under the receiver that is currently on top
4814 switch (assign_type) {
4818 case NAMED_PROPERTY:
4819 __ str(r0, MemOperand(sp, kPointerSize));
4821 case NAMED_SUPER_PROPERTY:
4822 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4824 case KEYED_PROPERTY:
4825 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4827 case KEYED_SUPER_PROPERTY:
4828 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4835 __ bind(&stub_call);
4837 __ mov(r0, Operand(Smi::FromInt(count_value)));
4839 SetExpressionPosition(expr);
4841 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4842 strength(language_mode())).code();
4843 CallIC(code, expr->CountBinOpFeedbackId());
4844 patch_site.EmitPatchInfo();
4847 if (is_strong(language_mode())) {
4848 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4850 // Store the value returned in r0.
4851 switch (assign_type) {
4853 if (expr->is_postfix()) {
4854 { EffectContext context(this);
4855 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4856 Token::ASSIGN, expr->CountSlot());
4857 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4860 // For all contexts except EffectConstant We have the result on
4861 // top of the stack.
4862 if (!context()->IsEffect()) {
4863 context()->PlugTOS();
4866 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4867 Token::ASSIGN, expr->CountSlot());
4868 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4869 context()->Plug(r0);
4872 case NAMED_PROPERTY: {
4873 __ mov(StoreDescriptor::NameRegister(),
4874 Operand(prop->key()->AsLiteral()->value()));
4875 __ pop(StoreDescriptor::ReceiverRegister());
4876 if (FLAG_vector_stores) {
4877 EmitLoadStoreICSlot(expr->CountSlot());
4880 CallStoreIC(expr->CountStoreFeedbackId());
4882 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4883 if (expr->is_postfix()) {
4884 if (!context()->IsEffect()) {
4885 context()->PlugTOS();
4888 context()->Plug(r0);
4892 case NAMED_SUPER_PROPERTY: {
4893 EmitNamedSuperPropertyStore(prop);
4894 if (expr->is_postfix()) {
4895 if (!context()->IsEffect()) {
4896 context()->PlugTOS();
4899 context()->Plug(r0);
4903 case KEYED_SUPER_PROPERTY: {
4904 EmitKeyedSuperPropertyStore(prop);
4905 if (expr->is_postfix()) {
4906 if (!context()->IsEffect()) {
4907 context()->PlugTOS();
4910 context()->Plug(r0);
4914 case KEYED_PROPERTY: {
4915 __ Pop(StoreDescriptor::ReceiverRegister(),
4916 StoreDescriptor::NameRegister());
4918 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4919 if (FLAG_vector_stores) {
4920 EmitLoadStoreICSlot(expr->CountSlot());
4923 CallIC(ic, expr->CountStoreFeedbackId());
4925 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4926 if (expr->is_postfix()) {
4927 if (!context()->IsEffect()) {
4928 context()->PlugTOS();
4931 context()->Plug(r0);
4939 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4940 Expression* sub_expr,
4941 Handle<String> check) {
4942 Label materialize_true, materialize_false;
4943 Label* if_true = NULL;
4944 Label* if_false = NULL;
4945 Label* fall_through = NULL;
4946 context()->PrepareTest(&materialize_true, &materialize_false,
4947 &if_true, &if_false, &fall_through);
4949 { AccumulatorValueContext context(this);
4950 VisitForTypeofValue(sub_expr);
4952 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4954 Factory* factory = isolate()->factory();
4955 if (String::Equals(check, factory->number_string())) {
4956 __ JumpIfSmi(r0, if_true);
4957 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4958 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4960 Split(eq, if_true, if_false, fall_through);
4961 } else if (String::Equals(check, factory->string_string())) {
4962 __ JumpIfSmi(r0, if_false);
4963 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4964 Split(lt, if_true, if_false, fall_through);
4965 } else if (String::Equals(check, factory->symbol_string())) {
4966 __ JumpIfSmi(r0, if_false);
4967 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4968 Split(eq, if_true, if_false, fall_through);
4969 } else if (String::Equals(check, factory->boolean_string())) {
4970 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4972 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4973 Split(eq, if_true, if_false, fall_through);
4974 } else if (String::Equals(check, factory->undefined_string())) {
4975 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4977 __ JumpIfSmi(r0, if_false);
4978 // Check for undetectable objects => true.
4979 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4980 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4981 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4982 Split(ne, if_true, if_false, fall_through);
4984 } else if (String::Equals(check, factory->function_string())) {
4985 __ JumpIfSmi(r0, if_false);
4986 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4987 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4989 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4990 __ cmp(r1, Operand(1 << Map::kIsCallable));
4991 Split(eq, if_true, if_false, fall_through);
4992 } else if (String::Equals(check, factory->object_string())) {
4993 __ JumpIfSmi(r0, if_false);
4994 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4996 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
4997 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
4999 // Check for callable or undetectable objects => false.
5000 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5001 __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5002 Split(eq, if_true, if_false, fall_through);
5004 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5005 } else if (String::Equals(check, factory->type##_string())) { \
5006 __ JumpIfSmi(r0, if_false); \
5007 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
5008 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
5009 Split(eq, if_true, if_false, fall_through);
5010 SIMD128_TYPES(SIMD128_TYPE)
5014 if (if_false != fall_through) __ jmp(if_false);
5016 context()->Plug(if_true, if_false);
5020 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5021 Comment cmnt(masm_, "[ CompareOperation");
5022 SetExpressionPosition(expr);
5024 // First we try a fast inlined version of the compare when one of
5025 // the operands is a literal.
5026 if (TryLiteralCompare(expr)) return;
5028 // Always perform the comparison for its control flow. Pack the result
5029 // into the expression's context after the comparison is performed.
5030 Label materialize_true, materialize_false;
5031 Label* if_true = NULL;
5032 Label* if_false = NULL;
5033 Label* fall_through = NULL;
5034 context()->PrepareTest(&materialize_true, &materialize_false,
5035 &if_true, &if_false, &fall_through);
5037 Token::Value op = expr->op();
5038 VisitForStackValue(expr->left());
5041 VisitForStackValue(expr->right());
5042 __ CallRuntime(Runtime::kHasProperty, 2);
5043 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5044 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
5045 Split(eq, if_true, if_false, fall_through);
5048 case Token::INSTANCEOF: {
5049 VisitForAccumulatorValue(expr->right());
5051 InstanceOfStub stub(isolate());
5053 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5054 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
5055 Split(eq, if_true, if_false, fall_through);
5060 VisitForAccumulatorValue(expr->right());
5061 Condition cond = CompareIC::ComputeCondition(op);
5064 bool inline_smi_code = ShouldInlineSmiCase(op);
5065 JumpPatchSite patch_site(masm_);
5066 if (inline_smi_code) {
5068 __ orr(r2, r0, Operand(r1));
5069 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5071 Split(cond, if_true, if_false, NULL);
5072 __ bind(&slow_case);
5075 Handle<Code> ic = CodeFactory::CompareIC(
5076 isolate(), op, strength(language_mode())).code();
5077 CallIC(ic, expr->CompareOperationFeedbackId());
5078 patch_site.EmitPatchInfo();
5079 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5080 __ cmp(r0, Operand::Zero());
5081 Split(cond, if_true, if_false, fall_through);
5085 // Convert the result of the comparison into one expected for this
5086 // expression's context.
5087 context()->Plug(if_true, if_false);
5091 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5092 Expression* sub_expr,
5094 Label materialize_true, materialize_false;
5095 Label* if_true = NULL;
5096 Label* if_false = NULL;
5097 Label* fall_through = NULL;
5098 context()->PrepareTest(&materialize_true, &materialize_false,
5099 &if_true, &if_false, &fall_through);
5101 VisitForAccumulatorValue(sub_expr);
5102 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5103 if (expr->op() == Token::EQ_STRICT) {
5104 Heap::RootListIndex nil_value = nil == kNullValue ?
5105 Heap::kNullValueRootIndex :
5106 Heap::kUndefinedValueRootIndex;
5107 __ LoadRoot(r1, nil_value);
5109 Split(eq, if_true, if_false, fall_through);
5111 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5112 CallIC(ic, expr->CompareOperationFeedbackId());
5113 __ cmp(r0, Operand(0));
5114 Split(ne, if_true, if_false, fall_through);
5116 context()->Plug(if_true, if_false);
5120 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5121 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5122 context()->Plug(r0);
5126 Register FullCodeGenerator::result_register() {
5131 Register FullCodeGenerator::context_register() {
5136 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5137 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5138 __ str(value, MemOperand(fp, frame_offset));
5142 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5143 __ ldr(dst, ContextOperand(cp, context_index));
5147 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5148 Scope* closure_scope = scope()->ClosureScope();
5149 if (closure_scope->is_script_scope() ||
5150 closure_scope->is_module_scope()) {
5151 // Contexts nested in the native context have a canonical empty function
5152 // as their closure, not the anonymous closure containing the global
5153 // code. Pass a smi sentinel and let the runtime look up the empty
5155 __ mov(ip, Operand(Smi::FromInt(0)));
5156 } else if (closure_scope->is_eval_scope()) {
5157 // Contexts created by a call to eval have the same closure as the
5158 // context calling eval, not the anonymous closure containing the eval
5159 // code. Fetch it from the context.
5160 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5162 DCHECK(closure_scope->is_function_scope());
5163 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5169 // ----------------------------------------------------------------------------
5170 // Non-local control flow support.
5172 void FullCodeGenerator::EnterFinallyBlock() {
5173 DCHECK(!result_register().is(r1));
5174 // Store result register while executing finally block.
5175 __ push(result_register());
5176 // Cook return address in link register to stack (smi encoded Code* delta)
5177 __ sub(r1, lr, Operand(masm_->CodeObject()));
5180 // Store result register while executing finally block.
5183 // Store pending message while executing finally block.
5184 ExternalReference pending_message_obj =
5185 ExternalReference::address_of_pending_message_obj(isolate());
5186 __ mov(ip, Operand(pending_message_obj));
5187 __ ldr(r1, MemOperand(ip));
5190 ClearPendingMessage();
5194 void FullCodeGenerator::ExitFinallyBlock() {
5195 DCHECK(!result_register().is(r1));
5196 // Restore pending message from stack.
5198 ExternalReference pending_message_obj =
5199 ExternalReference::address_of_pending_message_obj(isolate());
5200 __ mov(ip, Operand(pending_message_obj));
5201 __ str(r1, MemOperand(ip));
5203 // Restore result register from stack.
5206 // Uncook return address and return.
5207 __ pop(result_register());
5209 __ add(pc, r1, Operand(masm_->CodeObject()));
5213 void FullCodeGenerator::ClearPendingMessage() {
5214 DCHECK(!result_register().is(r1));
5215 ExternalReference pending_message_obj =
5216 ExternalReference::address_of_pending_message_obj(isolate());
5217 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
5218 __ mov(ip, Operand(pending_message_obj));
5219 __ str(r1, MemOperand(ip));
5223 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5224 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5225 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5226 Operand(SmiFromSlot(slot)));
5233 static Address GetInterruptImmediateLoadAddress(Address pc) {
5234 Address load_address = pc - 2 * Assembler::kInstrSize;
5235 if (!FLAG_enable_embedded_constant_pool) {
5236 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5237 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5238 // This is an extended constant pool lookup.
5239 if (CpuFeatures::IsSupported(ARMv7)) {
5240 load_address -= 2 * Assembler::kInstrSize;
5241 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5242 DCHECK(Assembler::IsMovT(
5243 Memory::int32_at(load_address + Assembler::kInstrSize)));
5245 load_address -= 4 * Assembler::kInstrSize;
5246 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5247 DCHECK(Assembler::IsOrrImmed(
5248 Memory::int32_at(load_address + Assembler::kInstrSize)));
5249 DCHECK(Assembler::IsOrrImmed(
5250 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5251 DCHECK(Assembler::IsOrrImmed(
5252 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5254 } else if (CpuFeatures::IsSupported(ARMv7) &&
5255 Assembler::IsMovT(Memory::int32_at(load_address))) {
5256 // This is a movw / movt immediate load.
5257 load_address -= Assembler::kInstrSize;
5258 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5259 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5260 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5261 // This is a mov / orr immediate load.
5262 load_address -= 3 * Assembler::kInstrSize;
5263 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5264 DCHECK(Assembler::IsOrrImmed(
5265 Memory::int32_at(load_address + Assembler::kInstrSize)));
5266 DCHECK(Assembler::IsOrrImmed(
5267 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5269 // This is a small constant pool lookup.
5270 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5272 return load_address;
5276 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5278 BackEdgeState target_state,
5279 Code* replacement_code) {
5280 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5281 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5282 CodePatcher patcher(branch_address, 1);
5283 switch (target_state) {
5286 // <decrement profiling counter>
5288 // ; load interrupt stub address into ip - either of (for ARMv7):
5289 // ; <small cp load> | <extended cp load> | <immediate load>
5290 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5291 // | movt ip, #imm | movw ip, #imm
5292 // | ldr ip, [pp, ip]
5293 // ; or (for ARMv6):
5294 // ; <small cp load> | <extended cp load> | <immediate load>
5295 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5296 // | orr ip, ip, #imm> | orr ip, ip, #imm
5297 // | orr ip, ip, #imm> | orr ip, ip, #imm
5298 // | orr ip, ip, #imm> | orr ip, ip, #imm
5300 // <reset profiling counter>
5303 // Calculate branch offset to the ok-label - this is the difference
5304 // between the branch address and |pc| (which points at <blx ip>) plus
5305 // kProfileCounterResetSequence instructions
5306 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5307 kProfileCounterResetSequenceLength;
5308 patcher.masm()->b(branch_offset, pl);
5311 case ON_STACK_REPLACEMENT:
5312 case OSR_AFTER_STACK_CHECK:
5313 // <decrement profiling counter>
5315 // ; load on-stack replacement address into ip - either of (for ARMv7):
5316 // ; <small cp load> | <extended cp load> | <immediate load>
5317 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5318 // | movt ip, #imm> | movw ip, #imm
5319 // | ldr ip, [pp, ip]
5320 // ; or (for ARMv6):
5321 // ; <small cp load> | <extended cp load> | <immediate load>
5322 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5323 // | orr ip, ip, #imm> | orr ip, ip, #imm
5324 // | orr ip, ip, #imm> | orr ip, ip, #imm
5325 // | orr ip, ip, #imm> | orr ip, ip, #imm
5327 // <reset profiling counter>
5329 patcher.masm()->nop();
5333 // Replace the call address.
5334 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5335 replacement_code->entry());
5337 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5338 unoptimized_code, pc_immediate_load_address, replacement_code);
5342 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5344 Code* unoptimized_code,
5346 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5348 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5349 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5350 Address interrupt_address = Assembler::target_address_at(
5351 pc_immediate_load_address, unoptimized_code);
5353 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5354 DCHECK(interrupt_address ==
5355 isolate->builtins()->InterruptCheck()->entry());
5359 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5361 if (interrupt_address ==
5362 isolate->builtins()->OnStackReplacement()->entry()) {
5363 return ON_STACK_REPLACEMENT;
5366 DCHECK(interrupt_address ==
5367 isolate->builtins()->OsrAfterStackCheck()->entry());
5368 return OSR_AFTER_STACK_CHECK;
5372 } // namespace internal
5375 #endif // V8_TARGET_ARCH_ARM