1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug/debug.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/arm/code-stubs-arm.h"
20 #include "src/arm/macro-assembler-arm.h"
25 #define __ ACCESS_MASM(masm_)
28 // A patch site is a location in the code which it is possible to patch. This
29 // class has a number of methods to emit the code which is patchable and the
30 // method EmitPatchInfo to record a marker back to the patchable code. This
31 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
32 // immediate value is used) is the delta from the pc to the first instruction of
33 // the patchable code.
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
43 DCHECK(patch_site_.is_bound() == info_emitted_);
46 // When initially emitting this ensure that a jump is always generated to skip
47 // the inlined smi code.
48 void EmitJumpIfNotSmi(Register reg, Label* target) {
49 DCHECK(!patch_site_.is_bound() && !info_emitted_);
50 Assembler::BlockConstPoolScope block_const_pool(masm_);
51 __ bind(&patch_site_);
52 __ cmp(reg, Operand(reg));
53 __ b(eq, target); // Always taken before patched.
56 // When initially emitting this ensure that a jump is never generated to skip
57 // the inlined smi code.
58 void EmitJumpIfSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockConstPoolScope block_const_pool(masm_);
61 __ bind(&patch_site_);
62 __ cmp(reg, Operand(reg));
63 __ b(ne, target); // Never taken before patched.
66 void EmitPatchInfo() {
67 // Block literal pool emission whilst recording patch site information.
68 Assembler::BlockConstPoolScope block_const_pool(masm_);
69 if (patch_site_.is_bound()) {
70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72 reg.set_code(delta_to_patch_site / kOff12Mask);
73 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
78 __ nop(); // Signals no inlined code.
83 MacroAssembler* masm_;
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
96 // The live registers are:
97 // o r1: the JS function object being called (i.e., ourselves)
99 // o pp: our caller's constant pool pointer (if enabled)
100 // o fp: our caller's frame pointer
101 // o sp: stack pointer
102 // o lr: return address
104 // The function builds a JS frame. Please see JavaScriptFrameConstants in
105 // frames-arm.h for its layout.
106 void FullCodeGenerator::Generate() {
107 CompilationInfo* info = info_;
108 profiling_counter_ = isolate()->factory()->NewCell(
109 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
110 SetFunctionPosition(function());
111 Comment cmnt(masm_, "[ function compiled by full code generator");
113 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
116 if (strlen(FLAG_stop_at) > 0 &&
117 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
122 // Sloppy mode functions and builtins need to replace the receiver with the
123 // global proxy when called as functions (without an explicit receiver
125 if (is_sloppy(info->language_mode()) && !info->is_native() &&
126 info->MayUseThis() && info->scope()->has_this_declaration()) {
128 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
129 __ ldr(r2, MemOperand(sp, receiver_offset));
130 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
133 __ ldr(r2, GlobalObjectOperand());
134 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
136 __ str(r2, MemOperand(sp, receiver_offset));
141 // Open a frame scope to indicate that there is a frame on the stack. The
142 // MANUAL indicates that the scope shouldn't actually generate code to set up
143 // the frame (that is done below).
144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 info->set_prologue_offset(masm_->pc_offset());
147 __ Prologue(info->IsCodePreAgingActive());
148 info->AddNoFrameRange(0, masm_->pc_offset());
150 { Comment cmnt(masm_, "[ Allocate locals");
151 int locals_count = info->scope()->num_stack_slots();
152 // Generators allocate locals, if any, in context slots.
153 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
154 if (locals_count > 0) {
155 if (locals_count >= 128) {
157 __ sub(r9, sp, Operand(locals_count * kPointerSize));
158 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
159 __ cmp(r9, Operand(r2));
161 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
164 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
165 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
166 if (locals_count >= kMaxPushes) {
167 int loop_iterations = locals_count / kMaxPushes;
168 __ mov(r2, Operand(loop_iterations));
170 __ bind(&loop_header);
172 for (int i = 0; i < kMaxPushes; i++) {
175 // Continue loop if not done.
176 __ sub(r2, r2, Operand(1), SetCC);
177 __ b(&loop_header, ne);
179 int remaining = locals_count % kMaxPushes;
180 // Emit the remaining pushes.
181 for (int i = 0; i < remaining; i++) {
187 bool function_in_register = true;
189 // Possibly allocate a local context.
190 if (info->scope()->num_heap_slots() > 0) {
191 // Argument to NewContext is the function, which is still in r1.
192 Comment cmnt(masm_, "[ Allocate context");
193 bool need_write_barrier = true;
194 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (info->scope()->is_script_scope()) {
197 __ Push(info->scope()->GetScopeInfo(info->isolate()));
198 __ CallRuntime(Runtime::kNewScriptContext, 2);
199 } else if (slots <= FastNewContextStub::kMaximumSlots) {
200 FastNewContextStub stub(isolate(), slots);
202 // Result of FastNewContextStub is always in new space.
203 need_write_barrier = false;
206 __ CallRuntime(Runtime::kNewFunctionContext, 1);
208 function_in_register = false;
209 // Context is returned in r0. It replaces the context passed to us.
210 // It's saved in the stack and kept live in cp.
212 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
213 // Copy any necessary parameters into the context.
214 int num_parameters = info->scope()->num_parameters();
215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216 for (int i = first_parameter; i < num_parameters; i++) {
217 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
218 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220 (num_parameters - 1 - i) * kPointerSize;
221 // Load parameter from stack.
222 __ ldr(r0, MemOperand(fp, parameter_offset));
223 // Store it in the context.
224 MemOperand target = ContextOperand(cp, var->index());
227 // Update the write barrier.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(
230 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
231 } else if (FLAG_debug_code) {
233 __ JumpIfInNewSpace(cp, r0, &done);
234 __ Abort(kExpectedNewSpaceObject);
241 // Possibly set up a local binding to the this function which is used in
242 // derived constructors with super calls.
243 Variable* this_function_var = scope()->this_function_var();
244 if (this_function_var != nullptr) {
245 Comment cmnt(masm_, "[ This function");
246 if (!function_in_register) {
247 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
248 // The write barrier clobbers register again, keep is marked as such.
250 SetVar(this_function_var, r1, r0, r2);
253 Variable* new_target_var = scope()->new_target_var();
254 if (new_target_var != nullptr) {
255 Comment cmnt(masm_, "[ new.target");
257 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
258 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
259 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
260 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
261 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
262 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
263 Label non_construct_frame, done;
265 __ b(ne, &non_construct_frame);
267 MemOperand(r2, ConstructFrameConstants::kOriginalConstructorOffset));
270 __ bind(&non_construct_frame);
271 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
274 SetVar(new_target_var, r0, r2, r3);
277 // Possibly allocate RestParameters
279 Variable* rest_param = scope()->rest_parameter(&rest_index);
281 Comment cmnt(masm_, "[ Allocate rest parameter array");
283 int num_parameters = info->scope()->num_parameters();
284 int offset = num_parameters * kPointerSize;
286 __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
287 __ mov(r2, Operand(Smi::FromInt(num_parameters)));
288 __ mov(r1, Operand(Smi::FromInt(rest_index)));
289 __ mov(r0, Operand(Smi::FromInt(language_mode())));
290 __ Push(r3, r2, r1, r0);
292 RestParamAccessStub stub(isolate());
295 SetVar(rest_param, r0, r1, r2);
298 Variable* arguments = scope()->arguments();
299 if (arguments != NULL) {
300 // Function uses arguments object.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (!function_in_register) {
303 // Load this again, if it's used by the local context below.
304 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
308 // Receiver is just before the parameters on the caller's stack.
309 int num_parameters = info->scope()->num_parameters();
310 int offset = num_parameters * kPointerSize;
312 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
313 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
316 // Arguments to ArgumentsAccessStub:
317 // function, receiver address, parameter count.
318 // The stub will rewrite receiver and parameter count if the previous
319 // stack frame was an arguments adapter frame.
320 ArgumentsAccessStub::Type type;
321 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
322 type = ArgumentsAccessStub::NEW_STRICT;
323 } else if (function()->has_duplicate_parameters()) {
324 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
326 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
328 ArgumentsAccessStub stub(isolate(), type);
331 SetVar(arguments, r0, r1, r2);
336 __ CallRuntime(Runtime::kTraceEnter, 0);
339 // Visit the declarations and body unless there is an illegal
341 if (scope()->HasIllegalRedeclaration()) {
342 Comment cmnt(masm_, "[ Declarations");
343 scope()->VisitIllegalRedeclaration(this);
346 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
347 { Comment cmnt(masm_, "[ Declarations");
348 VisitDeclarations(scope()->declarations());
351 // Assert that the declarations do not use ICs. Otherwise the debugger
352 // won't be able to redirect a PC at an IC to the correct IC in newly
354 DCHECK_EQ(0, ic_total_count_);
356 { Comment cmnt(masm_, "[ Stack check");
357 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
359 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
360 __ cmp(sp, Operand(ip));
362 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
363 PredictableCodeSizeScope predictable(masm_);
364 predictable.ExpectSize(
365 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
366 __ Call(stack_check, RelocInfo::CODE_TARGET);
370 { Comment cmnt(masm_, "[ Body");
371 DCHECK(loop_depth() == 0);
372 VisitStatements(function()->body());
373 DCHECK(loop_depth() == 0);
377 // Always emit a 'return undefined' in case control fell off the end of
379 { Comment cmnt(masm_, "[ return <undefined>;");
380 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
382 EmitReturnSequence();
384 // Force emit the constant pool, so it doesn't get emitted in the middle
385 // of the back edge table.
386 masm()->CheckConstPool(true, false);
390 void FullCodeGenerator::ClearAccumulator() {
391 __ mov(r0, Operand(Smi::FromInt(0)));
395 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
396 __ mov(r2, Operand(profiling_counter_));
397 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
398 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
399 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
403 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
404 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
406 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
410 void FullCodeGenerator::EmitProfilingCounterReset() {
411 Assembler::BlockConstPoolScope block_const_pool(masm_);
412 PredictableCodeSizeScope predictable_code_size_scope(
413 masm_, kProfileCounterResetSequenceLength);
416 int reset_value = FLAG_interrupt_budget;
417 __ mov(r2, Operand(profiling_counter_));
418 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
419 // instructions (for ARMv6) depending upon whether it is an extended constant
420 // pool - insert nop to compensate.
421 int expected_instr_count =
422 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
423 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
424 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
427 __ mov(r3, Operand(Smi::FromInt(reset_value)));
428 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
432 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
433 Label* back_edge_target) {
434 Comment cmnt(masm_, "[ Back edge bookkeeping");
435 // Block literal pools whilst emitting back edge code.
436 Assembler::BlockConstPoolScope block_const_pool(masm_);
439 DCHECK(back_edge_target->is_bound());
440 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
441 int weight = Min(kMaxBackEdgeWeight,
442 Max(1, distance / kCodeSizeMultiplier));
443 EmitProfilingCounterDecrement(weight);
445 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
447 // Record a mapping of this PC offset to the OSR id. This is used to find
448 // the AST id from the unoptimized code in order to use it as a key into
449 // the deoptimization input data found in the optimized code.
450 RecordBackEdge(stmt->OsrEntryId());
452 EmitProfilingCounterReset();
455 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
456 // Record a mapping of the OSR id to this PC. This is used if the OSR
457 // entry becomes the target of a bailout. We don't expect it to be, but
458 // we want it to work if it is.
459 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
463 void FullCodeGenerator::EmitReturnSequence() {
464 Comment cmnt(masm_, "[ Return sequence");
465 if (return_label_.is_bound()) {
466 __ b(&return_label_);
468 __ bind(&return_label_);
470 // Push the return value on the stack as the parameter.
471 // Runtime::TraceExit returns its parameter in r0.
473 __ CallRuntime(Runtime::kTraceExit, 1);
475 // Pretend that the exit is a backwards jump to the entry.
477 if (info_->ShouldSelfOptimize()) {
478 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
480 int distance = masm_->pc_offset();
481 weight = Min(kMaxBackEdgeWeight,
482 Max(1, distance / kCodeSizeMultiplier));
484 EmitProfilingCounterDecrement(weight);
488 __ Call(isolate()->builtins()->InterruptCheck(),
489 RelocInfo::CODE_TARGET);
491 EmitProfilingCounterReset();
494 // Make sure that the constant pool is not emitted inside of the return
496 { Assembler::BlockConstPoolScope block_const_pool(masm_);
497 int32_t arg_count = info_->scope()->num_parameters() + 1;
498 int32_t sp_delta = arg_count * kPointerSize;
499 SetReturnPosition(function());
500 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
501 PredictableCodeSizeScope predictable(masm_, -1);
502 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
503 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
504 __ add(sp, sp, Operand(sp_delta));
506 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
513 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
514 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
515 codegen()->GetVar(result_register(), var);
516 __ push(result_register());
520 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
524 void FullCodeGenerator::AccumulatorValueContext::Plug(
525 Heap::RootListIndex index) const {
526 __ LoadRoot(result_register(), index);
530 void FullCodeGenerator::StackValueContext::Plug(
531 Heap::RootListIndex index) const {
532 __ LoadRoot(result_register(), index);
533 __ push(result_register());
537 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
538 codegen()->PrepareForBailoutBeforeSplit(condition(),
542 if (index == Heap::kUndefinedValueRootIndex ||
543 index == Heap::kNullValueRootIndex ||
544 index == Heap::kFalseValueRootIndex) {
545 if (false_label_ != fall_through_) __ b(false_label_);
546 } else if (index == Heap::kTrueValueRootIndex) {
547 if (true_label_ != fall_through_) __ b(true_label_);
549 __ LoadRoot(result_register(), index);
550 codegen()->DoTest(this);
555 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
559 void FullCodeGenerator::AccumulatorValueContext::Plug(
560 Handle<Object> lit) const {
561 __ mov(result_register(), Operand(lit));
565 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
566 // Immediates cannot be pushed directly.
567 __ mov(result_register(), Operand(lit));
568 __ push(result_register());
572 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
573 codegen()->PrepareForBailoutBeforeSplit(condition(),
577 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
578 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
579 if (false_label_ != fall_through_) __ b(false_label_);
580 } else if (lit->IsTrue() || lit->IsJSObject()) {
581 if (true_label_ != fall_through_) __ b(true_label_);
582 } else if (lit->IsString()) {
583 if (String::cast(*lit)->length() == 0) {
584 if (false_label_ != fall_through_) __ b(false_label_);
586 if (true_label_ != fall_through_) __ b(true_label_);
588 } else if (lit->IsSmi()) {
589 if (Smi::cast(*lit)->value() == 0) {
590 if (false_label_ != fall_through_) __ b(false_label_);
592 if (true_label_ != fall_through_) __ b(true_label_);
595 // For simplicity we always test the accumulator register.
596 __ mov(result_register(), Operand(lit));
597 codegen()->DoTest(this);
602 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
603 Register reg) const {
609 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
611 Register reg) const {
614 __ Move(result_register(), reg);
618 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
619 Register reg) const {
621 if (count > 1) __ Drop(count - 1);
622 __ str(reg, MemOperand(sp, 0));
626 void FullCodeGenerator::TestContext::DropAndPlug(int count,
627 Register reg) const {
629 // For simplicity we always test the accumulator register.
631 __ Move(result_register(), reg);
632 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
633 codegen()->DoTest(this);
637 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
638 Label* materialize_false) const {
639 DCHECK(materialize_true == materialize_false);
640 __ bind(materialize_true);
644 void FullCodeGenerator::AccumulatorValueContext::Plug(
645 Label* materialize_true,
646 Label* materialize_false) const {
648 __ bind(materialize_true);
649 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
651 __ bind(materialize_false);
652 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
657 void FullCodeGenerator::StackValueContext::Plug(
658 Label* materialize_true,
659 Label* materialize_false) const {
661 __ bind(materialize_true);
662 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
664 __ bind(materialize_false);
665 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
671 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
672 Label* materialize_false) const {
673 DCHECK(materialize_true == true_label_);
674 DCHECK(materialize_false == false_label_);
678 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
679 Heap::RootListIndex value_root_index =
680 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
681 __ LoadRoot(result_register(), value_root_index);
685 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
686 Heap::RootListIndex value_root_index =
687 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
688 __ LoadRoot(ip, value_root_index);
693 void FullCodeGenerator::TestContext::Plug(bool flag) const {
694 codegen()->PrepareForBailoutBeforeSplit(condition(),
699 if (true_label_ != fall_through_) __ b(true_label_);
701 if (false_label_ != fall_through_) __ b(false_label_);
706 void FullCodeGenerator::DoTest(Expression* condition,
709 Label* fall_through) {
710 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
711 CallIC(ic, condition->test_id());
712 __ tst(result_register(), result_register());
713 Split(ne, if_true, if_false, fall_through);
717 void FullCodeGenerator::Split(Condition cond,
720 Label* fall_through) {
721 if (if_false == fall_through) {
723 } else if (if_true == fall_through) {
724 __ b(NegateCondition(cond), if_false);
732 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
733 DCHECK(var->IsStackAllocated());
734 // Offset is negative because higher indexes are at lower addresses.
735 int offset = -var->index() * kPointerSize;
736 // Adjust by a (parameter or local) base offset.
737 if (var->IsParameter()) {
738 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
740 offset += JavaScriptFrameConstants::kLocal0Offset;
742 return MemOperand(fp, offset);
746 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
747 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
748 if (var->IsContextSlot()) {
749 int context_chain_length = scope()->ContextChainLength(var->scope());
750 __ LoadContext(scratch, context_chain_length);
751 return ContextOperand(scratch, var->index());
753 return StackOperand(var);
758 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
759 // Use destination as scratch.
760 MemOperand location = VarOperand(var, dest);
761 __ ldr(dest, location);
765 void FullCodeGenerator::SetVar(Variable* var,
769 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
770 DCHECK(!scratch0.is(src));
771 DCHECK(!scratch0.is(scratch1));
772 DCHECK(!scratch1.is(src));
773 MemOperand location = VarOperand(var, scratch0);
774 __ str(src, location);
776 // Emit the write barrier code if the location is in the heap.
777 if (var->IsContextSlot()) {
778 __ RecordWriteContextSlot(scratch0,
788 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
789 bool should_normalize,
792 // Only prepare for bailouts before splits if we're in a test
793 // context. Otherwise, we let the Visit function deal with the
794 // preparation to avoid preparing with the same AST id twice.
795 if (!context()->IsTest() || !info_->IsOptimizable()) return;
798 if (should_normalize) __ b(&skip);
799 PrepareForBailout(expr, TOS_REG);
800 if (should_normalize) {
801 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
803 Split(eq, if_true, if_false, NULL);
809 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
810 // The variable in the declaration always resides in the current function
812 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
813 if (generate_debug_code_) {
814 // Check that we're not inside a with or catch context.
815 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
816 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
817 __ Check(ne, kDeclarationInWithContext);
818 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
819 __ Check(ne, kDeclarationInCatchContext);
824 void FullCodeGenerator::VisitVariableDeclaration(
825 VariableDeclaration* declaration) {
826 // If it was not possible to allocate the variable at compile time, we
827 // need to "declare" it at runtime to make sure it actually exists in the
829 VariableProxy* proxy = declaration->proxy();
830 VariableMode mode = declaration->mode();
831 Variable* variable = proxy->var();
832 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
833 switch (variable->location()) {
834 case VariableLocation::GLOBAL:
835 case VariableLocation::UNALLOCATED:
836 globals_->Add(variable->name(), zone());
837 globals_->Add(variable->binding_needs_init()
838 ? isolate()->factory()->the_hole_value()
839 : isolate()->factory()->undefined_value(),
843 case VariableLocation::PARAMETER:
844 case VariableLocation::LOCAL:
846 Comment cmnt(masm_, "[ VariableDeclaration");
847 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
848 __ str(ip, StackOperand(variable));
852 case VariableLocation::CONTEXT:
854 Comment cmnt(masm_, "[ VariableDeclaration");
855 EmitDebugCheckDeclarationContext(variable);
856 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
857 __ str(ip, ContextOperand(cp, variable->index()));
858 // No write barrier since the_hole_value is in old space.
859 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
863 case VariableLocation::LOOKUP: {
864 Comment cmnt(masm_, "[ VariableDeclaration");
865 __ mov(r2, Operand(variable->name()));
866 // Declaration nodes are always introduced in one of four modes.
867 DCHECK(IsDeclaredVariableMode(mode));
868 // Push initial value, if any.
869 // Note: For variables we must not push an initial value (such as
870 // 'undefined') because we may have a (legal) redeclaration and we
871 // must not destroy the current value.
873 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
875 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
878 __ CallRuntime(IsImmutableVariableMode(mode)
879 ? Runtime::kDeclareReadOnlyLookupSlot
880 : Runtime::kDeclareLookupSlot,
888 void FullCodeGenerator::VisitFunctionDeclaration(
889 FunctionDeclaration* declaration) {
890 VariableProxy* proxy = declaration->proxy();
891 Variable* variable = proxy->var();
892 switch (variable->location()) {
893 case VariableLocation::GLOBAL:
894 case VariableLocation::UNALLOCATED: {
895 globals_->Add(variable->name(), zone());
896 Handle<SharedFunctionInfo> function =
897 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
898 // Check for stack-overflow exception.
899 if (function.is_null()) return SetStackOverflow();
900 globals_->Add(function, zone());
904 case VariableLocation::PARAMETER:
905 case VariableLocation::LOCAL: {
906 Comment cmnt(masm_, "[ FunctionDeclaration");
907 VisitForAccumulatorValue(declaration->fun());
908 __ str(result_register(), StackOperand(variable));
912 case VariableLocation::CONTEXT: {
913 Comment cmnt(masm_, "[ FunctionDeclaration");
914 EmitDebugCheckDeclarationContext(variable);
915 VisitForAccumulatorValue(declaration->fun());
916 __ str(result_register(), ContextOperand(cp, variable->index()));
917 int offset = Context::SlotOffset(variable->index());
918 // We know that we have written a function, which is not a smi.
919 __ RecordWriteContextSlot(cp,
927 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
931 case VariableLocation::LOOKUP: {
932 Comment cmnt(masm_, "[ FunctionDeclaration");
933 __ mov(r2, Operand(variable->name()));
935 // Push initial value for function declaration.
936 VisitForStackValue(declaration->fun());
937 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
944 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
945 // Call the runtime to declare the globals.
946 __ mov(r1, Operand(pairs));
947 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
949 __ CallRuntime(Runtime::kDeclareGlobals, 2);
950 // Return value is ignored.
954 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
955 // Call the runtime to declare the modules.
956 __ Push(descriptions);
957 __ CallRuntime(Runtime::kDeclareModules, 1);
958 // Return value is ignored.
962 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
963 Comment cmnt(masm_, "[ SwitchStatement");
964 Breakable nested_statement(this, stmt);
965 SetStatementPosition(stmt);
967 // Keep the switch value on the stack until a case matches.
968 VisitForStackValue(stmt->tag());
969 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
971 ZoneList<CaseClause*>* clauses = stmt->cases();
972 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
974 Label next_test; // Recycled for each test.
975 // Compile all the tests with branches to their bodies.
976 for (int i = 0; i < clauses->length(); i++) {
977 CaseClause* clause = clauses->at(i);
978 clause->body_target()->Unuse();
980 // The default is not a test, but remember it as final fall through.
981 if (clause->is_default()) {
982 default_clause = clause;
986 Comment cmnt(masm_, "[ Case comparison");
990 // Compile the label expression.
991 VisitForAccumulatorValue(clause->label());
993 // Perform the comparison as if via '==='.
994 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
995 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
996 JumpPatchSite patch_site(masm_);
997 if (inline_smi_code) {
1000 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1003 __ b(ne, &next_test);
1004 __ Drop(1); // Switch value is no longer needed.
1005 __ b(clause->body_target());
1006 __ bind(&slow_case);
1009 // Record position before stub call for type feedback.
1010 SetExpressionPosition(clause);
1011 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1012 strength(language_mode())).code();
1013 CallIC(ic, clause->CompareId());
1014 patch_site.EmitPatchInfo();
1018 PrepareForBailout(clause, TOS_REG);
1019 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1021 __ b(ne, &next_test);
1023 __ jmp(clause->body_target());
1026 __ cmp(r0, Operand::Zero());
1027 __ b(ne, &next_test);
1028 __ Drop(1); // Switch value is no longer needed.
1029 __ b(clause->body_target());
1032 // Discard the test value and jump to the default if present, otherwise to
1033 // the end of the statement.
1034 __ bind(&next_test);
1035 __ Drop(1); // Switch value is no longer needed.
1036 if (default_clause == NULL) {
1037 __ b(nested_statement.break_label());
1039 __ b(default_clause->body_target());
1042 // Compile all the case bodies.
1043 for (int i = 0; i < clauses->length(); i++) {
1044 Comment cmnt(masm_, "[ Case body");
1045 CaseClause* clause = clauses->at(i);
1046 __ bind(clause->body_target());
1047 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1048 VisitStatements(clause->statements());
1051 __ bind(nested_statement.break_label());
1052 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1056 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1057 Comment cmnt(masm_, "[ ForInStatement");
1058 SetStatementPosition(stmt, SKIP_BREAK);
1060 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1063 ForIn loop_statement(this, stmt);
1064 increment_loop_depth();
1066 // Get the object to enumerate over. If the object is null or undefined, skip
1067 // over the loop. See ECMA-262 version 5, section 12.6.4.
1068 SetExpressionAsStatementPosition(stmt->enumerable());
1069 VisitForAccumulatorValue(stmt->enumerable());
1070 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1073 Register null_value = r5;
1074 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1075 __ cmp(r0, null_value);
1078 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1080 // Convert the object to a JS object.
1081 Label convert, done_convert;
1082 __ JumpIfSmi(r0, &convert);
1083 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1084 __ b(ge, &done_convert);
1087 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1088 __ bind(&done_convert);
1089 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1092 // Check for proxies.
1094 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1095 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1096 __ b(le, &call_runtime);
1098 // Check cache validity in generated code. This is a fast case for
1099 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1100 // guarantee cache validity, call the runtime system to check cache
1101 // validity or get the property names in a fixed array.
1102 __ CheckEnumCache(null_value, &call_runtime);
1104 // The enum cache is valid. Load the map of the object being
1105 // iterated over and use the cache for the iteration.
1107 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1110 // Get the set of properties to enumerate.
1111 __ bind(&call_runtime);
1112 __ push(r0); // Duplicate the enumerable object on the stack.
1113 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1114 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1116 // If we got a map from the runtime call, we can do a fast
1117 // modification check. Otherwise, we got a fixed array, and we have
1118 // to do a slow check.
1120 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1121 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1123 __ b(ne, &fixed_array);
1125 // We got a map in register r0. Get the enumeration cache from it.
1126 Label no_descriptors;
1127 __ bind(&use_cache);
1129 __ EnumLength(r1, r0);
1130 __ cmp(r1, Operand(Smi::FromInt(0)));
1131 __ b(eq, &no_descriptors);
1133 __ LoadInstanceDescriptors(r0, r2);
1134 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1135 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1137 // Set up the four remaining stack slots.
1138 __ push(r0); // Map.
1139 __ mov(r0, Operand(Smi::FromInt(0)));
1140 // Push enumeration cache, enumeration cache length (as smi) and zero.
1141 __ Push(r2, r1, r0);
1144 __ bind(&no_descriptors);
1148 // We got a fixed array in register r0. Iterate through that.
1150 __ bind(&fixed_array);
1152 __ Move(r1, FeedbackVector());
1153 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1154 int vector_index = FeedbackVector()->GetIndex(slot);
1155 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1157 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1158 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1159 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1160 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1161 __ b(gt, &non_proxy);
1162 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1163 __ bind(&non_proxy);
1164 __ Push(r1, r0); // Smi and array
1165 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1166 __ mov(r0, Operand(Smi::FromInt(0)));
1167 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1169 // Generate code for doing the condition check.
1170 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1172 SetExpressionAsStatementPosition(stmt->each());
1174 // Load the current count to r0, load the length to r1.
1175 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1176 __ cmp(r0, r1); // Compare to the array length.
1177 __ b(hs, loop_statement.break_label());
1179 // Get the current entry of the array into register r3.
1180 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1181 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1182 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1184 // Get the expected map from the stack or a smi in the
1185 // permanent slow case into register r2.
1186 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1188 // Check if the expected map still matches that of the enumerable.
1189 // If not, we may have to filter the key.
1191 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1192 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1193 __ cmp(r4, Operand(r2));
1194 __ b(eq, &update_each);
1196 // For proxies, no filtering is done.
1197 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1198 __ cmp(r2, Operand(Smi::FromInt(0)));
1199 __ b(eq, &update_each);
1201 // Convert the entry to a string or (smi) 0 if it isn't a property
1202 // any more. If the property has been removed while iterating, we
1204 __ push(r1); // Enumerable.
1205 __ push(r3); // Current entry.
1206 __ CallRuntime(Runtime::kForInFilter, 2);
1207 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1208 __ mov(r3, Operand(r0));
1209 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1211 __ b(eq, loop_statement.continue_label());
1213 // Update the 'each' property or variable from the possibly filtered
1214 // entry in register r3.
1215 __ bind(&update_each);
1216 __ mov(result_register(), r3);
1217 // Perform the assignment as if via '='.
1218 { EffectContext context(this);
1219 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1220 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1223 // Generate code for the body of the loop.
1224 Visit(stmt->body());
1226 // Generate code for the going to the next element by incrementing
1227 // the index (smi) stored on top of the stack.
1228 __ bind(loop_statement.continue_label());
1230 __ add(r0, r0, Operand(Smi::FromInt(1)));
1233 EmitBackEdgeBookkeeping(stmt, &loop);
1236 // Remove the pointers stored on the stack.
1237 __ bind(loop_statement.break_label());
1240 // Exit and decrement the loop depth.
1241 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1243 decrement_loop_depth();
1247 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1249 // Use the fast case closure allocation code that allocates in new
1250 // space for nested functions that don't need literals cloning. If
1251 // we're running with the --always-opt or the --prepare-always-opt
1252 // flag, we need to use the runtime function so that the new function
1253 // we are creating here gets a chance to have its code optimized and
1254 // doesn't just get a copy of the existing unoptimized code.
1255 if (!FLAG_always_opt &&
1256 !FLAG_prepare_always_opt &&
1258 scope()->is_function_scope() &&
1259 info->num_literals() == 0) {
1260 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1261 __ mov(r2, Operand(info));
1264 __ mov(r0, Operand(info));
1265 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1266 : Heap::kFalseValueRootIndex);
1267 __ Push(cp, r0, r1);
1268 __ CallRuntime(Runtime::kNewClosure, 3);
1270 context()->Plug(r0);
1274 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1276 FeedbackVectorICSlot slot) {
1277 if (NeedsHomeObject(initializer)) {
1278 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1279 __ mov(StoreDescriptor::NameRegister(),
1280 Operand(isolate()->factory()->home_object_symbol()));
1281 __ ldr(StoreDescriptor::ValueRegister(),
1282 MemOperand(sp, offset * kPointerSize));
1283 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1289 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1290 TypeofMode typeof_mode,
1292 Register current = cp;
1298 if (s->num_heap_slots() > 0) {
1299 if (s->calls_sloppy_eval()) {
1300 // Check that extension is NULL.
1301 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1305 // Load next context in chain.
1306 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1307 // Walk the rest of the chain without clobbering cp.
1310 // If no outer scope calls eval, we do not need to check more
1311 // context extensions.
1312 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1313 s = s->outer_scope();
1316 if (s->is_eval_scope()) {
1318 if (!current.is(next)) {
1319 __ Move(next, current);
1322 // Terminate at native context.
1323 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1324 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1327 // Check that extension is NULL.
1328 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1331 // Load next context in chain.
1332 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1337 // All extension objects were empty and it is safe to use a normal global
1339 EmitGlobalVariableLoad(proxy, typeof_mode);
1343 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1345 DCHECK(var->IsContextSlot());
1346 Register context = cp;
1350 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1351 if (s->num_heap_slots() > 0) {
1352 if (s->calls_sloppy_eval()) {
1353 // Check that extension is NULL.
1354 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1358 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1359 // Walk the rest of the chain without clobbering cp.
1363 // Check that last extension is NULL.
1364 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1368 // This function is used only for loads, not stores, so it's safe to
1369 // return an cp-based operand (the write barrier cannot be allowed to
1370 // destroy the cp register).
1371 return ContextOperand(context, var->index());
1375 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1376 TypeofMode typeof_mode,
1377 Label* slow, Label* done) {
1378 // Generate fast-case code for variables that might be shadowed by
1379 // eval-introduced variables. Eval is used a lot without
1380 // introducing variables. In those cases, we do not want to
1381 // perform a runtime call for all variables in the scope
1382 // containing the eval.
1383 Variable* var = proxy->var();
1384 if (var->mode() == DYNAMIC_GLOBAL) {
1385 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1387 } else if (var->mode() == DYNAMIC_LOCAL) {
1388 Variable* local = var->local_if_not_shadowed();
1389 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1390 if (local->mode() == LET || local->mode() == CONST ||
1391 local->mode() == CONST_LEGACY) {
1392 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1393 if (local->mode() == CONST_LEGACY) {
1394 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1395 } else { // LET || CONST
1397 __ mov(r0, Operand(var->name()));
1399 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1407 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1408 TypeofMode typeof_mode) {
1409 Variable* var = proxy->var();
1410 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1411 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1412 if (var->IsGlobalSlot()) {
1413 DCHECK(var->index() > 0);
1414 DCHECK(var->IsStaticGlobalObjectProperty());
1415 const int slot = var->index();
1416 const int depth = scope()->ContextChainLength(var->scope());
1417 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1418 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1419 LoadGlobalViaContextStub stub(isolate(), depth);
1422 __ Push(Smi::FromInt(slot));
1423 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1426 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1427 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1428 __ mov(LoadDescriptor::SlotRegister(),
1429 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1430 CallLoadIC(typeof_mode);
1435 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1436 TypeofMode typeof_mode) {
1437 // Record position before possible IC call.
1438 SetExpressionPosition(proxy);
1439 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1440 Variable* var = proxy->var();
1442 // Three cases: global variables, lookup variables, and all other types of
1444 switch (var->location()) {
1445 case VariableLocation::GLOBAL:
1446 case VariableLocation::UNALLOCATED: {
1447 Comment cmnt(masm_, "[ Global variable");
1448 EmitGlobalVariableLoad(proxy, typeof_mode);
1449 context()->Plug(r0);
1453 case VariableLocation::PARAMETER:
1454 case VariableLocation::LOCAL:
1455 case VariableLocation::CONTEXT: {
1456 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1457 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1458 : "[ Stack variable");
1459 if (var->binding_needs_init()) {
1460 // var->scope() may be NULL when the proxy is located in eval code and
1461 // refers to a potential outside binding. Currently those bindings are
1462 // always looked up dynamically, i.e. in that case
1463 // var->location() == LOOKUP.
1465 DCHECK(var->scope() != NULL);
1467 // Check if the binding really needs an initialization check. The check
1468 // can be skipped in the following situation: we have a LET or CONST
1469 // binding in harmony mode, both the Variable and the VariableProxy have
1470 // the same declaration scope (i.e. they are both in global code, in the
1471 // same function or in the same eval code) and the VariableProxy is in
1472 // the source physically located after the initializer of the variable.
1474 // We cannot skip any initialization checks for CONST in non-harmony
1475 // mode because const variables may be declared but never initialized:
1476 // if (false) { const x; }; var y = x;
1478 // The condition on the declaration scopes is a conservative check for
1479 // nested functions that access a binding and are called before the
1480 // binding is initialized:
1481 // function() { f(); let x = 1; function f() { x = 2; } }
1483 bool skip_init_check;
1484 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1485 skip_init_check = false;
1486 } else if (var->is_this()) {
1487 CHECK(info_->function() != nullptr &&
1488 (info_->function()->kind() & kSubclassConstructor) != 0);
1489 // TODO(dslomov): implement 'this' hole check elimination.
1490 skip_init_check = false;
1492 // Check that we always have valid source position.
1493 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1494 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1495 skip_init_check = var->mode() != CONST_LEGACY &&
1496 var->initializer_position() < proxy->position();
1499 if (!skip_init_check) {
1500 // Let and const need a read barrier.
1502 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1503 if (var->mode() == LET || var->mode() == CONST) {
1504 // Throw a reference error when using an uninitialized let/const
1505 // binding in harmony mode.
1508 __ mov(r0, Operand(var->name()));
1510 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1513 // Uninitalized const bindings outside of harmony mode are unholed.
1514 DCHECK(var->mode() == CONST_LEGACY);
1515 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1517 context()->Plug(r0);
1521 context()->Plug(var);
1525 case VariableLocation::LOOKUP: {
1526 Comment cmnt(masm_, "[ Lookup variable");
1528 // Generate code for loading from variables potentially shadowed
1529 // by eval-introduced variables.
1530 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1532 __ mov(r1, Operand(var->name()));
1533 __ Push(cp, r1); // Context and name.
1534 Runtime::FunctionId function_id =
1535 typeof_mode == NOT_INSIDE_TYPEOF
1536 ? Runtime::kLoadLookupSlot
1537 : Runtime::kLoadLookupSlotNoReferenceError;
1538 __ CallRuntime(function_id, 2);
1540 context()->Plug(r0);
1546 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1547 Comment cmnt(masm_, "[ RegExpLiteral");
1549 // Registers will be used as follows:
1550 // r5 = materialized value (RegExp literal)
1551 // r4 = JS function, literals array
1552 // r3 = literal index
1553 // r2 = RegExp pattern
1554 // r1 = RegExp flags
1555 // r0 = RegExp literal clone
1556 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1557 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1558 int literal_offset =
1559 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1560 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1561 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1563 __ b(ne, &materialized);
1565 // Create regexp literal using runtime function.
1566 // Result will be in r0.
1567 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1568 __ mov(r2, Operand(expr->pattern()));
1569 __ mov(r1, Operand(expr->flags()));
1570 __ Push(r4, r3, r2, r1);
1571 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1574 __ bind(&materialized);
1575 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1576 Label allocated, runtime_allocate;
1577 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1580 __ bind(&runtime_allocate);
1581 __ mov(r0, Operand(Smi::FromInt(size)));
1583 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1586 __ bind(&allocated);
1587 // After this, registers are used as follows:
1588 // r0: Newly allocated regexp.
1589 // r5: Materialized regexp.
1591 __ CopyFields(r0, r5, d0, size / kPointerSize);
1592 context()->Plug(r0);
1596 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1597 if (expression == NULL) {
1598 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1601 VisitForStackValue(expression);
1606 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1607 Comment cmnt(masm_, "[ ObjectLiteral");
1609 Handle<FixedArray> constant_properties = expr->constant_properties();
1610 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1611 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1612 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1613 __ mov(r1, Operand(constant_properties));
1614 int flags = expr->ComputeFlags();
1615 __ mov(r0, Operand(Smi::FromInt(flags)));
1616 if (MustCreateObjectLiteralWithRuntime(expr)) {
1617 __ Push(r3, r2, r1, r0);
1618 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1620 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1623 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1625 // If result_saved is true the result is on top of the stack. If
1626 // result_saved is false the result is in r0.
1627 bool result_saved = false;
1629 AccessorTable accessor_table(zone());
1630 int property_index = 0;
1631 // store_slot_index points to the vector IC slot for the next store IC used.
1632 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1633 // and must be updated if the number of store ICs emitted here changes.
1634 int store_slot_index = 0;
1635 for (; property_index < expr->properties()->length(); property_index++) {
1636 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1637 if (property->is_computed_name()) break;
1638 if (property->IsCompileTimeValue()) continue;
1640 Literal* key = property->key()->AsLiteral();
1641 Expression* value = property->value();
1642 if (!result_saved) {
1643 __ push(r0); // Save result on stack
1644 result_saved = true;
1646 switch (property->kind()) {
1647 case ObjectLiteral::Property::CONSTANT:
1649 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1650 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1652 case ObjectLiteral::Property::COMPUTED:
1653 // It is safe to use [[Put]] here because the boilerplate already
1654 // contains computed properties with an uninitialized value.
1655 if (key->value()->IsInternalizedString()) {
1656 if (property->emit_store()) {
1657 VisitForAccumulatorValue(value);
1658 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1659 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1660 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1661 if (FLAG_vector_stores) {
1662 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1665 CallStoreIC(key->LiteralFeedbackId());
1667 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1669 if (NeedsHomeObject(value)) {
1670 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1671 __ mov(StoreDescriptor::NameRegister(),
1672 Operand(isolate()->factory()->home_object_symbol()));
1673 __ ldr(StoreDescriptor::ValueRegister(), MemOperand(sp));
1674 if (FLAG_vector_stores) {
1675 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1680 VisitForEffect(value);
1684 // Duplicate receiver on stack.
1685 __ ldr(r0, MemOperand(sp));
1687 VisitForStackValue(key);
1688 VisitForStackValue(value);
1689 if (property->emit_store()) {
1690 EmitSetHomeObjectIfNeeded(
1691 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1692 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1694 __ CallRuntime(Runtime::kSetProperty, 4);
1699 case ObjectLiteral::Property::PROTOTYPE:
1700 // Duplicate receiver on stack.
1701 __ ldr(r0, MemOperand(sp));
1703 VisitForStackValue(value);
1704 DCHECK(property->emit_store());
1705 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1708 case ObjectLiteral::Property::GETTER:
1709 if (property->emit_store()) {
1710 accessor_table.lookup(key)->second->getter = value;
1713 case ObjectLiteral::Property::SETTER:
1714 if (property->emit_store()) {
1715 accessor_table.lookup(key)->second->setter = value;
1721 // Emit code to define accessors, using only a single call to the runtime for
1722 // each pair of corresponding getters and setters.
1723 for (AccessorTable::Iterator it = accessor_table.begin();
1724 it != accessor_table.end();
1726 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1728 VisitForStackValue(it->first);
1729 EmitAccessor(it->second->getter);
1730 EmitSetHomeObjectIfNeeded(
1731 it->second->getter, 2,
1732 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1733 EmitAccessor(it->second->setter);
1734 EmitSetHomeObjectIfNeeded(
1735 it->second->setter, 3,
1736 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1737 __ mov(r0, Operand(Smi::FromInt(NONE)));
1739 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1742 // Object literals have two parts. The "static" part on the left contains no
1743 // computed property names, and so we can compute its map ahead of time; see
1744 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1745 // starts with the first computed property name, and continues with all
1746 // properties to its right. All the code from above initializes the static
1747 // component of the object literal, and arranges for the map of the result to
1748 // reflect the static order in which the keys appear. For the dynamic
1749 // properties, we compile them into a series of "SetOwnProperty" runtime
1750 // calls. This will preserve insertion order.
1751 for (; property_index < expr->properties()->length(); property_index++) {
1752 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1754 Expression* value = property->value();
1755 if (!result_saved) {
1756 __ push(r0); // Save result on the stack
1757 result_saved = true;
1760 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1763 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1764 DCHECK(!property->is_computed_name());
1765 VisitForStackValue(value);
1766 DCHECK(property->emit_store());
1767 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1769 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1770 VisitForStackValue(value);
1771 EmitSetHomeObjectIfNeeded(
1772 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1774 switch (property->kind()) {
1775 case ObjectLiteral::Property::CONSTANT:
1776 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1777 case ObjectLiteral::Property::COMPUTED:
1778 if (property->emit_store()) {
1779 __ mov(r0, Operand(Smi::FromInt(NONE)));
1781 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1787 case ObjectLiteral::Property::PROTOTYPE:
1791 case ObjectLiteral::Property::GETTER:
1792 __ mov(r0, Operand(Smi::FromInt(NONE)));
1794 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1797 case ObjectLiteral::Property::SETTER:
1798 __ mov(r0, Operand(Smi::FromInt(NONE)));
1800 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1806 if (expr->has_function()) {
1807 DCHECK(result_saved);
1808 __ ldr(r0, MemOperand(sp));
1810 __ CallRuntime(Runtime::kToFastProperties, 1);
1814 context()->PlugTOS();
1816 context()->Plug(r0);
1819 // Verify that compilation exactly consumed the number of store ic slots that
1820 // the ObjectLiteral node had to offer.
1821 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1825 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1826 Comment cmnt(masm_, "[ ArrayLiteral");
1828 expr->BuildConstantElements(isolate());
1830 Handle<FixedArray> constant_elements = expr->constant_elements();
1831 bool has_fast_elements =
1832 IsFastObjectElementsKind(expr->constant_elements_kind());
1833 Handle<FixedArrayBase> constant_elements_values(
1834 FixedArrayBase::cast(constant_elements->get(1)));
1836 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1837 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1838 // If the only customer of allocation sites is transitioning, then
1839 // we can turn it off if we don't have anywhere else to transition to.
1840 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1843 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1844 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1845 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1846 __ mov(r1, Operand(constant_elements));
1847 if (MustCreateArrayLiteralWithRuntime(expr)) {
1848 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1849 __ Push(r3, r2, r1, r0);
1850 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1852 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1855 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1857 bool result_saved = false; // Is the result saved to the stack?
1858 ZoneList<Expression*>* subexprs = expr->values();
1859 int length = subexprs->length();
1861 // Emit code to evaluate all the non-constant subexpressions and to store
1862 // them into the newly cloned array.
1863 int array_index = 0;
1864 for (; array_index < length; array_index++) {
1865 Expression* subexpr = subexprs->at(array_index);
1866 if (subexpr->IsSpread()) break;
1868 // If the subexpression is a literal or a simple materialized literal it
1869 // is already set in the cloned array.
1870 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1872 if (!result_saved) {
1874 __ Push(Smi::FromInt(expr->literal_index()));
1875 result_saved = true;
1877 VisitForAccumulatorValue(subexpr);
1879 if (has_fast_elements) {
1880 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1881 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1882 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1883 __ str(result_register(), FieldMemOperand(r1, offset));
1884 // Update the write barrier for the array store.
1885 __ RecordWriteField(r1, offset, result_register(), r2,
1886 kLRHasBeenSaved, kDontSaveFPRegs,
1887 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1889 __ mov(r3, Operand(Smi::FromInt(array_index)));
1890 StoreArrayLiteralElementStub stub(isolate());
1894 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1897 // In case the array literal contains spread expressions it has two parts. The
1898 // first part is the "static" array which has a literal index is handled
1899 // above. The second part is the part after the first spread expression
1900 // (inclusive) and these elements gets appended to the array. Note that the
1901 // number elements an iterable produces is unknown ahead of time.
1902 if (array_index < length && result_saved) {
1903 __ pop(); // literal index
1905 result_saved = false;
1907 for (; array_index < length; array_index++) {
1908 Expression* subexpr = subexprs->at(array_index);
1911 if (subexpr->IsSpread()) {
1912 VisitForStackValue(subexpr->AsSpread()->expression());
1913 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1915 VisitForStackValue(subexpr);
1916 __ CallRuntime(Runtime::kAppendElement, 2);
1919 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1923 __ pop(); // literal index
1924 context()->PlugTOS();
1926 context()->Plug(r0);
1931 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1932 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1934 Comment cmnt(masm_, "[ Assignment");
1935 SetExpressionPosition(expr, INSERT_BREAK);
1937 Property* property = expr->target()->AsProperty();
1938 LhsKind assign_type = Property::GetAssignType(property);
1940 // Evaluate LHS expression.
1941 switch (assign_type) {
1943 // Nothing to do here.
1945 case NAMED_PROPERTY:
1946 if (expr->is_compound()) {
1947 // We need the receiver both on the stack and in the register.
1948 VisitForStackValue(property->obj());
1949 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1951 VisitForStackValue(property->obj());
1954 case NAMED_SUPER_PROPERTY:
1956 property->obj()->AsSuperPropertyReference()->this_var());
1957 VisitForAccumulatorValue(
1958 property->obj()->AsSuperPropertyReference()->home_object());
1959 __ Push(result_register());
1960 if (expr->is_compound()) {
1961 const Register scratch = r1;
1962 __ ldr(scratch, MemOperand(sp, kPointerSize));
1964 __ Push(result_register());
1967 case KEYED_SUPER_PROPERTY:
1969 property->obj()->AsSuperPropertyReference()->this_var());
1971 property->obj()->AsSuperPropertyReference()->home_object());
1972 VisitForAccumulatorValue(property->key());
1973 __ Push(result_register());
1974 if (expr->is_compound()) {
1975 const Register scratch = r1;
1976 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1978 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1980 __ Push(result_register());
1983 case KEYED_PROPERTY:
1984 if (expr->is_compound()) {
1985 VisitForStackValue(property->obj());
1986 VisitForStackValue(property->key());
1987 __ ldr(LoadDescriptor::ReceiverRegister(),
1988 MemOperand(sp, 1 * kPointerSize));
1989 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1991 VisitForStackValue(property->obj());
1992 VisitForStackValue(property->key());
1997 // For compound assignments we need another deoptimization point after the
1998 // variable/property load.
1999 if (expr->is_compound()) {
2000 { AccumulatorValueContext context(this);
2001 switch (assign_type) {
2003 EmitVariableLoad(expr->target()->AsVariableProxy());
2004 PrepareForBailout(expr->target(), TOS_REG);
2006 case NAMED_PROPERTY:
2007 EmitNamedPropertyLoad(property);
2008 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2010 case NAMED_SUPER_PROPERTY:
2011 EmitNamedSuperPropertyLoad(property);
2012 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2014 case KEYED_SUPER_PROPERTY:
2015 EmitKeyedSuperPropertyLoad(property);
2016 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2018 case KEYED_PROPERTY:
2019 EmitKeyedPropertyLoad(property);
2020 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2025 Token::Value op = expr->binary_op();
2026 __ push(r0); // Left operand goes on the stack.
2027 VisitForAccumulatorValue(expr->value());
2029 AccumulatorValueContext context(this);
2030 if (ShouldInlineSmiCase(op)) {
2031 EmitInlineSmiBinaryOp(expr->binary_operation(),
2036 EmitBinaryOp(expr->binary_operation(), op);
2039 // Deoptimization point in case the binary operation may have side effects.
2040 PrepareForBailout(expr->binary_operation(), TOS_REG);
2042 VisitForAccumulatorValue(expr->value());
2045 SetExpressionPosition(expr);
2048 switch (assign_type) {
2050 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2051 expr->op(), expr->AssignmentSlot());
2052 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2053 context()->Plug(r0);
2055 case NAMED_PROPERTY:
2056 EmitNamedPropertyAssignment(expr);
2058 case NAMED_SUPER_PROPERTY:
2059 EmitNamedSuperPropertyStore(property);
2060 context()->Plug(r0);
2062 case KEYED_SUPER_PROPERTY:
2063 EmitKeyedSuperPropertyStore(property);
2064 context()->Plug(r0);
2066 case KEYED_PROPERTY:
2067 EmitKeyedPropertyAssignment(expr);
2073 void FullCodeGenerator::VisitYield(Yield* expr) {
2074 Comment cmnt(masm_, "[ Yield");
2075 SetExpressionPosition(expr);
2077 // Evaluate yielded value first; the initial iterator definition depends on
2078 // this. It stays on the stack while we update the iterator.
2079 VisitForStackValue(expr->expression());
2081 switch (expr->yield_kind()) {
2082 case Yield::kSuspend:
2083 // Pop value from top-of-stack slot; box result into result register.
2084 EmitCreateIteratorResult(false);
2085 __ push(result_register());
2087 case Yield::kInitial: {
2088 Label suspend, continuation, post_runtime, resume;
2091 __ bind(&continuation);
2092 __ RecordGeneratorContinuation();
2096 VisitForAccumulatorValue(expr->generator_object());
2097 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2098 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2099 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2100 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2102 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2103 kLRHasBeenSaved, kDontSaveFPRegs);
2104 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2106 __ b(eq, &post_runtime);
2107 __ push(r0); // generator object
2108 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2109 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2110 __ bind(&post_runtime);
2111 __ pop(result_register());
2112 EmitReturnSequence();
2115 context()->Plug(result_register());
2119 case Yield::kFinal: {
2120 VisitForAccumulatorValue(expr->generator_object());
2121 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2122 __ str(r1, FieldMemOperand(result_register(),
2123 JSGeneratorObject::kContinuationOffset));
2124 // Pop value from top-of-stack slot, box result into result register.
2125 EmitCreateIteratorResult(true);
2126 EmitUnwindBeforeReturn();
2127 EmitReturnSequence();
2131 case Yield::kDelegating: {
2132 VisitForStackValue(expr->generator_object());
2134 // Initial stack layout is as follows:
2135 // [sp + 1 * kPointerSize] iter
2136 // [sp + 0 * kPointerSize] g
2138 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2139 Label l_next, l_call, l_loop;
2140 Register load_receiver = LoadDescriptor::ReceiverRegister();
2141 Register load_name = LoadDescriptor::NameRegister();
2143 // Initial send value is undefined.
2144 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2147 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2149 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2150 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2151 __ Push(load_name, r3, r0); // "throw", iter, except
2154 // try { received = %yield result }
2155 // Shuffle the received result above a try handler and yield it without
2158 __ pop(r0); // result
2159 int handler_index = NewHandlerTableEntry();
2160 EnterTryBlock(handler_index, &l_catch);
2161 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2162 __ push(r0); // result
2165 __ bind(&l_continuation);
2166 __ RecordGeneratorContinuation();
2169 __ bind(&l_suspend);
2170 const int generator_object_depth = kPointerSize + try_block_size;
2171 __ ldr(r0, MemOperand(sp, generator_object_depth));
2173 __ Push(Smi::FromInt(handler_index)); // handler-index
2174 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2175 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2176 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2177 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2179 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2180 kLRHasBeenSaved, kDontSaveFPRegs);
2181 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2182 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2183 __ pop(r0); // result
2184 EmitReturnSequence();
2185 __ bind(&l_resume); // received in r0
2186 ExitTryBlock(handler_index);
2188 // receiver = iter; f = 'next'; arg = received;
2191 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2192 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2193 __ Push(load_name, r3, r0); // "next", iter, received
2195 // result = receiver[f](arg);
2197 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2198 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2199 __ mov(LoadDescriptor::SlotRegister(),
2200 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2201 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2202 CallIC(ic, TypeFeedbackId::None());
2204 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2205 SetCallPosition(expr, 1);
2206 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2209 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2210 __ Drop(1); // The function is still on the stack; drop it.
2212 // if (!result.done) goto l_try;
2214 __ Move(load_receiver, r0);
2216 __ push(load_receiver); // save result
2217 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2218 __ mov(LoadDescriptor::SlotRegister(),
2219 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2220 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2221 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2223 __ cmp(r0, Operand(0));
2227 __ pop(load_receiver); // result
2228 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2229 __ mov(LoadDescriptor::SlotRegister(),
2230 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2231 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.value
2232 context()->DropAndPlug(2, r0); // drop iter and g
2239 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2241 JSGeneratorObject::ResumeMode resume_mode) {
2242 // The value stays in r0, and is ultimately read by the resumed generator, as
2243 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2244 // is read to throw the value when the resumed generator is already closed.
2245 // r1 will hold the generator object until the activation has been resumed.
2246 VisitForStackValue(generator);
2247 VisitForAccumulatorValue(value);
2250 // Load suspended function and context.
2251 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2252 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2254 // Load receiver and store as the first argument.
2255 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2258 // Push holes for the rest of the arguments to the generator function.
2259 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2261 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2262 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2263 Label push_argument_holes, push_frame;
2264 __ bind(&push_argument_holes);
2265 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2266 __ b(mi, &push_frame);
2268 __ jmp(&push_argument_holes);
2270 // Enter a new JavaScript frame, and initialize its slots as they were when
2271 // the generator was suspended.
2272 Label resume_frame, done;
2273 __ bind(&push_frame);
2274 __ bl(&resume_frame);
2276 __ bind(&resume_frame);
2277 // lr = return address.
2278 // fp = caller's frame pointer.
2279 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
2280 // cp = callee's context,
2281 // r4 = callee's JS function.
2282 __ PushFixedFrame(r4);
2283 // Adjust FP to point to saved FP.
2284 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2286 // Load the operand stack size.
2287 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2288 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2291 // If we are sending a value and there is no operand stack, we can jump back
2293 if (resume_mode == JSGeneratorObject::NEXT) {
2295 __ cmp(r3, Operand(0));
2296 __ b(ne, &slow_resume);
2297 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2299 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2300 if (FLAG_enable_embedded_constant_pool) {
2301 // Load the new code object's constant pool pointer.
2302 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2305 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2308 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2309 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2312 __ bind(&slow_resume);
2315 // Otherwise, we push holes for the operand stack and call the runtime to fix
2316 // up the stack and the handlers.
2317 Label push_operand_holes, call_resume;
2318 __ bind(&push_operand_holes);
2319 __ sub(r3, r3, Operand(1), SetCC);
2320 __ b(mi, &call_resume);
2322 __ b(&push_operand_holes);
2323 __ bind(&call_resume);
2324 DCHECK(!result_register().is(r1));
2325 __ Push(r1, result_register());
2326 __ Push(Smi::FromInt(resume_mode));
2327 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2328 // Not reached: the runtime call returns elsewhere.
2329 __ stop("not-reached");
2332 context()->Plug(result_register());
2336 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2340 const int instance_size = 5 * kPointerSize;
2341 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2344 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT);
2347 __ bind(&gc_required);
2348 __ Push(Smi::FromInt(instance_size));
2349 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2350 __ ldr(context_register(),
2351 MemOperand(fp, StandardFrameConstants::kContextOffset));
2353 __ bind(&allocated);
2354 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2355 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2356 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2358 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2359 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2360 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2361 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2362 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2364 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2366 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2368 // Only the value field needs a write barrier, as the other values are in the
2370 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2371 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2375 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2376 SetExpressionPosition(prop);
2377 Literal* key = prop->key()->AsLiteral();
2378 DCHECK(!prop->IsSuperAccess());
2380 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2381 __ mov(LoadDescriptor::SlotRegister(),
2382 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2383 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2387 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2388 // Stack: receiver, home_object.
2389 SetExpressionPosition(prop);
2390 Literal* key = prop->key()->AsLiteral();
2391 DCHECK(!key->value()->IsSmi());
2392 DCHECK(prop->IsSuperAccess());
2394 __ Push(key->value());
2395 __ Push(Smi::FromInt(language_mode()));
2396 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2400 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2401 SetExpressionPosition(prop);
2402 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2403 __ mov(LoadDescriptor::SlotRegister(),
2404 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2409 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2410 // Stack: receiver, home_object, key.
2411 SetExpressionPosition(prop);
2412 __ Push(Smi::FromInt(language_mode()));
2413 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2417 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2419 Expression* left_expr,
2420 Expression* right_expr) {
2421 Label done, smi_case, stub_call;
2423 Register scratch1 = r2;
2424 Register scratch2 = r3;
2426 // Get the arguments.
2428 Register right = r0;
2431 // Perform combined smi check on both operands.
2432 __ orr(scratch1, left, Operand(right));
2433 STATIC_ASSERT(kSmiTag == 0);
2434 JumpPatchSite patch_site(masm_);
2435 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2437 __ bind(&stub_call);
2439 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2440 CallIC(code, expr->BinaryOperationFeedbackId());
2441 patch_site.EmitPatchInfo();
2445 // Smi case. This code works the same way as the smi-smi case in the type
2446 // recording binary operation stub, see
2449 __ GetLeastBitsFromSmi(scratch1, right, 5);
2450 __ mov(right, Operand(left, ASR, scratch1));
2451 __ bic(right, right, Operand(kSmiTagMask));
2454 __ SmiUntag(scratch1, left);
2455 __ GetLeastBitsFromSmi(scratch2, right, 5);
2456 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2457 __ TrySmiTag(right, scratch1, &stub_call);
2461 __ SmiUntag(scratch1, left);
2462 __ GetLeastBitsFromSmi(scratch2, right, 5);
2463 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2464 __ tst(scratch1, Operand(0xc0000000));
2465 __ b(ne, &stub_call);
2466 __ SmiTag(right, scratch1);
2470 __ add(scratch1, left, Operand(right), SetCC);
2471 __ b(vs, &stub_call);
2472 __ mov(right, scratch1);
2475 __ sub(scratch1, left, Operand(right), SetCC);
2476 __ b(vs, &stub_call);
2477 __ mov(right, scratch1);
2480 __ SmiUntag(ip, right);
2481 __ smull(scratch1, scratch2, left, ip);
2482 __ mov(ip, Operand(scratch1, ASR, 31));
2483 __ cmp(ip, Operand(scratch2));
2484 __ b(ne, &stub_call);
2485 __ cmp(scratch1, Operand::Zero());
2486 __ mov(right, Operand(scratch1), LeaveCC, ne);
2488 __ add(scratch2, right, Operand(left), SetCC);
2489 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2490 __ b(mi, &stub_call);
2494 __ orr(right, left, Operand(right));
2496 case Token::BIT_AND:
2497 __ and_(right, left, Operand(right));
2499 case Token::BIT_XOR:
2500 __ eor(right, left, Operand(right));
2507 context()->Plug(r0);
2511 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2512 int* used_store_slots) {
2513 // Constructor is in r0.
2514 DCHECK(lit != NULL);
2517 // No access check is needed here since the constructor is created by the
2519 Register scratch = r1;
2521 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2524 for (int i = 0; i < lit->properties()->length(); i++) {
2525 ObjectLiteral::Property* property = lit->properties()->at(i);
2526 Expression* value = property->value();
2528 if (property->is_static()) {
2529 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2531 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2534 EmitPropertyKey(property, lit->GetIdForProperty(i));
2536 // The static prototype property is read only. We handle the non computed
2537 // property name case in the parser. Since this is the only case where we
2538 // need to check for an own read only property we special case this so we do
2539 // not need to do this for every property.
2540 if (property->is_static() && property->is_computed_name()) {
2541 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2545 VisitForStackValue(value);
2546 EmitSetHomeObjectIfNeeded(value, 2,
2547 lit->SlotForHomeObject(value, used_store_slots));
2549 switch (property->kind()) {
2550 case ObjectLiteral::Property::CONSTANT:
2551 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2552 case ObjectLiteral::Property::PROTOTYPE:
2554 case ObjectLiteral::Property::COMPUTED:
2555 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2558 case ObjectLiteral::Property::GETTER:
2559 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2561 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2564 case ObjectLiteral::Property::SETTER:
2565 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2567 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2575 // Set both the prototype and constructor to have fast properties, and also
2576 // freeze them in strong mode.
2577 __ CallRuntime(is_strong(language_mode())
2578 ? Runtime::kFinalizeClassDefinitionStrong
2579 : Runtime::kFinalizeClassDefinition,
2584 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2587 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2588 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2589 CallIC(code, expr->BinaryOperationFeedbackId());
2590 patch_site.EmitPatchInfo();
2591 context()->Plug(r0);
2595 void FullCodeGenerator::EmitAssignment(Expression* expr,
2596 FeedbackVectorICSlot slot) {
2597 DCHECK(expr->IsValidReferenceExpressionOrThis());
2599 Property* prop = expr->AsProperty();
2600 LhsKind assign_type = Property::GetAssignType(prop);
2602 switch (assign_type) {
2604 Variable* var = expr->AsVariableProxy()->var();
2605 EffectContext context(this);
2606 EmitVariableAssignment(var, Token::ASSIGN, slot);
2609 case NAMED_PROPERTY: {
2610 __ push(r0); // Preserve value.
2611 VisitForAccumulatorValue(prop->obj());
2612 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2613 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2614 __ mov(StoreDescriptor::NameRegister(),
2615 Operand(prop->key()->AsLiteral()->value()));
2616 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2620 case NAMED_SUPER_PROPERTY: {
2622 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2623 VisitForAccumulatorValue(
2624 prop->obj()->AsSuperPropertyReference()->home_object());
2625 // stack: value, this; r0: home_object
2626 Register scratch = r2;
2627 Register scratch2 = r3;
2628 __ mov(scratch, result_register()); // home_object
2629 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2630 __ ldr(scratch2, MemOperand(sp, 0)); // this
2631 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2632 __ str(scratch, MemOperand(sp, 0)); // home_object
2633 // stack: this, home_object; r0: value
2634 EmitNamedSuperPropertyStore(prop);
2637 case KEYED_SUPER_PROPERTY: {
2639 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2641 prop->obj()->AsSuperPropertyReference()->home_object());
2642 VisitForAccumulatorValue(prop->key());
2643 Register scratch = r2;
2644 Register scratch2 = r3;
2645 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2646 // stack: value, this, home_object; r0: key, r3: value
2647 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2648 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2649 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2650 __ str(scratch, MemOperand(sp, kPointerSize));
2651 __ str(r0, MemOperand(sp, 0));
2652 __ Move(r0, scratch2);
2653 // stack: this, home_object, key; r0: value.
2654 EmitKeyedSuperPropertyStore(prop);
2657 case KEYED_PROPERTY: {
2658 __ push(r0); // Preserve value.
2659 VisitForStackValue(prop->obj());
2660 VisitForAccumulatorValue(prop->key());
2661 __ Move(StoreDescriptor::NameRegister(), r0);
2662 __ Pop(StoreDescriptor::ValueRegister(),
2663 StoreDescriptor::ReceiverRegister());
2664 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2666 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2671 context()->Plug(r0);
2675 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2676 Variable* var, MemOperand location) {
2677 __ str(result_register(), location);
2678 if (var->IsContextSlot()) {
2679 // RecordWrite may destroy all its register arguments.
2680 __ mov(r3, result_register());
2681 int offset = Context::SlotOffset(var->index());
2682 __ RecordWriteContextSlot(
2683 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2688 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2689 FeedbackVectorICSlot slot) {
2690 if (var->IsUnallocated()) {
2691 // Global var, const, or let.
2692 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2693 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2694 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2697 } else if (var->IsGlobalSlot()) {
2698 // Global var, const, or let.
2699 DCHECK(var->index() > 0);
2700 DCHECK(var->IsStaticGlobalObjectProperty());
2701 const int slot = var->index();
2702 const int depth = scope()->ContextChainLength(var->scope());
2703 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2704 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2705 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
2706 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2709 __ Push(Smi::FromInt(slot));
2711 __ CallRuntime(is_strict(language_mode())
2712 ? Runtime::kStoreGlobalViaContext_Strict
2713 : Runtime::kStoreGlobalViaContext_Sloppy,
2716 } else if (var->mode() == LET && op != Token::INIT_LET) {
2717 // Non-initializing assignment to let variable needs a write barrier.
2718 DCHECK(!var->IsLookupSlot());
2719 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2721 MemOperand location = VarOperand(var, r1);
2722 __ ldr(r3, location);
2723 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2725 __ mov(r3, Operand(var->name()));
2727 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2728 // Perform the assignment.
2730 EmitStoreToStackLocalOrContextSlot(var, location);
2732 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2733 // Assignment to const variable needs a write barrier.
2734 DCHECK(!var->IsLookupSlot());
2735 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2737 MemOperand location = VarOperand(var, r1);
2738 __ ldr(r3, location);
2739 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2740 __ b(ne, &const_error);
2741 __ mov(r3, Operand(var->name()));
2743 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2744 __ bind(&const_error);
2745 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2747 } else if (var->is_this() && op == Token::INIT_CONST) {
2748 // Initializing assignment to const {this} needs a write barrier.
2749 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2750 Label uninitialized_this;
2751 MemOperand location = VarOperand(var, r1);
2752 __ ldr(r3, location);
2753 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2754 __ b(eq, &uninitialized_this);
2755 __ mov(r0, Operand(var->name()));
2757 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2758 __ bind(&uninitialized_this);
2759 EmitStoreToStackLocalOrContextSlot(var, location);
2761 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2762 if (var->IsLookupSlot()) {
2763 // Assignment to var.
2764 __ push(r0); // Value.
2765 __ mov(r1, Operand(var->name()));
2766 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2767 __ Push(cp, r1, r0); // Context, name, language mode.
2768 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2770 // Assignment to var or initializing assignment to let/const in harmony
2772 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2773 MemOperand location = VarOperand(var, r1);
2774 if (generate_debug_code_ && op == Token::INIT_LET) {
2775 // Check for an uninitialized let binding.
2776 __ ldr(r2, location);
2777 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2778 __ Check(eq, kLetBindingReInitialization);
2780 EmitStoreToStackLocalOrContextSlot(var, location);
2783 } else if (op == Token::INIT_CONST_LEGACY) {
2784 // Const initializers need a write barrier.
2785 DCHECK(var->mode() == CONST_LEGACY);
2786 DCHECK(!var->IsParameter()); // No const parameters.
2787 if (var->IsLookupSlot()) {
2789 __ mov(r0, Operand(var->name()));
2790 __ Push(cp, r0); // Context and name.
2791 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2793 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2795 MemOperand location = VarOperand(var, r1);
2796 __ ldr(r2, location);
2797 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2799 EmitStoreToStackLocalOrContextSlot(var, location);
2804 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2805 if (is_strict(language_mode())) {
2806 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2808 // Silently ignore store in sloppy mode.
2813 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2814 // Assignment to a property, using a named store IC.
2815 Property* prop = expr->target()->AsProperty();
2816 DCHECK(prop != NULL);
2817 DCHECK(prop->key()->IsLiteral());
2819 __ mov(StoreDescriptor::NameRegister(),
2820 Operand(prop->key()->AsLiteral()->value()));
2821 __ pop(StoreDescriptor::ReceiverRegister());
2822 if (FLAG_vector_stores) {
2823 EmitLoadStoreICSlot(expr->AssignmentSlot());
2826 CallStoreIC(expr->AssignmentFeedbackId());
2829 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2830 context()->Plug(r0);
2834 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2835 // Assignment to named property of super.
2837 // stack : receiver ('this'), home_object
2838 DCHECK(prop != NULL);
2839 Literal* key = prop->key()->AsLiteral();
2840 DCHECK(key != NULL);
2842 __ Push(key->value());
2844 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2845 : Runtime::kStoreToSuper_Sloppy),
2850 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2851 // Assignment to named property of super.
2853 // stack : receiver ('this'), home_object, key
2854 DCHECK(prop != NULL);
2858 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2859 : Runtime::kStoreKeyedToSuper_Sloppy),
2864 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2865 // Assignment to a property, using a keyed store IC.
2866 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2867 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2870 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2871 if (FLAG_vector_stores) {
2872 EmitLoadStoreICSlot(expr->AssignmentSlot());
2875 CallIC(ic, expr->AssignmentFeedbackId());
2878 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2879 context()->Plug(r0);
2883 void FullCodeGenerator::VisitProperty(Property* expr) {
2884 Comment cmnt(masm_, "[ Property");
2885 SetExpressionPosition(expr);
2887 Expression* key = expr->key();
2889 if (key->IsPropertyName()) {
2890 if (!expr->IsSuperAccess()) {
2891 VisitForAccumulatorValue(expr->obj());
2892 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2893 EmitNamedPropertyLoad(expr);
2895 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2897 expr->obj()->AsSuperPropertyReference()->home_object());
2898 EmitNamedSuperPropertyLoad(expr);
2901 if (!expr->IsSuperAccess()) {
2902 VisitForStackValue(expr->obj());
2903 VisitForAccumulatorValue(expr->key());
2904 __ Move(LoadDescriptor::NameRegister(), r0);
2905 __ pop(LoadDescriptor::ReceiverRegister());
2906 EmitKeyedPropertyLoad(expr);
2908 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2910 expr->obj()->AsSuperPropertyReference()->home_object());
2911 VisitForStackValue(expr->key());
2912 EmitKeyedSuperPropertyLoad(expr);
2915 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2916 context()->Plug(r0);
2920 void FullCodeGenerator::CallIC(Handle<Code> code,
2921 TypeFeedbackId ast_id) {
2923 // All calls must have a predictable size in full-codegen code to ensure that
2924 // the debugger can patch them correctly.
2925 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2926 NEVER_INLINE_TARGET_ADDRESS);
2930 // Code common for calls using the IC.
2931 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2932 Expression* callee = expr->expression();
2934 CallICState::CallType call_type =
2935 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2937 // Get the target function.
2938 if (call_type == CallICState::FUNCTION) {
2939 { StackValueContext context(this);
2940 EmitVariableLoad(callee->AsVariableProxy());
2941 PrepareForBailout(callee, NO_REGISTERS);
2943 // Push undefined as receiver. This is patched in the method prologue if it
2944 // is a sloppy mode method.
2945 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2948 // Load the function from the receiver.
2949 DCHECK(callee->IsProperty());
2950 DCHECK(!callee->AsProperty()->IsSuperAccess());
2951 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2952 EmitNamedPropertyLoad(callee->AsProperty());
2953 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2954 // Push the target function under the receiver.
2955 __ ldr(ip, MemOperand(sp, 0));
2957 __ str(r0, MemOperand(sp, kPointerSize));
2960 EmitCall(expr, call_type);
2964 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2965 Expression* callee = expr->expression();
2966 DCHECK(callee->IsProperty());
2967 Property* prop = callee->AsProperty();
2968 DCHECK(prop->IsSuperAccess());
2969 SetExpressionPosition(prop);
2971 Literal* key = prop->key()->AsLiteral();
2972 DCHECK(!key->value()->IsSmi());
2973 // Load the function from the receiver.
2974 const Register scratch = r1;
2975 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2976 VisitForStackValue(super_ref->home_object());
2977 VisitForAccumulatorValue(super_ref->this_var());
2980 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2982 __ Push(key->value());
2983 __ Push(Smi::FromInt(language_mode()));
2987 // - this (receiver)
2988 // - this (receiver) <-- LoadFromSuper will pop here and below.
2992 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2994 // Replace home_object with target function.
2995 __ str(r0, MemOperand(sp, kPointerSize));
2998 // - target function
2999 // - this (receiver)
3000 EmitCall(expr, CallICState::METHOD);
3004 // Code common for calls using the IC.
3005 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3008 VisitForAccumulatorValue(key);
3010 Expression* callee = expr->expression();
3012 // Load the function from the receiver.
3013 DCHECK(callee->IsProperty());
3014 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3015 __ Move(LoadDescriptor::NameRegister(), r0);
3016 EmitKeyedPropertyLoad(callee->AsProperty());
3017 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3019 // Push the target function under the receiver.
3020 __ ldr(ip, MemOperand(sp, 0));
3022 __ str(r0, MemOperand(sp, kPointerSize));
3024 EmitCall(expr, CallICState::METHOD);
3028 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3029 Expression* callee = expr->expression();
3030 DCHECK(callee->IsProperty());
3031 Property* prop = callee->AsProperty();
3032 DCHECK(prop->IsSuperAccess());
3034 SetExpressionPosition(prop);
3035 // Load the function from the receiver.
3036 const Register scratch = r1;
3037 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3038 VisitForStackValue(super_ref->home_object());
3039 VisitForAccumulatorValue(super_ref->this_var());
3042 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3044 VisitForStackValue(prop->key());
3045 __ Push(Smi::FromInt(language_mode()));
3049 // - this (receiver)
3050 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3054 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3056 // Replace home_object with target function.
3057 __ str(r0, MemOperand(sp, kPointerSize));
3060 // - target function
3061 // - this (receiver)
3062 EmitCall(expr, CallICState::METHOD);
3066 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3067 // Load the arguments.
3068 ZoneList<Expression*>* args = expr->arguments();
3069 int arg_count = args->length();
3070 for (int i = 0; i < arg_count; i++) {
3071 VisitForStackValue(args->at(i));
3074 SetCallPosition(expr, arg_count);
3075 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3076 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3077 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3078 // Don't assign a type feedback id to the IC, since type feedback is provided
3079 // by the vector above.
3082 RecordJSReturnSite(expr);
3083 // Restore context register.
3084 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3085 context()->DropAndPlug(1, r0);
3089 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3090 // r4: copy of the first argument or undefined if it doesn't exist.
3091 if (arg_count > 0) {
3092 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
3094 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3097 // r3: the receiver of the enclosing function.
3098 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3100 // r2: language mode.
3101 __ mov(r2, Operand(Smi::FromInt(language_mode())));
3103 // r1: the start position of the scope the calls resides in.
3104 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3106 // Do the runtime call.
3107 __ Push(r4, r3, r2, r1);
3108 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3112 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3113 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3114 VariableProxy* callee = expr->expression()->AsVariableProxy();
3115 if (callee->var()->IsLookupSlot()) {
3117 SetExpressionPosition(callee);
3118 // Generate code for loading from variables potentially shadowed
3119 // by eval-introduced variables.
3120 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3123 // Call the runtime to find the function to call (returned in r0)
3124 // and the object holding it (returned in edx).
3125 DCHECK(!context_register().is(r2));
3126 __ mov(r2, Operand(callee->name()));
3127 __ Push(context_register(), r2);
3128 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3129 __ Push(r0, r1); // Function, receiver.
3130 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3132 // If fast case code has been generated, emit code to push the
3133 // function and receiver and have the slow path jump around this
3135 if (done.is_linked()) {
3141 // The receiver is implicitly the global receiver. Indicate this
3142 // by passing the hole to the call function stub.
3143 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3148 VisitForStackValue(callee);
3149 // refEnv.WithBaseObject()
3150 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3151 __ push(r2); // Reserved receiver slot.
3156 void FullCodeGenerator::VisitCall(Call* expr) {
3158 // We want to verify that RecordJSReturnSite gets called on all paths
3159 // through this function. Avoid early returns.
3160 expr->return_is_recorded_ = false;
3163 Comment cmnt(masm_, "[ Call");
3164 Expression* callee = expr->expression();
3165 Call::CallType call_type = expr->GetCallType(isolate());
3167 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3168 // In a call to eval, we first call
3169 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
3170 // to call. Then we call the resolved function using the given arguments.
3171 ZoneList<Expression*>* args = expr->arguments();
3172 int arg_count = args->length();
3174 PushCalleeAndWithBaseObject(expr);
3176 // Push the arguments.
3177 for (int i = 0; i < arg_count; i++) {
3178 VisitForStackValue(args->at(i));
3181 // Push a copy of the function (found below the arguments) and
3183 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3185 EmitResolvePossiblyDirectEval(arg_count);
3187 // Touch up the stack with the resolved function.
3188 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3190 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3192 // Record source position for debugger.
3193 SetCallPosition(expr, arg_count);
3194 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3195 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3197 RecordJSReturnSite(expr);
3198 // Restore context register.
3199 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3200 context()->DropAndPlug(1, r0);
3201 } else if (call_type == Call::GLOBAL_CALL) {
3202 EmitCallWithLoadIC(expr);
3204 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3205 // Call to a lookup slot (dynamically introduced variable).
3206 PushCalleeAndWithBaseObject(expr);
3208 } else if (call_type == Call::PROPERTY_CALL) {
3209 Property* property = callee->AsProperty();
3210 bool is_named_call = property->key()->IsPropertyName();
3211 if (property->IsSuperAccess()) {
3212 if (is_named_call) {
3213 EmitSuperCallWithLoadIC(expr);
3215 EmitKeyedSuperCallWithLoadIC(expr);
3218 VisitForStackValue(property->obj());
3219 if (is_named_call) {
3220 EmitCallWithLoadIC(expr);
3222 EmitKeyedCallWithLoadIC(expr, property->key());
3225 } else if (call_type == Call::SUPER_CALL) {
3226 EmitSuperConstructorCall(expr);
3228 DCHECK(call_type == Call::OTHER_CALL);
3229 // Call to an arbitrary expression not handled specially above.
3230 VisitForStackValue(callee);
3231 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3233 // Emit function call.
3238 // RecordJSReturnSite should have been called.
3239 DCHECK(expr->return_is_recorded_);
3244 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3245 Comment cmnt(masm_, "[ CallNew");
3246 // According to ECMA-262, section 11.2.2, page 44, the function
3247 // expression in new calls must be evaluated before the
3250 // Push constructor on the stack. If it's not a function it's used as
3251 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3253 DCHECK(!expr->expression()->IsSuperPropertyReference());
3254 VisitForStackValue(expr->expression());
3256 // Push the arguments ("left-to-right") on the stack.
3257 ZoneList<Expression*>* args = expr->arguments();
3258 int arg_count = args->length();
3259 for (int i = 0; i < arg_count; i++) {
3260 VisitForStackValue(args->at(i));
3263 // Call the construct call builtin that handles allocation and
3264 // constructor invocation.
3265 SetConstructCallPosition(expr);
3267 // Load function and argument count into r1 and r0.
3268 __ mov(r0, Operand(arg_count));
3269 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3271 // Record call targets in unoptimized code.
3272 if (FLAG_pretenuring_call_new) {
3273 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3274 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3275 expr->CallNewFeedbackSlot().ToInt() + 1);
3278 __ Move(r2, FeedbackVector());
3279 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3281 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3282 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3283 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3284 context()->Plug(r0);
3288 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3289 SuperCallReference* super_call_ref =
3290 expr->expression()->AsSuperCallReference();
3291 DCHECK_NOT_NULL(super_call_ref);
3293 EmitLoadSuperConstructor(super_call_ref);
3294 __ push(result_register());
3296 // Push the arguments ("left-to-right") on the stack.
3297 ZoneList<Expression*>* args = expr->arguments();
3298 int arg_count = args->length();
3299 for (int i = 0; i < arg_count; i++) {
3300 VisitForStackValue(args->at(i));
3303 // Call the construct call builtin that handles allocation and
3304 // constructor invocation.
3305 SetConstructCallPosition(expr);
3307 // Load original constructor into r4.
3308 VisitForAccumulatorValue(super_call_ref->new_target_var());
3309 __ mov(r4, result_register());
3311 // Load function and argument count into r1 and r0.
3312 __ mov(r0, Operand(arg_count));
3313 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3315 // Record call targets in unoptimized code.
3316 if (FLAG_pretenuring_call_new) {
3318 /* TODO(dslomov): support pretenuring.
3319 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3320 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3321 expr->CallNewFeedbackSlot().ToInt() + 1);
3325 __ Move(r2, FeedbackVector());
3326 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3328 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3329 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3331 RecordJSReturnSite(expr);
3333 context()->Plug(r0);
3337 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3338 ZoneList<Expression*>* args = expr->arguments();
3339 DCHECK(args->length() == 1);
3341 VisitForAccumulatorValue(args->at(0));
3343 Label materialize_true, materialize_false;
3344 Label* if_true = NULL;
3345 Label* if_false = NULL;
3346 Label* fall_through = NULL;
3347 context()->PrepareTest(&materialize_true, &materialize_false,
3348 &if_true, &if_false, &fall_through);
3350 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3352 Split(eq, if_true, if_false, fall_through);
3354 context()->Plug(if_true, if_false);
3358 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3359 ZoneList<Expression*>* args = expr->arguments();
3360 DCHECK(args->length() == 1);
3362 VisitForAccumulatorValue(args->at(0));
3364 Label materialize_true, materialize_false;
3365 Label* if_true = NULL;
3366 Label* if_false = NULL;
3367 Label* fall_through = NULL;
3368 context()->PrepareTest(&materialize_true, &materialize_false,
3369 &if_true, &if_false, &fall_through);
3371 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3372 __ NonNegativeSmiTst(r0);
3373 Split(eq, if_true, if_false, fall_through);
3375 context()->Plug(if_true, if_false);
3379 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3380 ZoneList<Expression*>* args = expr->arguments();
3381 DCHECK(args->length() == 1);
3383 VisitForAccumulatorValue(args->at(0));
3385 Label materialize_true, materialize_false;
3386 Label* if_true = NULL;
3387 Label* if_false = NULL;
3388 Label* fall_through = NULL;
3389 context()->PrepareTest(&materialize_true, &materialize_false,
3390 &if_true, &if_false, &fall_through);
3392 __ JumpIfSmi(r0, if_false);
3393 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3396 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3397 // Undetectable objects behave like undefined when tested with typeof.
3398 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3399 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3401 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3402 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3404 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3405 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3406 Split(le, if_true, if_false, fall_through);
3408 context()->Plug(if_true, if_false);
3412 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3413 ZoneList<Expression*>* args = expr->arguments();
3414 DCHECK(args->length() == 1);
3416 VisitForAccumulatorValue(args->at(0));
3418 Label materialize_true, materialize_false;
3419 Label* if_true = NULL;
3420 Label* if_false = NULL;
3421 Label* fall_through = NULL;
3422 context()->PrepareTest(&materialize_true, &materialize_false,
3423 &if_true, &if_false, &fall_through);
3425 __ JumpIfSmi(r0, if_false);
3426 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3427 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3428 Split(ge, if_true, if_false, fall_through);
3430 context()->Plug(if_true, if_false);
3434 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3435 ZoneList<Expression*>* args = expr->arguments();
3436 DCHECK(args->length() == 1);
3438 VisitForAccumulatorValue(args->at(0));
3440 Label materialize_true, materialize_false;
3441 Label* if_true = NULL;
3442 Label* if_false = NULL;
3443 Label* fall_through = NULL;
3444 context()->PrepareTest(&materialize_true, &materialize_false,
3445 &if_true, &if_false, &fall_through);
3447 __ JumpIfSmi(r0, if_false);
3448 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3449 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3450 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3451 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3452 Split(ne, if_true, if_false, fall_through);
3454 context()->Plug(if_true, if_false);
3458 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3459 CallRuntime* expr) {
3460 ZoneList<Expression*>* args = expr->arguments();
3461 DCHECK(args->length() == 1);
3463 VisitForAccumulatorValue(args->at(0));
3465 Label materialize_true, materialize_false, skip_lookup;
3466 Label* if_true = NULL;
3467 Label* if_false = NULL;
3468 Label* fall_through = NULL;
3469 context()->PrepareTest(&materialize_true, &materialize_false,
3470 &if_true, &if_false, &fall_through);
3472 __ AssertNotSmi(r0);
3474 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3475 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3476 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3477 __ b(ne, &skip_lookup);
3479 // Check for fast case object. Generate false result for slow case object.
3480 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3481 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3482 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3486 // Look for valueOf name in the descriptor array, and indicate false if
3487 // found. Since we omit an enumeration index check, if it is added via a
3488 // transition that shares its descriptor array, this is a false positive.
3489 Label entry, loop, done;
3491 // Skip loop if no descriptors are valid.
3492 __ NumberOfOwnDescriptors(r3, r1);
3493 __ cmp(r3, Operand::Zero());
3496 __ LoadInstanceDescriptors(r1, r4);
3497 // r4: descriptor array.
3498 // r3: valid entries in the descriptor array.
3499 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3501 // Calculate location of the first key name.
3502 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3503 // Calculate the end of the descriptor array.
3505 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3507 // Loop through all the keys in the descriptor array. If one of these is the
3508 // string "valueOf" the result is false.
3509 // The use of ip to store the valueOf string assumes that it is not otherwise
3510 // used in the loop below.
3511 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3514 __ ldr(r3, MemOperand(r4, 0));
3517 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3519 __ cmp(r4, Operand(r2));
3524 // Set the bit in the map to indicate that there is no local valueOf field.
3525 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3526 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3527 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3529 __ bind(&skip_lookup);
3531 // If a valueOf property is not found on the object check that its
3532 // prototype is the un-modified String prototype. If not result is false.
3533 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3534 __ JumpIfSmi(r2, if_false);
3535 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3536 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3537 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3538 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3540 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3541 Split(eq, if_true, if_false, fall_through);
3543 context()->Plug(if_true, if_false);
3547 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3548 ZoneList<Expression*>* args = expr->arguments();
3549 DCHECK(args->length() == 1);
3551 VisitForAccumulatorValue(args->at(0));
3553 Label materialize_true, materialize_false;
3554 Label* if_true = NULL;
3555 Label* if_false = NULL;
3556 Label* fall_through = NULL;
3557 context()->PrepareTest(&materialize_true, &materialize_false,
3558 &if_true, &if_false, &fall_through);
3560 __ JumpIfSmi(r0, if_false);
3561 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3562 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3563 Split(eq, if_true, if_false, fall_through);
3565 context()->Plug(if_true, if_false);
3569 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3570 ZoneList<Expression*>* args = expr->arguments();
3571 DCHECK(args->length() == 1);
3573 VisitForAccumulatorValue(args->at(0));
3575 Label materialize_true, materialize_false;
3576 Label* if_true = NULL;
3577 Label* if_false = NULL;
3578 Label* fall_through = NULL;
3579 context()->PrepareTest(&materialize_true, &materialize_false,
3580 &if_true, &if_false, &fall_through);
3582 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3583 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3584 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3585 __ cmp(r2, Operand(0x80000000));
3586 __ cmp(r1, Operand(0x00000000), eq);
3588 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3589 Split(eq, if_true, if_false, fall_through);
3591 context()->Plug(if_true, if_false);
3595 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3596 ZoneList<Expression*>* args = expr->arguments();
3597 DCHECK(args->length() == 1);
3599 VisitForAccumulatorValue(args->at(0));
3601 Label materialize_true, materialize_false;
3602 Label* if_true = NULL;
3603 Label* if_false = NULL;
3604 Label* fall_through = NULL;
3605 context()->PrepareTest(&materialize_true, &materialize_false,
3606 &if_true, &if_false, &fall_through);
3608 __ JumpIfSmi(r0, if_false);
3609 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3610 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3611 Split(eq, if_true, if_false, fall_through);
3613 context()->Plug(if_true, if_false);
3617 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3618 ZoneList<Expression*>* args = expr->arguments();
3619 DCHECK(args->length() == 1);
3621 VisitForAccumulatorValue(args->at(0));
3623 Label materialize_true, materialize_false;
3624 Label* if_true = NULL;
3625 Label* if_false = NULL;
3626 Label* fall_through = NULL;
3627 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3628 &if_false, &fall_through);
3630 __ JumpIfSmi(r0, if_false);
3631 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
3632 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3633 Split(eq, if_true, if_false, fall_through);
3635 context()->Plug(if_true, if_false);
3639 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3640 ZoneList<Expression*>* args = expr->arguments();
3641 DCHECK(args->length() == 1);
3643 VisitForAccumulatorValue(args->at(0));
3645 Label materialize_true, materialize_false;
3646 Label* if_true = NULL;
3647 Label* if_false = NULL;
3648 Label* fall_through = NULL;
3649 context()->PrepareTest(&materialize_true, &materialize_false,
3650 &if_true, &if_false, &fall_through);
3652 __ JumpIfSmi(r0, if_false);
3653 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3654 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3655 Split(eq, if_true, if_false, fall_through);
3657 context()->Plug(if_true, if_false);
3661 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3662 ZoneList<Expression*>* args = expr->arguments();
3663 DCHECK(args->length() == 1);
3665 VisitForAccumulatorValue(args->at(0));
3667 Label materialize_true, materialize_false;
3668 Label* if_true = NULL;
3669 Label* if_false = NULL;
3670 Label* fall_through = NULL;
3671 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3672 &if_false, &fall_through);
3674 __ JumpIfSmi(r0, if_false);
3676 Register type_reg = r2;
3677 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3678 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3679 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3680 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3681 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3682 Split(ls, if_true, if_false, fall_through);
3684 context()->Plug(if_true, if_false);
3688 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3689 DCHECK(expr->arguments()->length() == 0);
3691 Label materialize_true, materialize_false;
3692 Label* if_true = NULL;
3693 Label* if_false = NULL;
3694 Label* fall_through = NULL;
3695 context()->PrepareTest(&materialize_true, &materialize_false,
3696 &if_true, &if_false, &fall_through);
3698 // Get the frame pointer for the calling frame.
3699 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3701 // Skip the arguments adaptor frame if it exists.
3702 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3703 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3704 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3706 // Check the marker in the calling frame.
3707 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3708 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3709 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3710 Split(eq, if_true, if_false, fall_through);
3712 context()->Plug(if_true, if_false);
3716 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3717 ZoneList<Expression*>* args = expr->arguments();
3718 DCHECK(args->length() == 2);
3720 // Load the two objects into registers and perform the comparison.
3721 VisitForStackValue(args->at(0));
3722 VisitForAccumulatorValue(args->at(1));
3724 Label materialize_true, materialize_false;
3725 Label* if_true = NULL;
3726 Label* if_false = NULL;
3727 Label* fall_through = NULL;
3728 context()->PrepareTest(&materialize_true, &materialize_false,
3729 &if_true, &if_false, &fall_through);
3733 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3734 Split(eq, if_true, if_false, fall_through);
3736 context()->Plug(if_true, if_false);
3740 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3741 ZoneList<Expression*>* args = expr->arguments();
3742 DCHECK(args->length() == 1);
3744 // ArgumentsAccessStub expects the key in edx and the formal
3745 // parameter count in r0.
3746 VisitForAccumulatorValue(args->at(0));
3748 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3749 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3751 context()->Plug(r0);
3755 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3756 DCHECK(expr->arguments()->length() == 0);
3758 // Get the number of formal parameters.
3759 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3761 // Check if the calling frame is an arguments adaptor frame.
3762 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3763 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3764 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3766 // Arguments adaptor case: Read the arguments length from the
3768 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3770 context()->Plug(r0);
3774 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3775 ZoneList<Expression*>* args = expr->arguments();
3776 DCHECK(args->length() == 1);
3777 Label done, null, function, non_function_constructor;
3779 VisitForAccumulatorValue(args->at(0));
3781 // If the object is a smi, we return null.
3782 __ JumpIfSmi(r0, &null);
3784 // Check that the object is a JS object but take special care of JS
3785 // functions to make sure they have 'Function' as their class.
3786 // Assume that there are only two callable types, and one of them is at
3787 // either end of the type range for JS object types. Saves extra comparisons.
3788 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3789 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3790 // Map is now in r0.
3792 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3793 FIRST_SPEC_OBJECT_TYPE + 1);
3794 __ b(eq, &function);
3796 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3797 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3798 LAST_SPEC_OBJECT_TYPE - 1);
3799 __ b(eq, &function);
3800 // Assume that there is no larger type.
3801 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3803 // Check if the constructor in the map is a JS function.
3804 Register instance_type = r2;
3805 __ GetMapConstructor(r0, r0, r1, instance_type);
3806 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3807 __ b(ne, &non_function_constructor);
3809 // r0 now contains the constructor function. Grab the
3810 // instance class name from there.
3811 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3812 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3815 // Functions have class 'Function'.
3817 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3820 // Objects with a non-function constructor have class 'Object'.
3821 __ bind(&non_function_constructor);
3822 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3825 // Non-JS objects have class null.
3827 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3832 context()->Plug(r0);
3836 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3837 ZoneList<Expression*>* args = expr->arguments();
3838 DCHECK(args->length() == 1);
3839 VisitForAccumulatorValue(args->at(0)); // Load the object.
3842 // If the object is a smi return the object.
3843 __ JumpIfSmi(r0, &done);
3844 // If the object is not a value type, return the object.
3845 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3846 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3849 context()->Plug(r0);
3853 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3854 ZoneList<Expression*>* args = expr->arguments();
3855 DCHECK_EQ(1, args->length());
3857 VisitForAccumulatorValue(args->at(0));
3859 Label materialize_true, materialize_false;
3860 Label* if_true = nullptr;
3861 Label* if_false = nullptr;
3862 Label* fall_through = nullptr;
3863 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3864 &if_false, &fall_through);
3866 __ JumpIfSmi(r0, if_false);
3867 __ CompareObjectType(r0, r1, r1, JS_DATE_TYPE);
3868 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3869 Split(eq, if_true, if_false, fall_through);
3871 context()->Plug(if_true, if_false);
3875 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3876 ZoneList<Expression*>* args = expr->arguments();
3877 DCHECK(args->length() == 2);
3878 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3879 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3881 VisitForAccumulatorValue(args->at(0)); // Load the object.
3883 Register object = r0;
3884 Register result = r0;
3885 Register scratch0 = r9;
3886 Register scratch1 = r1;
3888 if (index->value() == 0) {
3889 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3891 Label runtime, done;
3892 if (index->value() < JSDate::kFirstUncachedField) {
3893 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3894 __ mov(scratch1, Operand(stamp));
3895 __ ldr(scratch1, MemOperand(scratch1));
3896 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3897 __ cmp(scratch1, scratch0);
3899 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3900 kPointerSize * index->value()));
3904 __ PrepareCallCFunction(2, scratch1);
3905 __ mov(r1, Operand(index));
3906 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3910 context()->Plug(result);
3914 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3915 ZoneList<Expression*>* args = expr->arguments();
3916 DCHECK_EQ(3, args->length());
3918 Register string = r0;
3919 Register index = r1;
3920 Register value = r2;
3922 VisitForStackValue(args->at(0)); // index
3923 VisitForStackValue(args->at(1)); // value
3924 VisitForAccumulatorValue(args->at(2)); // string
3925 __ Pop(index, value);
3927 if (FLAG_debug_code) {
3929 __ Check(eq, kNonSmiValue);
3931 __ Check(eq, kNonSmiIndex);
3932 __ SmiUntag(index, index);
3933 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3934 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3935 __ SmiTag(index, index);
3938 __ SmiUntag(value, value);
3941 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3942 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3943 context()->Plug(string);
3947 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3948 ZoneList<Expression*>* args = expr->arguments();
3949 DCHECK_EQ(3, args->length());
3951 Register string = r0;
3952 Register index = r1;
3953 Register value = r2;
3955 VisitForStackValue(args->at(0)); // index
3956 VisitForStackValue(args->at(1)); // value
3957 VisitForAccumulatorValue(args->at(2)); // string
3958 __ Pop(index, value);
3960 if (FLAG_debug_code) {
3962 __ Check(eq, kNonSmiValue);
3964 __ Check(eq, kNonSmiIndex);
3965 __ SmiUntag(index, index);
3966 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3967 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3968 __ SmiTag(index, index);
3971 __ SmiUntag(value, value);
3974 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3975 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3976 __ strh(value, MemOperand(ip, index));
3977 context()->Plug(string);
3981 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3982 ZoneList<Expression*>* args = expr->arguments();
3983 DCHECK(args->length() == 2);
3984 VisitForStackValue(args->at(0)); // Load the object.
3985 VisitForAccumulatorValue(args->at(1)); // Load the value.
3986 __ pop(r1); // r0 = value. r1 = object.
3989 // If the object is a smi, return the value.
3990 __ JumpIfSmi(r1, &done);
3992 // If the object is not a value type, return the value.
3993 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3997 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3998 // Update the write barrier. Save the value as it will be
3999 // overwritten by the write barrier code and is needed afterward.
4001 __ RecordWriteField(
4002 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
4005 context()->Plug(r0);
4009 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4010 ZoneList<Expression*>* args = expr->arguments();
4011 DCHECK_EQ(args->length(), 1);
4012 // Load the argument into r0 and call the stub.
4013 VisitForAccumulatorValue(args->at(0));
4015 NumberToStringStub stub(isolate());
4017 context()->Plug(r0);
4021 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4022 ZoneList<Expression*>* args = expr->arguments();
4023 DCHECK(args->length() == 1);
4024 VisitForAccumulatorValue(args->at(0));
4027 StringCharFromCodeGenerator generator(r0, r1);
4028 generator.GenerateFast(masm_);
4031 NopRuntimeCallHelper call_helper;
4032 generator.GenerateSlow(masm_, call_helper);
4035 context()->Plug(r1);
4039 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4040 ZoneList<Expression*>* args = expr->arguments();
4041 DCHECK(args->length() == 2);
4042 VisitForStackValue(args->at(0));
4043 VisitForAccumulatorValue(args->at(1));
4045 Register object = r1;
4046 Register index = r0;
4047 Register result = r3;
4051 Label need_conversion;
4052 Label index_out_of_range;
4054 StringCharCodeAtGenerator generator(object,
4059 &index_out_of_range,
4060 STRING_INDEX_IS_NUMBER);
4061 generator.GenerateFast(masm_);
4064 __ bind(&index_out_of_range);
4065 // When the index is out of range, the spec requires us to return
4067 __ LoadRoot(result, Heap::kNanValueRootIndex);
4070 __ bind(&need_conversion);
4071 // Load the undefined value into the result register, which will
4072 // trigger conversion.
4073 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4076 NopRuntimeCallHelper call_helper;
4077 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4080 context()->Plug(result);
4084 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4085 ZoneList<Expression*>* args = expr->arguments();
4086 DCHECK(args->length() == 2);
4087 VisitForStackValue(args->at(0));
4088 VisitForAccumulatorValue(args->at(1));
4090 Register object = r1;
4091 Register index = r0;
4092 Register scratch = r3;
4093 Register result = r0;
4097 Label need_conversion;
4098 Label index_out_of_range;
4100 StringCharAtGenerator generator(object,
4106 &index_out_of_range,
4107 STRING_INDEX_IS_NUMBER);
4108 generator.GenerateFast(masm_);
4111 __ bind(&index_out_of_range);
4112 // When the index is out of range, the spec requires us to return
4113 // the empty string.
4114 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4117 __ bind(&need_conversion);
4118 // Move smi zero into the result register, which will trigger
4120 __ mov(result, Operand(Smi::FromInt(0)));
4123 NopRuntimeCallHelper call_helper;
4124 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4127 context()->Plug(result);
4131 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4132 ZoneList<Expression*>* args = expr->arguments();
4133 DCHECK_EQ(2, args->length());
4134 VisitForStackValue(args->at(0));
4135 VisitForAccumulatorValue(args->at(1));
4138 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4140 context()->Plug(r0);
4144 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4145 ZoneList<Expression*>* args = expr->arguments();
4146 DCHECK(args->length() >= 2);
4148 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4149 for (int i = 0; i < arg_count + 1; i++) {
4150 VisitForStackValue(args->at(i));
4152 VisitForAccumulatorValue(args->last()); // Function.
4154 Label runtime, done;
4155 // Check for non-function argument (including proxy).
4156 __ JumpIfSmi(r0, &runtime);
4157 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4160 // InvokeFunction requires the function in r1. Move it in there.
4161 __ mov(r1, result_register());
4162 ParameterCount count(arg_count);
4163 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4164 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4169 __ CallRuntime(Runtime::kCall, args->length());
4172 context()->Plug(r0);
4176 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4177 ZoneList<Expression*>* args = expr->arguments();
4178 DCHECK(args->length() == 2);
4181 VisitForStackValue(args->at(0));
4184 VisitForStackValue(args->at(1));
4185 __ CallRuntime(Runtime::kGetPrototype, 1);
4186 __ Push(result_register());
4188 // Load original constructor into r4.
4189 __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
4191 // Check if the calling frame is an arguments adaptor frame.
4192 Label adaptor_frame, args_set_up, runtime;
4193 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4194 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4195 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4196 __ b(eq, &adaptor_frame);
4197 // default constructor has no arguments, so no adaptor frame means no args.
4198 __ mov(r0, Operand::Zero());
4201 // Copy arguments from adaptor frame.
4203 __ bind(&adaptor_frame);
4204 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4205 __ SmiUntag(r1, r1);
4208 // Get arguments pointer in r2.
4209 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4210 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset));
4213 // Pre-decrement r2 with kPointerSize on each iteration.
4214 // Pre-decrement in order to skip receiver.
4215 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
4217 __ sub(r1, r1, Operand(1));
4218 __ cmp(r1, Operand::Zero());
4222 __ bind(&args_set_up);
4223 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4224 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
4226 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4227 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4231 context()->Plug(result_register());
4235 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4236 RegExpConstructResultStub stub(isolate());
4237 ZoneList<Expression*>* args = expr->arguments();
4238 DCHECK(args->length() == 3);
4239 VisitForStackValue(args->at(0));
4240 VisitForStackValue(args->at(1));
4241 VisitForAccumulatorValue(args->at(2));
4245 context()->Plug(r0);
4249 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4250 ZoneList<Expression*>* args = expr->arguments();
4251 DCHECK_EQ(2, args->length());
4252 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4253 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4255 Handle<FixedArray> jsfunction_result_caches(
4256 isolate()->native_context()->jsfunction_result_caches());
4257 if (jsfunction_result_caches->length() <= cache_id) {
4258 __ Abort(kAttemptToUseUndefinedCache);
4259 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4260 context()->Plug(r0);
4264 VisitForAccumulatorValue(args->at(1));
4267 Register cache = r1;
4268 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4269 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4270 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4272 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4275 Label done, not_found;
4276 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4277 // r2 now holds finger offset as a smi.
4278 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4279 // r3 now points to the start of fixed array elements.
4280 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
4281 // Note side effect of PreIndex: r3 now points to the key of the pair.
4283 __ b(ne, ¬_found);
4285 __ ldr(r0, MemOperand(r3, kPointerSize));
4288 __ bind(¬_found);
4289 // Call runtime to perform the lookup.
4290 __ Push(cache, key);
4291 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4294 context()->Plug(r0);
4298 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4299 ZoneList<Expression*>* args = expr->arguments();
4300 VisitForAccumulatorValue(args->at(0));
4302 Label materialize_true, materialize_false;
4303 Label* if_true = NULL;
4304 Label* if_false = NULL;
4305 Label* fall_through = NULL;
4306 context()->PrepareTest(&materialize_true, &materialize_false,
4307 &if_true, &if_false, &fall_through);
4309 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4310 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4311 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4312 Split(eq, if_true, if_false, fall_through);
4314 context()->Plug(if_true, if_false);
4318 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4319 ZoneList<Expression*>* args = expr->arguments();
4320 DCHECK(args->length() == 1);
4321 VisitForAccumulatorValue(args->at(0));
4323 __ AssertString(r0);
4325 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4326 __ IndexFromHash(r0, r0);
4328 context()->Plug(r0);
4332 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4333 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4334 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4335 one_char_separator_loop_entry, long_separator_loop;
4336 ZoneList<Expression*>* args = expr->arguments();
4337 DCHECK(args->length() == 2);
4338 VisitForStackValue(args->at(1));
4339 VisitForAccumulatorValue(args->at(0));
4341 // All aliases of the same register have disjoint lifetimes.
4342 Register array = r0;
4343 Register elements = no_reg; // Will be r0.
4344 Register result = no_reg; // Will be r0.
4345 Register separator = r1;
4346 Register array_length = r2;
4347 Register result_pos = no_reg; // Will be r2
4348 Register string_length = r3;
4349 Register string = r4;
4350 Register element = r5;
4351 Register elements_end = r6;
4352 Register scratch = r9;
4354 // Separator operand is on the stack.
4357 // Check that the array is a JSArray.
4358 __ JumpIfSmi(array, &bailout);
4359 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4362 // Check that the array has fast elements.
4363 __ CheckFastElements(scratch, array_length, &bailout);
4365 // If the array has length zero, return the empty string.
4366 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4367 __ SmiUntag(array_length, SetCC);
4368 __ b(ne, &non_trivial_array);
4369 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4372 __ bind(&non_trivial_array);
4374 // Get the FixedArray containing array's elements.
4376 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4377 array = no_reg; // End of array's live range.
4379 // Check that all array elements are sequential one-byte strings, and
4380 // accumulate the sum of their lengths, as a smi-encoded value.
4381 __ mov(string_length, Operand::Zero());
4383 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4384 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4385 // Loop condition: while (element < elements_end).
4386 // Live values in registers:
4387 // elements: Fixed array of strings.
4388 // array_length: Length of the fixed array of strings (not smi)
4389 // separator: Separator string
4390 // string_length: Accumulated sum of string lengths (smi).
4391 // element: Current array element.
4392 // elements_end: Array end.
4393 if (generate_debug_code_) {
4394 __ cmp(array_length, Operand::Zero());
4395 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4398 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4399 __ JumpIfSmi(string, &bailout);
4400 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4401 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4402 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4403 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4404 __ add(string_length, string_length, Operand(scratch), SetCC);
4406 __ cmp(element, elements_end);
4409 // If array_length is 1, return elements[0], a string.
4410 __ cmp(array_length, Operand(1));
4411 __ b(ne, ¬_size_one_array);
4412 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4415 __ bind(¬_size_one_array);
4417 // Live values in registers:
4418 // separator: Separator string
4419 // array_length: Length of the array.
4420 // string_length: Sum of string lengths (smi).
4421 // elements: FixedArray of strings.
4423 // Check that the separator is a flat one-byte string.
4424 __ JumpIfSmi(separator, &bailout);
4425 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4426 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4427 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4429 // Add (separator length times array_length) - separator length to the
4430 // string_length to get the length of the result string. array_length is not
4431 // smi but the other values are, so the result is a smi
4432 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4433 __ sub(string_length, string_length, Operand(scratch));
4434 __ smull(scratch, ip, array_length, scratch);
4435 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4437 __ cmp(ip, Operand::Zero());
4439 __ tst(scratch, Operand(0x80000000));
4441 __ add(string_length, string_length, Operand(scratch), SetCC);
4443 __ SmiUntag(string_length);
4445 // Get first element in the array to free up the elements register to be used
4448 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4449 result = elements; // End of live range for elements.
4451 // Live values in registers:
4452 // element: First array element
4453 // separator: Separator string
4454 // string_length: Length of result string (not smi)
4455 // array_length: Length of the array.
4456 __ AllocateOneByteString(result, string_length, scratch,
4457 string, // used as scratch
4458 elements_end, // used as scratch
4460 // Prepare for looping. Set up elements_end to end of the array. Set
4461 // result_pos to the position of the result where to write the first
4463 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4464 result_pos = array_length; // End of live range for array_length.
4465 array_length = no_reg;
4468 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4470 // Check the length of the separator.
4471 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4472 __ cmp(scratch, Operand(Smi::FromInt(1)));
4473 __ b(eq, &one_char_separator);
4474 __ b(gt, &long_separator);
4476 // Empty separator case
4477 __ bind(&empty_separator_loop);
4478 // Live values in registers:
4479 // result_pos: the position to which we are currently copying characters.
4480 // element: Current array element.
4481 // elements_end: Array end.
4483 // Copy next array element to the result.
4484 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4485 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4486 __ SmiUntag(string_length);
4489 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4490 __ CopyBytes(string, result_pos, string_length, scratch);
4491 __ cmp(element, elements_end);
4492 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4493 DCHECK(result.is(r0));
4496 // One-character separator case
4497 __ bind(&one_char_separator);
4498 // Replace separator with its one-byte character value.
4499 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4500 // Jump into the loop after the code that copies the separator, so the first
4501 // element is not preceded by a separator
4502 __ jmp(&one_char_separator_loop_entry);
4504 __ bind(&one_char_separator_loop);
4505 // Live values in registers:
4506 // result_pos: the position to which we are currently copying characters.
4507 // element: Current array element.
4508 // elements_end: Array end.
4509 // separator: Single separator one-byte char (in lower byte).
4511 // Copy the separator character to the result.
4512 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4514 // Copy next array element to the result.
4515 __ bind(&one_char_separator_loop_entry);
4516 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4517 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4518 __ SmiUntag(string_length);
4521 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4522 __ CopyBytes(string, result_pos, string_length, scratch);
4523 __ cmp(element, elements_end);
4524 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4525 DCHECK(result.is(r0));
4528 // Long separator case (separator is more than one character). Entry is at the
4529 // label long_separator below.
4530 __ bind(&long_separator_loop);
4531 // Live values in registers:
4532 // result_pos: the position to which we are currently copying characters.
4533 // element: Current array element.
4534 // elements_end: Array end.
4535 // separator: Separator string.
4537 // Copy the separator to the result.
4538 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4539 __ SmiUntag(string_length);
4542 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4543 __ CopyBytes(string, result_pos, string_length, scratch);
4545 __ bind(&long_separator);
4546 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4547 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4548 __ SmiUntag(string_length);
4551 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4552 __ CopyBytes(string, result_pos, string_length, scratch);
4553 __ cmp(element, elements_end);
4554 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4555 DCHECK(result.is(r0));
4559 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4561 context()->Plug(r0);
4565 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4566 DCHECK(expr->arguments()->length() == 0);
4567 ExternalReference debug_is_active =
4568 ExternalReference::debug_is_active_address(isolate());
4569 __ mov(ip, Operand(debug_is_active));
4570 __ ldrb(r0, MemOperand(ip));
4572 context()->Plug(r0);
4576 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4577 // Push the builtins object as the receiver.
4578 Register receiver = LoadDescriptor::ReceiverRegister();
4579 __ ldr(receiver, GlobalObjectOperand());
4580 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4583 // Load the function from the receiver.
4584 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4585 __ mov(LoadDescriptor::SlotRegister(),
4586 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4587 CallLoadIC(NOT_INSIDE_TYPEOF);
4591 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4592 ZoneList<Expression*>* args = expr->arguments();
4593 int arg_count = args->length();
4595 SetCallPosition(expr, arg_count);
4596 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4597 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4602 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4603 ZoneList<Expression*>* args = expr->arguments();
4604 int arg_count = args->length();
4606 if (expr->is_jsruntime()) {
4607 Comment cmnt(masm_, "[ CallRuntime");
4608 EmitLoadJSRuntimeFunction(expr);
4610 // Push the target function under the receiver.
4611 __ ldr(ip, MemOperand(sp, 0));
4613 __ str(r0, MemOperand(sp, kPointerSize));
4615 // Push the arguments ("left-to-right").
4616 for (int i = 0; i < arg_count; i++) {
4617 VisitForStackValue(args->at(i));
4620 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4621 EmitCallJSRuntimeFunction(expr);
4623 // Restore context register.
4624 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4626 context()->DropAndPlug(1, r0);
4629 const Runtime::Function* function = expr->function();
4630 switch (function->function_id) {
4631 #define CALL_INTRINSIC_GENERATOR(Name) \
4632 case Runtime::kInline##Name: { \
4633 Comment cmnt(masm_, "[ Inline" #Name); \
4634 return Emit##Name(expr); \
4636 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4637 #undef CALL_INTRINSIC_GENERATOR
4639 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4640 // Push the arguments ("left-to-right").
4641 for (int i = 0; i < arg_count; i++) {
4642 VisitForStackValue(args->at(i));
4645 // Call the C runtime function.
4646 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4647 __ CallRuntime(expr->function(), arg_count);
4648 context()->Plug(r0);
4655 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4656 switch (expr->op()) {
4657 case Token::DELETE: {
4658 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4659 Property* property = expr->expression()->AsProperty();
4660 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4662 if (property != NULL) {
4663 VisitForStackValue(property->obj());
4664 VisitForStackValue(property->key());
4665 __ mov(r1, Operand(Smi::FromInt(language_mode())));
4667 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4668 context()->Plug(r0);
4669 } else if (proxy != NULL) {
4670 Variable* var = proxy->var();
4671 // Delete of an unqualified identifier is disallowed in strict mode but
4672 // "delete this" is allowed.
4673 bool is_this = var->HasThisName(isolate());
4674 DCHECK(is_sloppy(language_mode()) || is_this);
4675 if (var->IsUnallocatedOrGlobalSlot()) {
4676 __ ldr(r2, GlobalObjectOperand());
4677 __ mov(r1, Operand(var->name()));
4678 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4679 __ Push(r2, r1, r0);
4680 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4681 context()->Plug(r0);
4682 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4683 // Result of deleting non-global, non-dynamic variables is false.
4684 // The subexpression does not have side effects.
4685 context()->Plug(is_this);
4687 // Non-global variable. Call the runtime to try to delete from the
4688 // context where the variable was introduced.
4689 DCHECK(!context_register().is(r2));
4690 __ mov(r2, Operand(var->name()));
4691 __ Push(context_register(), r2);
4692 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4693 context()->Plug(r0);
4696 // Result of deleting non-property, non-variable reference is true.
4697 // The subexpression may have side effects.
4698 VisitForEffect(expr->expression());
4699 context()->Plug(true);
4705 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4706 VisitForEffect(expr->expression());
4707 context()->Plug(Heap::kUndefinedValueRootIndex);
4712 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4713 if (context()->IsEffect()) {
4714 // Unary NOT has no side effects so it's only necessary to visit the
4715 // subexpression. Match the optimizing compiler by not branching.
4716 VisitForEffect(expr->expression());
4717 } else if (context()->IsTest()) {
4718 const TestContext* test = TestContext::cast(context());
4719 // The labels are swapped for the recursive call.
4720 VisitForControl(expr->expression(),
4721 test->false_label(),
4723 test->fall_through());
4724 context()->Plug(test->true_label(), test->false_label());
4726 // We handle value contexts explicitly rather than simply visiting
4727 // for control and plugging the control flow into the context,
4728 // because we need to prepare a pair of extra administrative AST ids
4729 // for the optimizing compiler.
4730 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4731 Label materialize_true, materialize_false, done;
4732 VisitForControl(expr->expression(),
4736 __ bind(&materialize_true);
4737 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4738 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4739 if (context()->IsStackValue()) __ push(r0);
4741 __ bind(&materialize_false);
4742 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4743 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4744 if (context()->IsStackValue()) __ push(r0);
4750 case Token::TYPEOF: {
4751 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4753 AccumulatorValueContext context(this);
4754 VisitForTypeofValue(expr->expression());
4757 TypeofStub typeof_stub(isolate());
4758 __ CallStub(&typeof_stub);
4759 context()->Plug(r0);
4769 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4770 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4772 Comment cmnt(masm_, "[ CountOperation");
4774 Property* prop = expr->expression()->AsProperty();
4775 LhsKind assign_type = Property::GetAssignType(prop);
4777 // Evaluate expression and get value.
4778 if (assign_type == VARIABLE) {
4779 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4780 AccumulatorValueContext context(this);
4781 EmitVariableLoad(expr->expression()->AsVariableProxy());
4783 // Reserve space for result of postfix operation.
4784 if (expr->is_postfix() && !context()->IsEffect()) {
4785 __ mov(ip, Operand(Smi::FromInt(0)));
4788 switch (assign_type) {
4789 case NAMED_PROPERTY: {
4790 // Put the object both on the stack and in the register.
4791 VisitForStackValue(prop->obj());
4792 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4793 EmitNamedPropertyLoad(prop);
4797 case NAMED_SUPER_PROPERTY: {
4798 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4799 VisitForAccumulatorValue(
4800 prop->obj()->AsSuperPropertyReference()->home_object());
4801 __ Push(result_register());
4802 const Register scratch = r1;
4803 __ ldr(scratch, MemOperand(sp, kPointerSize));
4805 __ Push(result_register());
4806 EmitNamedSuperPropertyLoad(prop);
4810 case KEYED_SUPER_PROPERTY: {
4811 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4813 prop->obj()->AsSuperPropertyReference()->home_object());
4814 VisitForAccumulatorValue(prop->key());
4815 __ Push(result_register());
4816 const Register scratch = r1;
4817 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4819 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4821 __ Push(result_register());
4822 EmitKeyedSuperPropertyLoad(prop);
4826 case KEYED_PROPERTY: {
4827 VisitForStackValue(prop->obj());
4828 VisitForStackValue(prop->key());
4829 __ ldr(LoadDescriptor::ReceiverRegister(),
4830 MemOperand(sp, 1 * kPointerSize));
4831 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4832 EmitKeyedPropertyLoad(prop);
4841 // We need a second deoptimization point after loading the value
4842 // in case evaluating the property load my have a side effect.
4843 if (assign_type == VARIABLE) {
4844 PrepareForBailout(expr->expression(), TOS_REG);
4846 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4849 // Inline smi case if we are in a loop.
4850 Label stub_call, done;
4851 JumpPatchSite patch_site(masm_);
4853 int count_value = expr->op() == Token::INC ? 1 : -1;
4854 if (ShouldInlineSmiCase(expr->op())) {
4856 patch_site.EmitJumpIfNotSmi(r0, &slow);
4858 // Save result for postfix expressions.
4859 if (expr->is_postfix()) {
4860 if (!context()->IsEffect()) {
4861 // Save the result on the stack. If we have a named or keyed property
4862 // we store the result under the receiver that is currently on top
4864 switch (assign_type) {
4868 case NAMED_PROPERTY:
4869 __ str(r0, MemOperand(sp, kPointerSize));
4871 case NAMED_SUPER_PROPERTY:
4872 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4874 case KEYED_PROPERTY:
4875 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4877 case KEYED_SUPER_PROPERTY:
4878 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4884 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4886 // Call stub. Undo operation first.
4887 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4891 if (!is_strong(language_mode())) {
4892 ToNumberStub convert_stub(isolate());
4893 __ CallStub(&convert_stub);
4894 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4897 // Save result for postfix expressions.
4898 if (expr->is_postfix()) {
4899 if (!context()->IsEffect()) {
4900 // Save the result on the stack. If we have a named or keyed property
4901 // we store the result under the receiver that is currently on top
4903 switch (assign_type) {
4907 case NAMED_PROPERTY:
4908 __ str(r0, MemOperand(sp, kPointerSize));
4910 case NAMED_SUPER_PROPERTY:
4911 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4913 case KEYED_PROPERTY:
4914 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4916 case KEYED_SUPER_PROPERTY:
4917 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4924 __ bind(&stub_call);
4926 __ mov(r0, Operand(Smi::FromInt(count_value)));
4928 SetExpressionPosition(expr);
4930 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4931 strength(language_mode())).code();
4932 CallIC(code, expr->CountBinOpFeedbackId());
4933 patch_site.EmitPatchInfo();
4936 if (is_strong(language_mode())) {
4937 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4939 // Store the value returned in r0.
4940 switch (assign_type) {
4942 if (expr->is_postfix()) {
4943 { EffectContext context(this);
4944 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4945 Token::ASSIGN, expr->CountSlot());
4946 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4949 // For all contexts except EffectConstant We have the result on
4950 // top of the stack.
4951 if (!context()->IsEffect()) {
4952 context()->PlugTOS();
4955 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4956 Token::ASSIGN, expr->CountSlot());
4957 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4958 context()->Plug(r0);
4961 case NAMED_PROPERTY: {
4962 __ mov(StoreDescriptor::NameRegister(),
4963 Operand(prop->key()->AsLiteral()->value()));
4964 __ pop(StoreDescriptor::ReceiverRegister());
4965 if (FLAG_vector_stores) {
4966 EmitLoadStoreICSlot(expr->CountSlot());
4969 CallStoreIC(expr->CountStoreFeedbackId());
4971 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4972 if (expr->is_postfix()) {
4973 if (!context()->IsEffect()) {
4974 context()->PlugTOS();
4977 context()->Plug(r0);
4981 case NAMED_SUPER_PROPERTY: {
4982 EmitNamedSuperPropertyStore(prop);
4983 if (expr->is_postfix()) {
4984 if (!context()->IsEffect()) {
4985 context()->PlugTOS();
4988 context()->Plug(r0);
4992 case KEYED_SUPER_PROPERTY: {
4993 EmitKeyedSuperPropertyStore(prop);
4994 if (expr->is_postfix()) {
4995 if (!context()->IsEffect()) {
4996 context()->PlugTOS();
4999 context()->Plug(r0);
5003 case KEYED_PROPERTY: {
5004 __ Pop(StoreDescriptor::ReceiverRegister(),
5005 StoreDescriptor::NameRegister());
5007 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5008 if (FLAG_vector_stores) {
5009 EmitLoadStoreICSlot(expr->CountSlot());
5012 CallIC(ic, expr->CountStoreFeedbackId());
5014 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5015 if (expr->is_postfix()) {
5016 if (!context()->IsEffect()) {
5017 context()->PlugTOS();
5020 context()->Plug(r0);
5028 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5029 Expression* sub_expr,
5030 Handle<String> check) {
5031 Label materialize_true, materialize_false;
5032 Label* if_true = NULL;
5033 Label* if_false = NULL;
5034 Label* fall_through = NULL;
5035 context()->PrepareTest(&materialize_true, &materialize_false,
5036 &if_true, &if_false, &fall_through);
5038 { AccumulatorValueContext context(this);
5039 VisitForTypeofValue(sub_expr);
5041 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5043 Factory* factory = isolate()->factory();
5044 if (String::Equals(check, factory->number_string())) {
5045 __ JumpIfSmi(r0, if_true);
5046 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5047 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5049 Split(eq, if_true, if_false, fall_through);
5050 } else if (String::Equals(check, factory->string_string())) {
5051 __ JumpIfSmi(r0, if_false);
5052 // Check for undetectable objects => false.
5053 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
5055 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5056 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5057 Split(eq, if_true, if_false, fall_through);
5058 } else if (String::Equals(check, factory->symbol_string())) {
5059 __ JumpIfSmi(r0, if_false);
5060 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
5061 Split(eq, if_true, if_false, fall_through);
5062 } else if (String::Equals(check, factory->float32x4_string())) {
5063 __ JumpIfSmi(r0, if_false);
5064 __ CompareObjectType(r0, r0, r1, FLOAT32X4_TYPE);
5065 Split(eq, if_true, if_false, fall_through);
5066 } else if (String::Equals(check, factory->boolean_string())) {
5067 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
5069 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
5070 Split(eq, if_true, if_false, fall_through);
5071 } else if (String::Equals(check, factory->undefined_string())) {
5072 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
5074 __ JumpIfSmi(r0, if_false);
5075 // Check for undetectable objects => true.
5076 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5077 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5078 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5079 Split(ne, if_true, if_false, fall_through);
5081 } else if (String::Equals(check, factory->function_string())) {
5082 __ JumpIfSmi(r0, if_false);
5083 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5084 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
5086 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
5087 Split(eq, if_true, if_false, fall_through);
5088 } else if (String::Equals(check, factory->object_string())) {
5089 __ JumpIfSmi(r0, if_false);
5090 __ CompareRoot(r0, Heap::kNullValueRootIndex);
5092 // Check for JS objects => true.
5093 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5095 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5097 // Check for undetectable objects => false.
5098 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5099 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5100 Split(eq, if_true, if_false, fall_through);
5102 if (if_false != fall_through) __ jmp(if_false);
5104 context()->Plug(if_true, if_false);
5108 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5109 Comment cmnt(masm_, "[ CompareOperation");
5110 SetExpressionPosition(expr);
5112 // First we try a fast inlined version of the compare when one of
5113 // the operands is a literal.
5114 if (TryLiteralCompare(expr)) return;
5116 // Always perform the comparison for its control flow. Pack the result
5117 // into the expression's context after the comparison is performed.
5118 Label materialize_true, materialize_false;
5119 Label* if_true = NULL;
5120 Label* if_false = NULL;
5121 Label* fall_through = NULL;
5122 context()->PrepareTest(&materialize_true, &materialize_false,
5123 &if_true, &if_false, &fall_through);
5125 Token::Value op = expr->op();
5126 VisitForStackValue(expr->left());
5129 VisitForStackValue(expr->right());
5130 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5131 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5132 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5134 Split(eq, if_true, if_false, fall_through);
5137 case Token::INSTANCEOF: {
5138 VisitForStackValue(expr->right());
5139 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5141 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5142 // The stub returns 0 for true.
5144 Split(eq, if_true, if_false, fall_through);
5149 VisitForAccumulatorValue(expr->right());
5150 Condition cond = CompareIC::ComputeCondition(op);
5153 bool inline_smi_code = ShouldInlineSmiCase(op);
5154 JumpPatchSite patch_site(masm_);
5155 if (inline_smi_code) {
5157 __ orr(r2, r0, Operand(r1));
5158 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5160 Split(cond, if_true, if_false, NULL);
5161 __ bind(&slow_case);
5164 Handle<Code> ic = CodeFactory::CompareIC(
5165 isolate(), op, strength(language_mode())).code();
5166 CallIC(ic, expr->CompareOperationFeedbackId());
5167 patch_site.EmitPatchInfo();
5168 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5169 __ cmp(r0, Operand::Zero());
5170 Split(cond, if_true, if_false, fall_through);
5174 // Convert the result of the comparison into one expected for this
5175 // expression's context.
5176 context()->Plug(if_true, if_false);
5180 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5181 Expression* sub_expr,
5183 Label materialize_true, materialize_false;
5184 Label* if_true = NULL;
5185 Label* if_false = NULL;
5186 Label* fall_through = NULL;
5187 context()->PrepareTest(&materialize_true, &materialize_false,
5188 &if_true, &if_false, &fall_through);
5190 VisitForAccumulatorValue(sub_expr);
5191 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5192 if (expr->op() == Token::EQ_STRICT) {
5193 Heap::RootListIndex nil_value = nil == kNullValue ?
5194 Heap::kNullValueRootIndex :
5195 Heap::kUndefinedValueRootIndex;
5196 __ LoadRoot(r1, nil_value);
5198 Split(eq, if_true, if_false, fall_through);
5200 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5201 CallIC(ic, expr->CompareOperationFeedbackId());
5202 __ cmp(r0, Operand(0));
5203 Split(ne, if_true, if_false, fall_through);
5205 context()->Plug(if_true, if_false);
5209 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5210 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5211 context()->Plug(r0);
5215 Register FullCodeGenerator::result_register() {
5220 Register FullCodeGenerator::context_register() {
5225 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5226 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5227 __ str(value, MemOperand(fp, frame_offset));
5231 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5232 __ ldr(dst, ContextOperand(cp, context_index));
5236 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5237 Scope* closure_scope = scope()->ClosureScope();
5238 if (closure_scope->is_script_scope() ||
5239 closure_scope->is_module_scope()) {
5240 // Contexts nested in the native context have a canonical empty function
5241 // as their closure, not the anonymous closure containing the global
5242 // code. Pass a smi sentinel and let the runtime look up the empty
5244 __ mov(ip, Operand(Smi::FromInt(0)));
5245 } else if (closure_scope->is_eval_scope()) {
5246 // Contexts created by a call to eval have the same closure as the
5247 // context calling eval, not the anonymous closure containing the eval
5248 // code. Fetch it from the context.
5249 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5251 DCHECK(closure_scope->is_function_scope());
5252 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5258 // ----------------------------------------------------------------------------
5259 // Non-local control flow support.
5261 void FullCodeGenerator::EnterFinallyBlock() {
5262 DCHECK(!result_register().is(r1));
5263 // Store result register while executing finally block.
5264 __ push(result_register());
5265 // Cook return address in link register to stack (smi encoded Code* delta)
5266 __ sub(r1, lr, Operand(masm_->CodeObject()));
5269 // Store result register while executing finally block.
5272 // Store pending message while executing finally block.
5273 ExternalReference pending_message_obj =
5274 ExternalReference::address_of_pending_message_obj(isolate());
5275 __ mov(ip, Operand(pending_message_obj));
5276 __ ldr(r1, MemOperand(ip));
5279 ClearPendingMessage();
5283 void FullCodeGenerator::ExitFinallyBlock() {
5284 DCHECK(!result_register().is(r1));
5285 // Restore pending message from stack.
5287 ExternalReference pending_message_obj =
5288 ExternalReference::address_of_pending_message_obj(isolate());
5289 __ mov(ip, Operand(pending_message_obj));
5290 __ str(r1, MemOperand(ip));
5292 // Restore result register from stack.
5295 // Uncook return address and return.
5296 __ pop(result_register());
5298 __ add(pc, r1, Operand(masm_->CodeObject()));
5302 void FullCodeGenerator::ClearPendingMessage() {
5303 DCHECK(!result_register().is(r1));
5304 ExternalReference pending_message_obj =
5305 ExternalReference::address_of_pending_message_obj(isolate());
5306 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
5307 __ mov(ip, Operand(pending_message_obj));
5308 __ str(r1, MemOperand(ip));
5312 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5313 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5314 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5315 Operand(SmiFromSlot(slot)));
5322 static Address GetInterruptImmediateLoadAddress(Address pc) {
5323 Address load_address = pc - 2 * Assembler::kInstrSize;
5324 if (!FLAG_enable_embedded_constant_pool) {
5325 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5326 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5327 // This is an extended constant pool lookup.
5328 if (CpuFeatures::IsSupported(ARMv7)) {
5329 load_address -= 2 * Assembler::kInstrSize;
5330 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5331 DCHECK(Assembler::IsMovT(
5332 Memory::int32_at(load_address + Assembler::kInstrSize)));
5334 load_address -= 4 * Assembler::kInstrSize;
5335 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5336 DCHECK(Assembler::IsOrrImmed(
5337 Memory::int32_at(load_address + Assembler::kInstrSize)));
5338 DCHECK(Assembler::IsOrrImmed(
5339 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5340 DCHECK(Assembler::IsOrrImmed(
5341 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5343 } else if (CpuFeatures::IsSupported(ARMv7) &&
5344 Assembler::IsMovT(Memory::int32_at(load_address))) {
5345 // This is a movw / movt immediate load.
5346 load_address -= Assembler::kInstrSize;
5347 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5348 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5349 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5350 // This is a mov / orr immediate load.
5351 load_address -= 3 * Assembler::kInstrSize;
5352 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5353 DCHECK(Assembler::IsOrrImmed(
5354 Memory::int32_at(load_address + Assembler::kInstrSize)));
5355 DCHECK(Assembler::IsOrrImmed(
5356 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5358 // This is a small constant pool lookup.
5359 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5361 return load_address;
5365 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5367 BackEdgeState target_state,
5368 Code* replacement_code) {
5369 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5370 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5371 CodePatcher patcher(branch_address, 1);
5372 switch (target_state) {
5375 // <decrement profiling counter>
5377 // ; load interrupt stub address into ip - either of (for ARMv7):
5378 // ; <small cp load> | <extended cp load> | <immediate load>
5379 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5380 // | movt ip, #imm | movw ip, #imm
5381 // | ldr ip, [pp, ip]
5382 // ; or (for ARMv6):
5383 // ; <small cp load> | <extended cp load> | <immediate load>
5384 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5385 // | orr ip, ip, #imm> | orr ip, ip, #imm
5386 // | orr ip, ip, #imm> | orr ip, ip, #imm
5387 // | orr ip, ip, #imm> | orr ip, ip, #imm
5389 // <reset profiling counter>
5392 // Calculate branch offset to the ok-label - this is the difference
5393 // between the branch address and |pc| (which points at <blx ip>) plus
5394 // kProfileCounterResetSequence instructions
5395 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5396 kProfileCounterResetSequenceLength;
5397 patcher.masm()->b(branch_offset, pl);
5400 case ON_STACK_REPLACEMENT:
5401 case OSR_AFTER_STACK_CHECK:
5402 // <decrement profiling counter>
5404 // ; load on-stack replacement address into ip - either of (for ARMv7):
5405 // ; <small cp load> | <extended cp load> | <immediate load>
5406 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5407 // | movt ip, #imm> | movw ip, #imm
5408 // | ldr ip, [pp, ip]
5409 // ; or (for ARMv6):
5410 // ; <small cp load> | <extended cp load> | <immediate load>
5411 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5412 // | orr ip, ip, #imm> | orr ip, ip, #imm
5413 // | orr ip, ip, #imm> | orr ip, ip, #imm
5414 // | orr ip, ip, #imm> | orr ip, ip, #imm
5416 // <reset profiling counter>
5418 patcher.masm()->nop();
5422 // Replace the call address.
5423 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5424 replacement_code->entry());
5426 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5427 unoptimized_code, pc_immediate_load_address, replacement_code);
5431 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5433 Code* unoptimized_code,
5435 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5437 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5438 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5439 Address interrupt_address = Assembler::target_address_at(
5440 pc_immediate_load_address, unoptimized_code);
5442 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5443 DCHECK(interrupt_address ==
5444 isolate->builtins()->InterruptCheck()->entry());
5448 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5450 if (interrupt_address ==
5451 isolate->builtins()->OnStackReplacement()->entry()) {
5452 return ON_STACK_REPLACEMENT;
5455 DCHECK(interrupt_address ==
5456 isolate->builtins()->OsrAfterStackCheck()->entry());
5457 return OSR_AFTER_STACK_CHECK;
5461 } // namespace internal
5464 #endif // V8_TARGET_ARCH_ARM