1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/arm/code-stubs-arm.h"
20 #include "src/arm/macro-assembler-arm.h"
25 #define __ ACCESS_MASM(masm_)
28 // A patch site is a location in the code which it is possible to patch. This
29 // class has a number of methods to emit the code which is patchable and the
30 // method EmitPatchInfo to record a marker back to the patchable code. This
31 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
32 // immediate value is used) is the delta from the pc to the first instruction of
33 // the patchable code.
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
43 DCHECK(patch_site_.is_bound() == info_emitted_);
46 // When initially emitting this ensure that a jump is always generated to skip
47 // the inlined smi code.
48 void EmitJumpIfNotSmi(Register reg, Label* target) {
49 DCHECK(!patch_site_.is_bound() && !info_emitted_);
50 Assembler::BlockConstPoolScope block_const_pool(masm_);
51 __ bind(&patch_site_);
52 __ cmp(reg, Operand(reg));
53 __ b(eq, target); // Always taken before patched.
56 // When initially emitting this ensure that a jump is never generated to skip
57 // the inlined smi code.
58 void EmitJumpIfSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockConstPoolScope block_const_pool(masm_);
61 __ bind(&patch_site_);
62 __ cmp(reg, Operand(reg));
63 __ b(ne, target); // Never taken before patched.
66 void EmitPatchInfo() {
67 // Block literal pool emission whilst recording patch site information.
68 Assembler::BlockConstPoolScope block_const_pool(masm_);
69 if (patch_site_.is_bound()) {
70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72 reg.set_code(delta_to_patch_site / kOff12Mask);
73 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
78 __ nop(); // Signals no inlined code.
83 MacroAssembler* masm_;
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
96 // The live registers are:
97 // o r1: the JS function object being called (i.e., ourselves)
99 // o pp: our caller's constant pool pointer (if enabled)
100 // o fp: our caller's frame pointer
101 // o sp: stack pointer
102 // o lr: return address
104 // The function builds a JS frame. Please see JavaScriptFrameConstants in
105 // frames-arm.h for its layout.
106 void FullCodeGenerator::Generate() {
107 CompilationInfo* info = info_;
108 profiling_counter_ = isolate()->factory()->NewCell(
109 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
110 SetFunctionPosition(function());
111 Comment cmnt(masm_, "[ function compiled by full code generator");
113 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
116 if (strlen(FLAG_stop_at) > 0 &&
117 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
122 // Sloppy mode functions and builtins need to replace the receiver with the
123 // global proxy when called as functions (without an explicit receiver
125 if (is_sloppy(info->language_mode()) && !info->is_native() &&
126 info->MayUseThis() && info->scope()->has_this_declaration()) {
128 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
129 __ ldr(r2, MemOperand(sp, receiver_offset));
130 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
133 __ ldr(r2, GlobalObjectOperand());
134 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
136 __ str(r2, MemOperand(sp, receiver_offset));
141 // Open a frame scope to indicate that there is a frame on the stack. The
142 // MANUAL indicates that the scope shouldn't actually generate code to set up
143 // the frame (that is done below).
144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 info->set_prologue_offset(masm_->pc_offset());
147 __ Prologue(info->IsCodePreAgingActive());
148 info->AddNoFrameRange(0, masm_->pc_offset());
150 { Comment cmnt(masm_, "[ Allocate locals");
151 int locals_count = info->scope()->num_stack_slots();
152 // Generators allocate locals, if any, in context slots.
153 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
154 if (locals_count > 0) {
155 if (locals_count >= 128) {
157 __ sub(r9, sp, Operand(locals_count * kPointerSize));
158 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
159 __ cmp(r9, Operand(r2));
161 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
164 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
165 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
166 if (locals_count >= kMaxPushes) {
167 int loop_iterations = locals_count / kMaxPushes;
168 __ mov(r2, Operand(loop_iterations));
170 __ bind(&loop_header);
172 for (int i = 0; i < kMaxPushes; i++) {
175 // Continue loop if not done.
176 __ sub(r2, r2, Operand(1), SetCC);
177 __ b(&loop_header, ne);
179 int remaining = locals_count % kMaxPushes;
180 // Emit the remaining pushes.
181 for (int i = 0; i < remaining; i++) {
187 bool function_in_register = true;
189 // Possibly allocate a local context.
190 if (info->scope()->num_heap_slots() > 0) {
191 // Argument to NewContext is the function, which is still in r1.
192 Comment cmnt(masm_, "[ Allocate context");
193 bool need_write_barrier = true;
194 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (info->scope()->is_script_scope()) {
197 __ Push(info->scope()->GetScopeInfo(info->isolate()));
198 __ CallRuntime(Runtime::kNewScriptContext, 2);
199 } else if (slots <= FastNewContextStub::kMaximumSlots) {
200 FastNewContextStub stub(isolate(), slots);
202 // Result of FastNewContextStub is always in new space.
203 need_write_barrier = false;
206 __ CallRuntime(Runtime::kNewFunctionContext, 1);
208 function_in_register = false;
209 // Context is returned in r0. It replaces the context passed to us.
210 // It's saved in the stack and kept live in cp.
212 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
213 // Copy any necessary parameters into the context.
214 int num_parameters = info->scope()->num_parameters();
215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216 for (int i = first_parameter; i < num_parameters; i++) {
217 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
218 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220 (num_parameters - 1 - i) * kPointerSize;
221 // Load parameter from stack.
222 __ ldr(r0, MemOperand(fp, parameter_offset));
223 // Store it in the context.
224 MemOperand target = ContextOperand(cp, var->index());
227 // Update the write barrier.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(
230 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
231 } else if (FLAG_debug_code) {
233 __ JumpIfInNewSpace(cp, r0, &done);
234 __ Abort(kExpectedNewSpaceObject);
241 // Possibly set up a local binding to the this function which is used in
242 // derived constructors with super calls.
243 Variable* this_function_var = scope()->this_function_var();
244 if (this_function_var != nullptr) {
245 Comment cmnt(masm_, "[ This function");
246 if (!function_in_register) {
247 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
248 // The write barrier clobbers register again, keep is marked as such.
250 SetVar(this_function_var, r1, r0, r2);
253 Variable* new_target_var = scope()->new_target_var();
254 if (new_target_var != nullptr) {
255 Comment cmnt(masm_, "[ new.target");
257 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
258 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
259 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
260 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
261 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
262 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
263 Label non_construct_frame, done;
265 __ b(ne, &non_construct_frame);
267 MemOperand(r2, ConstructFrameConstants::kOriginalConstructorOffset));
270 __ bind(&non_construct_frame);
271 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
274 SetVar(new_target_var, r0, r2, r3);
277 // Possibly allocate RestParameters
279 Variable* rest_param = scope()->rest_parameter(&rest_index);
281 Comment cmnt(masm_, "[ Allocate rest parameter array");
283 int num_parameters = info->scope()->num_parameters();
284 int offset = num_parameters * kPointerSize;
286 __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
287 __ mov(r2, Operand(Smi::FromInt(num_parameters)));
288 __ mov(r1, Operand(Smi::FromInt(rest_index)));
289 __ mov(r0, Operand(Smi::FromInt(language_mode())));
290 __ Push(r3, r2, r1, r0);
292 RestParamAccessStub stub(isolate());
295 SetVar(rest_param, r0, r1, r2);
298 Variable* arguments = scope()->arguments();
299 if (arguments != NULL) {
300 // Function uses arguments object.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (!function_in_register) {
303 // Load this again, if it's used by the local context below.
304 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
308 // Receiver is just before the parameters on the caller's stack.
309 int num_parameters = info->scope()->num_parameters();
310 int offset = num_parameters * kPointerSize;
312 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
313 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
316 // Arguments to ArgumentsAccessStub:
317 // function, receiver address, parameter count.
318 // The stub will rewrite receiever and parameter count if the previous
319 // stack frame was an arguments adapter frame.
320 ArgumentsAccessStub::Type type;
321 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
322 type = ArgumentsAccessStub::NEW_STRICT;
323 } else if (function()->has_duplicate_parameters()) {
324 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
326 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
328 ArgumentsAccessStub stub(isolate(), type);
331 SetVar(arguments, r0, r1, r2);
336 __ CallRuntime(Runtime::kTraceEnter, 0);
339 // Visit the declarations and body unless there is an illegal
341 if (scope()->HasIllegalRedeclaration()) {
342 Comment cmnt(masm_, "[ Declarations");
343 scope()->VisitIllegalRedeclaration(this);
346 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
347 { Comment cmnt(masm_, "[ Declarations");
348 VisitDeclarations(scope()->declarations());
351 { Comment cmnt(masm_, "[ Stack check");
352 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
354 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
355 __ cmp(sp, Operand(ip));
357 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
358 PredictableCodeSizeScope predictable(masm_);
359 predictable.ExpectSize(
360 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
361 __ Call(stack_check, RelocInfo::CODE_TARGET);
365 { Comment cmnt(masm_, "[ Body");
366 DCHECK(loop_depth() == 0);
367 VisitStatements(function()->body());
368 DCHECK(loop_depth() == 0);
372 // Always emit a 'return undefined' in case control fell off the end of
374 { Comment cmnt(masm_, "[ return <undefined>;");
375 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
377 EmitReturnSequence();
379 // Force emit the constant pool, so it doesn't get emitted in the middle
380 // of the back edge table.
381 masm()->CheckConstPool(true, false);
385 void FullCodeGenerator::ClearAccumulator() {
386 __ mov(r0, Operand(Smi::FromInt(0)));
390 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
391 __ mov(r2, Operand(profiling_counter_));
392 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
393 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
394 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
398 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
399 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
401 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
405 void FullCodeGenerator::EmitProfilingCounterReset() {
406 Assembler::BlockConstPoolScope block_const_pool(masm_);
407 PredictableCodeSizeScope predictable_code_size_scope(
408 masm_, kProfileCounterResetSequenceLength);
411 int reset_value = FLAG_interrupt_budget;
412 if (info_->is_debug()) {
413 // Detect debug break requests as soon as possible.
414 reset_value = FLAG_interrupt_budget >> 4;
416 __ mov(r2, Operand(profiling_counter_));
417 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
418 // instructions (for ARMv6) depending upon whether it is an extended constant
419 // pool - insert nop to compensate.
420 int expected_instr_count =
421 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
422 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
423 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
426 __ mov(r3, Operand(Smi::FromInt(reset_value)));
427 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
431 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
432 Label* back_edge_target) {
433 Comment cmnt(masm_, "[ Back edge bookkeeping");
434 // Block literal pools whilst emitting back edge code.
435 Assembler::BlockConstPoolScope block_const_pool(masm_);
438 DCHECK(back_edge_target->is_bound());
439 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
440 int weight = Min(kMaxBackEdgeWeight,
441 Max(1, distance / kCodeSizeMultiplier));
442 EmitProfilingCounterDecrement(weight);
444 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
446 // Record a mapping of this PC offset to the OSR id. This is used to find
447 // the AST id from the unoptimized code in order to use it as a key into
448 // the deoptimization input data found in the optimized code.
449 RecordBackEdge(stmt->OsrEntryId());
451 EmitProfilingCounterReset();
454 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
455 // Record a mapping of the OSR id to this PC. This is used if the OSR
456 // entry becomes the target of a bailout. We don't expect it to be, but
457 // we want it to work if it is.
458 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
462 void FullCodeGenerator::EmitReturnSequence() {
463 Comment cmnt(masm_, "[ Return sequence");
464 if (return_label_.is_bound()) {
465 __ b(&return_label_);
467 __ bind(&return_label_);
469 // Push the return value on the stack as the parameter.
470 // Runtime::TraceExit returns its parameter in r0.
472 __ CallRuntime(Runtime::kTraceExit, 1);
474 // Pretend that the exit is a backwards jump to the entry.
476 if (info_->ShouldSelfOptimize()) {
477 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
479 int distance = masm_->pc_offset();
480 weight = Min(kMaxBackEdgeWeight,
481 Max(1, distance / kCodeSizeMultiplier));
483 EmitProfilingCounterDecrement(weight);
487 __ Call(isolate()->builtins()->InterruptCheck(),
488 RelocInfo::CODE_TARGET);
490 EmitProfilingCounterReset();
493 // Make sure that the constant pool is not emitted inside of the return
495 { Assembler::BlockConstPoolScope block_const_pool(masm_);
496 int32_t arg_count = info_->scope()->num_parameters() + 1;
497 int32_t sp_delta = arg_count * kPointerSize;
498 SetReturnPosition(function());
499 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
500 PredictableCodeSizeScope predictable(masm_, -1);
501 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
502 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
503 __ add(sp, sp, Operand(sp_delta));
505 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
512 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
513 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
514 codegen()->GetVar(result_register(), var);
515 __ push(result_register());
519 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
523 void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Heap::RootListIndex index) const {
525 __ LoadRoot(result_register(), index);
529 void FullCodeGenerator::StackValueContext::Plug(
530 Heap::RootListIndex index) const {
531 __ LoadRoot(result_register(), index);
532 __ push(result_register());
536 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
537 codegen()->PrepareForBailoutBeforeSplit(condition(),
541 if (index == Heap::kUndefinedValueRootIndex ||
542 index == Heap::kNullValueRootIndex ||
543 index == Heap::kFalseValueRootIndex) {
544 if (false_label_ != fall_through_) __ b(false_label_);
545 } else if (index == Heap::kTrueValueRootIndex) {
546 if (true_label_ != fall_through_) __ b(true_label_);
548 __ LoadRoot(result_register(), index);
549 codegen()->DoTest(this);
554 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
558 void FullCodeGenerator::AccumulatorValueContext::Plug(
559 Handle<Object> lit) const {
560 __ mov(result_register(), Operand(lit));
564 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
565 // Immediates cannot be pushed directly.
566 __ mov(result_register(), Operand(lit));
567 __ push(result_register());
571 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
572 codegen()->PrepareForBailoutBeforeSplit(condition(),
576 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
577 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
578 if (false_label_ != fall_through_) __ b(false_label_);
579 } else if (lit->IsTrue() || lit->IsJSObject()) {
580 if (true_label_ != fall_through_) __ b(true_label_);
581 } else if (lit->IsString()) {
582 if (String::cast(*lit)->length() == 0) {
583 if (false_label_ != fall_through_) __ b(false_label_);
585 if (true_label_ != fall_through_) __ b(true_label_);
587 } else if (lit->IsSmi()) {
588 if (Smi::cast(*lit)->value() == 0) {
589 if (false_label_ != fall_through_) __ b(false_label_);
591 if (true_label_ != fall_through_) __ b(true_label_);
594 // For simplicity we always test the accumulator register.
595 __ mov(result_register(), Operand(lit));
596 codegen()->DoTest(this);
601 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
602 Register reg) const {
608 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
610 Register reg) const {
613 __ Move(result_register(), reg);
617 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
618 Register reg) const {
620 if (count > 1) __ Drop(count - 1);
621 __ str(reg, MemOperand(sp, 0));
625 void FullCodeGenerator::TestContext::DropAndPlug(int count,
626 Register reg) const {
628 // For simplicity we always test the accumulator register.
630 __ Move(result_register(), reg);
631 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
632 codegen()->DoTest(this);
636 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
637 Label* materialize_false) const {
638 DCHECK(materialize_true == materialize_false);
639 __ bind(materialize_true);
643 void FullCodeGenerator::AccumulatorValueContext::Plug(
644 Label* materialize_true,
645 Label* materialize_false) const {
647 __ bind(materialize_true);
648 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
650 __ bind(materialize_false);
651 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
656 void FullCodeGenerator::StackValueContext::Plug(
657 Label* materialize_true,
658 Label* materialize_false) const {
660 __ bind(materialize_true);
661 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
663 __ bind(materialize_false);
664 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
670 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
671 Label* materialize_false) const {
672 DCHECK(materialize_true == true_label_);
673 DCHECK(materialize_false == false_label_);
677 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
678 Heap::RootListIndex value_root_index =
679 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
680 __ LoadRoot(result_register(), value_root_index);
684 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
685 Heap::RootListIndex value_root_index =
686 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
687 __ LoadRoot(ip, value_root_index);
692 void FullCodeGenerator::TestContext::Plug(bool flag) const {
693 codegen()->PrepareForBailoutBeforeSplit(condition(),
698 if (true_label_ != fall_through_) __ b(true_label_);
700 if (false_label_ != fall_through_) __ b(false_label_);
705 void FullCodeGenerator::DoTest(Expression* condition,
708 Label* fall_through) {
709 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
710 CallIC(ic, condition->test_id());
711 __ tst(result_register(), result_register());
712 Split(ne, if_true, if_false, fall_through);
716 void FullCodeGenerator::Split(Condition cond,
719 Label* fall_through) {
720 if (if_false == fall_through) {
722 } else if (if_true == fall_through) {
723 __ b(NegateCondition(cond), if_false);
731 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
732 DCHECK(var->IsStackAllocated());
733 // Offset is negative because higher indexes are at lower addresses.
734 int offset = -var->index() * kPointerSize;
735 // Adjust by a (parameter or local) base offset.
736 if (var->IsParameter()) {
737 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
739 offset += JavaScriptFrameConstants::kLocal0Offset;
741 return MemOperand(fp, offset);
745 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
746 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
747 if (var->IsContextSlot()) {
748 int context_chain_length = scope()->ContextChainLength(var->scope());
749 __ LoadContext(scratch, context_chain_length);
750 return ContextOperand(scratch, var->index());
752 return StackOperand(var);
757 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
758 // Use destination as scratch.
759 MemOperand location = VarOperand(var, dest);
760 __ ldr(dest, location);
764 void FullCodeGenerator::SetVar(Variable* var,
768 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
769 DCHECK(!scratch0.is(src));
770 DCHECK(!scratch0.is(scratch1));
771 DCHECK(!scratch1.is(src));
772 MemOperand location = VarOperand(var, scratch0);
773 __ str(src, location);
775 // Emit the write barrier code if the location is in the heap.
776 if (var->IsContextSlot()) {
777 __ RecordWriteContextSlot(scratch0,
787 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
788 bool should_normalize,
791 // Only prepare for bailouts before splits if we're in a test
792 // context. Otherwise, we let the Visit function deal with the
793 // preparation to avoid preparing with the same AST id twice.
794 if (!context()->IsTest() || !info_->IsOptimizable()) return;
797 if (should_normalize) __ b(&skip);
798 PrepareForBailout(expr, TOS_REG);
799 if (should_normalize) {
800 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
802 Split(eq, if_true, if_false, NULL);
808 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
809 // The variable in the declaration always resides in the current function
811 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
812 if (generate_debug_code_) {
813 // Check that we're not inside a with or catch context.
814 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
815 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
816 __ Check(ne, kDeclarationInWithContext);
817 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
818 __ Check(ne, kDeclarationInCatchContext);
823 void FullCodeGenerator::VisitVariableDeclaration(
824 VariableDeclaration* declaration) {
825 // If it was not possible to allocate the variable at compile time, we
826 // need to "declare" it at runtime to make sure it actually exists in the
828 VariableProxy* proxy = declaration->proxy();
829 VariableMode mode = declaration->mode();
830 Variable* variable = proxy->var();
831 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
832 switch (variable->location()) {
833 case VariableLocation::GLOBAL:
834 case VariableLocation::UNALLOCATED:
835 globals_->Add(variable->name(), zone());
836 globals_->Add(variable->binding_needs_init()
837 ? isolate()->factory()->the_hole_value()
838 : isolate()->factory()->undefined_value(),
842 case VariableLocation::PARAMETER:
843 case VariableLocation::LOCAL:
845 Comment cmnt(masm_, "[ VariableDeclaration");
846 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
847 __ str(ip, StackOperand(variable));
851 case VariableLocation::CONTEXT:
853 Comment cmnt(masm_, "[ VariableDeclaration");
854 EmitDebugCheckDeclarationContext(variable);
855 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
856 __ str(ip, ContextOperand(cp, variable->index()));
857 // No write barrier since the_hole_value is in old space.
858 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
862 case VariableLocation::LOOKUP: {
863 Comment cmnt(masm_, "[ VariableDeclaration");
864 __ mov(r2, Operand(variable->name()));
865 // Declaration nodes are always introduced in one of four modes.
866 DCHECK(IsDeclaredVariableMode(mode));
867 PropertyAttributes attr =
868 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
869 __ mov(r1, Operand(Smi::FromInt(attr)));
870 // Push initial value, if any.
871 // Note: For variables we must not push an initial value (such as
872 // 'undefined') because we may have a (legal) redeclaration and we
873 // must not destroy the current value.
875 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
876 __ Push(cp, r2, r1, r0);
878 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
879 __ Push(cp, r2, r1, r0);
881 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
888 void FullCodeGenerator::VisitFunctionDeclaration(
889 FunctionDeclaration* declaration) {
890 VariableProxy* proxy = declaration->proxy();
891 Variable* variable = proxy->var();
892 switch (variable->location()) {
893 case VariableLocation::GLOBAL:
894 case VariableLocation::UNALLOCATED: {
895 globals_->Add(variable->name(), zone());
896 Handle<SharedFunctionInfo> function =
897 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
898 // Check for stack-overflow exception.
899 if (function.is_null()) return SetStackOverflow();
900 globals_->Add(function, zone());
904 case VariableLocation::PARAMETER:
905 case VariableLocation::LOCAL: {
906 Comment cmnt(masm_, "[ FunctionDeclaration");
907 VisitForAccumulatorValue(declaration->fun());
908 __ str(result_register(), StackOperand(variable));
912 case VariableLocation::CONTEXT: {
913 Comment cmnt(masm_, "[ FunctionDeclaration");
914 EmitDebugCheckDeclarationContext(variable);
915 VisitForAccumulatorValue(declaration->fun());
916 __ str(result_register(), ContextOperand(cp, variable->index()));
917 int offset = Context::SlotOffset(variable->index());
918 // We know that we have written a function, which is not a smi.
919 __ RecordWriteContextSlot(cp,
927 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
931 case VariableLocation::LOOKUP: {
932 Comment cmnt(masm_, "[ FunctionDeclaration");
933 __ mov(r2, Operand(variable->name()));
934 __ mov(r1, Operand(Smi::FromInt(NONE)));
936 // Push initial value for function declaration.
937 VisitForStackValue(declaration->fun());
938 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
946 // Call the runtime to declare the globals.
947 // The context is the first argument.
948 __ mov(r1, Operand(pairs));
949 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
951 __ CallRuntime(Runtime::kDeclareGlobals, 3);
952 // Return value is ignored.
956 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
957 // Call the runtime to declare the modules.
958 __ Push(descriptions);
959 __ CallRuntime(Runtime::kDeclareModules, 1);
960 // Return value is ignored.
964 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
965 Comment cmnt(masm_, "[ SwitchStatement");
966 Breakable nested_statement(this, stmt);
967 SetStatementPosition(stmt);
969 // Keep the switch value on the stack until a case matches.
970 VisitForStackValue(stmt->tag());
971 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
973 ZoneList<CaseClause*>* clauses = stmt->cases();
974 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
976 Label next_test; // Recycled for each test.
977 // Compile all the tests with branches to their bodies.
978 for (int i = 0; i < clauses->length(); i++) {
979 CaseClause* clause = clauses->at(i);
980 clause->body_target()->Unuse();
982 // The default is not a test, but remember it as final fall through.
983 if (clause->is_default()) {
984 default_clause = clause;
988 Comment cmnt(masm_, "[ Case comparison");
992 // Compile the label expression.
993 VisitForAccumulatorValue(clause->label());
995 // Perform the comparison as if via '==='.
996 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
997 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
998 JumpPatchSite patch_site(masm_);
999 if (inline_smi_code) {
1002 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1005 __ b(ne, &next_test);
1006 __ Drop(1); // Switch value is no longer needed.
1007 __ b(clause->body_target());
1008 __ bind(&slow_case);
1011 // Record position before stub call for type feedback.
1012 SetExpressionPosition(clause);
1013 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1014 strength(language_mode())).code();
1015 CallIC(ic, clause->CompareId());
1016 patch_site.EmitPatchInfo();
1020 PrepareForBailout(clause, TOS_REG);
1021 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1023 __ b(ne, &next_test);
1025 __ jmp(clause->body_target());
1028 __ cmp(r0, Operand::Zero());
1029 __ b(ne, &next_test);
1030 __ Drop(1); // Switch value is no longer needed.
1031 __ b(clause->body_target());
1034 // Discard the test value and jump to the default if present, otherwise to
1035 // the end of the statement.
1036 __ bind(&next_test);
1037 __ Drop(1); // Switch value is no longer needed.
1038 if (default_clause == NULL) {
1039 __ b(nested_statement.break_label());
1041 __ b(default_clause->body_target());
1044 // Compile all the case bodies.
1045 for (int i = 0; i < clauses->length(); i++) {
1046 Comment cmnt(masm_, "[ Case body");
1047 CaseClause* clause = clauses->at(i);
1048 __ bind(clause->body_target());
1049 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1050 VisitStatements(clause->statements());
1053 __ bind(nested_statement.break_label());
1054 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1058 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1059 Comment cmnt(masm_, "[ ForInStatement");
1060 SetStatementPosition(stmt, SKIP_BREAK);
1062 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1065 ForIn loop_statement(this, stmt);
1066 increment_loop_depth();
1068 // Get the object to enumerate over. If the object is null or undefined, skip
1069 // over the loop. See ECMA-262 version 5, section 12.6.4.
1070 SetExpressionAsStatementPosition(stmt->enumerable());
1071 VisitForAccumulatorValue(stmt->enumerable());
1072 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1075 Register null_value = r5;
1076 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1077 __ cmp(r0, null_value);
1080 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1082 // Convert the object to a JS object.
1083 Label convert, done_convert;
1084 __ JumpIfSmi(r0, &convert);
1085 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1086 __ b(ge, &done_convert);
1089 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1090 __ bind(&done_convert);
1091 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1094 // Check for proxies.
1096 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1097 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1098 __ b(le, &call_runtime);
1100 // Check cache validity in generated code. This is a fast case for
1101 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1102 // guarantee cache validity, call the runtime system to check cache
1103 // validity or get the property names in a fixed array.
1104 __ CheckEnumCache(null_value, &call_runtime);
1106 // The enum cache is valid. Load the map of the object being
1107 // iterated over and use the cache for the iteration.
1109 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1112 // Get the set of properties to enumerate.
1113 __ bind(&call_runtime);
1114 __ push(r0); // Duplicate the enumerable object on the stack.
1115 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1116 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1118 // If we got a map from the runtime call, we can do a fast
1119 // modification check. Otherwise, we got a fixed array, and we have
1120 // to do a slow check.
1122 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1123 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1125 __ b(ne, &fixed_array);
1127 // We got a map in register r0. Get the enumeration cache from it.
1128 Label no_descriptors;
1129 __ bind(&use_cache);
1131 __ EnumLength(r1, r0);
1132 __ cmp(r1, Operand(Smi::FromInt(0)));
1133 __ b(eq, &no_descriptors);
1135 __ LoadInstanceDescriptors(r0, r2);
1136 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1137 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1139 // Set up the four remaining stack slots.
1140 __ push(r0); // Map.
1141 __ mov(r0, Operand(Smi::FromInt(0)));
1142 // Push enumeration cache, enumeration cache length (as smi) and zero.
1143 __ Push(r2, r1, r0);
1146 __ bind(&no_descriptors);
1150 // We got a fixed array in register r0. Iterate through that.
1152 __ bind(&fixed_array);
1154 __ Move(r1, FeedbackVector());
1155 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1156 int vector_index = FeedbackVector()->GetIndex(slot);
1157 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1159 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1160 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1161 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1162 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1163 __ b(gt, &non_proxy);
1164 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1165 __ bind(&non_proxy);
1166 __ Push(r1, r0); // Smi and array
1167 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1168 __ mov(r0, Operand(Smi::FromInt(0)));
1169 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1171 // Generate code for doing the condition check.
1172 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1174 SetExpressionAsStatementPosition(stmt->each());
1176 // Load the current count to r0, load the length to r1.
1177 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1178 __ cmp(r0, r1); // Compare to the array length.
1179 __ b(hs, loop_statement.break_label());
1181 // Get the current entry of the array into register r3.
1182 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1183 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1184 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1186 // Get the expected map from the stack or a smi in the
1187 // permanent slow case into register r2.
1188 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1190 // Check if the expected map still matches that of the enumerable.
1191 // If not, we may have to filter the key.
1193 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1194 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1195 __ cmp(r4, Operand(r2));
1196 __ b(eq, &update_each);
1198 // For proxies, no filtering is done.
1199 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1200 __ cmp(r2, Operand(Smi::FromInt(0)));
1201 __ b(eq, &update_each);
1203 // Convert the entry to a string or (smi) 0 if it isn't a property
1204 // any more. If the property has been removed while iterating, we
1206 __ push(r1); // Enumerable.
1207 __ push(r3); // Current entry.
1208 __ CallRuntime(Runtime::kForInFilter, 2);
1209 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1210 __ mov(r3, Operand(r0));
1211 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1213 __ b(eq, loop_statement.continue_label());
1215 // Update the 'each' property or variable from the possibly filtered
1216 // entry in register r3.
1217 __ bind(&update_each);
1218 __ mov(result_register(), r3);
1219 // Perform the assignment as if via '='.
1220 { EffectContext context(this);
1221 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1222 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1225 // Generate code for the body of the loop.
1226 Visit(stmt->body());
1228 // Generate code for the going to the next element by incrementing
1229 // the index (smi) stored on top of the stack.
1230 __ bind(loop_statement.continue_label());
1232 __ add(r0, r0, Operand(Smi::FromInt(1)));
1235 EmitBackEdgeBookkeeping(stmt, &loop);
1238 // Remove the pointers stored on the stack.
1239 __ bind(loop_statement.break_label());
1242 // Exit and decrement the loop depth.
1243 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1245 decrement_loop_depth();
1249 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1251 // Use the fast case closure allocation code that allocates in new
1252 // space for nested functions that don't need literals cloning. If
1253 // we're running with the --always-opt or the --prepare-always-opt
1254 // flag, we need to use the runtime function so that the new function
1255 // we are creating here gets a chance to have its code optimized and
1256 // doesn't just get a copy of the existing unoptimized code.
1257 if (!FLAG_always_opt &&
1258 !FLAG_prepare_always_opt &&
1260 scope()->is_function_scope() &&
1261 info->num_literals() == 0) {
1262 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1263 __ mov(r2, Operand(info));
1266 __ mov(r0, Operand(info));
1267 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1268 : Heap::kFalseValueRootIndex);
1269 __ Push(cp, r0, r1);
1270 __ CallRuntime(Runtime::kNewClosure, 3);
1272 context()->Plug(r0);
1276 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1278 FeedbackVectorICSlot slot) {
1279 if (NeedsHomeObject(initializer)) {
1280 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1281 __ mov(StoreDescriptor::NameRegister(),
1282 Operand(isolate()->factory()->home_object_symbol()));
1283 __ ldr(StoreDescriptor::ValueRegister(),
1284 MemOperand(sp, offset * kPointerSize));
1285 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1291 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1292 TypeofMode typeof_mode,
1294 Register current = cp;
1300 if (s->num_heap_slots() > 0) {
1301 if (s->calls_sloppy_eval()) {
1302 // Check that extension is NULL.
1303 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1307 // Load next context in chain.
1308 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1309 // Walk the rest of the chain without clobbering cp.
1312 // If no outer scope calls eval, we do not need to check more
1313 // context extensions.
1314 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1315 s = s->outer_scope();
1318 if (s->is_eval_scope()) {
1320 if (!current.is(next)) {
1321 __ Move(next, current);
1324 // Terminate at native context.
1325 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1326 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1329 // Check that extension is NULL.
1330 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1333 // Load next context in chain.
1334 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1339 // All extension objects were empty and it is safe to use a normal global
1341 EmitGlobalVariableLoad(proxy, typeof_mode);
1345 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1347 DCHECK(var->IsContextSlot());
1348 Register context = cp;
1352 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1353 if (s->num_heap_slots() > 0) {
1354 if (s->calls_sloppy_eval()) {
1355 // Check that extension is NULL.
1356 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1360 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1361 // Walk the rest of the chain without clobbering cp.
1365 // Check that last extension is NULL.
1366 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1370 // This function is used only for loads, not stores, so it's safe to
1371 // return an cp-based operand (the write barrier cannot be allowed to
1372 // destroy the cp register).
1373 return ContextOperand(context, var->index());
1377 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1378 TypeofMode typeof_mode,
1379 Label* slow, Label* done) {
1380 // Generate fast-case code for variables that might be shadowed by
1381 // eval-introduced variables. Eval is used a lot without
1382 // introducing variables. In those cases, we do not want to
1383 // perform a runtime call for all variables in the scope
1384 // containing the eval.
1385 Variable* var = proxy->var();
1386 if (var->mode() == DYNAMIC_GLOBAL) {
1387 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1389 } else if (var->mode() == DYNAMIC_LOCAL) {
1390 Variable* local = var->local_if_not_shadowed();
1391 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1392 if (local->mode() == LET || local->mode() == CONST ||
1393 local->mode() == CONST_LEGACY) {
1394 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1395 if (local->mode() == CONST_LEGACY) {
1396 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1397 } else { // LET || CONST
1399 __ mov(r0, Operand(var->name()));
1401 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1409 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1410 TypeofMode typeof_mode) {
1411 Variable* var = proxy->var();
1412 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1413 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1414 if (var->IsGlobalSlot()) {
1415 DCHECK(var->index() > 0);
1416 DCHECK(var->IsStaticGlobalObjectProperty());
1417 // Each var occupies two slots in the context: for reads and writes.
1418 const int slot = var->index();
1419 const int depth = scope()->ContextChainLength(var->scope());
1420 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1421 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1422 __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
1423 Operand(var->name()));
1424 LoadGlobalViaContextStub stub(isolate(), depth);
1427 __ Push(Smi::FromInt(slot));
1428 __ Push(var->name());
1429 __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
1432 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1433 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1434 __ mov(LoadDescriptor::SlotRegister(),
1435 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1436 CallLoadIC(typeof_mode);
1441 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1442 TypeofMode typeof_mode) {
1443 // Record position before possible IC call.
1444 SetExpressionPosition(proxy);
1445 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1446 Variable* var = proxy->var();
1448 // Three cases: global variables, lookup variables, and all other types of
1450 switch (var->location()) {
1451 case VariableLocation::GLOBAL:
1452 case VariableLocation::UNALLOCATED: {
1453 Comment cmnt(masm_, "[ Global variable");
1454 EmitGlobalVariableLoad(proxy, typeof_mode);
1455 context()->Plug(r0);
1459 case VariableLocation::PARAMETER:
1460 case VariableLocation::LOCAL:
1461 case VariableLocation::CONTEXT: {
1462 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1463 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1464 : "[ Stack variable");
1465 if (var->binding_needs_init()) {
1466 // var->scope() may be NULL when the proxy is located in eval code and
1467 // refers to a potential outside binding. Currently those bindings are
1468 // always looked up dynamically, i.e. in that case
1469 // var->location() == LOOKUP.
1471 DCHECK(var->scope() != NULL);
1473 // Check if the binding really needs an initialization check. The check
1474 // can be skipped in the following situation: we have a LET or CONST
1475 // binding in harmony mode, both the Variable and the VariableProxy have
1476 // the same declaration scope (i.e. they are both in global code, in the
1477 // same function or in the same eval code) and the VariableProxy is in
1478 // the source physically located after the initializer of the variable.
1480 // We cannot skip any initialization checks for CONST in non-harmony
1481 // mode because const variables may be declared but never initialized:
1482 // if (false) { const x; }; var y = x;
1484 // The condition on the declaration scopes is a conservative check for
1485 // nested functions that access a binding and are called before the
1486 // binding is initialized:
1487 // function() { f(); let x = 1; function f() { x = 2; } }
1489 bool skip_init_check;
1490 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1491 skip_init_check = false;
1492 } else if (var->is_this()) {
1493 CHECK(info_->function() != nullptr &&
1494 (info_->function()->kind() & kSubclassConstructor) != 0);
1495 // TODO(dslomov): implement 'this' hole check elimination.
1496 skip_init_check = false;
1498 // Check that we always have valid source position.
1499 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1500 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1501 skip_init_check = var->mode() != CONST_LEGACY &&
1502 var->initializer_position() < proxy->position();
1505 if (!skip_init_check) {
1506 // Let and const need a read barrier.
1508 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1509 if (var->mode() == LET || var->mode() == CONST) {
1510 // Throw a reference error when using an uninitialized let/const
1511 // binding in harmony mode.
1514 __ mov(r0, Operand(var->name()));
1516 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1519 // Uninitalized const bindings outside of harmony mode are unholed.
1520 DCHECK(var->mode() == CONST_LEGACY);
1521 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1523 context()->Plug(r0);
1527 context()->Plug(var);
1531 case VariableLocation::LOOKUP: {
1532 Comment cmnt(masm_, "[ Lookup variable");
1534 // Generate code for loading from variables potentially shadowed
1535 // by eval-introduced variables.
1536 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1538 __ mov(r1, Operand(var->name()));
1539 __ Push(cp, r1); // Context and name.
1540 Runtime::FunctionId function_id =
1541 typeof_mode == NOT_INSIDE_TYPEOF
1542 ? Runtime::kLoadLookupSlot
1543 : Runtime::kLoadLookupSlotNoReferenceError;
1544 __ CallRuntime(function_id, 2);
1546 context()->Plug(r0);
1552 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1553 Comment cmnt(masm_, "[ RegExpLiteral");
1555 // Registers will be used as follows:
1556 // r5 = materialized value (RegExp literal)
1557 // r4 = JS function, literals array
1558 // r3 = literal index
1559 // r2 = RegExp pattern
1560 // r1 = RegExp flags
1561 // r0 = RegExp literal clone
1562 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1563 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1564 int literal_offset =
1565 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1566 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1567 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1569 __ b(ne, &materialized);
1571 // Create regexp literal using runtime function.
1572 // Result will be in r0.
1573 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1574 __ mov(r2, Operand(expr->pattern()));
1575 __ mov(r1, Operand(expr->flags()));
1576 __ Push(r4, r3, r2, r1);
1577 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1580 __ bind(&materialized);
1581 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1582 Label allocated, runtime_allocate;
1583 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1586 __ bind(&runtime_allocate);
1587 __ mov(r0, Operand(Smi::FromInt(size)));
1589 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1592 __ bind(&allocated);
1593 // After this, registers are used as follows:
1594 // r0: Newly allocated regexp.
1595 // r5: Materialized regexp.
1597 __ CopyFields(r0, r5, d0, size / kPointerSize);
1598 context()->Plug(r0);
1602 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1603 if (expression == NULL) {
1604 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1607 VisitForStackValue(expression);
1612 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1613 Comment cmnt(masm_, "[ ObjectLiteral");
1615 Handle<FixedArray> constant_properties = expr->constant_properties();
1616 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1617 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1618 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1619 __ mov(r1, Operand(constant_properties));
1620 int flags = expr->ComputeFlags();
1621 __ mov(r0, Operand(Smi::FromInt(flags)));
1622 if (MustCreateObjectLiteralWithRuntime(expr)) {
1623 __ Push(r3, r2, r1, r0);
1624 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1626 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1629 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1631 // If result_saved is true the result is on top of the stack. If
1632 // result_saved is false the result is in r0.
1633 bool result_saved = false;
1635 AccessorTable accessor_table(zone());
1636 int property_index = 0;
1637 // store_slot_index points to the vector IC slot for the next store IC used.
1638 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1639 // and must be updated if the number of store ICs emitted here changes.
1640 int store_slot_index = 0;
1641 for (; property_index < expr->properties()->length(); property_index++) {
1642 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1643 if (property->is_computed_name()) break;
1644 if (property->IsCompileTimeValue()) continue;
1646 Literal* key = property->key()->AsLiteral();
1647 Expression* value = property->value();
1648 if (!result_saved) {
1649 __ push(r0); // Save result on stack
1650 result_saved = true;
1652 switch (property->kind()) {
1653 case ObjectLiteral::Property::CONSTANT:
1655 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1656 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1658 case ObjectLiteral::Property::COMPUTED:
1659 // It is safe to use [[Put]] here because the boilerplate already
1660 // contains computed properties with an uninitialized value.
1661 if (key->value()->IsInternalizedString()) {
1662 if (property->emit_store()) {
1663 VisitForAccumulatorValue(value);
1664 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1665 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1666 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1667 if (FLAG_vector_stores) {
1668 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1671 CallStoreIC(key->LiteralFeedbackId());
1673 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1675 if (NeedsHomeObject(value)) {
1676 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1677 __ mov(StoreDescriptor::NameRegister(),
1678 Operand(isolate()->factory()->home_object_symbol()));
1679 __ ldr(StoreDescriptor::ValueRegister(), MemOperand(sp));
1680 if (FLAG_vector_stores) {
1681 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1686 VisitForEffect(value);
1690 // Duplicate receiver on stack.
1691 __ ldr(r0, MemOperand(sp));
1693 VisitForStackValue(key);
1694 VisitForStackValue(value);
1695 if (property->emit_store()) {
1696 EmitSetHomeObjectIfNeeded(
1697 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1698 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1700 __ CallRuntime(Runtime::kSetProperty, 4);
1705 case ObjectLiteral::Property::PROTOTYPE:
1706 // Duplicate receiver on stack.
1707 __ ldr(r0, MemOperand(sp));
1709 VisitForStackValue(value);
1710 DCHECK(property->emit_store());
1711 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1714 case ObjectLiteral::Property::GETTER:
1715 if (property->emit_store()) {
1716 accessor_table.lookup(key)->second->getter = value;
1719 case ObjectLiteral::Property::SETTER:
1720 if (property->emit_store()) {
1721 accessor_table.lookup(key)->second->setter = value;
1727 // Emit code to define accessors, using only a single call to the runtime for
1728 // each pair of corresponding getters and setters.
1729 for (AccessorTable::Iterator it = accessor_table.begin();
1730 it != accessor_table.end();
1732 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1734 VisitForStackValue(it->first);
1735 EmitAccessor(it->second->getter);
1736 EmitSetHomeObjectIfNeeded(
1737 it->second->getter, 2,
1738 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1739 EmitAccessor(it->second->setter);
1740 EmitSetHomeObjectIfNeeded(
1741 it->second->setter, 3,
1742 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1743 __ mov(r0, Operand(Smi::FromInt(NONE)));
1745 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1748 // Object literals have two parts. The "static" part on the left contains no
1749 // computed property names, and so we can compute its map ahead of time; see
1750 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1751 // starts with the first computed property name, and continues with all
1752 // properties to its right. All the code from above initializes the static
1753 // component of the object literal, and arranges for the map of the result to
1754 // reflect the static order in which the keys appear. For the dynamic
1755 // properties, we compile them into a series of "SetOwnProperty" runtime
1756 // calls. This will preserve insertion order.
1757 for (; property_index < expr->properties()->length(); property_index++) {
1758 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1760 Expression* value = property->value();
1761 if (!result_saved) {
1762 __ push(r0); // Save result on the stack
1763 result_saved = true;
1766 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1769 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1770 DCHECK(!property->is_computed_name());
1771 VisitForStackValue(value);
1772 DCHECK(property->emit_store());
1773 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1775 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1776 VisitForStackValue(value);
1777 EmitSetHomeObjectIfNeeded(
1778 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1780 switch (property->kind()) {
1781 case ObjectLiteral::Property::CONSTANT:
1782 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1783 case ObjectLiteral::Property::COMPUTED:
1784 if (property->emit_store()) {
1785 __ mov(r0, Operand(Smi::FromInt(NONE)));
1787 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1793 case ObjectLiteral::Property::PROTOTYPE:
1797 case ObjectLiteral::Property::GETTER:
1798 __ mov(r0, Operand(Smi::FromInt(NONE)));
1800 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1803 case ObjectLiteral::Property::SETTER:
1804 __ mov(r0, Operand(Smi::FromInt(NONE)));
1806 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1812 if (expr->has_function()) {
1813 DCHECK(result_saved);
1814 __ ldr(r0, MemOperand(sp));
1816 __ CallRuntime(Runtime::kToFastProperties, 1);
1820 context()->PlugTOS();
1822 context()->Plug(r0);
1825 // Verify that compilation exactly consumed the number of store ic slots that
1826 // the ObjectLiteral node had to offer.
1827 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1831 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1832 Comment cmnt(masm_, "[ ArrayLiteral");
1834 expr->BuildConstantElements(isolate());
1836 Handle<FixedArray> constant_elements = expr->constant_elements();
1837 bool has_fast_elements =
1838 IsFastObjectElementsKind(expr->constant_elements_kind());
1839 Handle<FixedArrayBase> constant_elements_values(
1840 FixedArrayBase::cast(constant_elements->get(1)));
1842 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1843 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1844 // If the only customer of allocation sites is transitioning, then
1845 // we can turn it off if we don't have anywhere else to transition to.
1846 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1849 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1850 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1851 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1852 __ mov(r1, Operand(constant_elements));
1853 if (MustCreateArrayLiteralWithRuntime(expr)) {
1854 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1855 __ Push(r3, r2, r1, r0);
1856 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1858 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1861 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1863 bool result_saved = false; // Is the result saved to the stack?
1864 ZoneList<Expression*>* subexprs = expr->values();
1865 int length = subexprs->length();
1867 // Emit code to evaluate all the non-constant subexpressions and to store
1868 // them into the newly cloned array.
1869 int array_index = 0;
1870 for (; array_index < length; array_index++) {
1871 Expression* subexpr = subexprs->at(array_index);
1872 if (subexpr->IsSpread()) break;
1874 // If the subexpression is a literal or a simple materialized literal it
1875 // is already set in the cloned array.
1876 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1878 if (!result_saved) {
1880 __ Push(Smi::FromInt(expr->literal_index()));
1881 result_saved = true;
1883 VisitForAccumulatorValue(subexpr);
1885 if (has_fast_elements) {
1886 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1887 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1888 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1889 __ str(result_register(), FieldMemOperand(r1, offset));
1890 // Update the write barrier for the array store.
1891 __ RecordWriteField(r1, offset, result_register(), r2,
1892 kLRHasBeenSaved, kDontSaveFPRegs,
1893 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1895 __ mov(r3, Operand(Smi::FromInt(array_index)));
1896 StoreArrayLiteralElementStub stub(isolate());
1900 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1903 // In case the array literal contains spread expressions it has two parts. The
1904 // first part is the "static" array which has a literal index is handled
1905 // above. The second part is the part after the first spread expression
1906 // (inclusive) and these elements gets appended to the array. Note that the
1907 // number elements an iterable produces is unknown ahead of time.
1908 if (array_index < length && result_saved) {
1909 __ pop(); // literal index
1911 result_saved = false;
1913 for (; array_index < length; array_index++) {
1914 Expression* subexpr = subexprs->at(array_index);
1917 if (subexpr->IsSpread()) {
1918 VisitForStackValue(subexpr->AsSpread()->expression());
1919 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1921 VisitForStackValue(subexpr);
1922 __ CallRuntime(Runtime::kAppendElement, 2);
1925 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1929 __ pop(); // literal index
1930 context()->PlugTOS();
1932 context()->Plug(r0);
1937 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1938 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1940 Comment cmnt(masm_, "[ Assignment");
1941 SetExpressionPosition(expr, INSERT_BREAK);
1943 Property* property = expr->target()->AsProperty();
1944 LhsKind assign_type = Property::GetAssignType(property);
1946 // Evaluate LHS expression.
1947 switch (assign_type) {
1949 // Nothing to do here.
1951 case NAMED_PROPERTY:
1952 if (expr->is_compound()) {
1953 // We need the receiver both on the stack and in the register.
1954 VisitForStackValue(property->obj());
1955 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1957 VisitForStackValue(property->obj());
1960 case NAMED_SUPER_PROPERTY:
1962 property->obj()->AsSuperPropertyReference()->this_var());
1963 VisitForAccumulatorValue(
1964 property->obj()->AsSuperPropertyReference()->home_object());
1965 __ Push(result_register());
1966 if (expr->is_compound()) {
1967 const Register scratch = r1;
1968 __ ldr(scratch, MemOperand(sp, kPointerSize));
1970 __ Push(result_register());
1973 case KEYED_SUPER_PROPERTY:
1975 property->obj()->AsSuperPropertyReference()->this_var());
1977 property->obj()->AsSuperPropertyReference()->home_object());
1978 VisitForAccumulatorValue(property->key());
1979 __ Push(result_register());
1980 if (expr->is_compound()) {
1981 const Register scratch = r1;
1982 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1984 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1986 __ Push(result_register());
1989 case KEYED_PROPERTY:
1990 if (expr->is_compound()) {
1991 VisitForStackValue(property->obj());
1992 VisitForStackValue(property->key());
1993 __ ldr(LoadDescriptor::ReceiverRegister(),
1994 MemOperand(sp, 1 * kPointerSize));
1995 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1997 VisitForStackValue(property->obj());
1998 VisitForStackValue(property->key());
2003 // For compound assignments we need another deoptimization point after the
2004 // variable/property load.
2005 if (expr->is_compound()) {
2006 { AccumulatorValueContext context(this);
2007 switch (assign_type) {
2009 EmitVariableLoad(expr->target()->AsVariableProxy());
2010 PrepareForBailout(expr->target(), TOS_REG);
2012 case NAMED_PROPERTY:
2013 EmitNamedPropertyLoad(property);
2014 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2016 case NAMED_SUPER_PROPERTY:
2017 EmitNamedSuperPropertyLoad(property);
2018 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2020 case KEYED_SUPER_PROPERTY:
2021 EmitKeyedSuperPropertyLoad(property);
2022 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2024 case KEYED_PROPERTY:
2025 EmitKeyedPropertyLoad(property);
2026 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2031 Token::Value op = expr->binary_op();
2032 __ push(r0); // Left operand goes on the stack.
2033 VisitForAccumulatorValue(expr->value());
2035 AccumulatorValueContext context(this);
2036 if (ShouldInlineSmiCase(op)) {
2037 EmitInlineSmiBinaryOp(expr->binary_operation(),
2042 EmitBinaryOp(expr->binary_operation(), op);
2045 // Deoptimization point in case the binary operation may have side effects.
2046 PrepareForBailout(expr->binary_operation(), TOS_REG);
2048 VisitForAccumulatorValue(expr->value());
2051 SetExpressionPosition(expr);
2054 switch (assign_type) {
2056 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2057 expr->op(), expr->AssignmentSlot());
2058 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2059 context()->Plug(r0);
2061 case NAMED_PROPERTY:
2062 EmitNamedPropertyAssignment(expr);
2064 case NAMED_SUPER_PROPERTY:
2065 EmitNamedSuperPropertyStore(property);
2066 context()->Plug(r0);
2068 case KEYED_SUPER_PROPERTY:
2069 EmitKeyedSuperPropertyStore(property);
2070 context()->Plug(r0);
2072 case KEYED_PROPERTY:
2073 EmitKeyedPropertyAssignment(expr);
2079 void FullCodeGenerator::VisitYield(Yield* expr) {
2080 Comment cmnt(masm_, "[ Yield");
2081 SetExpressionPosition(expr);
2083 // Evaluate yielded value first; the initial iterator definition depends on
2084 // this. It stays on the stack while we update the iterator.
2085 VisitForStackValue(expr->expression());
2087 switch (expr->yield_kind()) {
2088 case Yield::kSuspend:
2089 // Pop value from top-of-stack slot; box result into result register.
2090 EmitCreateIteratorResult(false);
2091 __ push(result_register());
2093 case Yield::kInitial: {
2094 Label suspend, continuation, post_runtime, resume;
2097 __ bind(&continuation);
2098 __ RecordGeneratorContinuation();
2102 VisitForAccumulatorValue(expr->generator_object());
2103 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2104 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2105 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2106 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2108 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2109 kLRHasBeenSaved, kDontSaveFPRegs);
2110 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2112 __ b(eq, &post_runtime);
2113 __ push(r0); // generator object
2114 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2115 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2116 __ bind(&post_runtime);
2117 __ pop(result_register());
2118 EmitReturnSequence();
2121 context()->Plug(result_register());
2125 case Yield::kFinal: {
2126 VisitForAccumulatorValue(expr->generator_object());
2127 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2128 __ str(r1, FieldMemOperand(result_register(),
2129 JSGeneratorObject::kContinuationOffset));
2130 // Pop value from top-of-stack slot, box result into result register.
2131 EmitCreateIteratorResult(true);
2132 EmitUnwindBeforeReturn();
2133 EmitReturnSequence();
2137 case Yield::kDelegating: {
2138 VisitForStackValue(expr->generator_object());
2140 // Initial stack layout is as follows:
2141 // [sp + 1 * kPointerSize] iter
2142 // [sp + 0 * kPointerSize] g
2144 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2145 Label l_next, l_call, l_loop;
2146 Register load_receiver = LoadDescriptor::ReceiverRegister();
2147 Register load_name = LoadDescriptor::NameRegister();
2149 // Initial send value is undefined.
2150 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2153 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2155 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2156 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2157 __ Push(load_name, r3, r0); // "throw", iter, except
2160 // try { received = %yield result }
2161 // Shuffle the received result above a try handler and yield it without
2164 __ pop(r0); // result
2165 int handler_index = NewHandlerTableEntry();
2166 EnterTryBlock(handler_index, &l_catch);
2167 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2168 __ push(r0); // result
2171 __ bind(&l_continuation);
2172 __ RecordGeneratorContinuation();
2175 __ bind(&l_suspend);
2176 const int generator_object_depth = kPointerSize + try_block_size;
2177 __ ldr(r0, MemOperand(sp, generator_object_depth));
2179 __ Push(Smi::FromInt(handler_index)); // handler-index
2180 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2181 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2182 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2183 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2185 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2186 kLRHasBeenSaved, kDontSaveFPRegs);
2187 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2188 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2189 __ pop(r0); // result
2190 EmitReturnSequence();
2191 __ bind(&l_resume); // received in r0
2192 ExitTryBlock(handler_index);
2194 // receiver = iter; f = 'next'; arg = received;
2197 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2198 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2199 __ Push(load_name, r3, r0); // "next", iter, received
2201 // result = receiver[f](arg);
2203 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2204 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2205 __ mov(LoadDescriptor::SlotRegister(),
2206 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2207 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2208 CallIC(ic, TypeFeedbackId::None());
2210 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2211 SetCallPosition(expr, 1);
2212 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2215 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2216 __ Drop(1); // The function is still on the stack; drop it.
2218 // if (!result.done) goto l_try;
2220 __ Move(load_receiver, r0);
2222 __ push(load_receiver); // save result
2223 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2224 __ mov(LoadDescriptor::SlotRegister(),
2225 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2226 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2227 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2229 __ cmp(r0, Operand(0));
2233 __ pop(load_receiver); // result
2234 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2235 __ mov(LoadDescriptor::SlotRegister(),
2236 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2237 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.value
2238 context()->DropAndPlug(2, r0); // drop iter and g
2245 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2247 JSGeneratorObject::ResumeMode resume_mode) {
2248 // The value stays in r0, and is ultimately read by the resumed generator, as
2249 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2250 // is read to throw the value when the resumed generator is already closed.
2251 // r1 will hold the generator object until the activation has been resumed.
2252 VisitForStackValue(generator);
2253 VisitForAccumulatorValue(value);
2256 // Load suspended function and context.
2257 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2258 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2260 // Load receiver and store as the first argument.
2261 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2264 // Push holes for the rest of the arguments to the generator function.
2265 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2267 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2268 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2269 Label push_argument_holes, push_frame;
2270 __ bind(&push_argument_holes);
2271 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2272 __ b(mi, &push_frame);
2274 __ jmp(&push_argument_holes);
2276 // Enter a new JavaScript frame, and initialize its slots as they were when
2277 // the generator was suspended.
2278 Label resume_frame, done;
2279 __ bind(&push_frame);
2280 __ bl(&resume_frame);
2282 __ bind(&resume_frame);
2283 // lr = return address.
2284 // fp = caller's frame pointer.
2285 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
2286 // cp = callee's context,
2287 // r4 = callee's JS function.
2288 __ PushFixedFrame(r4);
2289 // Adjust FP to point to saved FP.
2290 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2292 // Load the operand stack size.
2293 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2294 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2297 // If we are sending a value and there is no operand stack, we can jump back
2299 if (resume_mode == JSGeneratorObject::NEXT) {
2301 __ cmp(r3, Operand(0));
2302 __ b(ne, &slow_resume);
2303 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2305 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2306 if (FLAG_enable_embedded_constant_pool) {
2307 // Load the new code object's constant pool pointer.
2308 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2311 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2314 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2315 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2318 __ bind(&slow_resume);
2321 // Otherwise, we push holes for the operand stack and call the runtime to fix
2322 // up the stack and the handlers.
2323 Label push_operand_holes, call_resume;
2324 __ bind(&push_operand_holes);
2325 __ sub(r3, r3, Operand(1), SetCC);
2326 __ b(mi, &call_resume);
2328 __ b(&push_operand_holes);
2329 __ bind(&call_resume);
2330 DCHECK(!result_register().is(r1));
2331 __ Push(r1, result_register());
2332 __ Push(Smi::FromInt(resume_mode));
2333 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2334 // Not reached: the runtime call returns elsewhere.
2335 __ stop("not-reached");
2338 context()->Plug(result_register());
2342 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2346 const int instance_size = 5 * kPointerSize;
2347 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2350 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT);
2353 __ bind(&gc_required);
2354 __ Push(Smi::FromInt(instance_size));
2355 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2356 __ ldr(context_register(),
2357 MemOperand(fp, StandardFrameConstants::kContextOffset));
2359 __ bind(&allocated);
2360 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2361 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2362 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2364 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2365 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2366 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2367 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2368 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2370 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2372 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2374 // Only the value field needs a write barrier, as the other values are in the
2376 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2377 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2381 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2382 SetExpressionPosition(prop);
2383 Literal* key = prop->key()->AsLiteral();
2384 DCHECK(!prop->IsSuperAccess());
2386 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2387 __ mov(LoadDescriptor::SlotRegister(),
2388 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2389 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2393 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2394 // Stack: receiver, home_object.
2395 SetExpressionPosition(prop);
2396 Literal* key = prop->key()->AsLiteral();
2397 DCHECK(!key->value()->IsSmi());
2398 DCHECK(prop->IsSuperAccess());
2400 __ Push(key->value());
2401 __ Push(Smi::FromInt(language_mode()));
2402 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2406 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2407 SetExpressionPosition(prop);
2408 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2409 __ mov(LoadDescriptor::SlotRegister(),
2410 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2415 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2416 // Stack: receiver, home_object, key.
2417 SetExpressionPosition(prop);
2418 __ Push(Smi::FromInt(language_mode()));
2419 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2423 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2425 Expression* left_expr,
2426 Expression* right_expr) {
2427 Label done, smi_case, stub_call;
2429 Register scratch1 = r2;
2430 Register scratch2 = r3;
2432 // Get the arguments.
2434 Register right = r0;
2437 // Perform combined smi check on both operands.
2438 __ orr(scratch1, left, Operand(right));
2439 STATIC_ASSERT(kSmiTag == 0);
2440 JumpPatchSite patch_site(masm_);
2441 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2443 __ bind(&stub_call);
2445 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2446 CallIC(code, expr->BinaryOperationFeedbackId());
2447 patch_site.EmitPatchInfo();
2451 // Smi case. This code works the same way as the smi-smi case in the type
2452 // recording binary operation stub, see
2455 __ GetLeastBitsFromSmi(scratch1, right, 5);
2456 __ mov(right, Operand(left, ASR, scratch1));
2457 __ bic(right, right, Operand(kSmiTagMask));
2460 __ SmiUntag(scratch1, left);
2461 __ GetLeastBitsFromSmi(scratch2, right, 5);
2462 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2463 __ TrySmiTag(right, scratch1, &stub_call);
2467 __ SmiUntag(scratch1, left);
2468 __ GetLeastBitsFromSmi(scratch2, right, 5);
2469 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2470 __ tst(scratch1, Operand(0xc0000000));
2471 __ b(ne, &stub_call);
2472 __ SmiTag(right, scratch1);
2476 __ add(scratch1, left, Operand(right), SetCC);
2477 __ b(vs, &stub_call);
2478 __ mov(right, scratch1);
2481 __ sub(scratch1, left, Operand(right), SetCC);
2482 __ b(vs, &stub_call);
2483 __ mov(right, scratch1);
2486 __ SmiUntag(ip, right);
2487 __ smull(scratch1, scratch2, left, ip);
2488 __ mov(ip, Operand(scratch1, ASR, 31));
2489 __ cmp(ip, Operand(scratch2));
2490 __ b(ne, &stub_call);
2491 __ cmp(scratch1, Operand::Zero());
2492 __ mov(right, Operand(scratch1), LeaveCC, ne);
2494 __ add(scratch2, right, Operand(left), SetCC);
2495 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2496 __ b(mi, &stub_call);
2500 __ orr(right, left, Operand(right));
2502 case Token::BIT_AND:
2503 __ and_(right, left, Operand(right));
2505 case Token::BIT_XOR:
2506 __ eor(right, left, Operand(right));
2513 context()->Plug(r0);
2517 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2518 int* used_store_slots) {
2519 // Constructor is in r0.
2520 DCHECK(lit != NULL);
2523 // No access check is needed here since the constructor is created by the
2525 Register scratch = r1;
2527 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2530 for (int i = 0; i < lit->properties()->length(); i++) {
2531 ObjectLiteral::Property* property = lit->properties()->at(i);
2532 Expression* value = property->value();
2534 if (property->is_static()) {
2535 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2537 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2540 EmitPropertyKey(property, lit->GetIdForProperty(i));
2542 // The static prototype property is read only. We handle the non computed
2543 // property name case in the parser. Since this is the only case where we
2544 // need to check for an own read only property we special case this so we do
2545 // not need to do this for every property.
2546 if (property->is_static() && property->is_computed_name()) {
2547 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2551 VisitForStackValue(value);
2552 EmitSetHomeObjectIfNeeded(value, 2,
2553 lit->SlotForHomeObject(value, used_store_slots));
2555 switch (property->kind()) {
2556 case ObjectLiteral::Property::CONSTANT:
2557 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2558 case ObjectLiteral::Property::PROTOTYPE:
2560 case ObjectLiteral::Property::COMPUTED:
2561 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2564 case ObjectLiteral::Property::GETTER:
2565 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2567 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2570 case ObjectLiteral::Property::SETTER:
2571 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2573 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2582 __ CallRuntime(Runtime::kToFastProperties, 1);
2585 __ CallRuntime(Runtime::kToFastProperties, 1);
2587 if (is_strong(language_mode())) {
2589 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2592 // TODO(conradw): It would be more efficient to define the properties with
2593 // the right attributes the first time round.
2594 // Freeze the prototype.
2595 __ CallRuntime(Runtime::kObjectFreeze, 1);
2596 // Freeze the constructor.
2597 __ CallRuntime(Runtime::kObjectFreeze, 1);
2602 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2605 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2606 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2607 CallIC(code, expr->BinaryOperationFeedbackId());
2608 patch_site.EmitPatchInfo();
2609 context()->Plug(r0);
2613 void FullCodeGenerator::EmitAssignment(Expression* expr,
2614 FeedbackVectorICSlot slot) {
2615 DCHECK(expr->IsValidReferenceExpressionOrThis());
2617 Property* prop = expr->AsProperty();
2618 LhsKind assign_type = Property::GetAssignType(prop);
2620 switch (assign_type) {
2622 Variable* var = expr->AsVariableProxy()->var();
2623 EffectContext context(this);
2624 EmitVariableAssignment(var, Token::ASSIGN, slot);
2627 case NAMED_PROPERTY: {
2628 __ push(r0); // Preserve value.
2629 VisitForAccumulatorValue(prop->obj());
2630 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2631 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2632 __ mov(StoreDescriptor::NameRegister(),
2633 Operand(prop->key()->AsLiteral()->value()));
2634 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2638 case NAMED_SUPER_PROPERTY: {
2640 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2641 VisitForAccumulatorValue(
2642 prop->obj()->AsSuperPropertyReference()->home_object());
2643 // stack: value, this; r0: home_object
2644 Register scratch = r2;
2645 Register scratch2 = r3;
2646 __ mov(scratch, result_register()); // home_object
2647 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2648 __ ldr(scratch2, MemOperand(sp, 0)); // this
2649 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2650 __ str(scratch, MemOperand(sp, 0)); // home_object
2651 // stack: this, home_object; r0: value
2652 EmitNamedSuperPropertyStore(prop);
2655 case KEYED_SUPER_PROPERTY: {
2657 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2659 prop->obj()->AsSuperPropertyReference()->home_object());
2660 VisitForAccumulatorValue(prop->key());
2661 Register scratch = r2;
2662 Register scratch2 = r3;
2663 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2664 // stack: value, this, home_object; r0: key, r3: value
2665 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2666 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2667 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2668 __ str(scratch, MemOperand(sp, kPointerSize));
2669 __ str(r0, MemOperand(sp, 0));
2670 __ Move(r0, scratch2);
2671 // stack: this, home_object, key; r0: value.
2672 EmitKeyedSuperPropertyStore(prop);
2675 case KEYED_PROPERTY: {
2676 __ push(r0); // Preserve value.
2677 VisitForStackValue(prop->obj());
2678 VisitForAccumulatorValue(prop->key());
2679 __ Move(StoreDescriptor::NameRegister(), r0);
2680 __ Pop(StoreDescriptor::ValueRegister(),
2681 StoreDescriptor::ReceiverRegister());
2682 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2684 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2689 context()->Plug(r0);
2693 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2694 Variable* var, MemOperand location) {
2695 __ str(result_register(), location);
2696 if (var->IsContextSlot()) {
2697 // RecordWrite may destroy all its register arguments.
2698 __ mov(r3, result_register());
2699 int offset = Context::SlotOffset(var->index());
2700 __ RecordWriteContextSlot(
2701 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2706 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2707 FeedbackVectorICSlot slot) {
2708 if (var->IsUnallocated()) {
2709 // Global var, const, or let.
2710 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2711 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2712 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2715 } else if (var->IsGlobalSlot()) {
2716 // Global var, const, or let.
2717 DCHECK(var->index() > 0);
2718 DCHECK(var->IsStaticGlobalObjectProperty());
2719 // Each var occupies two slots in the context: for reads and writes.
2720 const int slot = var->index() + 1;
2721 const int depth = scope()->ContextChainLength(var->scope());
2722 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2723 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2724 __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
2725 Operand(var->name()));
2726 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
2727 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2730 __ Push(Smi::FromInt(slot));
2731 __ Push(var->name());
2733 __ CallRuntime(is_strict(language_mode())
2734 ? Runtime::kStoreGlobalViaContext_Strict
2735 : Runtime::kStoreGlobalViaContext_Sloppy,
2738 } else if (var->mode() == LET && op != Token::INIT_LET) {
2739 // Non-initializing assignment to let variable needs a write barrier.
2740 DCHECK(!var->IsLookupSlot());
2741 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2743 MemOperand location = VarOperand(var, r1);
2744 __ ldr(r3, location);
2745 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2747 __ mov(r3, Operand(var->name()));
2749 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2750 // Perform the assignment.
2752 EmitStoreToStackLocalOrContextSlot(var, location);
2754 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2755 // Assignment to const variable needs a write barrier.
2756 DCHECK(!var->IsLookupSlot());
2757 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2759 MemOperand location = VarOperand(var, r1);
2760 __ ldr(r3, location);
2761 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2762 __ b(ne, &const_error);
2763 __ mov(r3, Operand(var->name()));
2765 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2766 __ bind(&const_error);
2767 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2769 } else if (var->is_this() && op == Token::INIT_CONST) {
2770 // Initializing assignment to const {this} needs a write barrier.
2771 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2772 Label uninitialized_this;
2773 MemOperand location = VarOperand(var, r1);
2774 __ ldr(r3, location);
2775 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2776 __ b(eq, &uninitialized_this);
2777 __ mov(r0, Operand(var->name()));
2779 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2780 __ bind(&uninitialized_this);
2781 EmitStoreToStackLocalOrContextSlot(var, location);
2783 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2784 if (var->IsLookupSlot()) {
2785 // Assignment to var.
2786 __ push(r0); // Value.
2787 __ mov(r1, Operand(var->name()));
2788 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2789 __ Push(cp, r1, r0); // Context, name, language mode.
2790 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2792 // Assignment to var or initializing assignment to let/const in harmony
2794 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2795 MemOperand location = VarOperand(var, r1);
2796 if (generate_debug_code_ && op == Token::INIT_LET) {
2797 // Check for an uninitialized let binding.
2798 __ ldr(r2, location);
2799 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2800 __ Check(eq, kLetBindingReInitialization);
2802 EmitStoreToStackLocalOrContextSlot(var, location);
2805 } else if (op == Token::INIT_CONST_LEGACY) {
2806 // Const initializers need a write barrier.
2807 DCHECK(var->mode() == CONST_LEGACY);
2808 DCHECK(!var->IsParameter()); // No const parameters.
2809 if (var->IsLookupSlot()) {
2811 __ mov(r0, Operand(var->name()));
2812 __ Push(cp, r0); // Context and name.
2813 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2815 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2817 MemOperand location = VarOperand(var, r1);
2818 __ ldr(r2, location);
2819 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2821 EmitStoreToStackLocalOrContextSlot(var, location);
2826 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2827 if (is_strict(language_mode())) {
2828 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2830 // Silently ignore store in sloppy mode.
2835 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2836 // Assignment to a property, using a named store IC.
2837 Property* prop = expr->target()->AsProperty();
2838 DCHECK(prop != NULL);
2839 DCHECK(prop->key()->IsLiteral());
2841 __ mov(StoreDescriptor::NameRegister(),
2842 Operand(prop->key()->AsLiteral()->value()));
2843 __ pop(StoreDescriptor::ReceiverRegister());
2844 if (FLAG_vector_stores) {
2845 EmitLoadStoreICSlot(expr->AssignmentSlot());
2848 CallStoreIC(expr->AssignmentFeedbackId());
2851 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2852 context()->Plug(r0);
2856 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2857 // Assignment to named property of super.
2859 // stack : receiver ('this'), home_object
2860 DCHECK(prop != NULL);
2861 Literal* key = prop->key()->AsLiteral();
2862 DCHECK(key != NULL);
2864 __ Push(key->value());
2866 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2867 : Runtime::kStoreToSuper_Sloppy),
2872 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2873 // Assignment to named property of super.
2875 // stack : receiver ('this'), home_object, key
2876 DCHECK(prop != NULL);
2880 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2881 : Runtime::kStoreKeyedToSuper_Sloppy),
2886 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2887 // Assignment to a property, using a keyed store IC.
2888 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2889 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2892 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2893 if (FLAG_vector_stores) {
2894 EmitLoadStoreICSlot(expr->AssignmentSlot());
2897 CallIC(ic, expr->AssignmentFeedbackId());
2900 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2901 context()->Plug(r0);
2905 void FullCodeGenerator::VisitProperty(Property* expr) {
2906 Comment cmnt(masm_, "[ Property");
2907 SetExpressionPosition(expr);
2909 Expression* key = expr->key();
2911 if (key->IsPropertyName()) {
2912 if (!expr->IsSuperAccess()) {
2913 VisitForAccumulatorValue(expr->obj());
2914 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2915 EmitNamedPropertyLoad(expr);
2917 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2919 expr->obj()->AsSuperPropertyReference()->home_object());
2920 EmitNamedSuperPropertyLoad(expr);
2923 if (!expr->IsSuperAccess()) {
2924 VisitForStackValue(expr->obj());
2925 VisitForAccumulatorValue(expr->key());
2926 __ Move(LoadDescriptor::NameRegister(), r0);
2927 __ pop(LoadDescriptor::ReceiverRegister());
2928 EmitKeyedPropertyLoad(expr);
2930 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2932 expr->obj()->AsSuperPropertyReference()->home_object());
2933 VisitForStackValue(expr->key());
2934 EmitKeyedSuperPropertyLoad(expr);
2937 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2938 context()->Plug(r0);
2942 void FullCodeGenerator::CallIC(Handle<Code> code,
2943 TypeFeedbackId ast_id) {
2945 // All calls must have a predictable size in full-codegen code to ensure that
2946 // the debugger can patch them correctly.
2947 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2948 NEVER_INLINE_TARGET_ADDRESS);
2952 // Code common for calls using the IC.
2953 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2954 Expression* callee = expr->expression();
2956 CallICState::CallType call_type =
2957 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2959 // Get the target function.
2960 if (call_type == CallICState::FUNCTION) {
2961 { StackValueContext context(this);
2962 EmitVariableLoad(callee->AsVariableProxy());
2963 PrepareForBailout(callee, NO_REGISTERS);
2965 // Push undefined as receiver. This is patched in the method prologue if it
2966 // is a sloppy mode method.
2967 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2970 // Load the function from the receiver.
2971 DCHECK(callee->IsProperty());
2972 DCHECK(!callee->AsProperty()->IsSuperAccess());
2973 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2974 EmitNamedPropertyLoad(callee->AsProperty());
2975 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2976 // Push the target function under the receiver.
2977 __ ldr(ip, MemOperand(sp, 0));
2979 __ str(r0, MemOperand(sp, kPointerSize));
2982 EmitCall(expr, call_type);
2986 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2987 Expression* callee = expr->expression();
2988 DCHECK(callee->IsProperty());
2989 Property* prop = callee->AsProperty();
2990 DCHECK(prop->IsSuperAccess());
2991 SetExpressionPosition(prop);
2993 Literal* key = prop->key()->AsLiteral();
2994 DCHECK(!key->value()->IsSmi());
2995 // Load the function from the receiver.
2996 const Register scratch = r1;
2997 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2998 VisitForStackValue(super_ref->home_object());
2999 VisitForAccumulatorValue(super_ref->this_var());
3002 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3004 __ Push(key->value());
3005 __ Push(Smi::FromInt(language_mode()));
3009 // - this (receiver)
3010 // - this (receiver) <-- LoadFromSuper will pop here and below.
3014 __ CallRuntime(Runtime::kLoadFromSuper, 4);
3016 // Replace home_object with target function.
3017 __ str(r0, MemOperand(sp, kPointerSize));
3020 // - target function
3021 // - this (receiver)
3022 EmitCall(expr, CallICState::METHOD);
3026 // Code common for calls using the IC.
3027 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3030 VisitForAccumulatorValue(key);
3032 Expression* callee = expr->expression();
3034 // Load the function from the receiver.
3035 DCHECK(callee->IsProperty());
3036 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3037 __ Move(LoadDescriptor::NameRegister(), r0);
3038 EmitKeyedPropertyLoad(callee->AsProperty());
3039 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3041 // Push the target function under the receiver.
3042 __ ldr(ip, MemOperand(sp, 0));
3044 __ str(r0, MemOperand(sp, kPointerSize));
3046 EmitCall(expr, CallICState::METHOD);
3050 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3051 Expression* callee = expr->expression();
3052 DCHECK(callee->IsProperty());
3053 Property* prop = callee->AsProperty();
3054 DCHECK(prop->IsSuperAccess());
3056 SetExpressionPosition(prop);
3057 // Load the function from the receiver.
3058 const Register scratch = r1;
3059 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3060 VisitForStackValue(super_ref->home_object());
3061 VisitForAccumulatorValue(super_ref->this_var());
3064 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3066 VisitForStackValue(prop->key());
3067 __ Push(Smi::FromInt(language_mode()));
3071 // - this (receiver)
3072 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3076 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3078 // Replace home_object with target function.
3079 __ str(r0, MemOperand(sp, kPointerSize));
3082 // - target function
3083 // - this (receiver)
3084 EmitCall(expr, CallICState::METHOD);
3088 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3089 // Load the arguments.
3090 ZoneList<Expression*>* args = expr->arguments();
3091 int arg_count = args->length();
3092 for (int i = 0; i < arg_count; i++) {
3093 VisitForStackValue(args->at(i));
3096 SetCallPosition(expr, arg_count);
3097 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3098 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3099 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3100 // Don't assign a type feedback id to the IC, since type feedback is provided
3101 // by the vector above.
3104 RecordJSReturnSite(expr);
3105 // Restore context register.
3106 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3107 context()->DropAndPlug(1, r0);
3111 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3112 // r4: copy of the first argument or undefined if it doesn't exist.
3113 if (arg_count > 0) {
3114 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
3116 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3119 // r3: the receiver of the enclosing function.
3120 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3122 // r2: language mode.
3123 __ mov(r2, Operand(Smi::FromInt(language_mode())));
3125 // r1: the start position of the scope the calls resides in.
3126 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3128 // Do the runtime call.
3129 __ Push(r4, r3, r2, r1);
3130 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3134 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3135 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3136 VariableProxy* callee = expr->expression()->AsVariableProxy();
3137 if (callee->var()->IsLookupSlot()) {
3139 SetExpressionPosition(callee);
3140 // Generate code for loading from variables potentially shadowed
3141 // by eval-introduced variables.
3142 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3145 // Call the runtime to find the function to call (returned in r0)
3146 // and the object holding it (returned in edx).
3147 DCHECK(!context_register().is(r2));
3148 __ mov(r2, Operand(callee->name()));
3149 __ Push(context_register(), r2);
3150 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3151 __ Push(r0, r1); // Function, receiver.
3152 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3154 // If fast case code has been generated, emit code to push the
3155 // function and receiver and have the slow path jump around this
3157 if (done.is_linked()) {
3163 // The receiver is implicitly the global receiver. Indicate this
3164 // by passing the hole to the call function stub.
3165 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3170 VisitForStackValue(callee);
3171 // refEnv.WithBaseObject()
3172 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3173 __ push(r2); // Reserved receiver slot.
3178 void FullCodeGenerator::VisitCall(Call* expr) {
3180 // We want to verify that RecordJSReturnSite gets called on all paths
3181 // through this function. Avoid early returns.
3182 expr->return_is_recorded_ = false;
3185 Comment cmnt(masm_, "[ Call");
3186 Expression* callee = expr->expression();
3187 Call::CallType call_type = expr->GetCallType(isolate());
3189 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3190 // In a call to eval, we first call
3191 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
3192 // to call. Then we call the resolved function using the given arguments.
3193 ZoneList<Expression*>* args = expr->arguments();
3194 int arg_count = args->length();
3196 PushCalleeAndWithBaseObject(expr);
3198 // Push the arguments.
3199 for (int i = 0; i < arg_count; i++) {
3200 VisitForStackValue(args->at(i));
3203 // Push a copy of the function (found below the arguments) and
3205 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3207 EmitResolvePossiblyDirectEval(arg_count);
3209 // Touch up the stack with the resolved function.
3210 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3212 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3214 // Record source position for debugger.
3215 SetCallPosition(expr, arg_count);
3216 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3217 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3219 RecordJSReturnSite(expr);
3220 // Restore context register.
3221 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3222 context()->DropAndPlug(1, r0);
3223 } else if (call_type == Call::GLOBAL_CALL) {
3224 EmitCallWithLoadIC(expr);
3226 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3227 // Call to a lookup slot (dynamically introduced variable).
3228 PushCalleeAndWithBaseObject(expr);
3230 } else if (call_type == Call::PROPERTY_CALL) {
3231 Property* property = callee->AsProperty();
3232 bool is_named_call = property->key()->IsPropertyName();
3233 if (property->IsSuperAccess()) {
3234 if (is_named_call) {
3235 EmitSuperCallWithLoadIC(expr);
3237 EmitKeyedSuperCallWithLoadIC(expr);
3240 VisitForStackValue(property->obj());
3241 if (is_named_call) {
3242 EmitCallWithLoadIC(expr);
3244 EmitKeyedCallWithLoadIC(expr, property->key());
3247 } else if (call_type == Call::SUPER_CALL) {
3248 EmitSuperConstructorCall(expr);
3250 DCHECK(call_type == Call::OTHER_CALL);
3251 // Call to an arbitrary expression not handled specially above.
3252 VisitForStackValue(callee);
3253 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3255 // Emit function call.
3260 // RecordJSReturnSite should have been called.
3261 DCHECK(expr->return_is_recorded_);
3266 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3267 Comment cmnt(masm_, "[ CallNew");
3268 // According to ECMA-262, section 11.2.2, page 44, the function
3269 // expression in new calls must be evaluated before the
3272 // Push constructor on the stack. If it's not a function it's used as
3273 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3275 DCHECK(!expr->expression()->IsSuperPropertyReference());
3276 VisitForStackValue(expr->expression());
3278 // Push the arguments ("left-to-right") on the stack.
3279 ZoneList<Expression*>* args = expr->arguments();
3280 int arg_count = args->length();
3281 for (int i = 0; i < arg_count; i++) {
3282 VisitForStackValue(args->at(i));
3285 // Call the construct call builtin that handles allocation and
3286 // constructor invocation.
3287 SetConstructCallPosition(expr);
3289 // Load function and argument count into r1 and r0.
3290 __ mov(r0, Operand(arg_count));
3291 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3293 // Record call targets in unoptimized code.
3294 if (FLAG_pretenuring_call_new) {
3295 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3296 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3297 expr->CallNewFeedbackSlot().ToInt() + 1);
3300 __ Move(r2, FeedbackVector());
3301 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3303 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3304 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3305 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3306 context()->Plug(r0);
3310 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3311 SuperCallReference* super_call_ref =
3312 expr->expression()->AsSuperCallReference();
3313 DCHECK_NOT_NULL(super_call_ref);
3315 EmitLoadSuperConstructor(super_call_ref);
3316 __ push(result_register());
3318 // Push the arguments ("left-to-right") on the stack.
3319 ZoneList<Expression*>* args = expr->arguments();
3320 int arg_count = args->length();
3321 for (int i = 0; i < arg_count; i++) {
3322 VisitForStackValue(args->at(i));
3325 // Call the construct call builtin that handles allocation and
3326 // constructor invocation.
3327 SetConstructCallPosition(expr);
3329 // Load original constructor into r4.
3330 VisitForAccumulatorValue(super_call_ref->new_target_var());
3331 __ mov(r4, result_register());
3333 // Load function and argument count into r1 and r0.
3334 __ mov(r0, Operand(arg_count));
3335 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3337 // Record call targets in unoptimized code.
3338 if (FLAG_pretenuring_call_new) {
3340 /* TODO(dslomov): support pretenuring.
3341 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3342 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3343 expr->CallNewFeedbackSlot().ToInt() + 1);
3347 __ Move(r2, FeedbackVector());
3348 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3350 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3351 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3353 RecordJSReturnSite(expr);
3355 context()->Plug(r0);
3359 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3360 ZoneList<Expression*>* args = expr->arguments();
3361 DCHECK(args->length() == 1);
3363 VisitForAccumulatorValue(args->at(0));
3365 Label materialize_true, materialize_false;
3366 Label* if_true = NULL;
3367 Label* if_false = NULL;
3368 Label* fall_through = NULL;
3369 context()->PrepareTest(&materialize_true, &materialize_false,
3370 &if_true, &if_false, &fall_through);
3372 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3374 Split(eq, if_true, if_false, fall_through);
3376 context()->Plug(if_true, if_false);
3380 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3381 ZoneList<Expression*>* args = expr->arguments();
3382 DCHECK(args->length() == 1);
3384 VisitForAccumulatorValue(args->at(0));
3386 Label materialize_true, materialize_false;
3387 Label* if_true = NULL;
3388 Label* if_false = NULL;
3389 Label* fall_through = NULL;
3390 context()->PrepareTest(&materialize_true, &materialize_false,
3391 &if_true, &if_false, &fall_through);
3393 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3394 __ NonNegativeSmiTst(r0);
3395 Split(eq, if_true, if_false, fall_through);
3397 context()->Plug(if_true, if_false);
3401 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3402 ZoneList<Expression*>* args = expr->arguments();
3403 DCHECK(args->length() == 1);
3405 VisitForAccumulatorValue(args->at(0));
3407 Label materialize_true, materialize_false;
3408 Label* if_true = NULL;
3409 Label* if_false = NULL;
3410 Label* fall_through = NULL;
3411 context()->PrepareTest(&materialize_true, &materialize_false,
3412 &if_true, &if_false, &fall_through);
3414 __ JumpIfSmi(r0, if_false);
3415 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3418 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3419 // Undetectable objects behave like undefined when tested with typeof.
3420 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3421 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3423 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3424 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3426 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3427 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3428 Split(le, if_true, if_false, fall_through);
3430 context()->Plug(if_true, if_false);
3434 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3435 ZoneList<Expression*>* args = expr->arguments();
3436 DCHECK(args->length() == 1);
3438 VisitForAccumulatorValue(args->at(0));
3440 Label materialize_true, materialize_false;
3441 Label* if_true = NULL;
3442 Label* if_false = NULL;
3443 Label* fall_through = NULL;
3444 context()->PrepareTest(&materialize_true, &materialize_false,
3445 &if_true, &if_false, &fall_through);
3447 __ JumpIfSmi(r0, if_false);
3448 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3449 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3450 Split(ge, if_true, if_false, fall_through);
3452 context()->Plug(if_true, if_false);
3456 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3457 ZoneList<Expression*>* args = expr->arguments();
3458 DCHECK(args->length() == 1);
3460 VisitForAccumulatorValue(args->at(0));
3462 Label materialize_true, materialize_false;
3463 Label* if_true = NULL;
3464 Label* if_false = NULL;
3465 Label* fall_through = NULL;
3466 context()->PrepareTest(&materialize_true, &materialize_false,
3467 &if_true, &if_false, &fall_through);
3469 __ JumpIfSmi(r0, if_false);
3470 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3471 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3472 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3473 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3474 Split(ne, if_true, if_false, fall_through);
3476 context()->Plug(if_true, if_false);
3480 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3481 CallRuntime* expr) {
3482 ZoneList<Expression*>* args = expr->arguments();
3483 DCHECK(args->length() == 1);
3485 VisitForAccumulatorValue(args->at(0));
3487 Label materialize_true, materialize_false, skip_lookup;
3488 Label* if_true = NULL;
3489 Label* if_false = NULL;
3490 Label* fall_through = NULL;
3491 context()->PrepareTest(&materialize_true, &materialize_false,
3492 &if_true, &if_false, &fall_through);
3494 __ AssertNotSmi(r0);
3496 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3497 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3498 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3499 __ b(ne, &skip_lookup);
3501 // Check for fast case object. Generate false result for slow case object.
3502 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3503 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3504 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3508 // Look for valueOf name in the descriptor array, and indicate false if
3509 // found. Since we omit an enumeration index check, if it is added via a
3510 // transition that shares its descriptor array, this is a false positive.
3511 Label entry, loop, done;
3513 // Skip loop if no descriptors are valid.
3514 __ NumberOfOwnDescriptors(r3, r1);
3515 __ cmp(r3, Operand::Zero());
3518 __ LoadInstanceDescriptors(r1, r4);
3519 // r4: descriptor array.
3520 // r3: valid entries in the descriptor array.
3521 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3523 // Calculate location of the first key name.
3524 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3525 // Calculate the end of the descriptor array.
3527 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3529 // Loop through all the keys in the descriptor array. If one of these is the
3530 // string "valueOf" the result is false.
3531 // The use of ip to store the valueOf string assumes that it is not otherwise
3532 // used in the loop below.
3533 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3536 __ ldr(r3, MemOperand(r4, 0));
3539 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3541 __ cmp(r4, Operand(r2));
3546 // Set the bit in the map to indicate that there is no local valueOf field.
3547 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3548 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3549 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3551 __ bind(&skip_lookup);
3553 // If a valueOf property is not found on the object check that its
3554 // prototype is the un-modified String prototype. If not result is false.
3555 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3556 __ JumpIfSmi(r2, if_false);
3557 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3558 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3559 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3560 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3562 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3563 Split(eq, if_true, if_false, fall_through);
3565 context()->Plug(if_true, if_false);
3569 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3570 ZoneList<Expression*>* args = expr->arguments();
3571 DCHECK(args->length() == 1);
3573 VisitForAccumulatorValue(args->at(0));
3575 Label materialize_true, materialize_false;
3576 Label* if_true = NULL;
3577 Label* if_false = NULL;
3578 Label* fall_through = NULL;
3579 context()->PrepareTest(&materialize_true, &materialize_false,
3580 &if_true, &if_false, &fall_through);
3582 __ JumpIfSmi(r0, if_false);
3583 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3584 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3585 Split(eq, if_true, if_false, fall_through);
3587 context()->Plug(if_true, if_false);
3591 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3592 ZoneList<Expression*>* args = expr->arguments();
3593 DCHECK(args->length() == 1);
3595 VisitForAccumulatorValue(args->at(0));
3597 Label materialize_true, materialize_false;
3598 Label* if_true = NULL;
3599 Label* if_false = NULL;
3600 Label* fall_through = NULL;
3601 context()->PrepareTest(&materialize_true, &materialize_false,
3602 &if_true, &if_false, &fall_through);
3604 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3605 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3606 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3607 __ cmp(r2, Operand(0x80000000));
3608 __ cmp(r1, Operand(0x00000000), eq);
3610 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3611 Split(eq, if_true, if_false, fall_through);
3613 context()->Plug(if_true, if_false);
3617 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3618 ZoneList<Expression*>* args = expr->arguments();
3619 DCHECK(args->length() == 1);
3621 VisitForAccumulatorValue(args->at(0));
3623 Label materialize_true, materialize_false;
3624 Label* if_true = NULL;
3625 Label* if_false = NULL;
3626 Label* fall_through = NULL;
3627 context()->PrepareTest(&materialize_true, &materialize_false,
3628 &if_true, &if_false, &fall_through);
3630 __ JumpIfSmi(r0, if_false);
3631 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3632 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3633 Split(eq, if_true, if_false, fall_through);
3635 context()->Plug(if_true, if_false);
3639 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3640 ZoneList<Expression*>* args = expr->arguments();
3641 DCHECK(args->length() == 1);
3643 VisitForAccumulatorValue(args->at(0));
3645 Label materialize_true, materialize_false;
3646 Label* if_true = NULL;
3647 Label* if_false = NULL;
3648 Label* fall_through = NULL;
3649 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3650 &if_false, &fall_through);
3652 __ JumpIfSmi(r0, if_false);
3653 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
3654 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3655 Split(eq, if_true, if_false, fall_through);
3657 context()->Plug(if_true, if_false);
3661 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3662 ZoneList<Expression*>* args = expr->arguments();
3663 DCHECK(args->length() == 1);
3665 VisitForAccumulatorValue(args->at(0));
3667 Label materialize_true, materialize_false;
3668 Label* if_true = NULL;
3669 Label* if_false = NULL;
3670 Label* fall_through = NULL;
3671 context()->PrepareTest(&materialize_true, &materialize_false,
3672 &if_true, &if_false, &fall_through);
3674 __ JumpIfSmi(r0, if_false);
3675 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3676 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3677 Split(eq, if_true, if_false, fall_through);
3679 context()->Plug(if_true, if_false);
3683 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3684 ZoneList<Expression*>* args = expr->arguments();
3685 DCHECK(args->length() == 1);
3687 VisitForAccumulatorValue(args->at(0));
3689 Label materialize_true, materialize_false;
3690 Label* if_true = NULL;
3691 Label* if_false = NULL;
3692 Label* fall_through = NULL;
3693 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3694 &if_false, &fall_through);
3696 __ JumpIfSmi(r0, if_false);
3698 Register type_reg = r2;
3699 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3700 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3701 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3702 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3703 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3704 Split(ls, if_true, if_false, fall_through);
3706 context()->Plug(if_true, if_false);
3710 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3711 DCHECK(expr->arguments()->length() == 0);
3713 Label materialize_true, materialize_false;
3714 Label* if_true = NULL;
3715 Label* if_false = NULL;
3716 Label* fall_through = NULL;
3717 context()->PrepareTest(&materialize_true, &materialize_false,
3718 &if_true, &if_false, &fall_through);
3720 // Get the frame pointer for the calling frame.
3721 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3723 // Skip the arguments adaptor frame if it exists.
3724 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3725 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3726 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3728 // Check the marker in the calling frame.
3729 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3730 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3731 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3732 Split(eq, if_true, if_false, fall_through);
3734 context()->Plug(if_true, if_false);
3738 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3739 ZoneList<Expression*>* args = expr->arguments();
3740 DCHECK(args->length() == 2);
3742 // Load the two objects into registers and perform the comparison.
3743 VisitForStackValue(args->at(0));
3744 VisitForAccumulatorValue(args->at(1));
3746 Label materialize_true, materialize_false;
3747 Label* if_true = NULL;
3748 Label* if_false = NULL;
3749 Label* fall_through = NULL;
3750 context()->PrepareTest(&materialize_true, &materialize_false,
3751 &if_true, &if_false, &fall_through);
3755 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3756 Split(eq, if_true, if_false, fall_through);
3758 context()->Plug(if_true, if_false);
3762 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3763 ZoneList<Expression*>* args = expr->arguments();
3764 DCHECK(args->length() == 1);
3766 // ArgumentsAccessStub expects the key in edx and the formal
3767 // parameter count in r0.
3768 VisitForAccumulatorValue(args->at(0));
3770 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3771 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3773 context()->Plug(r0);
3777 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3778 DCHECK(expr->arguments()->length() == 0);
3780 // Get the number of formal parameters.
3781 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3783 // Check if the calling frame is an arguments adaptor frame.
3784 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3785 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3786 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3788 // Arguments adaptor case: Read the arguments length from the
3790 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3792 context()->Plug(r0);
3796 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3797 ZoneList<Expression*>* args = expr->arguments();
3798 DCHECK(args->length() == 1);
3799 Label done, null, function, non_function_constructor;
3801 VisitForAccumulatorValue(args->at(0));
3803 // If the object is a smi, we return null.
3804 __ JumpIfSmi(r0, &null);
3806 // Check that the object is a JS object but take special care of JS
3807 // functions to make sure they have 'Function' as their class.
3808 // Assume that there are only two callable types, and one of them is at
3809 // either end of the type range for JS object types. Saves extra comparisons.
3810 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3811 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3812 // Map is now in r0.
3814 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3815 FIRST_SPEC_OBJECT_TYPE + 1);
3816 __ b(eq, &function);
3818 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3819 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3820 LAST_SPEC_OBJECT_TYPE - 1);
3821 __ b(eq, &function);
3822 // Assume that there is no larger type.
3823 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3825 // Check if the constructor in the map is a JS function.
3826 Register instance_type = r2;
3827 __ GetMapConstructor(r0, r0, r1, instance_type);
3828 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3829 __ b(ne, &non_function_constructor);
3831 // r0 now contains the constructor function. Grab the
3832 // instance class name from there.
3833 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3834 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3837 // Functions have class 'Function'.
3839 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3842 // Objects with a non-function constructor have class 'Object'.
3843 __ bind(&non_function_constructor);
3844 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3847 // Non-JS objects have class null.
3849 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3854 context()->Plug(r0);
3858 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3859 ZoneList<Expression*>* args = expr->arguments();
3860 DCHECK(args->length() == 1);
3861 VisitForAccumulatorValue(args->at(0)); // Load the object.
3864 // If the object is a smi return the object.
3865 __ JumpIfSmi(r0, &done);
3866 // If the object is not a value type, return the object.
3867 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3868 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3871 context()->Plug(r0);
3875 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3876 ZoneList<Expression*>* args = expr->arguments();
3877 DCHECK_EQ(1, args->length());
3879 VisitForAccumulatorValue(args->at(0));
3881 Label materialize_true, materialize_false;
3882 Label* if_true = nullptr;
3883 Label* if_false = nullptr;
3884 Label* fall_through = nullptr;
3885 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3886 &if_false, &fall_through);
3888 __ JumpIfSmi(r0, if_false);
3889 __ CompareObjectType(r0, r1, r1, JS_DATE_TYPE);
3890 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3891 Split(eq, if_true, if_false, fall_through);
3893 context()->Plug(if_true, if_false);
3897 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3898 ZoneList<Expression*>* args = expr->arguments();
3899 DCHECK(args->length() == 2);
3900 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3901 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3903 VisitForAccumulatorValue(args->at(0)); // Load the object.
3905 Register object = r0;
3906 Register result = r0;
3907 Register scratch0 = r9;
3908 Register scratch1 = r1;
3910 if (index->value() == 0) {
3911 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3913 Label runtime, done;
3914 if (index->value() < JSDate::kFirstUncachedField) {
3915 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3916 __ mov(scratch1, Operand(stamp));
3917 __ ldr(scratch1, MemOperand(scratch1));
3918 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3919 __ cmp(scratch1, scratch0);
3921 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3922 kPointerSize * index->value()));
3926 __ PrepareCallCFunction(2, scratch1);
3927 __ mov(r1, Operand(index));
3928 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3932 context()->Plug(result);
3936 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3937 ZoneList<Expression*>* args = expr->arguments();
3938 DCHECK_EQ(3, args->length());
3940 Register string = r0;
3941 Register index = r1;
3942 Register value = r2;
3944 VisitForStackValue(args->at(0)); // index
3945 VisitForStackValue(args->at(1)); // value
3946 VisitForAccumulatorValue(args->at(2)); // string
3947 __ Pop(index, value);
3949 if (FLAG_debug_code) {
3951 __ Check(eq, kNonSmiValue);
3953 __ Check(eq, kNonSmiIndex);
3954 __ SmiUntag(index, index);
3955 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3956 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3957 __ SmiTag(index, index);
3960 __ SmiUntag(value, value);
3963 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3964 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3965 context()->Plug(string);
3969 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3970 ZoneList<Expression*>* args = expr->arguments();
3971 DCHECK_EQ(3, args->length());
3973 Register string = r0;
3974 Register index = r1;
3975 Register value = r2;
3977 VisitForStackValue(args->at(0)); // index
3978 VisitForStackValue(args->at(1)); // value
3979 VisitForAccumulatorValue(args->at(2)); // string
3980 __ Pop(index, value);
3982 if (FLAG_debug_code) {
3984 __ Check(eq, kNonSmiValue);
3986 __ Check(eq, kNonSmiIndex);
3987 __ SmiUntag(index, index);
3988 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3989 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3990 __ SmiTag(index, index);
3993 __ SmiUntag(value, value);
3996 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3997 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3998 __ strh(value, MemOperand(ip, index));
3999 context()->Plug(string);
4003 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4004 ZoneList<Expression*>* args = expr->arguments();
4005 DCHECK(args->length() == 2);
4006 VisitForStackValue(args->at(0)); // Load the object.
4007 VisitForAccumulatorValue(args->at(1)); // Load the value.
4008 __ pop(r1); // r0 = value. r1 = object.
4011 // If the object is a smi, return the value.
4012 __ JumpIfSmi(r1, &done);
4014 // If the object is not a value type, return the value.
4015 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
4019 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
4020 // Update the write barrier. Save the value as it will be
4021 // overwritten by the write barrier code and is needed afterward.
4023 __ RecordWriteField(
4024 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
4027 context()->Plug(r0);
4031 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4032 ZoneList<Expression*>* args = expr->arguments();
4033 DCHECK_EQ(args->length(), 1);
4034 // Load the argument into r0 and call the stub.
4035 VisitForAccumulatorValue(args->at(0));
4037 NumberToStringStub stub(isolate());
4039 context()->Plug(r0);
4043 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4044 ZoneList<Expression*>* args = expr->arguments();
4045 DCHECK(args->length() == 1);
4046 VisitForAccumulatorValue(args->at(0));
4049 StringCharFromCodeGenerator generator(r0, r1);
4050 generator.GenerateFast(masm_);
4053 NopRuntimeCallHelper call_helper;
4054 generator.GenerateSlow(masm_, call_helper);
4057 context()->Plug(r1);
4061 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4062 ZoneList<Expression*>* args = expr->arguments();
4063 DCHECK(args->length() == 2);
4064 VisitForStackValue(args->at(0));
4065 VisitForAccumulatorValue(args->at(1));
4067 Register object = r1;
4068 Register index = r0;
4069 Register result = r3;
4073 Label need_conversion;
4074 Label index_out_of_range;
4076 StringCharCodeAtGenerator generator(object,
4081 &index_out_of_range,
4082 STRING_INDEX_IS_NUMBER);
4083 generator.GenerateFast(masm_);
4086 __ bind(&index_out_of_range);
4087 // When the index is out of range, the spec requires us to return
4089 __ LoadRoot(result, Heap::kNanValueRootIndex);
4092 __ bind(&need_conversion);
4093 // Load the undefined value into the result register, which will
4094 // trigger conversion.
4095 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4098 NopRuntimeCallHelper call_helper;
4099 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4102 context()->Plug(result);
4106 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4107 ZoneList<Expression*>* args = expr->arguments();
4108 DCHECK(args->length() == 2);
4109 VisitForStackValue(args->at(0));
4110 VisitForAccumulatorValue(args->at(1));
4112 Register object = r1;
4113 Register index = r0;
4114 Register scratch = r3;
4115 Register result = r0;
4119 Label need_conversion;
4120 Label index_out_of_range;
4122 StringCharAtGenerator generator(object,
4128 &index_out_of_range,
4129 STRING_INDEX_IS_NUMBER);
4130 generator.GenerateFast(masm_);
4133 __ bind(&index_out_of_range);
4134 // When the index is out of range, the spec requires us to return
4135 // the empty string.
4136 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4139 __ bind(&need_conversion);
4140 // Move smi zero into the result register, which will trigger
4142 __ mov(result, Operand(Smi::FromInt(0)));
4145 NopRuntimeCallHelper call_helper;
4146 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4149 context()->Plug(result);
4153 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4154 ZoneList<Expression*>* args = expr->arguments();
4155 DCHECK_EQ(2, args->length());
4156 VisitForStackValue(args->at(0));
4157 VisitForAccumulatorValue(args->at(1));
4160 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4162 context()->Plug(r0);
4166 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4167 ZoneList<Expression*>* args = expr->arguments();
4168 DCHECK(args->length() >= 2);
4170 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4171 for (int i = 0; i < arg_count + 1; i++) {
4172 VisitForStackValue(args->at(i));
4174 VisitForAccumulatorValue(args->last()); // Function.
4176 Label runtime, done;
4177 // Check for non-function argument (including proxy).
4178 __ JumpIfSmi(r0, &runtime);
4179 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4182 // InvokeFunction requires the function in r1. Move it in there.
4183 __ mov(r1, result_register());
4184 ParameterCount count(arg_count);
4185 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4186 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4191 __ CallRuntime(Runtime::kCall, args->length());
4194 context()->Plug(r0);
4198 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4199 ZoneList<Expression*>* args = expr->arguments();
4200 DCHECK(args->length() == 2);
4203 VisitForStackValue(args->at(0));
4206 VisitForStackValue(args->at(1));
4207 __ CallRuntime(Runtime::kGetPrototype, 1);
4208 __ Push(result_register());
4210 // Load original constructor into r4.
4211 __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
4213 // Check if the calling frame is an arguments adaptor frame.
4214 Label adaptor_frame, args_set_up, runtime;
4215 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4216 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4217 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4218 __ b(eq, &adaptor_frame);
4219 // default constructor has no arguments, so no adaptor frame means no args.
4220 __ mov(r0, Operand::Zero());
4223 // Copy arguments from adaptor frame.
4225 __ bind(&adaptor_frame);
4226 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4227 __ SmiUntag(r1, r1);
4230 // Get arguments pointer in r2.
4231 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4232 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset));
4235 // Pre-decrement r2 with kPointerSize on each iteration.
4236 // Pre-decrement in order to skip receiver.
4237 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
4239 __ sub(r1, r1, Operand(1));
4240 __ cmp(r1, Operand::Zero());
4244 __ bind(&args_set_up);
4245 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4246 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
4248 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4249 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4253 context()->Plug(result_register());
4257 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4258 RegExpConstructResultStub stub(isolate());
4259 ZoneList<Expression*>* args = expr->arguments();
4260 DCHECK(args->length() == 3);
4261 VisitForStackValue(args->at(0));
4262 VisitForStackValue(args->at(1));
4263 VisitForAccumulatorValue(args->at(2));
4267 context()->Plug(r0);
4271 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4272 ZoneList<Expression*>* args = expr->arguments();
4273 DCHECK_EQ(2, args->length());
4274 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4275 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4277 Handle<FixedArray> jsfunction_result_caches(
4278 isolate()->native_context()->jsfunction_result_caches());
4279 if (jsfunction_result_caches->length() <= cache_id) {
4280 __ Abort(kAttemptToUseUndefinedCache);
4281 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4282 context()->Plug(r0);
4286 VisitForAccumulatorValue(args->at(1));
4289 Register cache = r1;
4290 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4291 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4292 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4294 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4297 Label done, not_found;
4298 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4299 // r2 now holds finger offset as a smi.
4300 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4301 // r3 now points to the start of fixed array elements.
4302 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
4303 // Note side effect of PreIndex: r3 now points to the key of the pair.
4305 __ b(ne, ¬_found);
4307 __ ldr(r0, MemOperand(r3, kPointerSize));
4310 __ bind(¬_found);
4311 // Call runtime to perform the lookup.
4312 __ Push(cache, key);
4313 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4316 context()->Plug(r0);
4320 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4321 ZoneList<Expression*>* args = expr->arguments();
4322 VisitForAccumulatorValue(args->at(0));
4324 Label materialize_true, materialize_false;
4325 Label* if_true = NULL;
4326 Label* if_false = NULL;
4327 Label* fall_through = NULL;
4328 context()->PrepareTest(&materialize_true, &materialize_false,
4329 &if_true, &if_false, &fall_through);
4331 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4332 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4333 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4334 Split(eq, if_true, if_false, fall_through);
4336 context()->Plug(if_true, if_false);
4340 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4341 ZoneList<Expression*>* args = expr->arguments();
4342 DCHECK(args->length() == 1);
4343 VisitForAccumulatorValue(args->at(0));
4345 __ AssertString(r0);
4347 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4348 __ IndexFromHash(r0, r0);
4350 context()->Plug(r0);
4354 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4355 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4356 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4357 one_char_separator_loop_entry, long_separator_loop;
4358 ZoneList<Expression*>* args = expr->arguments();
4359 DCHECK(args->length() == 2);
4360 VisitForStackValue(args->at(1));
4361 VisitForAccumulatorValue(args->at(0));
4363 // All aliases of the same register have disjoint lifetimes.
4364 Register array = r0;
4365 Register elements = no_reg; // Will be r0.
4366 Register result = no_reg; // Will be r0.
4367 Register separator = r1;
4368 Register array_length = r2;
4369 Register result_pos = no_reg; // Will be r2
4370 Register string_length = r3;
4371 Register string = r4;
4372 Register element = r5;
4373 Register elements_end = r6;
4374 Register scratch = r9;
4376 // Separator operand is on the stack.
4379 // Check that the array is a JSArray.
4380 __ JumpIfSmi(array, &bailout);
4381 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4384 // Check that the array has fast elements.
4385 __ CheckFastElements(scratch, array_length, &bailout);
4387 // If the array has length zero, return the empty string.
4388 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4389 __ SmiUntag(array_length, SetCC);
4390 __ b(ne, &non_trivial_array);
4391 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4394 __ bind(&non_trivial_array);
4396 // Get the FixedArray containing array's elements.
4398 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4399 array = no_reg; // End of array's live range.
4401 // Check that all array elements are sequential one-byte strings, and
4402 // accumulate the sum of their lengths, as a smi-encoded value.
4403 __ mov(string_length, Operand::Zero());
4405 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4406 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4407 // Loop condition: while (element < elements_end).
4408 // Live values in registers:
4409 // elements: Fixed array of strings.
4410 // array_length: Length of the fixed array of strings (not smi)
4411 // separator: Separator string
4412 // string_length: Accumulated sum of string lengths (smi).
4413 // element: Current array element.
4414 // elements_end: Array end.
4415 if (generate_debug_code_) {
4416 __ cmp(array_length, Operand::Zero());
4417 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4420 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4421 __ JumpIfSmi(string, &bailout);
4422 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4423 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4424 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4425 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4426 __ add(string_length, string_length, Operand(scratch), SetCC);
4428 __ cmp(element, elements_end);
4431 // If array_length is 1, return elements[0], a string.
4432 __ cmp(array_length, Operand(1));
4433 __ b(ne, ¬_size_one_array);
4434 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4437 __ bind(¬_size_one_array);
4439 // Live values in registers:
4440 // separator: Separator string
4441 // array_length: Length of the array.
4442 // string_length: Sum of string lengths (smi).
4443 // elements: FixedArray of strings.
4445 // Check that the separator is a flat one-byte string.
4446 __ JumpIfSmi(separator, &bailout);
4447 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4448 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4449 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4451 // Add (separator length times array_length) - separator length to the
4452 // string_length to get the length of the result string. array_length is not
4453 // smi but the other values are, so the result is a smi
4454 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4455 __ sub(string_length, string_length, Operand(scratch));
4456 __ smull(scratch, ip, array_length, scratch);
4457 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4459 __ cmp(ip, Operand::Zero());
4461 __ tst(scratch, Operand(0x80000000));
4463 __ add(string_length, string_length, Operand(scratch), SetCC);
4465 __ SmiUntag(string_length);
4467 // Get first element in the array to free up the elements register to be used
4470 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4471 result = elements; // End of live range for elements.
4473 // Live values in registers:
4474 // element: First array element
4475 // separator: Separator string
4476 // string_length: Length of result string (not smi)
4477 // array_length: Length of the array.
4478 __ AllocateOneByteString(result, string_length, scratch,
4479 string, // used as scratch
4480 elements_end, // used as scratch
4482 // Prepare for looping. Set up elements_end to end of the array. Set
4483 // result_pos to the position of the result where to write the first
4485 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4486 result_pos = array_length; // End of live range for array_length.
4487 array_length = no_reg;
4490 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4492 // Check the length of the separator.
4493 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4494 __ cmp(scratch, Operand(Smi::FromInt(1)));
4495 __ b(eq, &one_char_separator);
4496 __ b(gt, &long_separator);
4498 // Empty separator case
4499 __ bind(&empty_separator_loop);
4500 // Live values in registers:
4501 // result_pos: the position to which we are currently copying characters.
4502 // element: Current array element.
4503 // elements_end: Array end.
4505 // Copy next array element to the result.
4506 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4507 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4508 __ SmiUntag(string_length);
4511 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4512 __ CopyBytes(string, result_pos, string_length, scratch);
4513 __ cmp(element, elements_end);
4514 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4515 DCHECK(result.is(r0));
4518 // One-character separator case
4519 __ bind(&one_char_separator);
4520 // Replace separator with its one-byte character value.
4521 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4522 // Jump into the loop after the code that copies the separator, so the first
4523 // element is not preceded by a separator
4524 __ jmp(&one_char_separator_loop_entry);
4526 __ bind(&one_char_separator_loop);
4527 // Live values in registers:
4528 // result_pos: the position to which we are currently copying characters.
4529 // element: Current array element.
4530 // elements_end: Array end.
4531 // separator: Single separator one-byte char (in lower byte).
4533 // Copy the separator character to the result.
4534 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4536 // Copy next array element to the result.
4537 __ bind(&one_char_separator_loop_entry);
4538 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4539 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4540 __ SmiUntag(string_length);
4543 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4544 __ CopyBytes(string, result_pos, string_length, scratch);
4545 __ cmp(element, elements_end);
4546 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4547 DCHECK(result.is(r0));
4550 // Long separator case (separator is more than one character). Entry is at the
4551 // label long_separator below.
4552 __ bind(&long_separator_loop);
4553 // Live values in registers:
4554 // result_pos: the position to which we are currently copying characters.
4555 // element: Current array element.
4556 // elements_end: Array end.
4557 // separator: Separator string.
4559 // Copy the separator to the result.
4560 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4561 __ SmiUntag(string_length);
4564 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4565 __ CopyBytes(string, result_pos, string_length, scratch);
4567 __ bind(&long_separator);
4568 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4569 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4570 __ SmiUntag(string_length);
4573 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4574 __ CopyBytes(string, result_pos, string_length, scratch);
4575 __ cmp(element, elements_end);
4576 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4577 DCHECK(result.is(r0));
4581 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4583 context()->Plug(r0);
4587 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4588 DCHECK(expr->arguments()->length() == 0);
4589 ExternalReference debug_is_active =
4590 ExternalReference::debug_is_active_address(isolate());
4591 __ mov(ip, Operand(debug_is_active));
4592 __ ldrb(r0, MemOperand(ip));
4594 context()->Plug(r0);
4598 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4599 // Push the builtins object as the receiver.
4600 Register receiver = LoadDescriptor::ReceiverRegister();
4601 __ ldr(receiver, GlobalObjectOperand());
4602 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4605 // Load the function from the receiver.
4606 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4607 __ mov(LoadDescriptor::SlotRegister(),
4608 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4609 CallLoadIC(NOT_INSIDE_TYPEOF);
4613 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4614 ZoneList<Expression*>* args = expr->arguments();
4615 int arg_count = args->length();
4617 SetCallPosition(expr, arg_count);
4618 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4619 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4624 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4625 ZoneList<Expression*>* args = expr->arguments();
4626 int arg_count = args->length();
4628 if (expr->is_jsruntime()) {
4629 Comment cmnt(masm_, "[ CallRuntime");
4630 EmitLoadJSRuntimeFunction(expr);
4632 // Push the target function under the receiver.
4633 __ ldr(ip, MemOperand(sp, 0));
4635 __ str(r0, MemOperand(sp, kPointerSize));
4637 // Push the arguments ("left-to-right").
4638 for (int i = 0; i < arg_count; i++) {
4639 VisitForStackValue(args->at(i));
4642 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4643 EmitCallJSRuntimeFunction(expr);
4645 // Restore context register.
4646 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4648 context()->DropAndPlug(1, r0);
4651 const Runtime::Function* function = expr->function();
4652 switch (function->function_id) {
4653 #define CALL_INTRINSIC_GENERATOR(Name) \
4654 case Runtime::kInline##Name: { \
4655 Comment cmnt(masm_, "[ Inline" #Name); \
4656 return Emit##Name(expr); \
4658 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4659 #undef CALL_INTRINSIC_GENERATOR
4661 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4662 // Push the arguments ("left-to-right").
4663 for (int i = 0; i < arg_count; i++) {
4664 VisitForStackValue(args->at(i));
4667 // Call the C runtime function.
4668 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4669 __ CallRuntime(expr->function(), arg_count);
4670 context()->Plug(r0);
4677 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4678 switch (expr->op()) {
4679 case Token::DELETE: {
4680 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4681 Property* property = expr->expression()->AsProperty();
4682 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4684 if (property != NULL) {
4685 VisitForStackValue(property->obj());
4686 VisitForStackValue(property->key());
4687 __ mov(r1, Operand(Smi::FromInt(language_mode())));
4689 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4690 context()->Plug(r0);
4691 } else if (proxy != NULL) {
4692 Variable* var = proxy->var();
4693 // Delete of an unqualified identifier is disallowed in strict mode but
4694 // "delete this" is allowed.
4695 bool is_this = var->HasThisName(isolate());
4696 DCHECK(is_sloppy(language_mode()) || is_this);
4697 if (var->IsUnallocatedOrGlobalSlot()) {
4698 __ ldr(r2, GlobalObjectOperand());
4699 __ mov(r1, Operand(var->name()));
4700 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4701 __ Push(r2, r1, r0);
4702 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4703 context()->Plug(r0);
4704 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4705 // Result of deleting non-global, non-dynamic variables is false.
4706 // The subexpression does not have side effects.
4707 context()->Plug(is_this);
4709 // Non-global variable. Call the runtime to try to delete from the
4710 // context where the variable was introduced.
4711 DCHECK(!context_register().is(r2));
4712 __ mov(r2, Operand(var->name()));
4713 __ Push(context_register(), r2);
4714 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4715 context()->Plug(r0);
4718 // Result of deleting non-property, non-variable reference is true.
4719 // The subexpression may have side effects.
4720 VisitForEffect(expr->expression());
4721 context()->Plug(true);
4727 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4728 VisitForEffect(expr->expression());
4729 context()->Plug(Heap::kUndefinedValueRootIndex);
4734 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4735 if (context()->IsEffect()) {
4736 // Unary NOT has no side effects so it's only necessary to visit the
4737 // subexpression. Match the optimizing compiler by not branching.
4738 VisitForEffect(expr->expression());
4739 } else if (context()->IsTest()) {
4740 const TestContext* test = TestContext::cast(context());
4741 // The labels are swapped for the recursive call.
4742 VisitForControl(expr->expression(),
4743 test->false_label(),
4745 test->fall_through());
4746 context()->Plug(test->true_label(), test->false_label());
4748 // We handle value contexts explicitly rather than simply visiting
4749 // for control and plugging the control flow into the context,
4750 // because we need to prepare a pair of extra administrative AST ids
4751 // for the optimizing compiler.
4752 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4753 Label materialize_true, materialize_false, done;
4754 VisitForControl(expr->expression(),
4758 __ bind(&materialize_true);
4759 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4760 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4761 if (context()->IsStackValue()) __ push(r0);
4763 __ bind(&materialize_false);
4764 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4765 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4766 if (context()->IsStackValue()) __ push(r0);
4772 case Token::TYPEOF: {
4773 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4775 AccumulatorValueContext context(this);
4776 VisitForTypeofValue(expr->expression());
4779 TypeofStub typeof_stub(isolate());
4780 __ CallStub(&typeof_stub);
4781 context()->Plug(r0);
4791 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4792 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4794 Comment cmnt(masm_, "[ CountOperation");
4796 Property* prop = expr->expression()->AsProperty();
4797 LhsKind assign_type = Property::GetAssignType(prop);
4799 // Evaluate expression and get value.
4800 if (assign_type == VARIABLE) {
4801 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4802 AccumulatorValueContext context(this);
4803 EmitVariableLoad(expr->expression()->AsVariableProxy());
4805 // Reserve space for result of postfix operation.
4806 if (expr->is_postfix() && !context()->IsEffect()) {
4807 __ mov(ip, Operand(Smi::FromInt(0)));
4810 switch (assign_type) {
4811 case NAMED_PROPERTY: {
4812 // Put the object both on the stack and in the register.
4813 VisitForStackValue(prop->obj());
4814 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4815 EmitNamedPropertyLoad(prop);
4819 case NAMED_SUPER_PROPERTY: {
4820 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4821 VisitForAccumulatorValue(
4822 prop->obj()->AsSuperPropertyReference()->home_object());
4823 __ Push(result_register());
4824 const Register scratch = r1;
4825 __ ldr(scratch, MemOperand(sp, kPointerSize));
4827 __ Push(result_register());
4828 EmitNamedSuperPropertyLoad(prop);
4832 case KEYED_SUPER_PROPERTY: {
4833 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4835 prop->obj()->AsSuperPropertyReference()->home_object());
4836 VisitForAccumulatorValue(prop->key());
4837 __ Push(result_register());
4838 const Register scratch = r1;
4839 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4841 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4843 __ Push(result_register());
4844 EmitKeyedSuperPropertyLoad(prop);
4848 case KEYED_PROPERTY: {
4849 VisitForStackValue(prop->obj());
4850 VisitForStackValue(prop->key());
4851 __ ldr(LoadDescriptor::ReceiverRegister(),
4852 MemOperand(sp, 1 * kPointerSize));
4853 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4854 EmitKeyedPropertyLoad(prop);
4863 // We need a second deoptimization point after loading the value
4864 // in case evaluating the property load my have a side effect.
4865 if (assign_type == VARIABLE) {
4866 PrepareForBailout(expr->expression(), TOS_REG);
4868 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4871 // Inline smi case if we are in a loop.
4872 Label stub_call, done;
4873 JumpPatchSite patch_site(masm_);
4875 int count_value = expr->op() == Token::INC ? 1 : -1;
4876 if (ShouldInlineSmiCase(expr->op())) {
4878 patch_site.EmitJumpIfNotSmi(r0, &slow);
4880 // Save result for postfix expressions.
4881 if (expr->is_postfix()) {
4882 if (!context()->IsEffect()) {
4883 // Save the result on the stack. If we have a named or keyed property
4884 // we store the result under the receiver that is currently on top
4886 switch (assign_type) {
4890 case NAMED_PROPERTY:
4891 __ str(r0, MemOperand(sp, kPointerSize));
4893 case NAMED_SUPER_PROPERTY:
4894 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4896 case KEYED_PROPERTY:
4897 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4899 case KEYED_SUPER_PROPERTY:
4900 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4906 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4908 // Call stub. Undo operation first.
4909 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4913 if (!is_strong(language_mode())) {
4914 ToNumberStub convert_stub(isolate());
4915 __ CallStub(&convert_stub);
4916 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4919 // Save result for postfix expressions.
4920 if (expr->is_postfix()) {
4921 if (!context()->IsEffect()) {
4922 // Save the result on the stack. If we have a named or keyed property
4923 // we store the result under the receiver that is currently on top
4925 switch (assign_type) {
4929 case NAMED_PROPERTY:
4930 __ str(r0, MemOperand(sp, kPointerSize));
4932 case NAMED_SUPER_PROPERTY:
4933 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4935 case KEYED_PROPERTY:
4936 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4938 case KEYED_SUPER_PROPERTY:
4939 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4946 __ bind(&stub_call);
4948 __ mov(r0, Operand(Smi::FromInt(count_value)));
4950 SetExpressionPosition(expr);
4952 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4953 strength(language_mode())).code();
4954 CallIC(code, expr->CountBinOpFeedbackId());
4955 patch_site.EmitPatchInfo();
4958 if (is_strong(language_mode())) {
4959 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4961 // Store the value returned in r0.
4962 switch (assign_type) {
4964 if (expr->is_postfix()) {
4965 { EffectContext context(this);
4966 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4967 Token::ASSIGN, expr->CountSlot());
4968 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4971 // For all contexts except EffectConstant We have the result on
4972 // top of the stack.
4973 if (!context()->IsEffect()) {
4974 context()->PlugTOS();
4977 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4978 Token::ASSIGN, expr->CountSlot());
4979 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4980 context()->Plug(r0);
4983 case NAMED_PROPERTY: {
4984 __ mov(StoreDescriptor::NameRegister(),
4985 Operand(prop->key()->AsLiteral()->value()));
4986 __ pop(StoreDescriptor::ReceiverRegister());
4987 if (FLAG_vector_stores) {
4988 EmitLoadStoreICSlot(expr->CountSlot());
4991 CallStoreIC(expr->CountStoreFeedbackId());
4993 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4994 if (expr->is_postfix()) {
4995 if (!context()->IsEffect()) {
4996 context()->PlugTOS();
4999 context()->Plug(r0);
5003 case NAMED_SUPER_PROPERTY: {
5004 EmitNamedSuperPropertyStore(prop);
5005 if (expr->is_postfix()) {
5006 if (!context()->IsEffect()) {
5007 context()->PlugTOS();
5010 context()->Plug(r0);
5014 case KEYED_SUPER_PROPERTY: {
5015 EmitKeyedSuperPropertyStore(prop);
5016 if (expr->is_postfix()) {
5017 if (!context()->IsEffect()) {
5018 context()->PlugTOS();
5021 context()->Plug(r0);
5025 case KEYED_PROPERTY: {
5026 __ Pop(StoreDescriptor::ReceiverRegister(),
5027 StoreDescriptor::NameRegister());
5029 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5030 if (FLAG_vector_stores) {
5031 EmitLoadStoreICSlot(expr->CountSlot());
5034 CallIC(ic, expr->CountStoreFeedbackId());
5036 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5037 if (expr->is_postfix()) {
5038 if (!context()->IsEffect()) {
5039 context()->PlugTOS();
5042 context()->Plug(r0);
5050 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5051 Expression* sub_expr,
5052 Handle<String> check) {
5053 Label materialize_true, materialize_false;
5054 Label* if_true = NULL;
5055 Label* if_false = NULL;
5056 Label* fall_through = NULL;
5057 context()->PrepareTest(&materialize_true, &materialize_false,
5058 &if_true, &if_false, &fall_through);
5060 { AccumulatorValueContext context(this);
5061 VisitForTypeofValue(sub_expr);
5063 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5065 Factory* factory = isolate()->factory();
5066 if (String::Equals(check, factory->number_string())) {
5067 __ JumpIfSmi(r0, if_true);
5068 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5069 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5071 Split(eq, if_true, if_false, fall_through);
5072 } else if (String::Equals(check, factory->string_string())) {
5073 __ JumpIfSmi(r0, if_false);
5074 // Check for undetectable objects => false.
5075 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
5077 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5078 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5079 Split(eq, if_true, if_false, fall_through);
5080 } else if (String::Equals(check, factory->symbol_string())) {
5081 __ JumpIfSmi(r0, if_false);
5082 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
5083 Split(eq, if_true, if_false, fall_through);
5084 } else if (String::Equals(check, factory->float32x4_string())) {
5085 __ JumpIfSmi(r0, if_false);
5086 __ CompareObjectType(r0, r0, r1, FLOAT32X4_TYPE);
5087 Split(eq, if_true, if_false, fall_through);
5088 } else if (String::Equals(check, factory->boolean_string())) {
5089 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
5091 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
5092 Split(eq, if_true, if_false, fall_through);
5093 } else if (String::Equals(check, factory->undefined_string())) {
5094 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
5096 __ JumpIfSmi(r0, if_false);
5097 // Check for undetectable objects => true.
5098 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5099 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5100 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5101 Split(ne, if_true, if_false, fall_through);
5103 } else if (String::Equals(check, factory->function_string())) {
5104 __ JumpIfSmi(r0, if_false);
5105 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5106 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
5108 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
5109 Split(eq, if_true, if_false, fall_through);
5110 } else if (String::Equals(check, factory->object_string())) {
5111 __ JumpIfSmi(r0, if_false);
5112 __ CompareRoot(r0, Heap::kNullValueRootIndex);
5114 // Check for JS objects => true.
5115 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5117 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5119 // Check for undetectable objects => false.
5120 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5121 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5122 Split(eq, if_true, if_false, fall_through);
5124 if (if_false != fall_through) __ jmp(if_false);
5126 context()->Plug(if_true, if_false);
5130 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5131 Comment cmnt(masm_, "[ CompareOperation");
5132 SetExpressionPosition(expr);
5134 // First we try a fast inlined version of the compare when one of
5135 // the operands is a literal.
5136 if (TryLiteralCompare(expr)) return;
5138 // Always perform the comparison for its control flow. Pack the result
5139 // into the expression's context after the comparison is performed.
5140 Label materialize_true, materialize_false;
5141 Label* if_true = NULL;
5142 Label* if_false = NULL;
5143 Label* fall_through = NULL;
5144 context()->PrepareTest(&materialize_true, &materialize_false,
5145 &if_true, &if_false, &fall_through);
5147 Token::Value op = expr->op();
5148 VisitForStackValue(expr->left());
5151 VisitForStackValue(expr->right());
5152 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5153 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5154 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5156 Split(eq, if_true, if_false, fall_through);
5159 case Token::INSTANCEOF: {
5160 VisitForStackValue(expr->right());
5161 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5163 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5164 // The stub returns 0 for true.
5166 Split(eq, if_true, if_false, fall_through);
5171 VisitForAccumulatorValue(expr->right());
5172 Condition cond = CompareIC::ComputeCondition(op);
5175 bool inline_smi_code = ShouldInlineSmiCase(op);
5176 JumpPatchSite patch_site(masm_);
5177 if (inline_smi_code) {
5179 __ orr(r2, r0, Operand(r1));
5180 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5182 Split(cond, if_true, if_false, NULL);
5183 __ bind(&slow_case);
5186 Handle<Code> ic = CodeFactory::CompareIC(
5187 isolate(), op, strength(language_mode())).code();
5188 CallIC(ic, expr->CompareOperationFeedbackId());
5189 patch_site.EmitPatchInfo();
5190 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5191 __ cmp(r0, Operand::Zero());
5192 Split(cond, if_true, if_false, fall_through);
5196 // Convert the result of the comparison into one expected for this
5197 // expression's context.
5198 context()->Plug(if_true, if_false);
5202 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5203 Expression* sub_expr,
5205 Label materialize_true, materialize_false;
5206 Label* if_true = NULL;
5207 Label* if_false = NULL;
5208 Label* fall_through = NULL;
5209 context()->PrepareTest(&materialize_true, &materialize_false,
5210 &if_true, &if_false, &fall_through);
5212 VisitForAccumulatorValue(sub_expr);
5213 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5214 if (expr->op() == Token::EQ_STRICT) {
5215 Heap::RootListIndex nil_value = nil == kNullValue ?
5216 Heap::kNullValueRootIndex :
5217 Heap::kUndefinedValueRootIndex;
5218 __ LoadRoot(r1, nil_value);
5220 Split(eq, if_true, if_false, fall_through);
5222 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5223 CallIC(ic, expr->CompareOperationFeedbackId());
5224 __ cmp(r0, Operand(0));
5225 Split(ne, if_true, if_false, fall_through);
5227 context()->Plug(if_true, if_false);
5231 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5232 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5233 context()->Plug(r0);
5237 Register FullCodeGenerator::result_register() {
5242 Register FullCodeGenerator::context_register() {
5247 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5248 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5249 __ str(value, MemOperand(fp, frame_offset));
5253 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5254 __ ldr(dst, ContextOperand(cp, context_index));
5258 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5259 Scope* declaration_scope = scope()->DeclarationScope();
5260 if (declaration_scope->is_script_scope() ||
5261 declaration_scope->is_module_scope()) {
5262 // Contexts nested in the native context have a canonical empty function
5263 // as their closure, not the anonymous closure containing the global
5264 // code. Pass a smi sentinel and let the runtime look up the empty
5266 __ mov(ip, Operand(Smi::FromInt(0)));
5267 } else if (declaration_scope->is_eval_scope()) {
5268 // Contexts created by a call to eval have the same closure as the
5269 // context calling eval, not the anonymous closure containing the eval
5270 // code. Fetch it from the context.
5271 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5273 DCHECK(declaration_scope->is_function_scope());
5274 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5280 // ----------------------------------------------------------------------------
5281 // Non-local control flow support.
5283 void FullCodeGenerator::EnterFinallyBlock() {
5284 DCHECK(!result_register().is(r1));
5285 // Store result register while executing finally block.
5286 __ push(result_register());
5287 // Cook return address in link register to stack (smi encoded Code* delta)
5288 __ sub(r1, lr, Operand(masm_->CodeObject()));
5291 // Store result register while executing finally block.
5294 // Store pending message while executing finally block.
5295 ExternalReference pending_message_obj =
5296 ExternalReference::address_of_pending_message_obj(isolate());
5297 __ mov(ip, Operand(pending_message_obj));
5298 __ ldr(r1, MemOperand(ip));
5301 ClearPendingMessage();
5305 void FullCodeGenerator::ExitFinallyBlock() {
5306 DCHECK(!result_register().is(r1));
5307 // Restore pending message from stack.
5309 ExternalReference pending_message_obj =
5310 ExternalReference::address_of_pending_message_obj(isolate());
5311 __ mov(ip, Operand(pending_message_obj));
5312 __ str(r1, MemOperand(ip));
5314 // Restore result register from stack.
5317 // Uncook return address and return.
5318 __ pop(result_register());
5320 __ add(pc, r1, Operand(masm_->CodeObject()));
5324 void FullCodeGenerator::ClearPendingMessage() {
5325 DCHECK(!result_register().is(r1));
5326 ExternalReference pending_message_obj =
5327 ExternalReference::address_of_pending_message_obj(isolate());
5328 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
5329 __ mov(ip, Operand(pending_message_obj));
5330 __ str(r1, MemOperand(ip));
5334 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5335 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5336 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5337 Operand(SmiFromSlot(slot)));
5344 static Address GetInterruptImmediateLoadAddress(Address pc) {
5345 Address load_address = pc - 2 * Assembler::kInstrSize;
5346 if (!FLAG_enable_embedded_constant_pool) {
5347 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5348 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5349 // This is an extended constant pool lookup.
5350 if (CpuFeatures::IsSupported(ARMv7)) {
5351 load_address -= 2 * Assembler::kInstrSize;
5352 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5353 DCHECK(Assembler::IsMovT(
5354 Memory::int32_at(load_address + Assembler::kInstrSize)));
5356 load_address -= 4 * Assembler::kInstrSize;
5357 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5358 DCHECK(Assembler::IsOrrImmed(
5359 Memory::int32_at(load_address + Assembler::kInstrSize)));
5360 DCHECK(Assembler::IsOrrImmed(
5361 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5362 DCHECK(Assembler::IsOrrImmed(
5363 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5365 } else if (CpuFeatures::IsSupported(ARMv7) &&
5366 Assembler::IsMovT(Memory::int32_at(load_address))) {
5367 // This is a movw / movt immediate load.
5368 load_address -= Assembler::kInstrSize;
5369 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5370 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5371 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5372 // This is a mov / orr immediate load.
5373 load_address -= 3 * Assembler::kInstrSize;
5374 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5375 DCHECK(Assembler::IsOrrImmed(
5376 Memory::int32_at(load_address + Assembler::kInstrSize)));
5377 DCHECK(Assembler::IsOrrImmed(
5378 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5380 // This is a small constant pool lookup.
5381 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5383 return load_address;
5387 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5389 BackEdgeState target_state,
5390 Code* replacement_code) {
5391 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5392 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5393 CodePatcher patcher(branch_address, 1);
5394 switch (target_state) {
5397 // <decrement profiling counter>
5399 // ; load interrupt stub address into ip - either of (for ARMv7):
5400 // ; <small cp load> | <extended cp load> | <immediate load>
5401 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5402 // | movt ip, #imm | movw ip, #imm
5403 // | ldr ip, [pp, ip]
5404 // ; or (for ARMv6):
5405 // ; <small cp load> | <extended cp load> | <immediate load>
5406 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5407 // | orr ip, ip, #imm> | orr ip, ip, #imm
5408 // | orr ip, ip, #imm> | orr ip, ip, #imm
5409 // | orr ip, ip, #imm> | orr ip, ip, #imm
5411 // <reset profiling counter>
5414 // Calculate branch offset to the ok-label - this is the difference
5415 // between the branch address and |pc| (which points at <blx ip>) plus
5416 // kProfileCounterResetSequence instructions
5417 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5418 kProfileCounterResetSequenceLength;
5419 patcher.masm()->b(branch_offset, pl);
5422 case ON_STACK_REPLACEMENT:
5423 case OSR_AFTER_STACK_CHECK:
5424 // <decrement profiling counter>
5426 // ; load on-stack replacement address into ip - either of (for ARMv7):
5427 // ; <small cp load> | <extended cp load> | <immediate load>
5428 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5429 // | movt ip, #imm> | movw ip, #imm
5430 // | ldr ip, [pp, ip]
5431 // ; or (for ARMv6):
5432 // ; <small cp load> | <extended cp load> | <immediate load>
5433 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5434 // | orr ip, ip, #imm> | orr ip, ip, #imm
5435 // | orr ip, ip, #imm> | orr ip, ip, #imm
5436 // | orr ip, ip, #imm> | orr ip, ip, #imm
5438 // <reset profiling counter>
5440 patcher.masm()->nop();
5444 // Replace the call address.
5445 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5446 replacement_code->entry());
5448 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5449 unoptimized_code, pc_immediate_load_address, replacement_code);
5453 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5455 Code* unoptimized_code,
5457 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5459 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5460 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5461 Address interrupt_address = Assembler::target_address_at(
5462 pc_immediate_load_address, unoptimized_code);
5464 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5465 DCHECK(interrupt_address ==
5466 isolate->builtins()->InterruptCheck()->entry());
5470 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5472 if (interrupt_address ==
5473 isolate->builtins()->OnStackReplacement()->entry()) {
5474 return ON_STACK_REPLACEMENT;
5477 DCHECK(interrupt_address ==
5478 isolate->builtins()->OsrAfterStackCheck()->entry());
5479 return OSR_AFTER_STACK_CHECK;
5483 } // namespace internal
5486 #endif // V8_TARGET_ARCH_ARM