1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/arm/code-stubs-arm.h"
20 #include "src/arm/macro-assembler-arm.h"
25 #define __ ACCESS_MASM(masm_)
28 // A patch site is a location in the code which it is possible to patch. This
29 // class has a number of methods to emit the code which is patchable and the
30 // method EmitPatchInfo to record a marker back to the patchable code. This
31 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
32 // immediate value is used) is the delta from the pc to the first instruction of
33 // the patchable code.
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
43 DCHECK(patch_site_.is_bound() == info_emitted_);
46 // When initially emitting this ensure that a jump is always generated to skip
47 // the inlined smi code.
48 void EmitJumpIfNotSmi(Register reg, Label* target) {
49 DCHECK(!patch_site_.is_bound() && !info_emitted_);
50 Assembler::BlockConstPoolScope block_const_pool(masm_);
51 __ bind(&patch_site_);
52 __ cmp(reg, Operand(reg));
53 __ b(eq, target); // Always taken before patched.
56 // When initially emitting this ensure that a jump is never generated to skip
57 // the inlined smi code.
58 void EmitJumpIfSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockConstPoolScope block_const_pool(masm_);
61 __ bind(&patch_site_);
62 __ cmp(reg, Operand(reg));
63 __ b(ne, target); // Never taken before patched.
66 void EmitPatchInfo() {
67 // Block literal pool emission whilst recording patch site information.
68 Assembler::BlockConstPoolScope block_const_pool(masm_);
69 if (patch_site_.is_bound()) {
70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72 reg.set_code(delta_to_patch_site / kOff12Mask);
73 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
78 __ nop(); // Signals no inlined code.
83 MacroAssembler* masm_;
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
96 // The live registers are:
97 // o r1: the JS function object being called (i.e., ourselves)
99 // o pp: our caller's constant pool pointer (if enabled)
100 // o fp: our caller's frame pointer
101 // o sp: stack pointer
102 // o lr: return address
104 // The function builds a JS frame. Please see JavaScriptFrameConstants in
105 // frames-arm.h for its layout.
106 void FullCodeGenerator::Generate() {
107 CompilationInfo* info = info_;
109 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
110 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
112 profiling_counter_ = isolate()->factory()->NewCell(
113 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
114 SetFunctionPosition(function());
115 Comment cmnt(masm_, "[ function compiled by full code generator");
117 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120 if (strlen(FLAG_stop_at) > 0 &&
121 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
126 // Sloppy mode functions and builtins need to replace the receiver with the
127 // global proxy when called as functions (without an explicit receiver
129 if (is_sloppy(info->language_mode()) && !info->is_native() &&
130 info->MayUseThis() && info->scope()->has_this_declaration()) {
132 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
133 __ ldr(r2, MemOperand(sp, receiver_offset));
134 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
137 __ ldr(r2, GlobalObjectOperand());
138 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
140 __ str(r2, MemOperand(sp, receiver_offset));
145 // Open a frame scope to indicate that there is a frame on the stack. The
146 // MANUAL indicates that the scope shouldn't actually generate code to set up
147 // the frame (that is done below).
148 FrameScope frame_scope(masm_, StackFrame::MANUAL);
150 info->set_prologue_offset(masm_->pc_offset());
151 __ Prologue(info->IsCodePreAgingActive());
152 info->AddNoFrameRange(0, masm_->pc_offset());
154 { Comment cmnt(masm_, "[ Allocate locals");
155 int locals_count = info->scope()->num_stack_slots();
156 // Generators allocate locals, if any, in context slots.
157 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
158 if (locals_count > 0) {
159 if (locals_count >= 128) {
161 __ sub(r9, sp, Operand(locals_count * kPointerSize));
162 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
163 __ cmp(r9, Operand(r2));
165 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
168 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ mov(r2, Operand(loop_iterations));
174 __ bind(&loop_header);
176 for (int i = 0; i < kMaxPushes; i++) {
179 // Continue loop if not done.
180 __ sub(r2, r2, Operand(1), SetCC);
181 __ b(&loop_header, ne);
183 int remaining = locals_count % kMaxPushes;
184 // Emit the remaining pushes.
185 for (int i = 0; i < remaining; i++) {
191 bool function_in_register = true;
193 // Possibly allocate a local context.
194 if (info->scope()->num_heap_slots() > 0) {
195 // Argument to NewContext is the function, which is still in r1.
196 Comment cmnt(masm_, "[ Allocate context");
197 bool need_write_barrier = true;
198 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
199 if (info->scope()->is_script_scope()) {
201 __ Push(info->scope()->GetScopeInfo(info->isolate()));
202 __ CallRuntime(Runtime::kNewScriptContext, 2);
203 } else if (slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(isolate(), slots);
206 // Result of FastNewContextStub is always in new space.
207 need_write_barrier = false;
210 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 function_in_register = false;
213 // Context is returned in r0. It replaces the context passed to us.
214 // It's saved in the stack and kept live in cp.
216 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
217 // Copy any necessary parameters into the context.
218 int num_parameters = info->scope()->num_parameters();
219 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
220 for (int i = first_parameter; i < num_parameters; i++) {
221 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
222 if (var->IsContextSlot()) {
223 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
224 (num_parameters - 1 - i) * kPointerSize;
225 // Load parameter from stack.
226 __ ldr(r0, MemOperand(fp, parameter_offset));
227 // Store it in the context.
228 MemOperand target = ContextOperand(cp, var->index());
231 // Update the write barrier.
232 if (need_write_barrier) {
233 __ RecordWriteContextSlot(
234 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
235 } else if (FLAG_debug_code) {
237 __ JumpIfInNewSpace(cp, r0, &done);
238 __ Abort(kExpectedNewSpaceObject);
245 // Possibly set up a local binding to the this function which is used in
246 // derived constructors with super calls.
247 Variable* this_function_var = scope()->this_function_var();
248 if (this_function_var != nullptr) {
249 Comment cmnt(masm_, "[ This function");
250 SetVar(this_function_var, r1, r0, r2);
253 Variable* new_target_var = scope()->new_target_var();
254 if (new_target_var != nullptr) {
255 Comment cmnt(masm_, "[ new.target");
256 // new.target is parameter -2.
257 int offset = 2 * kPointerSize +
258 (info_->scope()->num_parameters() + 1) * kPointerSize;
259 __ ldr(r0, MemOperand(fp, offset));
260 SetVar(new_target_var, r0, r2, r3);
263 ArgumentsAccessStub::HasNewTarget has_new_target =
264 IsSubclassConstructor(info->function()->kind())
265 ? ArgumentsAccessStub::HAS_NEW_TARGET
266 : ArgumentsAccessStub::NO_NEW_TARGET;
268 // Possibly allocate RestParameters
270 Variable* rest_param = scope()->rest_parameter(&rest_index);
272 Comment cmnt(masm_, "[ Allocate rest parameter array");
274 int num_parameters = info->scope()->num_parameters();
275 int offset = num_parameters * kPointerSize;
276 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
281 __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
282 __ mov(r2, Operand(Smi::FromInt(num_parameters)));
283 __ mov(r1, Operand(Smi::FromInt(rest_index)));
284 __ mov(r0, Operand(Smi::FromInt(language_mode())));
285 __ Push(r3, r2, r1, r0);
287 RestParamAccessStub stub(isolate());
290 SetVar(rest_param, r0, r1, r2);
293 Variable* arguments = scope()->arguments();
294 if (arguments != NULL) {
295 // Function uses arguments object.
296 Comment cmnt(masm_, "[ Allocate arguments object");
297 if (!function_in_register) {
298 // Load this again, if it's used by the local context below.
299 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
303 // Receiver is just before the parameters on the caller's stack.
304 int num_parameters = info->scope()->num_parameters();
305 int offset = num_parameters * kPointerSize;
307 Operand(StandardFrameConstants::kCallerSPOffset + offset));
308 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
311 // Arguments to ArgumentsAccessStub:
312 // function, receiver address, parameter count.
313 // The stub will rewrite receiever and parameter count if the previous
314 // stack frame was an arguments adapter frame.
315 ArgumentsAccessStub::Type type;
316 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
317 type = ArgumentsAccessStub::NEW_STRICT;
318 } else if (function()->has_duplicate_parameters()) {
319 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
321 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
323 ArgumentsAccessStub stub(isolate(), type, has_new_target);
326 SetVar(arguments, r0, r1, r2);
331 __ CallRuntime(Runtime::kTraceEnter, 0);
334 // Visit the declarations and body unless there is an illegal
336 if (scope()->HasIllegalRedeclaration()) {
337 Comment cmnt(masm_, "[ Declarations");
338 scope()->VisitIllegalRedeclaration(this);
341 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
342 { Comment cmnt(masm_, "[ Declarations");
343 // For named function expressions, declare the function name as a
345 if (scope()->is_function_scope() && scope()->function() != NULL) {
346 VariableDeclaration* function = scope()->function();
347 DCHECK(function->proxy()->var()->mode() == CONST ||
348 function->proxy()->var()->mode() == CONST_LEGACY);
349 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
350 VisitVariableDeclaration(function);
352 VisitDeclarations(scope()->declarations());
355 { Comment cmnt(masm_, "[ Stack check");
356 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
358 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
359 __ cmp(sp, Operand(ip));
361 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
362 PredictableCodeSizeScope predictable(masm_,
363 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
364 __ Call(stack_check, RelocInfo::CODE_TARGET);
368 { Comment cmnt(masm_, "[ Body");
369 DCHECK(loop_depth() == 0);
370 VisitStatements(function()->body());
371 DCHECK(loop_depth() == 0);
375 // Always emit a 'return undefined' in case control fell off the end of
377 { Comment cmnt(masm_, "[ return <undefined>;");
378 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
380 EmitReturnSequence();
382 // Force emit the constant pool, so it doesn't get emitted in the middle
383 // of the back edge table.
384 masm()->CheckConstPool(true, false);
388 void FullCodeGenerator::ClearAccumulator() {
389 __ mov(r0, Operand(Smi::FromInt(0)));
393 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
394 __ mov(r2, Operand(profiling_counter_));
395 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
396 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
397 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
401 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
402 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
404 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
408 void FullCodeGenerator::EmitProfilingCounterReset() {
409 Assembler::BlockConstPoolScope block_const_pool(masm_);
410 PredictableCodeSizeScope predictable_code_size_scope(
411 masm_, kProfileCounterResetSequenceLength);
414 int reset_value = FLAG_interrupt_budget;
415 if (info_->is_debug()) {
416 // Detect debug break requests as soon as possible.
417 reset_value = FLAG_interrupt_budget >> 4;
419 __ mov(r2, Operand(profiling_counter_));
420 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
421 // instructions (for ARMv6) depending upon whether it is an extended constant
422 // pool - insert nop to compensate.
423 int expected_instr_count =
424 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
425 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
426 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
429 __ mov(r3, Operand(Smi::FromInt(reset_value)));
430 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
434 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
435 Label* back_edge_target) {
436 Comment cmnt(masm_, "[ Back edge bookkeeping");
437 // Block literal pools whilst emitting back edge code.
438 Assembler::BlockConstPoolScope block_const_pool(masm_);
441 DCHECK(back_edge_target->is_bound());
442 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
443 int weight = Min(kMaxBackEdgeWeight,
444 Max(1, distance / kCodeSizeMultiplier));
445 EmitProfilingCounterDecrement(weight);
447 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
449 // Record a mapping of this PC offset to the OSR id. This is used to find
450 // the AST id from the unoptimized code in order to use it as a key into
451 // the deoptimization input data found in the optimized code.
452 RecordBackEdge(stmt->OsrEntryId());
454 EmitProfilingCounterReset();
457 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
458 // Record a mapping of the OSR id to this PC. This is used if the OSR
459 // entry becomes the target of a bailout. We don't expect it to be, but
460 // we want it to work if it is.
461 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
465 void FullCodeGenerator::EmitReturnSequence() {
466 Comment cmnt(masm_, "[ Return sequence");
467 if (return_label_.is_bound()) {
468 __ b(&return_label_);
470 __ bind(&return_label_);
472 // Push the return value on the stack as the parameter.
473 // Runtime::TraceExit returns its parameter in r0.
475 __ CallRuntime(Runtime::kTraceExit, 1);
477 // Pretend that the exit is a backwards jump to the entry.
479 if (info_->ShouldSelfOptimize()) {
480 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
482 int distance = masm_->pc_offset();
483 weight = Min(kMaxBackEdgeWeight,
484 Max(1, distance / kCodeSizeMultiplier));
486 EmitProfilingCounterDecrement(weight);
490 __ Call(isolate()->builtins()->InterruptCheck(),
491 RelocInfo::CODE_TARGET);
493 EmitProfilingCounterReset();
497 // Add a label for checking the size of the code used for returning.
498 Label check_exit_codesize;
499 __ bind(&check_exit_codesize);
501 // Make sure that the constant pool is not emitted inside of the return
503 { Assembler::BlockConstPoolScope block_const_pool(masm_);
504 int32_t arg_count = info_->scope()->num_parameters() + 1;
505 if (IsSubclassConstructor(info_->function()->kind())) {
508 int32_t sp_delta = arg_count * kPointerSize;
509 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
510 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
511 PredictableCodeSizeScope predictable(masm_, -1);
513 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
514 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
515 __ add(sp, sp, Operand(sp_delta));
517 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
522 // Check that the size of the code used for returning is large enough
523 // for the debugger's requirements.
524 DCHECK(Assembler::kJSReturnSequenceInstructions <=
525 masm_->InstructionsGeneratedSince(&check_exit_codesize));
531 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
532 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
536 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
537 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
538 codegen()->GetVar(result_register(), var);
542 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
543 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
544 codegen()->GetVar(result_register(), var);
545 __ push(result_register());
549 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
550 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
551 // For simplicity we always test the accumulator register.
552 codegen()->GetVar(result_register(), var);
553 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
554 codegen()->DoTest(this);
558 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
562 void FullCodeGenerator::AccumulatorValueContext::Plug(
563 Heap::RootListIndex index) const {
564 __ LoadRoot(result_register(), index);
568 void FullCodeGenerator::StackValueContext::Plug(
569 Heap::RootListIndex index) const {
570 __ LoadRoot(result_register(), index);
571 __ push(result_register());
575 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
576 codegen()->PrepareForBailoutBeforeSplit(condition(),
580 if (index == Heap::kUndefinedValueRootIndex ||
581 index == Heap::kNullValueRootIndex ||
582 index == Heap::kFalseValueRootIndex) {
583 if (false_label_ != fall_through_) __ b(false_label_);
584 } else if (index == Heap::kTrueValueRootIndex) {
585 if (true_label_ != fall_through_) __ b(true_label_);
587 __ LoadRoot(result_register(), index);
588 codegen()->DoTest(this);
593 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
597 void FullCodeGenerator::AccumulatorValueContext::Plug(
598 Handle<Object> lit) const {
599 __ mov(result_register(), Operand(lit));
603 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
604 // Immediates cannot be pushed directly.
605 __ mov(result_register(), Operand(lit));
606 __ push(result_register());
610 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
611 codegen()->PrepareForBailoutBeforeSplit(condition(),
615 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
616 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
617 if (false_label_ != fall_through_) __ b(false_label_);
618 } else if (lit->IsTrue() || lit->IsJSObject()) {
619 if (true_label_ != fall_through_) __ b(true_label_);
620 } else if (lit->IsString()) {
621 if (String::cast(*lit)->length() == 0) {
622 if (false_label_ != fall_through_) __ b(false_label_);
624 if (true_label_ != fall_through_) __ b(true_label_);
626 } else if (lit->IsSmi()) {
627 if (Smi::cast(*lit)->value() == 0) {
628 if (false_label_ != fall_through_) __ b(false_label_);
630 if (true_label_ != fall_through_) __ b(true_label_);
633 // For simplicity we always test the accumulator register.
634 __ mov(result_register(), Operand(lit));
635 codegen()->DoTest(this);
640 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
641 Register reg) const {
647 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
649 Register reg) const {
652 __ Move(result_register(), reg);
656 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
657 Register reg) const {
659 if (count > 1) __ Drop(count - 1);
660 __ str(reg, MemOperand(sp, 0));
664 void FullCodeGenerator::TestContext::DropAndPlug(int count,
665 Register reg) const {
667 // For simplicity we always test the accumulator register.
669 __ Move(result_register(), reg);
670 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
671 codegen()->DoTest(this);
675 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
676 Label* materialize_false) const {
677 DCHECK(materialize_true == materialize_false);
678 __ bind(materialize_true);
682 void FullCodeGenerator::AccumulatorValueContext::Plug(
683 Label* materialize_true,
684 Label* materialize_false) const {
686 __ bind(materialize_true);
687 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
689 __ bind(materialize_false);
690 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
695 void FullCodeGenerator::StackValueContext::Plug(
696 Label* materialize_true,
697 Label* materialize_false) const {
699 __ bind(materialize_true);
700 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
702 __ bind(materialize_false);
703 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
709 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
710 Label* materialize_false) const {
711 DCHECK(materialize_true == true_label_);
712 DCHECK(materialize_false == false_label_);
716 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
720 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
721 Heap::RootListIndex value_root_index =
722 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
723 __ LoadRoot(result_register(), value_root_index);
727 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
728 Heap::RootListIndex value_root_index =
729 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
730 __ LoadRoot(ip, value_root_index);
735 void FullCodeGenerator::TestContext::Plug(bool flag) const {
736 codegen()->PrepareForBailoutBeforeSplit(condition(),
741 if (true_label_ != fall_through_) __ b(true_label_);
743 if (false_label_ != fall_through_) __ b(false_label_);
748 void FullCodeGenerator::DoTest(Expression* condition,
751 Label* fall_through) {
752 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
753 CallIC(ic, condition->test_id());
754 __ tst(result_register(), result_register());
755 Split(ne, if_true, if_false, fall_through);
759 void FullCodeGenerator::Split(Condition cond,
762 Label* fall_through) {
763 if (if_false == fall_through) {
765 } else if (if_true == fall_through) {
766 __ b(NegateCondition(cond), if_false);
774 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
775 DCHECK(var->IsStackAllocated());
776 // Offset is negative because higher indexes are at lower addresses.
777 int offset = -var->index() * kPointerSize;
778 // Adjust by a (parameter or local) base offset.
779 if (var->IsParameter()) {
780 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
782 offset += JavaScriptFrameConstants::kLocal0Offset;
784 return MemOperand(fp, offset);
788 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
789 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
790 if (var->IsContextSlot()) {
791 int context_chain_length = scope()->ContextChainLength(var->scope());
792 __ LoadContext(scratch, context_chain_length);
793 return ContextOperand(scratch, var->index());
795 return StackOperand(var);
800 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
801 // Use destination as scratch.
802 MemOperand location = VarOperand(var, dest);
803 __ ldr(dest, location);
807 void FullCodeGenerator::SetVar(Variable* var,
811 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
812 DCHECK(!scratch0.is(src));
813 DCHECK(!scratch0.is(scratch1));
814 DCHECK(!scratch1.is(src));
815 MemOperand location = VarOperand(var, scratch0);
816 __ str(src, location);
818 // Emit the write barrier code if the location is in the heap.
819 if (var->IsContextSlot()) {
820 __ RecordWriteContextSlot(scratch0,
830 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
831 bool should_normalize,
834 // Only prepare for bailouts before splits if we're in a test
835 // context. Otherwise, we let the Visit function deal with the
836 // preparation to avoid preparing with the same AST id twice.
837 if (!context()->IsTest() || !info_->IsOptimizable()) return;
840 if (should_normalize) __ b(&skip);
841 PrepareForBailout(expr, TOS_REG);
842 if (should_normalize) {
843 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
845 Split(eq, if_true, if_false, NULL);
851 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
852 // The variable in the declaration always resides in the current function
854 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
855 if (generate_debug_code_) {
856 // Check that we're not inside a with or catch context.
857 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
858 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
859 __ Check(ne, kDeclarationInWithContext);
860 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
861 __ Check(ne, kDeclarationInCatchContext);
866 void FullCodeGenerator::VisitVariableDeclaration(
867 VariableDeclaration* declaration) {
868 // If it was not possible to allocate the variable at compile time, we
869 // need to "declare" it at runtime to make sure it actually exists in the
871 VariableProxy* proxy = declaration->proxy();
872 VariableMode mode = declaration->mode();
873 Variable* variable = proxy->var();
874 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
875 switch (variable->location()) {
876 case Variable::UNALLOCATED:
877 globals_->Add(variable->name(), zone());
878 globals_->Add(variable->binding_needs_init()
879 ? isolate()->factory()->the_hole_value()
880 : isolate()->factory()->undefined_value(),
884 case Variable::PARAMETER:
885 case Variable::LOCAL:
887 Comment cmnt(masm_, "[ VariableDeclaration");
888 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
889 __ str(ip, StackOperand(variable));
893 case Variable::CONTEXT:
895 Comment cmnt(masm_, "[ VariableDeclaration");
896 EmitDebugCheckDeclarationContext(variable);
897 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
898 __ str(ip, ContextOperand(cp, variable->index()));
899 // No write barrier since the_hole_value is in old space.
900 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
904 case Variable::LOOKUP: {
905 Comment cmnt(masm_, "[ VariableDeclaration");
906 __ mov(r2, Operand(variable->name()));
907 // Declaration nodes are always introduced in one of four modes.
908 DCHECK(IsDeclaredVariableMode(mode));
909 PropertyAttributes attr =
910 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
911 __ mov(r1, Operand(Smi::FromInt(attr)));
912 // Push initial value, if any.
913 // Note: For variables we must not push an initial value (such as
914 // 'undefined') because we may have a (legal) redeclaration and we
915 // must not destroy the current value.
917 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
918 __ Push(cp, r2, r1, r0);
920 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
921 __ Push(cp, r2, r1, r0);
923 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
930 void FullCodeGenerator::VisitFunctionDeclaration(
931 FunctionDeclaration* declaration) {
932 VariableProxy* proxy = declaration->proxy();
933 Variable* variable = proxy->var();
934 switch (variable->location()) {
935 case Variable::UNALLOCATED: {
936 globals_->Add(variable->name(), zone());
937 Handle<SharedFunctionInfo> function =
938 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
939 // Check for stack-overflow exception.
940 if (function.is_null()) return SetStackOverflow();
941 globals_->Add(function, zone());
945 case Variable::PARAMETER:
946 case Variable::LOCAL: {
947 Comment cmnt(masm_, "[ FunctionDeclaration");
948 VisitForAccumulatorValue(declaration->fun());
949 __ str(result_register(), StackOperand(variable));
953 case Variable::CONTEXT: {
954 Comment cmnt(masm_, "[ FunctionDeclaration");
955 EmitDebugCheckDeclarationContext(variable);
956 VisitForAccumulatorValue(declaration->fun());
957 __ str(result_register(), ContextOperand(cp, variable->index()));
958 int offset = Context::SlotOffset(variable->index());
959 // We know that we have written a function, which is not a smi.
960 __ RecordWriteContextSlot(cp,
968 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
972 case Variable::LOOKUP: {
973 Comment cmnt(masm_, "[ FunctionDeclaration");
974 __ mov(r2, Operand(variable->name()));
975 __ mov(r1, Operand(Smi::FromInt(NONE)));
977 // Push initial value for function declaration.
978 VisitForStackValue(declaration->fun());
979 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
986 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
987 VariableProxy* proxy = declaration->proxy();
988 Variable* variable = proxy->var();
989 switch (variable->location()) {
990 case Variable::UNALLOCATED:
994 case Variable::CONTEXT: {
995 Comment cmnt(masm_, "[ ImportDeclaration");
996 EmitDebugCheckDeclarationContext(variable);
1001 case Variable::PARAMETER:
1002 case Variable::LOCAL:
1003 case Variable::LOOKUP:
1009 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1014 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1015 // Call the runtime to declare the globals.
1016 // The context is the first argument.
1017 __ mov(r1, Operand(pairs));
1018 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1019 __ Push(cp, r1, r0);
1020 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1021 // Return value is ignored.
1025 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1026 // Call the runtime to declare the modules.
1027 __ Push(descriptions);
1028 __ CallRuntime(Runtime::kDeclareModules, 1);
1029 // Return value is ignored.
1033 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1034 Comment cmnt(masm_, "[ SwitchStatement");
1035 Breakable nested_statement(this, stmt);
1036 SetStatementPosition(stmt);
1038 // Keep the switch value on the stack until a case matches.
1039 VisitForStackValue(stmt->tag());
1040 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1042 ZoneList<CaseClause*>* clauses = stmt->cases();
1043 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1045 Label next_test; // Recycled for each test.
1046 // Compile all the tests with branches to their bodies.
1047 for (int i = 0; i < clauses->length(); i++) {
1048 CaseClause* clause = clauses->at(i);
1049 clause->body_target()->Unuse();
1051 // The default is not a test, but remember it as final fall through.
1052 if (clause->is_default()) {
1053 default_clause = clause;
1057 Comment cmnt(masm_, "[ Case comparison");
1058 __ bind(&next_test);
1061 // Compile the label expression.
1062 VisitForAccumulatorValue(clause->label());
1064 // Perform the comparison as if via '==='.
1065 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1066 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1067 JumpPatchSite patch_site(masm_);
1068 if (inline_smi_code) {
1071 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1074 __ b(ne, &next_test);
1075 __ Drop(1); // Switch value is no longer needed.
1076 __ b(clause->body_target());
1077 __ bind(&slow_case);
1080 // Record position before stub call for type feedback.
1081 SetSourcePosition(clause->position());
1082 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1083 language_mode()).code();
1084 CallIC(ic, clause->CompareId());
1085 patch_site.EmitPatchInfo();
1089 PrepareForBailout(clause, TOS_REG);
1090 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1092 __ b(ne, &next_test);
1094 __ jmp(clause->body_target());
1097 __ cmp(r0, Operand::Zero());
1098 __ b(ne, &next_test);
1099 __ Drop(1); // Switch value is no longer needed.
1100 __ b(clause->body_target());
1103 // Discard the test value and jump to the default if present, otherwise to
1104 // the end of the statement.
1105 __ bind(&next_test);
1106 __ Drop(1); // Switch value is no longer needed.
1107 if (default_clause == NULL) {
1108 __ b(nested_statement.break_label());
1110 __ b(default_clause->body_target());
1113 // Compile all the case bodies.
1114 for (int i = 0; i < clauses->length(); i++) {
1115 Comment cmnt(masm_, "[ Case body");
1116 CaseClause* clause = clauses->at(i);
1117 __ bind(clause->body_target());
1118 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1119 VisitStatements(clause->statements());
1122 __ bind(nested_statement.break_label());
1123 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1127 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1128 Comment cmnt(masm_, "[ ForInStatement");
1129 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1130 SetStatementPosition(stmt);
1133 ForIn loop_statement(this, stmt);
1134 increment_loop_depth();
1136 // Get the object to enumerate over. If the object is null or undefined, skip
1137 // over the loop. See ECMA-262 version 5, section 12.6.4.
1138 SetExpressionPosition(stmt->enumerable());
1139 VisitForAccumulatorValue(stmt->enumerable());
1140 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1143 Register null_value = r5;
1144 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1145 __ cmp(r0, null_value);
1148 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1150 // Convert the object to a JS object.
1151 Label convert, done_convert;
1152 __ JumpIfSmi(r0, &convert);
1153 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1154 __ b(ge, &done_convert);
1157 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1158 __ bind(&done_convert);
1159 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1162 // Check for proxies.
1164 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1165 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1166 __ b(le, &call_runtime);
1168 // Check cache validity in generated code. This is a fast case for
1169 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1170 // guarantee cache validity, call the runtime system to check cache
1171 // validity or get the property names in a fixed array.
1172 __ CheckEnumCache(null_value, &call_runtime);
1174 // The enum cache is valid. Load the map of the object being
1175 // iterated over and use the cache for the iteration.
1177 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1180 // Get the set of properties to enumerate.
1181 __ bind(&call_runtime);
1182 __ push(r0); // Duplicate the enumerable object on the stack.
1183 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1184 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1186 // If we got a map from the runtime call, we can do a fast
1187 // modification check. Otherwise, we got a fixed array, and we have
1188 // to do a slow check.
1190 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1191 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1193 __ b(ne, &fixed_array);
1195 // We got a map in register r0. Get the enumeration cache from it.
1196 Label no_descriptors;
1197 __ bind(&use_cache);
1199 __ EnumLength(r1, r0);
1200 __ cmp(r1, Operand(Smi::FromInt(0)));
1201 __ b(eq, &no_descriptors);
1203 __ LoadInstanceDescriptors(r0, r2);
1204 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1205 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1207 // Set up the four remaining stack slots.
1208 __ push(r0); // Map.
1209 __ mov(r0, Operand(Smi::FromInt(0)));
1210 // Push enumeration cache, enumeration cache length (as smi) and zero.
1211 __ Push(r2, r1, r0);
1214 __ bind(&no_descriptors);
1218 // We got a fixed array in register r0. Iterate through that.
1220 __ bind(&fixed_array);
1222 __ Move(r1, FeedbackVector());
1223 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1224 int vector_index = FeedbackVector()->GetIndex(slot);
1225 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1227 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1228 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1229 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1230 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1231 __ b(gt, &non_proxy);
1232 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1233 __ bind(&non_proxy);
1234 __ Push(r1, r0); // Smi and array
1235 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1236 __ mov(r0, Operand(Smi::FromInt(0)));
1237 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1239 // Generate code for doing the condition check.
1240 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1242 SetExpressionPosition(stmt->each());
1244 // Load the current count to r0, load the length to r1.
1245 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1246 __ cmp(r0, r1); // Compare to the array length.
1247 __ b(hs, loop_statement.break_label());
1249 // Get the current entry of the array into register r3.
1250 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1251 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1252 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1254 // Get the expected map from the stack or a smi in the
1255 // permanent slow case into register r2.
1256 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1258 // Check if the expected map still matches that of the enumerable.
1259 // If not, we may have to filter the key.
1261 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1262 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1263 __ cmp(r4, Operand(r2));
1264 __ b(eq, &update_each);
1266 // For proxies, no filtering is done.
1267 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1268 __ cmp(r2, Operand(Smi::FromInt(0)));
1269 __ b(eq, &update_each);
1271 // Convert the entry to a string or (smi) 0 if it isn't a property
1272 // any more. If the property has been removed while iterating, we
1274 __ push(r1); // Enumerable.
1275 __ push(r3); // Current entry.
1276 __ CallRuntime(Runtime::kForInFilter, 2);
1277 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1278 __ mov(r3, Operand(r0));
1279 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1281 __ b(eq, loop_statement.continue_label());
1283 // Update the 'each' property or variable from the possibly filtered
1284 // entry in register r3.
1285 __ bind(&update_each);
1286 __ mov(result_register(), r3);
1287 // Perform the assignment as if via '='.
1288 { EffectContext context(this);
1289 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1290 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1293 // Generate code for the body of the loop.
1294 Visit(stmt->body());
1296 // Generate code for the going to the next element by incrementing
1297 // the index (smi) stored on top of the stack.
1298 __ bind(loop_statement.continue_label());
1300 __ add(r0, r0, Operand(Smi::FromInt(1)));
1303 EmitBackEdgeBookkeeping(stmt, &loop);
1306 // Remove the pointers stored on the stack.
1307 __ bind(loop_statement.break_label());
1310 // Exit and decrement the loop depth.
1311 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1313 decrement_loop_depth();
1317 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1319 // Use the fast case closure allocation code that allocates in new
1320 // space for nested functions that don't need literals cloning. If
1321 // we're running with the --always-opt or the --prepare-always-opt
1322 // flag, we need to use the runtime function so that the new function
1323 // we are creating here gets a chance to have its code optimized and
1324 // doesn't just get a copy of the existing unoptimized code.
1325 if (!FLAG_always_opt &&
1326 !FLAG_prepare_always_opt &&
1328 scope()->is_function_scope() &&
1329 info->num_literals() == 0) {
1330 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1331 __ mov(r2, Operand(info));
1334 __ mov(r0, Operand(info));
1335 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1336 : Heap::kFalseValueRootIndex);
1337 __ Push(cp, r0, r1);
1338 __ CallRuntime(Runtime::kNewClosure, 3);
1340 context()->Plug(r0);
1344 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1345 Comment cmnt(masm_, "[ VariableProxy");
1346 EmitVariableLoad(expr);
1350 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1352 FeedbackVectorICSlot slot) {
1353 if (NeedsHomeObject(initializer)) {
1354 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1355 __ mov(StoreDescriptor::NameRegister(),
1356 Operand(isolate()->factory()->home_object_symbol()));
1357 __ ldr(StoreDescriptor::ValueRegister(),
1358 MemOperand(sp, offset * kPointerSize));
1359 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1365 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1366 TypeofState typeof_state,
1368 Register current = cp;
1374 if (s->num_heap_slots() > 0) {
1375 if (s->calls_sloppy_eval()) {
1376 // Check that extension is NULL.
1377 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1381 // Load next context in chain.
1382 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1383 // Walk the rest of the chain without clobbering cp.
1386 // If no outer scope calls eval, we do not need to check more
1387 // context extensions.
1388 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1389 s = s->outer_scope();
1392 if (s->is_eval_scope()) {
1394 if (!current.is(next)) {
1395 __ Move(next, current);
1398 // Terminate at native context.
1399 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1400 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1403 // Check that extension is NULL.
1404 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1407 // Load next context in chain.
1408 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1413 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1414 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1415 __ mov(LoadDescriptor::SlotRegister(),
1416 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1418 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1425 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1427 DCHECK(var->IsContextSlot());
1428 Register context = cp;
1432 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1433 if (s->num_heap_slots() > 0) {
1434 if (s->calls_sloppy_eval()) {
1435 // Check that extension is NULL.
1436 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1440 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1441 // Walk the rest of the chain without clobbering cp.
1445 // Check that last extension is NULL.
1446 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1450 // This function is used only for loads, not stores, so it's safe to
1451 // return an cp-based operand (the write barrier cannot be allowed to
1452 // destroy the cp register).
1453 return ContextOperand(context, var->index());
1457 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1458 TypeofState typeof_state,
1461 // Generate fast-case code for variables that might be shadowed by
1462 // eval-introduced variables. Eval is used a lot without
1463 // introducing variables. In those cases, we do not want to
1464 // perform a runtime call for all variables in the scope
1465 // containing the eval.
1466 Variable* var = proxy->var();
1467 if (var->mode() == DYNAMIC_GLOBAL) {
1468 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1470 } else if (var->mode() == DYNAMIC_LOCAL) {
1471 Variable* local = var->local_if_not_shadowed();
1472 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1473 if (local->mode() == LET || local->mode() == CONST ||
1474 local->mode() == CONST_LEGACY) {
1475 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1476 if (local->mode() == CONST_LEGACY) {
1477 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1478 } else { // LET || CONST
1480 __ mov(r0, Operand(var->name()));
1482 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1490 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1491 // Record position before possible IC call.
1492 SetSourcePosition(proxy->position());
1493 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1494 Variable* var = proxy->var();
1496 // Three cases: global variables, lookup variables, and all other types of
1498 switch (var->location()) {
1499 case Variable::UNALLOCATED: {
1500 Comment cmnt(masm_, "[ Global variable");
1501 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1502 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1503 __ mov(LoadDescriptor::SlotRegister(),
1504 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1505 CallGlobalLoadIC(var->name());
1506 context()->Plug(r0);
1510 case Variable::PARAMETER:
1511 case Variable::LOCAL:
1512 case Variable::CONTEXT: {
1513 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1514 : "[ Stack variable");
1515 if (var->binding_needs_init()) {
1516 // var->scope() may be NULL when the proxy is located in eval code and
1517 // refers to a potential outside binding. Currently those bindings are
1518 // always looked up dynamically, i.e. in that case
1519 // var->location() == LOOKUP.
1521 DCHECK(var->scope() != NULL);
1523 // Check if the binding really needs an initialization check. The check
1524 // can be skipped in the following situation: we have a LET or CONST
1525 // binding in harmony mode, both the Variable and the VariableProxy have
1526 // the same declaration scope (i.e. they are both in global code, in the
1527 // same function or in the same eval code) and the VariableProxy is in
1528 // the source physically located after the initializer of the variable.
1530 // We cannot skip any initialization checks for CONST in non-harmony
1531 // mode because const variables may be declared but never initialized:
1532 // if (false) { const x; }; var y = x;
1534 // The condition on the declaration scopes is a conservative check for
1535 // nested functions that access a binding and are called before the
1536 // binding is initialized:
1537 // function() { f(); let x = 1; function f() { x = 2; } }
1539 bool skip_init_check;
1540 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1541 skip_init_check = false;
1542 } else if (var->is_this()) {
1543 CHECK(info_->function() != nullptr &&
1544 (info_->function()->kind() & kSubclassConstructor) != 0);
1545 // TODO(dslomov): implement 'this' hole check elimination.
1546 skip_init_check = false;
1548 // Check that we always have valid source position.
1549 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1550 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1551 skip_init_check = var->mode() != CONST_LEGACY &&
1552 var->initializer_position() < proxy->position();
1555 if (!skip_init_check) {
1556 // Let and const need a read barrier.
1558 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1559 if (var->mode() == LET || var->mode() == CONST) {
1560 // Throw a reference error when using an uninitialized let/const
1561 // binding in harmony mode.
1564 __ mov(r0, Operand(var->name()));
1566 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1569 // Uninitalized const bindings outside of harmony mode are unholed.
1570 DCHECK(var->mode() == CONST_LEGACY);
1571 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1573 context()->Plug(r0);
1577 context()->Plug(var);
1581 case Variable::LOOKUP: {
1582 Comment cmnt(masm_, "[ Lookup variable");
1584 // Generate code for loading from variables potentially shadowed
1585 // by eval-introduced variables.
1586 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1588 __ mov(r1, Operand(var->name()));
1589 __ Push(cp, r1); // Context and name.
1590 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1592 context()->Plug(r0);
1598 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1599 Comment cmnt(masm_, "[ RegExpLiteral");
1601 // Registers will be used as follows:
1602 // r5 = materialized value (RegExp literal)
1603 // r4 = JS function, literals array
1604 // r3 = literal index
1605 // r2 = RegExp pattern
1606 // r1 = RegExp flags
1607 // r0 = RegExp literal clone
1608 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1609 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1610 int literal_offset =
1611 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1612 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1613 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1615 __ b(ne, &materialized);
1617 // Create regexp literal using runtime function.
1618 // Result will be in r0.
1619 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1620 __ mov(r2, Operand(expr->pattern()));
1621 __ mov(r1, Operand(expr->flags()));
1622 __ Push(r4, r3, r2, r1);
1623 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1626 __ bind(&materialized);
1627 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1628 Label allocated, runtime_allocate;
1629 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1632 __ bind(&runtime_allocate);
1633 __ mov(r0, Operand(Smi::FromInt(size)));
1635 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1638 __ bind(&allocated);
1639 // After this, registers are used as follows:
1640 // r0: Newly allocated regexp.
1641 // r5: Materialized regexp.
1643 __ CopyFields(r0, r5, d0, size / kPointerSize);
1644 context()->Plug(r0);
1648 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1649 if (expression == NULL) {
1650 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1653 VisitForStackValue(expression);
1658 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1659 Comment cmnt(masm_, "[ ObjectLiteral");
1661 Handle<FixedArray> constant_properties = expr->constant_properties();
1662 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1663 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1664 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1665 __ mov(r1, Operand(constant_properties));
1666 int flags = expr->ComputeFlags();
1667 __ mov(r0, Operand(Smi::FromInt(flags)));
1668 if (MustCreateObjectLiteralWithRuntime(expr)) {
1669 __ Push(r3, r2, r1, r0);
1670 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1672 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1675 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1677 // If result_saved is true the result is on top of the stack. If
1678 // result_saved is false the result is in r0.
1679 bool result_saved = false;
1681 AccessorTable accessor_table(zone());
1682 int property_index = 0;
1683 // store_slot_index points to the vector IC slot for the next store IC used.
1684 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1685 // and must be updated if the number of store ICs emitted here changes.
1686 int store_slot_index = 0;
1687 for (; property_index < expr->properties()->length(); property_index++) {
1688 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1689 if (property->is_computed_name()) break;
1690 if (property->IsCompileTimeValue()) continue;
1692 Literal* key = property->key()->AsLiteral();
1693 Expression* value = property->value();
1694 if (!result_saved) {
1695 __ push(r0); // Save result on stack
1696 result_saved = true;
1698 switch (property->kind()) {
1699 case ObjectLiteral::Property::CONSTANT:
1701 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1702 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1704 case ObjectLiteral::Property::COMPUTED:
1705 // It is safe to use [[Put]] here because the boilerplate already
1706 // contains computed properties with an uninitialized value.
1707 if (key->value()->IsInternalizedString()) {
1708 if (property->emit_store()) {
1709 VisitForAccumulatorValue(value);
1710 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1711 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1712 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1713 if (FLAG_vector_stores) {
1714 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1717 CallStoreIC(key->LiteralFeedbackId());
1719 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1721 if (NeedsHomeObject(value)) {
1722 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1723 __ mov(StoreDescriptor::NameRegister(),
1724 Operand(isolate()->factory()->home_object_symbol()));
1725 __ ldr(StoreDescriptor::ValueRegister(), MemOperand(sp));
1726 if (FLAG_vector_stores) {
1727 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1732 VisitForEffect(value);
1736 // Duplicate receiver on stack.
1737 __ ldr(r0, MemOperand(sp));
1739 VisitForStackValue(key);
1740 VisitForStackValue(value);
1741 if (property->emit_store()) {
1742 EmitSetHomeObjectIfNeeded(
1743 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1744 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1746 __ CallRuntime(Runtime::kSetProperty, 4);
1751 case ObjectLiteral::Property::PROTOTYPE:
1752 // Duplicate receiver on stack.
1753 __ ldr(r0, MemOperand(sp));
1755 VisitForStackValue(value);
1756 DCHECK(property->emit_store());
1757 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1760 case ObjectLiteral::Property::GETTER:
1761 if (property->emit_store()) {
1762 accessor_table.lookup(key)->second->getter = value;
1765 case ObjectLiteral::Property::SETTER:
1766 if (property->emit_store()) {
1767 accessor_table.lookup(key)->second->setter = value;
1773 // Emit code to define accessors, using only a single call to the runtime for
1774 // each pair of corresponding getters and setters.
1775 for (AccessorTable::Iterator it = accessor_table.begin();
1776 it != accessor_table.end();
1778 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1780 VisitForStackValue(it->first);
1781 EmitAccessor(it->second->getter);
1782 EmitSetHomeObjectIfNeeded(
1783 it->second->getter, 2,
1784 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1785 EmitAccessor(it->second->setter);
1786 EmitSetHomeObjectIfNeeded(
1787 it->second->setter, 3,
1788 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1789 __ mov(r0, Operand(Smi::FromInt(NONE)));
1791 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1794 // Object literals have two parts. The "static" part on the left contains no
1795 // computed property names, and so we can compute its map ahead of time; see
1796 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1797 // starts with the first computed property name, and continues with all
1798 // properties to its right. All the code from above initializes the static
1799 // component of the object literal, and arranges for the map of the result to
1800 // reflect the static order in which the keys appear. For the dynamic
1801 // properties, we compile them into a series of "SetOwnProperty" runtime
1802 // calls. This will preserve insertion order.
1803 for (; property_index < expr->properties()->length(); property_index++) {
1804 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1806 Expression* value = property->value();
1807 if (!result_saved) {
1808 __ push(r0); // Save result on the stack
1809 result_saved = true;
1812 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1815 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1816 DCHECK(!property->is_computed_name());
1817 VisitForStackValue(value);
1818 DCHECK(property->emit_store());
1819 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1821 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1822 VisitForStackValue(value);
1823 EmitSetHomeObjectIfNeeded(
1824 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1826 switch (property->kind()) {
1827 case ObjectLiteral::Property::CONSTANT:
1828 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1829 case ObjectLiteral::Property::COMPUTED:
1830 if (property->emit_store()) {
1831 __ mov(r0, Operand(Smi::FromInt(NONE)));
1833 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1839 case ObjectLiteral::Property::PROTOTYPE:
1843 case ObjectLiteral::Property::GETTER:
1844 __ mov(r0, Operand(Smi::FromInt(NONE)));
1846 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1849 case ObjectLiteral::Property::SETTER:
1850 __ mov(r0, Operand(Smi::FromInt(NONE)));
1852 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1858 if (expr->has_function()) {
1859 DCHECK(result_saved);
1860 __ ldr(r0, MemOperand(sp));
1862 __ CallRuntime(Runtime::kToFastProperties, 1);
1866 context()->PlugTOS();
1868 context()->Plug(r0);
1871 // Verify that compilation exactly consumed the number of store ic slots that
1872 // the ObjectLiteral node had to offer.
1873 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1877 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1878 Comment cmnt(masm_, "[ ArrayLiteral");
1880 expr->BuildConstantElements(isolate());
1882 Handle<FixedArray> constant_elements = expr->constant_elements();
1883 bool has_fast_elements =
1884 IsFastObjectElementsKind(expr->constant_elements_kind());
1885 Handle<FixedArrayBase> constant_elements_values(
1886 FixedArrayBase::cast(constant_elements->get(1)));
1888 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1889 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1890 // If the only customer of allocation sites is transitioning, then
1891 // we can turn it off if we don't have anywhere else to transition to.
1892 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1895 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1896 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1897 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1898 __ mov(r1, Operand(constant_elements));
1899 if (MustCreateArrayLiteralWithRuntime(expr)) {
1900 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1901 __ Push(r3, r2, r1, r0);
1902 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1904 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1907 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1909 bool result_saved = false; // Is the result saved to the stack?
1910 ZoneList<Expression*>* subexprs = expr->values();
1911 int length = subexprs->length();
1913 // Emit code to evaluate all the non-constant subexpressions and to store
1914 // them into the newly cloned array.
1915 int array_index = 0;
1916 for (; array_index < length; array_index++) {
1917 Expression* subexpr = subexprs->at(array_index);
1918 if (subexpr->IsSpread()) break;
1920 // If the subexpression is a literal or a simple materialized literal it
1921 // is already set in the cloned array.
1922 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1924 if (!result_saved) {
1926 __ Push(Smi::FromInt(expr->literal_index()));
1927 result_saved = true;
1929 VisitForAccumulatorValue(subexpr);
1931 if (has_fast_elements) {
1932 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1933 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1934 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1935 __ str(result_register(), FieldMemOperand(r1, offset));
1936 // Update the write barrier for the array store.
1937 __ RecordWriteField(r1, offset, result_register(), r2,
1938 kLRHasBeenSaved, kDontSaveFPRegs,
1939 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1941 __ mov(r3, Operand(Smi::FromInt(array_index)));
1942 StoreArrayLiteralElementStub stub(isolate());
1946 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1949 // In case the array literal contains spread expressions it has two parts. The
1950 // first part is the "static" array which has a literal index is handled
1951 // above. The second part is the part after the first spread expression
1952 // (inclusive) and these elements gets appended to the array. Note that the
1953 // number elements an iterable produces is unknown ahead of time.
1954 if (array_index < length && result_saved) {
1955 __ pop(); // literal index
1957 result_saved = false;
1959 for (; array_index < length; array_index++) {
1960 Expression* subexpr = subexprs->at(array_index);
1963 if (subexpr->IsSpread()) {
1964 VisitForStackValue(subexpr->AsSpread()->expression());
1965 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1967 VisitForStackValue(subexpr);
1968 __ CallRuntime(Runtime::kAppendElement, 2);
1971 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1975 __ pop(); // literal index
1976 context()->PlugTOS();
1978 context()->Plug(r0);
1983 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1984 DCHECK(expr->target()->IsValidReferenceExpression());
1986 Comment cmnt(masm_, "[ Assignment");
1988 Property* property = expr->target()->AsProperty();
1989 LhsKind assign_type = Property::GetAssignType(property);
1991 // Evaluate LHS expression.
1992 switch (assign_type) {
1994 // Nothing to do here.
1996 case NAMED_PROPERTY:
1997 if (expr->is_compound()) {
1998 // We need the receiver both on the stack and in the register.
1999 VisitForStackValue(property->obj());
2000 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2002 VisitForStackValue(property->obj());
2005 case NAMED_SUPER_PROPERTY:
2007 property->obj()->AsSuperPropertyReference()->this_var());
2008 VisitForAccumulatorValue(
2009 property->obj()->AsSuperPropertyReference()->home_object());
2010 __ Push(result_register());
2011 if (expr->is_compound()) {
2012 const Register scratch = r1;
2013 __ ldr(scratch, MemOperand(sp, kPointerSize));
2015 __ Push(result_register());
2018 case KEYED_SUPER_PROPERTY:
2020 property->obj()->AsSuperPropertyReference()->this_var());
2022 property->obj()->AsSuperPropertyReference()->home_object());
2023 VisitForAccumulatorValue(property->key());
2024 __ Push(result_register());
2025 if (expr->is_compound()) {
2026 const Register scratch = r1;
2027 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
2029 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
2031 __ Push(result_register());
2034 case KEYED_PROPERTY:
2035 if (expr->is_compound()) {
2036 VisitForStackValue(property->obj());
2037 VisitForStackValue(property->key());
2038 __ ldr(LoadDescriptor::ReceiverRegister(),
2039 MemOperand(sp, 1 * kPointerSize));
2040 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2042 VisitForStackValue(property->obj());
2043 VisitForStackValue(property->key());
2048 // For compound assignments we need another deoptimization point after the
2049 // variable/property load.
2050 if (expr->is_compound()) {
2051 { AccumulatorValueContext context(this);
2052 switch (assign_type) {
2054 EmitVariableLoad(expr->target()->AsVariableProxy());
2055 PrepareForBailout(expr->target(), TOS_REG);
2057 case NAMED_PROPERTY:
2058 EmitNamedPropertyLoad(property);
2059 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2061 case NAMED_SUPER_PROPERTY:
2062 EmitNamedSuperPropertyLoad(property);
2063 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2065 case KEYED_SUPER_PROPERTY:
2066 EmitKeyedSuperPropertyLoad(property);
2067 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2069 case KEYED_PROPERTY:
2070 EmitKeyedPropertyLoad(property);
2071 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2076 Token::Value op = expr->binary_op();
2077 __ push(r0); // Left operand goes on the stack.
2078 VisitForAccumulatorValue(expr->value());
2080 SetSourcePosition(expr->position() + 1);
2081 AccumulatorValueContext context(this);
2082 if (ShouldInlineSmiCase(op)) {
2083 EmitInlineSmiBinaryOp(expr->binary_operation(),
2088 EmitBinaryOp(expr->binary_operation(), op);
2091 // Deoptimization point in case the binary operation may have side effects.
2092 PrepareForBailout(expr->binary_operation(), TOS_REG);
2094 VisitForAccumulatorValue(expr->value());
2097 // Record source position before possible IC call.
2098 SetSourcePosition(expr->position());
2101 switch (assign_type) {
2103 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2104 expr->op(), expr->AssignmentSlot());
2105 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2106 context()->Plug(r0);
2108 case NAMED_PROPERTY:
2109 EmitNamedPropertyAssignment(expr);
2111 case NAMED_SUPER_PROPERTY:
2112 EmitNamedSuperPropertyStore(property);
2113 context()->Plug(r0);
2115 case KEYED_SUPER_PROPERTY:
2116 EmitKeyedSuperPropertyStore(property);
2117 context()->Plug(r0);
2119 case KEYED_PROPERTY:
2120 EmitKeyedPropertyAssignment(expr);
2126 void FullCodeGenerator::VisitYield(Yield* expr) {
2127 Comment cmnt(masm_, "[ Yield");
2128 // Evaluate yielded value first; the initial iterator definition depends on
2129 // this. It stays on the stack while we update the iterator.
2130 VisitForStackValue(expr->expression());
2132 switch (expr->yield_kind()) {
2133 case Yield::kSuspend:
2134 // Pop value from top-of-stack slot; box result into result register.
2135 EmitCreateIteratorResult(false);
2136 __ push(result_register());
2138 case Yield::kInitial: {
2139 Label suspend, continuation, post_runtime, resume;
2143 __ bind(&continuation);
2147 VisitForAccumulatorValue(expr->generator_object());
2148 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2149 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2150 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2151 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2153 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2154 kLRHasBeenSaved, kDontSaveFPRegs);
2155 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2157 __ b(eq, &post_runtime);
2158 __ push(r0); // generator object
2159 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2160 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2161 __ bind(&post_runtime);
2162 __ pop(result_register());
2163 EmitReturnSequence();
2166 context()->Plug(result_register());
2170 case Yield::kFinal: {
2171 VisitForAccumulatorValue(expr->generator_object());
2172 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2173 __ str(r1, FieldMemOperand(result_register(),
2174 JSGeneratorObject::kContinuationOffset));
2175 // Pop value from top-of-stack slot, box result into result register.
2176 EmitCreateIteratorResult(true);
2177 EmitUnwindBeforeReturn();
2178 EmitReturnSequence();
2182 case Yield::kDelegating: {
2183 VisitForStackValue(expr->generator_object());
2185 // Initial stack layout is as follows:
2186 // [sp + 1 * kPointerSize] iter
2187 // [sp + 0 * kPointerSize] g
2189 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2190 Label l_next, l_call, l_loop;
2191 Register load_receiver = LoadDescriptor::ReceiverRegister();
2192 Register load_name = LoadDescriptor::NameRegister();
2194 // Initial send value is undefined.
2195 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2198 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2200 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2201 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2202 __ Push(load_name, r3, r0); // "throw", iter, except
2205 // try { received = %yield result }
2206 // Shuffle the received result above a try handler and yield it without
2209 __ pop(r0); // result
2210 EnterTryBlock(expr->index(), &l_catch);
2211 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2212 __ push(r0); // result
2214 __ bind(&l_continuation);
2216 __ bind(&l_suspend);
2217 const int generator_object_depth = kPointerSize + try_block_size;
2218 __ ldr(r0, MemOperand(sp, generator_object_depth));
2220 __ Push(Smi::FromInt(expr->index())); // handler-index
2221 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2222 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2223 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2224 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2226 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2227 kLRHasBeenSaved, kDontSaveFPRegs);
2228 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2229 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2230 __ pop(r0); // result
2231 EmitReturnSequence();
2232 __ bind(&l_resume); // received in r0
2233 ExitTryBlock(expr->index());
2235 // receiver = iter; f = 'next'; arg = received;
2238 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2239 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2240 __ Push(load_name, r3, r0); // "next", iter, received
2242 // result = receiver[f](arg);
2244 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2245 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2246 __ mov(LoadDescriptor::SlotRegister(),
2247 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2248 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2249 CallIC(ic, TypeFeedbackId::None());
2251 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2252 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2255 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2256 __ Drop(1); // The function is still on the stack; drop it.
2258 // if (!result.done) goto l_try;
2260 __ Move(load_receiver, r0);
2262 __ push(load_receiver); // save result
2263 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2264 __ mov(LoadDescriptor::SlotRegister(),
2265 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2266 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2267 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2269 __ cmp(r0, Operand(0));
2273 __ pop(load_receiver); // result
2274 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2275 __ mov(LoadDescriptor::SlotRegister(),
2276 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2277 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value
2278 context()->DropAndPlug(2, r0); // drop iter and g
2285 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2287 JSGeneratorObject::ResumeMode resume_mode) {
2288 // The value stays in r0, and is ultimately read by the resumed generator, as
2289 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2290 // is read to throw the value when the resumed generator is already closed.
2291 // r1 will hold the generator object until the activation has been resumed.
2292 VisitForStackValue(generator);
2293 VisitForAccumulatorValue(value);
2296 // Load suspended function and context.
2297 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2298 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2300 // Load receiver and store as the first argument.
2301 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2304 // Push holes for the rest of the arguments to the generator function.
2305 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2307 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2308 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2309 Label push_argument_holes, push_frame;
2310 __ bind(&push_argument_holes);
2311 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2312 __ b(mi, &push_frame);
2314 __ jmp(&push_argument_holes);
2316 // Enter a new JavaScript frame, and initialize its slots as they were when
2317 // the generator was suspended.
2318 Label resume_frame, done;
2319 __ bind(&push_frame);
2320 __ bl(&resume_frame);
2322 __ bind(&resume_frame);
2323 // lr = return address.
2324 // fp = caller's frame pointer.
2325 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
2326 // cp = callee's context,
2327 // r4 = callee's JS function.
2328 __ PushFixedFrame(r4);
2329 // Adjust FP to point to saved FP.
2330 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2332 // Load the operand stack size.
2333 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2334 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2337 // If we are sending a value and there is no operand stack, we can jump back
2339 if (resume_mode == JSGeneratorObject::NEXT) {
2341 __ cmp(r3, Operand(0));
2342 __ b(ne, &slow_resume);
2343 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2345 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2346 if (FLAG_enable_embedded_constant_pool) {
2347 // Load the new code object's constant pool pointer.
2348 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2351 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2354 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2355 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2358 __ bind(&slow_resume);
2361 // Otherwise, we push holes for the operand stack and call the runtime to fix
2362 // up the stack and the handlers.
2363 Label push_operand_holes, call_resume;
2364 __ bind(&push_operand_holes);
2365 __ sub(r3, r3, Operand(1), SetCC);
2366 __ b(mi, &call_resume);
2368 __ b(&push_operand_holes);
2369 __ bind(&call_resume);
2370 DCHECK(!result_register().is(r1));
2371 __ Push(r1, result_register());
2372 __ Push(Smi::FromInt(resume_mode));
2373 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2374 // Not reached: the runtime call returns elsewhere.
2375 __ stop("not-reached");
2378 context()->Plug(result_register());
2382 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2386 const int instance_size = 5 * kPointerSize;
2387 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2390 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT);
2393 __ bind(&gc_required);
2394 __ Push(Smi::FromInt(instance_size));
2395 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2396 __ ldr(context_register(),
2397 MemOperand(fp, StandardFrameConstants::kContextOffset));
2399 __ bind(&allocated);
2400 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2401 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2402 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2404 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2405 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2406 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2407 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2408 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2410 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2412 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2414 // Only the value field needs a write barrier, as the other values are in the
2416 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2417 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2421 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2422 SetSourcePosition(prop->position());
2423 Literal* key = prop->key()->AsLiteral();
2424 DCHECK(!prop->IsSuperAccess());
2426 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2427 __ mov(LoadDescriptor::SlotRegister(),
2428 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2429 CallLoadIC(NOT_CONTEXTUAL);
2433 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2434 // Stack: receiver, home_object.
2435 SetSourcePosition(prop->position());
2436 Literal* key = prop->key()->AsLiteral();
2437 DCHECK(!key->value()->IsSmi());
2438 DCHECK(prop->IsSuperAccess());
2440 __ Push(key->value());
2441 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2445 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2446 SetSourcePosition(prop->position());
2447 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2448 __ mov(LoadDescriptor::SlotRegister(),
2449 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2454 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2455 // Stack: receiver, home_object, key.
2456 SetSourcePosition(prop->position());
2458 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2462 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2464 Expression* left_expr,
2465 Expression* right_expr) {
2466 Label done, smi_case, stub_call;
2468 Register scratch1 = r2;
2469 Register scratch2 = r3;
2471 // Get the arguments.
2473 Register right = r0;
2476 // Perform combined smi check on both operands.
2477 __ orr(scratch1, left, Operand(right));
2478 STATIC_ASSERT(kSmiTag == 0);
2479 JumpPatchSite patch_site(masm_);
2480 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2482 __ bind(&stub_call);
2483 Handle<Code> code = CodeFactory::BinaryOpIC(
2484 isolate(), op, language_mode()).code();
2485 CallIC(code, expr->BinaryOperationFeedbackId());
2486 patch_site.EmitPatchInfo();
2490 // Smi case. This code works the same way as the smi-smi case in the type
2491 // recording binary operation stub, see
2494 __ GetLeastBitsFromSmi(scratch1, right, 5);
2495 __ mov(right, Operand(left, ASR, scratch1));
2496 __ bic(right, right, Operand(kSmiTagMask));
2499 __ SmiUntag(scratch1, left);
2500 __ GetLeastBitsFromSmi(scratch2, right, 5);
2501 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2502 __ TrySmiTag(right, scratch1, &stub_call);
2506 __ SmiUntag(scratch1, left);
2507 __ GetLeastBitsFromSmi(scratch2, right, 5);
2508 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2509 __ tst(scratch1, Operand(0xc0000000));
2510 __ b(ne, &stub_call);
2511 __ SmiTag(right, scratch1);
2515 __ add(scratch1, left, Operand(right), SetCC);
2516 __ b(vs, &stub_call);
2517 __ mov(right, scratch1);
2520 __ sub(scratch1, left, Operand(right), SetCC);
2521 __ b(vs, &stub_call);
2522 __ mov(right, scratch1);
2525 __ SmiUntag(ip, right);
2526 __ smull(scratch1, scratch2, left, ip);
2527 __ mov(ip, Operand(scratch1, ASR, 31));
2528 __ cmp(ip, Operand(scratch2));
2529 __ b(ne, &stub_call);
2530 __ cmp(scratch1, Operand::Zero());
2531 __ mov(right, Operand(scratch1), LeaveCC, ne);
2533 __ add(scratch2, right, Operand(left), SetCC);
2534 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2535 __ b(mi, &stub_call);
2539 __ orr(right, left, Operand(right));
2541 case Token::BIT_AND:
2542 __ and_(right, left, Operand(right));
2544 case Token::BIT_XOR:
2545 __ eor(right, left, Operand(right));
2552 context()->Plug(r0);
2556 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2557 // Constructor is in r0.
2558 DCHECK(lit != NULL);
2561 // No access check is needed here since the constructor is created by the
2563 Register scratch = r1;
2565 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2568 // store_slot_index points to the vector IC slot for the next store IC used.
2569 // ClassLiteral::ComputeFeedbackRequirements controls the allocation of slots
2570 // and must be updated if the number of store ICs emitted here changes.
2571 int store_slot_index = 0;
2572 for (int i = 0; i < lit->properties()->length(); i++) {
2573 ObjectLiteral::Property* property = lit->properties()->at(i);
2574 Expression* value = property->value();
2576 if (property->is_static()) {
2577 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2579 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2582 EmitPropertyKey(property, lit->GetIdForProperty(i));
2584 // The static prototype property is read only. We handle the non computed
2585 // property name case in the parser. Since this is the only case where we
2586 // need to check for an own read only property we special case this so we do
2587 // not need to do this for every property.
2588 if (property->is_static() && property->is_computed_name()) {
2589 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2593 VisitForStackValue(value);
2594 EmitSetHomeObjectIfNeeded(value, 2,
2595 lit->SlotForHomeObject(value, &store_slot_index));
2597 switch (property->kind()) {
2598 case ObjectLiteral::Property::CONSTANT:
2599 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2600 case ObjectLiteral::Property::PROTOTYPE:
2602 case ObjectLiteral::Property::COMPUTED:
2603 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2606 case ObjectLiteral::Property::GETTER:
2607 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2609 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2612 case ObjectLiteral::Property::SETTER:
2613 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2615 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2624 __ CallRuntime(Runtime::kToFastProperties, 1);
2627 __ CallRuntime(Runtime::kToFastProperties, 1);
2629 // Verify that compilation exactly consumed the number of store ic slots that
2630 // the ClassLiteral node had to offer.
2631 DCHECK(!FLAG_vector_stores || store_slot_index == lit->slot_count());
2635 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2637 Handle<Code> code = CodeFactory::BinaryOpIC(
2638 isolate(), op, language_mode()).code();
2639 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2640 CallIC(code, expr->BinaryOperationFeedbackId());
2641 patch_site.EmitPatchInfo();
2642 context()->Plug(r0);
2646 void FullCodeGenerator::EmitAssignment(Expression* expr,
2647 FeedbackVectorICSlot slot) {
2648 DCHECK(expr->IsValidReferenceExpression());
2650 Property* prop = expr->AsProperty();
2651 LhsKind assign_type = Property::GetAssignType(prop);
2653 switch (assign_type) {
2655 Variable* var = expr->AsVariableProxy()->var();
2656 EffectContext context(this);
2657 EmitVariableAssignment(var, Token::ASSIGN, slot);
2660 case NAMED_PROPERTY: {
2661 __ push(r0); // Preserve value.
2662 VisitForAccumulatorValue(prop->obj());
2663 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2664 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2665 __ mov(StoreDescriptor::NameRegister(),
2666 Operand(prop->key()->AsLiteral()->value()));
2667 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2671 case NAMED_SUPER_PROPERTY: {
2673 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2674 VisitForAccumulatorValue(
2675 prop->obj()->AsSuperPropertyReference()->home_object());
2676 // stack: value, this; r0: home_object
2677 Register scratch = r2;
2678 Register scratch2 = r3;
2679 __ mov(scratch, result_register()); // home_object
2680 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2681 __ ldr(scratch2, MemOperand(sp, 0)); // this
2682 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2683 __ str(scratch, MemOperand(sp, 0)); // home_object
2684 // stack: this, home_object; r0: value
2685 EmitNamedSuperPropertyStore(prop);
2688 case KEYED_SUPER_PROPERTY: {
2690 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2692 prop->obj()->AsSuperPropertyReference()->home_object());
2693 VisitForAccumulatorValue(prop->key());
2694 Register scratch = r2;
2695 Register scratch2 = r3;
2696 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2697 // stack: value, this, home_object; r0: key, r3: value
2698 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2699 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2700 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2701 __ str(scratch, MemOperand(sp, kPointerSize));
2702 __ str(r0, MemOperand(sp, 0));
2703 __ Move(r0, scratch2);
2704 // stack: this, home_object, key; r0: value.
2705 EmitKeyedSuperPropertyStore(prop);
2708 case KEYED_PROPERTY: {
2709 __ push(r0); // Preserve value.
2710 VisitForStackValue(prop->obj());
2711 VisitForAccumulatorValue(prop->key());
2712 __ Move(StoreDescriptor::NameRegister(), r0);
2713 __ Pop(StoreDescriptor::ValueRegister(),
2714 StoreDescriptor::ReceiverRegister());
2715 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2717 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2722 context()->Plug(r0);
2726 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2727 Variable* var, MemOperand location) {
2728 __ str(result_register(), location);
2729 if (var->IsContextSlot()) {
2730 // RecordWrite may destroy all its register arguments.
2731 __ mov(r3, result_register());
2732 int offset = Context::SlotOffset(var->index());
2733 __ RecordWriteContextSlot(
2734 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2739 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2740 FeedbackVectorICSlot slot) {
2741 if (var->IsUnallocated()) {
2742 // Global var, const, or let.
2743 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2744 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2745 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2748 } else if (var->mode() == LET && op != Token::INIT_LET) {
2749 // Non-initializing assignment to let variable needs a write barrier.
2750 DCHECK(!var->IsLookupSlot());
2751 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2753 MemOperand location = VarOperand(var, r1);
2754 __ ldr(r3, location);
2755 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2757 __ mov(r3, Operand(var->name()));
2759 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2760 // Perform the assignment.
2762 EmitStoreToStackLocalOrContextSlot(var, location);
2764 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2765 // Assignment to const variable needs a write barrier.
2766 DCHECK(!var->IsLookupSlot());
2767 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2769 MemOperand location = VarOperand(var, r1);
2770 __ ldr(r3, location);
2771 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2772 __ b(ne, &const_error);
2773 __ mov(r3, Operand(var->name()));
2775 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2776 __ bind(&const_error);
2777 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2779 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2780 if (var->IsLookupSlot()) {
2781 // Assignment to var.
2782 __ push(r0); // Value.
2783 __ mov(r1, Operand(var->name()));
2784 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2785 __ Push(cp, r1, r0); // Context, name, language mode.
2786 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2788 // Assignment to var or initializing assignment to let/const in harmony
2790 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2791 MemOperand location = VarOperand(var, r1);
2792 if (generate_debug_code_ && op == Token::INIT_LET) {
2793 // Check for an uninitialized let binding.
2794 __ ldr(r2, location);
2795 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2796 __ Check(eq, kLetBindingReInitialization);
2798 EmitStoreToStackLocalOrContextSlot(var, location);
2801 } else if (op == Token::INIT_CONST_LEGACY) {
2802 // Const initializers need a write barrier.
2803 DCHECK(var->mode() == CONST_LEGACY);
2804 DCHECK(!var->IsParameter()); // No const parameters.
2805 if (var->IsLookupSlot()) {
2807 __ mov(r0, Operand(var->name()));
2808 __ Push(cp, r0); // Context and name.
2809 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2811 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2813 MemOperand location = VarOperand(var, r1);
2814 __ ldr(r2, location);
2815 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2817 EmitStoreToStackLocalOrContextSlot(var, location);
2822 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2823 if (is_strict(language_mode())) {
2824 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2826 // Silently ignore store in sloppy mode.
2831 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2832 // Assignment to a property, using a named store IC.
2833 Property* prop = expr->target()->AsProperty();
2834 DCHECK(prop != NULL);
2835 DCHECK(prop->key()->IsLiteral());
2837 // Record source code position before IC call.
2838 SetSourcePosition(expr->position());
2839 __ mov(StoreDescriptor::NameRegister(),
2840 Operand(prop->key()->AsLiteral()->value()));
2841 __ pop(StoreDescriptor::ReceiverRegister());
2842 if (FLAG_vector_stores) {
2843 EmitLoadStoreICSlot(expr->AssignmentSlot());
2846 CallStoreIC(expr->AssignmentFeedbackId());
2849 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2850 context()->Plug(r0);
2854 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2855 // Assignment to named property of super.
2857 // stack : receiver ('this'), home_object
2858 DCHECK(prop != NULL);
2859 Literal* key = prop->key()->AsLiteral();
2860 DCHECK(key != NULL);
2862 __ Push(key->value());
2864 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2865 : Runtime::kStoreToSuper_Sloppy),
2870 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2871 // Assignment to named property of super.
2873 // stack : receiver ('this'), home_object, key
2874 DCHECK(prop != NULL);
2878 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2879 : Runtime::kStoreKeyedToSuper_Sloppy),
2884 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2885 // Assignment to a property, using a keyed store IC.
2887 // Record source code position before IC call.
2888 SetSourcePosition(expr->position());
2889 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2890 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2893 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2894 if (FLAG_vector_stores) {
2895 EmitLoadStoreICSlot(expr->AssignmentSlot());
2898 CallIC(ic, expr->AssignmentFeedbackId());
2901 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2902 context()->Plug(r0);
2906 void FullCodeGenerator::VisitProperty(Property* expr) {
2907 Comment cmnt(masm_, "[ Property");
2908 Expression* key = expr->key();
2910 if (key->IsPropertyName()) {
2911 if (!expr->IsSuperAccess()) {
2912 VisitForAccumulatorValue(expr->obj());
2913 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2914 EmitNamedPropertyLoad(expr);
2916 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2918 expr->obj()->AsSuperPropertyReference()->home_object());
2919 EmitNamedSuperPropertyLoad(expr);
2922 if (!expr->IsSuperAccess()) {
2923 VisitForStackValue(expr->obj());
2924 VisitForAccumulatorValue(expr->key());
2925 __ Move(LoadDescriptor::NameRegister(), r0);
2926 __ pop(LoadDescriptor::ReceiverRegister());
2927 EmitKeyedPropertyLoad(expr);
2929 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2931 expr->obj()->AsSuperPropertyReference()->home_object());
2932 VisitForStackValue(expr->key());
2933 EmitKeyedSuperPropertyLoad(expr);
2936 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2937 context()->Plug(r0);
2941 void FullCodeGenerator::CallIC(Handle<Code> code,
2942 TypeFeedbackId ast_id) {
2944 // All calls must have a predictable size in full-codegen code to ensure that
2945 // the debugger can patch them correctly.
2946 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2947 NEVER_INLINE_TARGET_ADDRESS);
2951 // Code common for calls using the IC.
2952 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2953 Expression* callee = expr->expression();
2955 CallICState::CallType call_type =
2956 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2958 // Get the target function.
2959 if (call_type == CallICState::FUNCTION) {
2960 { StackValueContext context(this);
2961 EmitVariableLoad(callee->AsVariableProxy());
2962 PrepareForBailout(callee, NO_REGISTERS);
2964 // Push undefined as receiver. This is patched in the method prologue if it
2965 // is a sloppy mode method.
2966 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2969 // Load the function from the receiver.
2970 DCHECK(callee->IsProperty());
2971 DCHECK(!callee->AsProperty()->IsSuperAccess());
2972 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2973 EmitNamedPropertyLoad(callee->AsProperty());
2974 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2975 // Push the target function under the receiver.
2976 __ ldr(ip, MemOperand(sp, 0));
2978 __ str(r0, MemOperand(sp, kPointerSize));
2981 EmitCall(expr, call_type);
2985 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2986 Expression* callee = expr->expression();
2987 DCHECK(callee->IsProperty());
2988 Property* prop = callee->AsProperty();
2989 DCHECK(prop->IsSuperAccess());
2991 SetSourcePosition(prop->position());
2992 Literal* key = prop->key()->AsLiteral();
2993 DCHECK(!key->value()->IsSmi());
2994 // Load the function from the receiver.
2995 const Register scratch = r1;
2996 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2997 VisitForStackValue(super_ref->home_object());
2998 VisitForAccumulatorValue(super_ref->this_var());
3001 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3003 __ Push(key->value());
3007 // - this (receiver)
3008 // - this (receiver) <-- LoadFromSuper will pop here and below.
3011 __ CallRuntime(Runtime::kLoadFromSuper, 3);
3013 // Replace home_object with target function.
3014 __ str(r0, MemOperand(sp, kPointerSize));
3017 // - target function
3018 // - this (receiver)
3019 EmitCall(expr, CallICState::METHOD);
3023 // Code common for calls using the IC.
3024 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3027 VisitForAccumulatorValue(key);
3029 Expression* callee = expr->expression();
3031 // Load the function from the receiver.
3032 DCHECK(callee->IsProperty());
3033 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3034 __ Move(LoadDescriptor::NameRegister(), r0);
3035 EmitKeyedPropertyLoad(callee->AsProperty());
3036 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3038 // Push the target function under the receiver.
3039 __ ldr(ip, MemOperand(sp, 0));
3041 __ str(r0, MemOperand(sp, kPointerSize));
3043 EmitCall(expr, CallICState::METHOD);
3047 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3048 Expression* callee = expr->expression();
3049 DCHECK(callee->IsProperty());
3050 Property* prop = callee->AsProperty();
3051 DCHECK(prop->IsSuperAccess());
3053 SetSourcePosition(prop->position());
3054 // Load the function from the receiver.
3055 const Register scratch = r1;
3056 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3057 VisitForStackValue(super_ref->home_object());
3058 VisitForAccumulatorValue(super_ref->this_var());
3061 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3063 VisitForStackValue(prop->key());
3067 // - this (receiver)
3068 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3071 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3073 // Replace home_object with target function.
3074 __ str(r0, MemOperand(sp, kPointerSize));
3077 // - target function
3078 // - this (receiver)
3079 EmitCall(expr, CallICState::METHOD);
3083 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3084 // Load the arguments.
3085 ZoneList<Expression*>* args = expr->arguments();
3086 int arg_count = args->length();
3087 { PreservePositionScope scope(masm()->positions_recorder());
3088 for (int i = 0; i < arg_count; i++) {
3089 VisitForStackValue(args->at(i));
3093 // Record source position of the IC call.
3094 SetSourcePosition(expr->position());
3095 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3096 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3097 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3098 // Don't assign a type feedback id to the IC, since type feedback is provided
3099 // by the vector above.
3102 RecordJSReturnSite(expr);
3103 // Restore context register.
3104 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3105 context()->DropAndPlug(1, r0);
3109 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3110 // r5: copy of the first argument or undefined if it doesn't exist.
3111 if (arg_count > 0) {
3112 __ ldr(r5, MemOperand(sp, arg_count * kPointerSize));
3114 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3117 // r4: the receiver of the enclosing function.
3118 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3120 // r3: the receiver of the enclosing function.
3121 Variable* this_var = scope()->LookupThis();
3122 DCHECK_NOT_NULL(this_var);
3123 __ ldr(r3, VarOperand(this_var, r3));
3125 // r2: language mode.
3126 __ mov(r2, Operand(Smi::FromInt(language_mode())));
3128 // r1: the start position of the scope the calls resides in.
3129 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3131 // Do the runtime call.
3133 __ Push(r4, r3, r2, r1);
3134 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3138 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3139 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
3140 Variable* this_var = super_ref->this_var()->var();
3141 GetVar(r1, this_var);
3142 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
3143 Label uninitialized_this;
3144 __ b(eq, &uninitialized_this);
3145 __ mov(r0, Operand(this_var->name()));
3147 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3148 __ bind(&uninitialized_this);
3150 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3154 void FullCodeGenerator::VisitCall(Call* expr) {
3156 // We want to verify that RecordJSReturnSite gets called on all paths
3157 // through this function. Avoid early returns.
3158 expr->return_is_recorded_ = false;
3161 Comment cmnt(masm_, "[ Call");
3162 Expression* callee = expr->expression();
3163 Call::CallType call_type = expr->GetCallType(isolate());
3165 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3166 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3167 // to resolve the function we need to call and the receiver of the
3168 // call. Then we call the resolved function using the given
3170 ZoneList<Expression*>* args = expr->arguments();
3171 int arg_count = args->length();
3173 { PreservePositionScope pos_scope(masm()->positions_recorder());
3174 VisitForStackValue(callee);
3175 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3176 __ push(r2); // Reserved receiver slot.
3178 // Push the arguments.
3179 for (int i = 0; i < arg_count; i++) {
3180 VisitForStackValue(args->at(i));
3183 // Push a copy of the function (found below the arguments) and
3185 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3187 EmitResolvePossiblyDirectEval(arg_count);
3189 // The runtime call returns a pair of values in r0 (function) and
3190 // r1 (receiver). Touch up the stack with the right values.
3191 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3192 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
3194 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3197 // Record source position for debugger.
3198 SetSourcePosition(expr->position());
3199 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3200 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3202 RecordJSReturnSite(expr);
3203 // Restore context register.
3204 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3205 context()->DropAndPlug(1, r0);
3206 } else if (call_type == Call::GLOBAL_CALL) {
3207 EmitCallWithLoadIC(expr);
3209 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3210 // Call to a lookup slot (dynamically introduced variable).
3211 VariableProxy* proxy = callee->AsVariableProxy();
3214 { PreservePositionScope scope(masm()->positions_recorder());
3215 // Generate code for loading from variables potentially shadowed
3216 // by eval-introduced variables.
3217 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3221 // Call the runtime to find the function to call (returned in r0)
3222 // and the object holding it (returned in edx).
3223 DCHECK(!context_register().is(r2));
3224 __ mov(r2, Operand(proxy->name()));
3225 __ Push(context_register(), r2);
3226 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3227 __ Push(r0, r1); // Function, receiver.
3228 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3230 // If fast case code has been generated, emit code to push the
3231 // function and receiver and have the slow path jump around this
3233 if (done.is_linked()) {
3239 // The receiver is implicitly the global receiver. Indicate this
3240 // by passing the hole to the call function stub.
3241 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3246 // The receiver is either the global receiver or an object found
3247 // by LoadContextSlot.
3249 } else if (call_type == Call::PROPERTY_CALL) {
3250 Property* property = callee->AsProperty();
3251 bool is_named_call = property->key()->IsPropertyName();
3252 if (property->IsSuperAccess()) {
3253 if (is_named_call) {
3254 EmitSuperCallWithLoadIC(expr);
3256 EmitKeyedSuperCallWithLoadIC(expr);
3260 PreservePositionScope scope(masm()->positions_recorder());
3261 VisitForStackValue(property->obj());
3263 if (is_named_call) {
3264 EmitCallWithLoadIC(expr);
3266 EmitKeyedCallWithLoadIC(expr, property->key());
3269 } else if (call_type == Call::SUPER_CALL) {
3270 EmitSuperConstructorCall(expr);
3272 DCHECK(call_type == Call::OTHER_CALL);
3273 // Call to an arbitrary expression not handled specially above.
3274 { PreservePositionScope scope(masm()->positions_recorder());
3275 VisitForStackValue(callee);
3277 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3279 // Emit function call.
3284 // RecordJSReturnSite should have been called.
3285 DCHECK(expr->return_is_recorded_);
3290 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3291 Comment cmnt(masm_, "[ CallNew");
3292 // According to ECMA-262, section 11.2.2, page 44, the function
3293 // expression in new calls must be evaluated before the
3296 // Push constructor on the stack. If it's not a function it's used as
3297 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3299 DCHECK(!expr->expression()->IsSuperPropertyReference());
3300 VisitForStackValue(expr->expression());
3302 // Push the arguments ("left-to-right") on the stack.
3303 ZoneList<Expression*>* args = expr->arguments();
3304 int arg_count = args->length();
3305 for (int i = 0; i < arg_count; i++) {
3306 VisitForStackValue(args->at(i));
3309 // Call the construct call builtin that handles allocation and
3310 // constructor invocation.
3311 SetSourcePosition(expr->position());
3313 // Load function and argument count into r1 and r0.
3314 __ mov(r0, Operand(arg_count));
3315 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3317 // Record call targets in unoptimized code.
3318 if (FLAG_pretenuring_call_new) {
3319 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3320 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3321 expr->CallNewFeedbackSlot().ToInt() + 1);
3324 __ Move(r2, FeedbackVector());
3325 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3327 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3328 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3329 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3330 context()->Plug(r0);
3334 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3335 SuperCallReference* super_call_ref =
3336 expr->expression()->AsSuperCallReference();
3337 DCHECK_NOT_NULL(super_call_ref);
3339 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3340 VisitForStackValue(new_target_proxy);
3342 EmitLoadSuperConstructor(super_call_ref);
3343 __ push(result_register());
3345 // Push the arguments ("left-to-right") on the stack.
3346 ZoneList<Expression*>* args = expr->arguments();
3347 int arg_count = args->length();
3348 for (int i = 0; i < arg_count; i++) {
3349 VisitForStackValue(args->at(i));
3352 // Call the construct call builtin that handles allocation and
3353 // constructor invocation.
3354 SetSourcePosition(expr->position());
3356 // Load function and argument count into r1 and r0.
3357 __ mov(r0, Operand(arg_count));
3358 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3360 // Record call targets in unoptimized code.
3361 if (FLAG_pretenuring_call_new) {
3363 /* TODO(dslomov): support pretenuring.
3364 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3365 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3366 expr->CallNewFeedbackSlot().ToInt() + 1);
3370 __ Move(r2, FeedbackVector());
3371 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3373 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3374 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3378 RecordJSReturnSite(expr);
3380 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3381 context()->Plug(r0);
3385 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3386 ZoneList<Expression*>* args = expr->arguments();
3387 DCHECK(args->length() == 1);
3389 VisitForAccumulatorValue(args->at(0));
3391 Label materialize_true, materialize_false;
3392 Label* if_true = NULL;
3393 Label* if_false = NULL;
3394 Label* fall_through = NULL;
3395 context()->PrepareTest(&materialize_true, &materialize_false,
3396 &if_true, &if_false, &fall_through);
3398 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3400 Split(eq, if_true, if_false, fall_through);
3402 context()->Plug(if_true, if_false);
3406 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3407 ZoneList<Expression*>* args = expr->arguments();
3408 DCHECK(args->length() == 1);
3410 VisitForAccumulatorValue(args->at(0));
3412 Label materialize_true, materialize_false;
3413 Label* if_true = NULL;
3414 Label* if_false = NULL;
3415 Label* fall_through = NULL;
3416 context()->PrepareTest(&materialize_true, &materialize_false,
3417 &if_true, &if_false, &fall_through);
3419 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3420 __ NonNegativeSmiTst(r0);
3421 Split(eq, if_true, if_false, fall_through);
3423 context()->Plug(if_true, if_false);
3427 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3428 ZoneList<Expression*>* args = expr->arguments();
3429 DCHECK(args->length() == 1);
3431 VisitForAccumulatorValue(args->at(0));
3433 Label materialize_true, materialize_false;
3434 Label* if_true = NULL;
3435 Label* if_false = NULL;
3436 Label* fall_through = NULL;
3437 context()->PrepareTest(&materialize_true, &materialize_false,
3438 &if_true, &if_false, &fall_through);
3440 __ JumpIfSmi(r0, if_false);
3441 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3444 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3445 // Undetectable objects behave like undefined when tested with typeof.
3446 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3447 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3449 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3450 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3452 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3453 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3454 Split(le, if_true, if_false, fall_through);
3456 context()->Plug(if_true, if_false);
3460 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3461 ZoneList<Expression*>* args = expr->arguments();
3462 DCHECK(args->length() == 1);
3464 VisitForAccumulatorValue(args->at(0));
3466 Label materialize_true, materialize_false;
3467 Label* if_true = NULL;
3468 Label* if_false = NULL;
3469 Label* fall_through = NULL;
3470 context()->PrepareTest(&materialize_true, &materialize_false,
3471 &if_true, &if_false, &fall_through);
3473 __ JumpIfSmi(r0, if_false);
3474 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3475 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3476 Split(ge, if_true, if_false, fall_through);
3478 context()->Plug(if_true, if_false);
3482 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3483 ZoneList<Expression*>* args = expr->arguments();
3484 DCHECK(args->length() == 1);
3486 VisitForAccumulatorValue(args->at(0));
3488 Label materialize_true, materialize_false;
3489 Label* if_true = NULL;
3490 Label* if_false = NULL;
3491 Label* fall_through = NULL;
3492 context()->PrepareTest(&materialize_true, &materialize_false,
3493 &if_true, &if_false, &fall_through);
3495 __ JumpIfSmi(r0, if_false);
3496 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3497 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3498 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3499 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3500 Split(ne, if_true, if_false, fall_through);
3502 context()->Plug(if_true, if_false);
3506 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3507 CallRuntime* expr) {
3508 ZoneList<Expression*>* args = expr->arguments();
3509 DCHECK(args->length() == 1);
3511 VisitForAccumulatorValue(args->at(0));
3513 Label materialize_true, materialize_false, skip_lookup;
3514 Label* if_true = NULL;
3515 Label* if_false = NULL;
3516 Label* fall_through = NULL;
3517 context()->PrepareTest(&materialize_true, &materialize_false,
3518 &if_true, &if_false, &fall_through);
3520 __ AssertNotSmi(r0);
3522 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3523 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3524 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3525 __ b(ne, &skip_lookup);
3527 // Check for fast case object. Generate false result for slow case object.
3528 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3529 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3530 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3534 // Look for valueOf name in the descriptor array, and indicate false if
3535 // found. Since we omit an enumeration index check, if it is added via a
3536 // transition that shares its descriptor array, this is a false positive.
3537 Label entry, loop, done;
3539 // Skip loop if no descriptors are valid.
3540 __ NumberOfOwnDescriptors(r3, r1);
3541 __ cmp(r3, Operand::Zero());
3544 __ LoadInstanceDescriptors(r1, r4);
3545 // r4: descriptor array.
3546 // r3: valid entries in the descriptor array.
3547 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3549 // Calculate location of the first key name.
3550 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3551 // Calculate the end of the descriptor array.
3553 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3555 // Loop through all the keys in the descriptor array. If one of these is the
3556 // string "valueOf" the result is false.
3557 // The use of ip to store the valueOf string assumes that it is not otherwise
3558 // used in the loop below.
3559 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3562 __ ldr(r3, MemOperand(r4, 0));
3565 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3567 __ cmp(r4, Operand(r2));
3572 // Set the bit in the map to indicate that there is no local valueOf field.
3573 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3574 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3575 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3577 __ bind(&skip_lookup);
3579 // If a valueOf property is not found on the object check that its
3580 // prototype is the un-modified String prototype. If not result is false.
3581 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3582 __ JumpIfSmi(r2, if_false);
3583 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3584 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3585 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3586 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3588 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3589 Split(eq, if_true, if_false, fall_through);
3591 context()->Plug(if_true, if_false);
3595 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3596 ZoneList<Expression*>* args = expr->arguments();
3597 DCHECK(args->length() == 1);
3599 VisitForAccumulatorValue(args->at(0));
3601 Label materialize_true, materialize_false;
3602 Label* if_true = NULL;
3603 Label* if_false = NULL;
3604 Label* fall_through = NULL;
3605 context()->PrepareTest(&materialize_true, &materialize_false,
3606 &if_true, &if_false, &fall_through);
3608 __ JumpIfSmi(r0, if_false);
3609 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3610 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3611 Split(eq, if_true, if_false, fall_through);
3613 context()->Plug(if_true, if_false);
3617 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3618 ZoneList<Expression*>* args = expr->arguments();
3619 DCHECK(args->length() == 1);
3621 VisitForAccumulatorValue(args->at(0));
3623 Label materialize_true, materialize_false;
3624 Label* if_true = NULL;
3625 Label* if_false = NULL;
3626 Label* fall_through = NULL;
3627 context()->PrepareTest(&materialize_true, &materialize_false,
3628 &if_true, &if_false, &fall_through);
3630 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3631 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3632 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3633 __ cmp(r2, Operand(0x80000000));
3634 __ cmp(r1, Operand(0x00000000), eq);
3636 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3637 Split(eq, if_true, if_false, fall_through);
3639 context()->Plug(if_true, if_false);
3643 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3644 ZoneList<Expression*>* args = expr->arguments();
3645 DCHECK(args->length() == 1);
3647 VisitForAccumulatorValue(args->at(0));
3649 Label materialize_true, materialize_false;
3650 Label* if_true = NULL;
3651 Label* if_false = NULL;
3652 Label* fall_through = NULL;
3653 context()->PrepareTest(&materialize_true, &materialize_false,
3654 &if_true, &if_false, &fall_through);
3656 __ JumpIfSmi(r0, if_false);
3657 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3658 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3659 Split(eq, if_true, if_false, fall_through);
3661 context()->Plug(if_true, if_false);
3665 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3666 ZoneList<Expression*>* args = expr->arguments();
3667 DCHECK(args->length() == 1);
3669 VisitForAccumulatorValue(args->at(0));
3671 Label materialize_true, materialize_false;
3672 Label* if_true = NULL;
3673 Label* if_false = NULL;
3674 Label* fall_through = NULL;
3675 context()->PrepareTest(&materialize_true, &materialize_false,
3676 &if_true, &if_false, &fall_through);
3678 __ JumpIfSmi(r0, if_false);
3679 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3680 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3681 Split(eq, if_true, if_false, fall_through);
3683 context()->Plug(if_true, if_false);
3687 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3688 ZoneList<Expression*>* args = expr->arguments();
3689 DCHECK(args->length() == 1);
3691 VisitForAccumulatorValue(args->at(0));
3693 Label materialize_true, materialize_false;
3694 Label* if_true = NULL;
3695 Label* if_false = NULL;
3696 Label* fall_through = NULL;
3697 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3698 &if_false, &fall_through);
3700 __ JumpIfSmi(r0, if_false);
3702 Register type_reg = r2;
3703 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3704 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3705 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3706 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3707 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3708 Split(ls, if_true, if_false, fall_through);
3710 context()->Plug(if_true, if_false);
3714 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3715 DCHECK(expr->arguments()->length() == 0);
3717 Label materialize_true, materialize_false;
3718 Label* if_true = NULL;
3719 Label* if_false = NULL;
3720 Label* fall_through = NULL;
3721 context()->PrepareTest(&materialize_true, &materialize_false,
3722 &if_true, &if_false, &fall_through);
3724 // Get the frame pointer for the calling frame.
3725 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3727 // Skip the arguments adaptor frame if it exists.
3728 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3729 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3730 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3732 // Check the marker in the calling frame.
3733 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3734 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3735 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3736 Split(eq, if_true, if_false, fall_through);
3738 context()->Plug(if_true, if_false);
3742 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3743 ZoneList<Expression*>* args = expr->arguments();
3744 DCHECK(args->length() == 2);
3746 // Load the two objects into registers and perform the comparison.
3747 VisitForStackValue(args->at(0));
3748 VisitForAccumulatorValue(args->at(1));
3750 Label materialize_true, materialize_false;
3751 Label* if_true = NULL;
3752 Label* if_false = NULL;
3753 Label* fall_through = NULL;
3754 context()->PrepareTest(&materialize_true, &materialize_false,
3755 &if_true, &if_false, &fall_through);
3759 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3760 Split(eq, if_true, if_false, fall_through);
3762 context()->Plug(if_true, if_false);
3766 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3767 ZoneList<Expression*>* args = expr->arguments();
3768 DCHECK(args->length() == 1);
3770 // ArgumentsAccessStub expects the key in edx and the formal
3771 // parameter count in r0.
3772 VisitForAccumulatorValue(args->at(0));
3774 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3775 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3777 context()->Plug(r0);
3781 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3782 DCHECK(expr->arguments()->length() == 0);
3784 // Get the number of formal parameters.
3785 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3787 // Check if the calling frame is an arguments adaptor frame.
3788 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3789 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3790 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3792 // Arguments adaptor case: Read the arguments length from the
3794 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3796 context()->Plug(r0);
3800 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3801 ZoneList<Expression*>* args = expr->arguments();
3802 DCHECK(args->length() == 1);
3803 Label done, null, function, non_function_constructor;
3805 VisitForAccumulatorValue(args->at(0));
3807 // If the object is a smi, we return null.
3808 __ JumpIfSmi(r0, &null);
3810 // Check that the object is a JS object but take special care of JS
3811 // functions to make sure they have 'Function' as their class.
3812 // Assume that there are only two callable types, and one of them is at
3813 // either end of the type range for JS object types. Saves extra comparisons.
3814 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3815 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3816 // Map is now in r0.
3818 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3819 FIRST_SPEC_OBJECT_TYPE + 1);
3820 __ b(eq, &function);
3822 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3823 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3824 LAST_SPEC_OBJECT_TYPE - 1);
3825 __ b(eq, &function);
3826 // Assume that there is no larger type.
3827 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3829 // Check if the constructor in the map is a JS function.
3830 Register instance_type = r2;
3831 __ GetMapConstructor(r0, r0, r1, instance_type);
3832 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3833 __ b(ne, &non_function_constructor);
3835 // r0 now contains the constructor function. Grab the
3836 // instance class name from there.
3837 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3838 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3841 // Functions have class 'Function'.
3843 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3846 // Objects with a non-function constructor have class 'Object'.
3847 __ bind(&non_function_constructor);
3848 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3851 // Non-JS objects have class null.
3853 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3858 context()->Plug(r0);
3862 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3863 // Load the arguments on the stack and call the stub.
3864 SubStringStub stub(isolate());
3865 ZoneList<Expression*>* args = expr->arguments();
3866 DCHECK(args->length() == 3);
3867 VisitForStackValue(args->at(0));
3868 VisitForStackValue(args->at(1));
3869 VisitForStackValue(args->at(2));
3871 context()->Plug(r0);
3875 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3876 // Load the arguments on the stack and call the stub.
3877 RegExpExecStub stub(isolate());
3878 ZoneList<Expression*>* args = expr->arguments();
3879 DCHECK(args->length() == 4);
3880 VisitForStackValue(args->at(0));
3881 VisitForStackValue(args->at(1));
3882 VisitForStackValue(args->at(2));
3883 VisitForStackValue(args->at(3));
3885 context()->Plug(r0);
3889 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3890 ZoneList<Expression*>* args = expr->arguments();
3891 DCHECK(args->length() == 1);
3892 VisitForAccumulatorValue(args->at(0)); // Load the object.
3895 // If the object is a smi return the object.
3896 __ JumpIfSmi(r0, &done);
3897 // If the object is not a value type, return the object.
3898 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3899 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3902 context()->Plug(r0);
3906 void FullCodeGenerator::EmitThrowIfNotADate(CallRuntime* expr) {
3907 ZoneList<Expression*>* args = expr->arguments();
3908 DCHECK_EQ(1, args->length());
3910 VisitForAccumulatorValue(args->at(0)); // Load the object.
3912 Label done, not_date_object;
3913 Register object = r0;
3914 Register result = r0;
3915 Register scratch0 = r9;
3917 __ JumpIfSmi(object, ¬_date_object);
3918 __ CompareObjectType(object, scratch0, scratch0, JS_DATE_TYPE);
3920 __ bind(¬_date_object);
3921 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3924 context()->Plug(result);
3928 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3929 ZoneList<Expression*>* args = expr->arguments();
3930 DCHECK(args->length() == 2);
3931 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3932 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3934 VisitForAccumulatorValue(args->at(0)); // Load the object.
3936 Register object = r0;
3937 Register result = r0;
3938 Register scratch0 = r9;
3939 Register scratch1 = r1;
3941 if (index->value() == 0) {
3942 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3944 Label runtime, done;
3945 if (index->value() < JSDate::kFirstUncachedField) {
3946 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3947 __ mov(scratch1, Operand(stamp));
3948 __ ldr(scratch1, MemOperand(scratch1));
3949 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3950 __ cmp(scratch1, scratch0);
3952 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3953 kPointerSize * index->value()));
3957 __ PrepareCallCFunction(2, scratch1);
3958 __ mov(r1, Operand(index));
3959 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3963 context()->Plug(result);
3967 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3968 ZoneList<Expression*>* args = expr->arguments();
3969 DCHECK_EQ(3, args->length());
3971 Register string = r0;
3972 Register index = r1;
3973 Register value = r2;
3975 VisitForStackValue(args->at(0)); // index
3976 VisitForStackValue(args->at(1)); // value
3977 VisitForAccumulatorValue(args->at(2)); // string
3978 __ Pop(index, value);
3980 if (FLAG_debug_code) {
3982 __ Check(eq, kNonSmiValue);
3984 __ Check(eq, kNonSmiIndex);
3985 __ SmiUntag(index, index);
3986 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3987 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3988 __ SmiTag(index, index);
3991 __ SmiUntag(value, value);
3994 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3995 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3996 context()->Plug(string);
4000 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
4001 ZoneList<Expression*>* args = expr->arguments();
4002 DCHECK_EQ(3, args->length());
4004 Register string = r0;
4005 Register index = r1;
4006 Register value = r2;
4008 VisitForStackValue(args->at(0)); // index
4009 VisitForStackValue(args->at(1)); // value
4010 VisitForAccumulatorValue(args->at(2)); // string
4011 __ Pop(index, value);
4013 if (FLAG_debug_code) {
4015 __ Check(eq, kNonSmiValue);
4017 __ Check(eq, kNonSmiIndex);
4018 __ SmiUntag(index, index);
4019 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
4020 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
4021 __ SmiTag(index, index);
4024 __ SmiUntag(value, value);
4027 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4028 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
4029 __ strh(value, MemOperand(ip, index));
4030 context()->Plug(string);
4035 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
4036 // Load the arguments on the stack and call the runtime function.
4037 ZoneList<Expression*>* args = expr->arguments();
4038 DCHECK(args->length() == 2);
4039 VisitForStackValue(args->at(0));
4040 VisitForStackValue(args->at(1));
4041 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4043 context()->Plug(r0);
4047 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4048 ZoneList<Expression*>* args = expr->arguments();
4049 DCHECK(args->length() == 2);
4050 VisitForStackValue(args->at(0)); // Load the object.
4051 VisitForAccumulatorValue(args->at(1)); // Load the value.
4052 __ pop(r1); // r0 = value. r1 = object.
4055 // If the object is a smi, return the value.
4056 __ JumpIfSmi(r1, &done);
4058 // If the object is not a value type, return the value.
4059 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
4063 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
4064 // Update the write barrier. Save the value as it will be
4065 // overwritten by the write barrier code and is needed afterward.
4067 __ RecordWriteField(
4068 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
4071 context()->Plug(r0);
4075 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4076 ZoneList<Expression*>* args = expr->arguments();
4077 DCHECK_EQ(args->length(), 1);
4078 // Load the argument into r0 and call the stub.
4079 VisitForAccumulatorValue(args->at(0));
4081 NumberToStringStub stub(isolate());
4083 context()->Plug(r0);
4087 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4088 ZoneList<Expression*>* args = expr->arguments();
4089 DCHECK(args->length() == 1);
4090 VisitForAccumulatorValue(args->at(0));
4093 StringCharFromCodeGenerator generator(r0, r1);
4094 generator.GenerateFast(masm_);
4097 NopRuntimeCallHelper call_helper;
4098 generator.GenerateSlow(masm_, call_helper);
4101 context()->Plug(r1);
4105 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4106 ZoneList<Expression*>* args = expr->arguments();
4107 DCHECK(args->length() == 2);
4108 VisitForStackValue(args->at(0));
4109 VisitForAccumulatorValue(args->at(1));
4111 Register object = r1;
4112 Register index = r0;
4113 Register result = r3;
4117 Label need_conversion;
4118 Label index_out_of_range;
4120 StringCharCodeAtGenerator generator(object,
4125 &index_out_of_range,
4126 STRING_INDEX_IS_NUMBER);
4127 generator.GenerateFast(masm_);
4130 __ bind(&index_out_of_range);
4131 // When the index is out of range, the spec requires us to return
4133 __ LoadRoot(result, Heap::kNanValueRootIndex);
4136 __ bind(&need_conversion);
4137 // Load the undefined value into the result register, which will
4138 // trigger conversion.
4139 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4142 NopRuntimeCallHelper call_helper;
4143 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4146 context()->Plug(result);
4150 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4151 ZoneList<Expression*>* args = expr->arguments();
4152 DCHECK(args->length() == 2);
4153 VisitForStackValue(args->at(0));
4154 VisitForAccumulatorValue(args->at(1));
4156 Register object = r1;
4157 Register index = r0;
4158 Register scratch = r3;
4159 Register result = r0;
4163 Label need_conversion;
4164 Label index_out_of_range;
4166 StringCharAtGenerator generator(object,
4172 &index_out_of_range,
4173 STRING_INDEX_IS_NUMBER);
4174 generator.GenerateFast(masm_);
4177 __ bind(&index_out_of_range);
4178 // When the index is out of range, the spec requires us to return
4179 // the empty string.
4180 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4183 __ bind(&need_conversion);
4184 // Move smi zero into the result register, which will trigger
4186 __ mov(result, Operand(Smi::FromInt(0)));
4189 NopRuntimeCallHelper call_helper;
4190 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4193 context()->Plug(result);
4197 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4198 ZoneList<Expression*>* args = expr->arguments();
4199 DCHECK_EQ(2, args->length());
4200 VisitForStackValue(args->at(0));
4201 VisitForAccumulatorValue(args->at(1));
4204 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4206 context()->Plug(r0);
4210 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4211 ZoneList<Expression*>* args = expr->arguments();
4212 DCHECK_EQ(2, args->length());
4213 VisitForStackValue(args->at(0));
4214 VisitForStackValue(args->at(1));
4216 StringCompareStub stub(isolate());
4218 context()->Plug(r0);
4222 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4223 ZoneList<Expression*>* args = expr->arguments();
4224 DCHECK(args->length() >= 2);
4226 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4227 for (int i = 0; i < arg_count + 1; i++) {
4228 VisitForStackValue(args->at(i));
4230 VisitForAccumulatorValue(args->last()); // Function.
4232 Label runtime, done;
4233 // Check for non-function argument (including proxy).
4234 __ JumpIfSmi(r0, &runtime);
4235 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4238 // InvokeFunction requires the function in r1. Move it in there.
4239 __ mov(r1, result_register());
4240 ParameterCount count(arg_count);
4241 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4242 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4247 __ CallRuntime(Runtime::kCall, args->length());
4250 context()->Plug(r0);
4254 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4255 ZoneList<Expression*>* args = expr->arguments();
4256 DCHECK(args->length() == 2);
4259 VisitForStackValue(args->at(0));
4262 VisitForStackValue(args->at(1));
4263 __ CallRuntime(Runtime::kGetPrototype, 1);
4264 __ Push(result_register());
4266 // Check if the calling frame is an arguments adaptor frame.
4267 Label adaptor_frame, args_set_up, runtime;
4268 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4269 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4270 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4271 __ b(eq, &adaptor_frame);
4272 // default constructor has no arguments, so no adaptor frame means no args.
4273 __ mov(r0, Operand::Zero());
4276 // Copy arguments from adaptor frame.
4278 __ bind(&adaptor_frame);
4279 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4280 __ SmiUntag(r1, r1);
4282 // Subtract 1 from arguments count, for new.target.
4283 __ sub(r1, r1, Operand(1));
4286 // Get arguments pointer in r2.
4287 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4288 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset));
4291 // Pre-decrement r2 with kPointerSize on each iteration.
4292 // Pre-decrement in order to skip receiver.
4293 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
4295 __ sub(r1, r1, Operand(1));
4296 __ cmp(r1, Operand::Zero());
4300 __ bind(&args_set_up);
4301 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4302 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
4304 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4305 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4309 context()->Plug(result_register());
4313 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4314 RegExpConstructResultStub stub(isolate());
4315 ZoneList<Expression*>* args = expr->arguments();
4316 DCHECK(args->length() == 3);
4317 VisitForStackValue(args->at(0));
4318 VisitForStackValue(args->at(1));
4319 VisitForAccumulatorValue(args->at(2));
4323 context()->Plug(r0);
4327 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4328 ZoneList<Expression*>* args = expr->arguments();
4329 DCHECK_EQ(2, args->length());
4330 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4331 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4333 Handle<FixedArray> jsfunction_result_caches(
4334 isolate()->native_context()->jsfunction_result_caches());
4335 if (jsfunction_result_caches->length() <= cache_id) {
4336 __ Abort(kAttemptToUseUndefinedCache);
4337 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4338 context()->Plug(r0);
4342 VisitForAccumulatorValue(args->at(1));
4345 Register cache = r1;
4346 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4347 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4348 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4350 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4353 Label done, not_found;
4354 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4355 // r2 now holds finger offset as a smi.
4356 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4357 // r3 now points to the start of fixed array elements.
4358 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
4359 // Note side effect of PreIndex: r3 now points to the key of the pair.
4361 __ b(ne, ¬_found);
4363 __ ldr(r0, MemOperand(r3, kPointerSize));
4366 __ bind(¬_found);
4367 // Call runtime to perform the lookup.
4368 __ Push(cache, key);
4369 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4372 context()->Plug(r0);
4376 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4377 ZoneList<Expression*>* args = expr->arguments();
4378 VisitForAccumulatorValue(args->at(0));
4380 Label materialize_true, materialize_false;
4381 Label* if_true = NULL;
4382 Label* if_false = NULL;
4383 Label* fall_through = NULL;
4384 context()->PrepareTest(&materialize_true, &materialize_false,
4385 &if_true, &if_false, &fall_through);
4387 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4388 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4389 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4390 Split(eq, if_true, if_false, fall_through);
4392 context()->Plug(if_true, if_false);
4396 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4397 ZoneList<Expression*>* args = expr->arguments();
4398 DCHECK(args->length() == 1);
4399 VisitForAccumulatorValue(args->at(0));
4401 __ AssertString(r0);
4403 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4404 __ IndexFromHash(r0, r0);
4406 context()->Plug(r0);
4410 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4411 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4412 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4413 one_char_separator_loop_entry, long_separator_loop;
4414 ZoneList<Expression*>* args = expr->arguments();
4415 DCHECK(args->length() == 2);
4416 VisitForStackValue(args->at(1));
4417 VisitForAccumulatorValue(args->at(0));
4419 // All aliases of the same register have disjoint lifetimes.
4420 Register array = r0;
4421 Register elements = no_reg; // Will be r0.
4422 Register result = no_reg; // Will be r0.
4423 Register separator = r1;
4424 Register array_length = r2;
4425 Register result_pos = no_reg; // Will be r2
4426 Register string_length = r3;
4427 Register string = r4;
4428 Register element = r5;
4429 Register elements_end = r6;
4430 Register scratch = r9;
4432 // Separator operand is on the stack.
4435 // Check that the array is a JSArray.
4436 __ JumpIfSmi(array, &bailout);
4437 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4440 // Check that the array has fast elements.
4441 __ CheckFastElements(scratch, array_length, &bailout);
4443 // If the array has length zero, return the empty string.
4444 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4445 __ SmiUntag(array_length, SetCC);
4446 __ b(ne, &non_trivial_array);
4447 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4450 __ bind(&non_trivial_array);
4452 // Get the FixedArray containing array's elements.
4454 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4455 array = no_reg; // End of array's live range.
4457 // Check that all array elements are sequential one-byte strings, and
4458 // accumulate the sum of their lengths, as a smi-encoded value.
4459 __ mov(string_length, Operand::Zero());
4461 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4462 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4463 // Loop condition: while (element < elements_end).
4464 // Live values in registers:
4465 // elements: Fixed array of strings.
4466 // array_length: Length of the fixed array of strings (not smi)
4467 // separator: Separator string
4468 // string_length: Accumulated sum of string lengths (smi).
4469 // element: Current array element.
4470 // elements_end: Array end.
4471 if (generate_debug_code_) {
4472 __ cmp(array_length, Operand::Zero());
4473 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4476 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4477 __ JumpIfSmi(string, &bailout);
4478 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4479 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4480 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4481 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4482 __ add(string_length, string_length, Operand(scratch), SetCC);
4484 __ cmp(element, elements_end);
4487 // If array_length is 1, return elements[0], a string.
4488 __ cmp(array_length, Operand(1));
4489 __ b(ne, ¬_size_one_array);
4490 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4493 __ bind(¬_size_one_array);
4495 // Live values in registers:
4496 // separator: Separator string
4497 // array_length: Length of the array.
4498 // string_length: Sum of string lengths (smi).
4499 // elements: FixedArray of strings.
4501 // Check that the separator is a flat one-byte string.
4502 __ JumpIfSmi(separator, &bailout);
4503 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4504 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4505 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4507 // Add (separator length times array_length) - separator length to the
4508 // string_length to get the length of the result string. array_length is not
4509 // smi but the other values are, so the result is a smi
4510 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4511 __ sub(string_length, string_length, Operand(scratch));
4512 __ smull(scratch, ip, array_length, scratch);
4513 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4515 __ cmp(ip, Operand::Zero());
4517 __ tst(scratch, Operand(0x80000000));
4519 __ add(string_length, string_length, Operand(scratch), SetCC);
4521 __ SmiUntag(string_length);
4523 // Get first element in the array to free up the elements register to be used
4526 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4527 result = elements; // End of live range for elements.
4529 // Live values in registers:
4530 // element: First array element
4531 // separator: Separator string
4532 // string_length: Length of result string (not smi)
4533 // array_length: Length of the array.
4534 __ AllocateOneByteString(result, string_length, scratch,
4535 string, // used as scratch
4536 elements_end, // used as scratch
4538 // Prepare for looping. Set up elements_end to end of the array. Set
4539 // result_pos to the position of the result where to write the first
4541 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4542 result_pos = array_length; // End of live range for array_length.
4543 array_length = no_reg;
4546 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4548 // Check the length of the separator.
4549 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4550 __ cmp(scratch, Operand(Smi::FromInt(1)));
4551 __ b(eq, &one_char_separator);
4552 __ b(gt, &long_separator);
4554 // Empty separator case
4555 __ bind(&empty_separator_loop);
4556 // Live values in registers:
4557 // result_pos: the position to which we are currently copying characters.
4558 // element: Current array element.
4559 // elements_end: Array end.
4561 // Copy next array element to the result.
4562 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4563 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4564 __ SmiUntag(string_length);
4567 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4568 __ CopyBytes(string, result_pos, string_length, scratch);
4569 __ cmp(element, elements_end);
4570 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4571 DCHECK(result.is(r0));
4574 // One-character separator case
4575 __ bind(&one_char_separator);
4576 // Replace separator with its one-byte character value.
4577 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4578 // Jump into the loop after the code that copies the separator, so the first
4579 // element is not preceded by a separator
4580 __ jmp(&one_char_separator_loop_entry);
4582 __ bind(&one_char_separator_loop);
4583 // Live values in registers:
4584 // result_pos: the position to which we are currently copying characters.
4585 // element: Current array element.
4586 // elements_end: Array end.
4587 // separator: Single separator one-byte char (in lower byte).
4589 // Copy the separator character to the result.
4590 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4592 // Copy next array element to the result.
4593 __ bind(&one_char_separator_loop_entry);
4594 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4595 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4596 __ SmiUntag(string_length);
4599 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4600 __ CopyBytes(string, result_pos, string_length, scratch);
4601 __ cmp(element, elements_end);
4602 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4603 DCHECK(result.is(r0));
4606 // Long separator case (separator is more than one character). Entry is at the
4607 // label long_separator below.
4608 __ bind(&long_separator_loop);
4609 // Live values in registers:
4610 // result_pos: the position to which we are currently copying characters.
4611 // element: Current array element.
4612 // elements_end: Array end.
4613 // separator: Separator string.
4615 // Copy the separator to the result.
4616 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4617 __ SmiUntag(string_length);
4620 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4621 __ CopyBytes(string, result_pos, string_length, scratch);
4623 __ bind(&long_separator);
4624 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4625 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4626 __ SmiUntag(string_length);
4629 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4630 __ CopyBytes(string, result_pos, string_length, scratch);
4631 __ cmp(element, elements_end);
4632 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4633 DCHECK(result.is(r0));
4637 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4639 context()->Plug(r0);
4643 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4644 DCHECK(expr->arguments()->length() == 0);
4645 ExternalReference debug_is_active =
4646 ExternalReference::debug_is_active_address(isolate());
4647 __ mov(ip, Operand(debug_is_active));
4648 __ ldrb(r0, MemOperand(ip));
4650 context()->Plug(r0);
4654 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4655 // Assert: expr === CallRuntime("ReflectConstruct")
4656 DCHECK_EQ(1, expr->arguments()->length());
4657 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4659 ZoneList<Expression*>* args = call->arguments();
4660 DCHECK_EQ(3, args->length());
4662 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4663 DCHECK_NOT_NULL(super_call_ref);
4665 // Load ReflectConstruct function
4666 EmitLoadJSRuntimeFunction(call);
4668 // Push the target function under the receiver.
4669 __ ldr(ip, MemOperand(sp, 0));
4671 __ str(r0, MemOperand(sp, kPointerSize));
4673 // Push super constructor
4674 EmitLoadSuperConstructor(super_call_ref);
4675 __ Push(result_register());
4677 // Push arguments array
4678 VisitForStackValue(args->at(1));
4681 DCHECK(args->at(2)->IsVariableProxy());
4682 VisitForStackValue(args->at(2));
4684 EmitCallJSRuntimeFunction(call);
4686 // Restore context register.
4687 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4688 context()->DropAndPlug(1, r0);
4690 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4691 EmitInitializeThisAfterSuper(super_call_ref);
4695 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4696 // Push the builtins object as the receiver.
4697 Register receiver = LoadDescriptor::ReceiverRegister();
4698 __ ldr(receiver, GlobalObjectOperand());
4699 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4702 // Load the function from the receiver.
4703 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4704 __ mov(LoadDescriptor::SlotRegister(),
4705 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4706 CallLoadIC(NOT_CONTEXTUAL);
4710 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4711 ZoneList<Expression*>* args = expr->arguments();
4712 int arg_count = args->length();
4714 // Record source position of the IC call.
4715 SetSourcePosition(expr->position());
4716 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4717 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4722 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4723 ZoneList<Expression*>* args = expr->arguments();
4724 int arg_count = args->length();
4726 if (expr->is_jsruntime()) {
4727 Comment cmnt(masm_, "[ CallRuntime");
4728 EmitLoadJSRuntimeFunction(expr);
4730 // Push the target function under the receiver.
4731 __ ldr(ip, MemOperand(sp, 0));
4733 __ str(r0, MemOperand(sp, kPointerSize));
4735 // Push the arguments ("left-to-right").
4736 for (int i = 0; i < arg_count; i++) {
4737 VisitForStackValue(args->at(i));
4740 EmitCallJSRuntimeFunction(expr);
4742 // Restore context register.
4743 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4745 context()->DropAndPlug(1, r0);
4748 const Runtime::Function* function = expr->function();
4749 switch (function->function_id) {
4750 #define CALL_INTRINSIC_GENERATOR(Name) \
4751 case Runtime::kInline##Name: { \
4752 Comment cmnt(masm_, "[ Inline" #Name); \
4753 return Emit##Name(expr); \
4755 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4756 #undef CALL_INTRINSIC_GENERATOR
4758 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4759 // Push the arguments ("left-to-right").
4760 for (int i = 0; i < arg_count; i++) {
4761 VisitForStackValue(args->at(i));
4764 // Call the C runtime function.
4765 __ CallRuntime(expr->function(), arg_count);
4766 context()->Plug(r0);
4773 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4774 switch (expr->op()) {
4775 case Token::DELETE: {
4776 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4777 Property* property = expr->expression()->AsProperty();
4778 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4780 if (property != NULL) {
4781 VisitForStackValue(property->obj());
4782 VisitForStackValue(property->key());
4783 __ mov(r1, Operand(Smi::FromInt(language_mode())));
4785 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4786 context()->Plug(r0);
4787 } else if (proxy != NULL) {
4788 Variable* var = proxy->var();
4789 // Delete of an unqualified identifier is disallowed in strict mode
4790 // but "delete this" is allowed.
4791 DCHECK(is_sloppy(language_mode()) || var->is_this());
4792 if (var->IsUnallocated()) {
4793 __ ldr(r2, GlobalObjectOperand());
4794 __ mov(r1, Operand(var->name()));
4795 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4796 __ Push(r2, r1, r0);
4797 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4798 context()->Plug(r0);
4799 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4800 // Result of deleting non-global, non-dynamic variables is false.
4801 // The subexpression does not have side effects.
4802 context()->Plug(var->is_this());
4804 // Non-global variable. Call the runtime to try to delete from the
4805 // context where the variable was introduced.
4806 DCHECK(!context_register().is(r2));
4807 __ mov(r2, Operand(var->name()));
4808 __ Push(context_register(), r2);
4809 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4810 context()->Plug(r0);
4813 // Result of deleting non-property, non-variable reference is true.
4814 // The subexpression may have side effects.
4815 VisitForEffect(expr->expression());
4816 context()->Plug(true);
4822 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4823 VisitForEffect(expr->expression());
4824 context()->Plug(Heap::kUndefinedValueRootIndex);
4829 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4830 if (context()->IsEffect()) {
4831 // Unary NOT has no side effects so it's only necessary to visit the
4832 // subexpression. Match the optimizing compiler by not branching.
4833 VisitForEffect(expr->expression());
4834 } else if (context()->IsTest()) {
4835 const TestContext* test = TestContext::cast(context());
4836 // The labels are swapped for the recursive call.
4837 VisitForControl(expr->expression(),
4838 test->false_label(),
4840 test->fall_through());
4841 context()->Plug(test->true_label(), test->false_label());
4843 // We handle value contexts explicitly rather than simply visiting
4844 // for control and plugging the control flow into the context,
4845 // because we need to prepare a pair of extra administrative AST ids
4846 // for the optimizing compiler.
4847 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4848 Label materialize_true, materialize_false, done;
4849 VisitForControl(expr->expression(),
4853 __ bind(&materialize_true);
4854 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4855 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4856 if (context()->IsStackValue()) __ push(r0);
4858 __ bind(&materialize_false);
4859 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4860 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4861 if (context()->IsStackValue()) __ push(r0);
4867 case Token::TYPEOF: {
4868 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4870 AccumulatorValueContext context(this);
4871 VisitForTypeofValue(expr->expression());
4874 TypeofStub typeof_stub(isolate());
4875 __ CallStub(&typeof_stub);
4876 context()->Plug(r0);
4886 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4887 DCHECK(expr->expression()->IsValidReferenceExpression());
4889 Comment cmnt(masm_, "[ CountOperation");
4890 SetSourcePosition(expr->position());
4892 Property* prop = expr->expression()->AsProperty();
4893 LhsKind assign_type = Property::GetAssignType(prop);
4895 // Evaluate expression and get value.
4896 if (assign_type == VARIABLE) {
4897 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4898 AccumulatorValueContext context(this);
4899 EmitVariableLoad(expr->expression()->AsVariableProxy());
4901 // Reserve space for result of postfix operation.
4902 if (expr->is_postfix() && !context()->IsEffect()) {
4903 __ mov(ip, Operand(Smi::FromInt(0)));
4906 switch (assign_type) {
4907 case NAMED_PROPERTY: {
4908 // Put the object both on the stack and in the register.
4909 VisitForStackValue(prop->obj());
4910 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4911 EmitNamedPropertyLoad(prop);
4915 case NAMED_SUPER_PROPERTY: {
4916 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4917 VisitForAccumulatorValue(
4918 prop->obj()->AsSuperPropertyReference()->home_object());
4919 __ Push(result_register());
4920 const Register scratch = r1;
4921 __ ldr(scratch, MemOperand(sp, kPointerSize));
4923 __ Push(result_register());
4924 EmitNamedSuperPropertyLoad(prop);
4928 case KEYED_SUPER_PROPERTY: {
4929 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4931 prop->obj()->AsSuperPropertyReference()->home_object());
4932 VisitForAccumulatorValue(prop->key());
4933 __ Push(result_register());
4934 const Register scratch = r1;
4935 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4937 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4939 __ Push(result_register());
4940 EmitKeyedSuperPropertyLoad(prop);
4944 case KEYED_PROPERTY: {
4945 VisitForStackValue(prop->obj());
4946 VisitForStackValue(prop->key());
4947 __ ldr(LoadDescriptor::ReceiverRegister(),
4948 MemOperand(sp, 1 * kPointerSize));
4949 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4950 EmitKeyedPropertyLoad(prop);
4959 // We need a second deoptimization point after loading the value
4960 // in case evaluating the property load my have a side effect.
4961 if (assign_type == VARIABLE) {
4962 PrepareForBailout(expr->expression(), TOS_REG);
4964 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4967 // Inline smi case if we are in a loop.
4968 Label stub_call, done;
4969 JumpPatchSite patch_site(masm_);
4971 int count_value = expr->op() == Token::INC ? 1 : -1;
4972 if (ShouldInlineSmiCase(expr->op())) {
4974 patch_site.EmitJumpIfNotSmi(r0, &slow);
4976 // Save result for postfix expressions.
4977 if (expr->is_postfix()) {
4978 if (!context()->IsEffect()) {
4979 // Save the result on the stack. If we have a named or keyed property
4980 // we store the result under the receiver that is currently on top
4982 switch (assign_type) {
4986 case NAMED_PROPERTY:
4987 __ str(r0, MemOperand(sp, kPointerSize));
4989 case NAMED_SUPER_PROPERTY:
4990 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4992 case KEYED_PROPERTY:
4993 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4995 case KEYED_SUPER_PROPERTY:
4996 __ str(r0, MemOperand(sp, 3 * kPointerSize));
5002 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
5004 // Call stub. Undo operation first.
5005 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
5009 ToNumberStub convert_stub(isolate());
5010 __ CallStub(&convert_stub);
5011 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5013 // Save result for postfix expressions.
5014 if (expr->is_postfix()) {
5015 if (!context()->IsEffect()) {
5016 // Save the result on the stack. If we have a named or keyed property
5017 // we store the result under the receiver that is currently on top
5019 switch (assign_type) {
5023 case NAMED_PROPERTY:
5024 __ str(r0, MemOperand(sp, kPointerSize));
5026 case NAMED_SUPER_PROPERTY:
5027 __ str(r0, MemOperand(sp, 2 * kPointerSize));
5029 case KEYED_PROPERTY:
5030 __ str(r0, MemOperand(sp, 2 * kPointerSize));
5032 case KEYED_SUPER_PROPERTY:
5033 __ str(r0, MemOperand(sp, 3 * kPointerSize));
5040 __ bind(&stub_call);
5042 __ mov(r0, Operand(Smi::FromInt(count_value)));
5044 // Record position before stub call.
5045 SetSourcePosition(expr->position());
5047 Handle<Code> code = CodeFactory::BinaryOpIC(
5048 isolate(), Token::ADD, language_mode()).code();
5049 CallIC(code, expr->CountBinOpFeedbackId());
5050 patch_site.EmitPatchInfo();
5053 // Store the value returned in r0.
5054 switch (assign_type) {
5056 if (expr->is_postfix()) {
5057 { EffectContext context(this);
5058 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5059 Token::ASSIGN, expr->CountSlot());
5060 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5063 // For all contexts except EffectConstant We have the result on
5064 // top of the stack.
5065 if (!context()->IsEffect()) {
5066 context()->PlugTOS();
5069 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5070 Token::ASSIGN, expr->CountSlot());
5071 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5072 context()->Plug(r0);
5075 case NAMED_PROPERTY: {
5076 __ mov(StoreDescriptor::NameRegister(),
5077 Operand(prop->key()->AsLiteral()->value()));
5078 __ pop(StoreDescriptor::ReceiverRegister());
5079 if (FLAG_vector_stores) {
5080 EmitLoadStoreICSlot(expr->CountSlot());
5083 CallStoreIC(expr->CountStoreFeedbackId());
5085 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5086 if (expr->is_postfix()) {
5087 if (!context()->IsEffect()) {
5088 context()->PlugTOS();
5091 context()->Plug(r0);
5095 case NAMED_SUPER_PROPERTY: {
5096 EmitNamedSuperPropertyStore(prop);
5097 if (expr->is_postfix()) {
5098 if (!context()->IsEffect()) {
5099 context()->PlugTOS();
5102 context()->Plug(r0);
5106 case KEYED_SUPER_PROPERTY: {
5107 EmitKeyedSuperPropertyStore(prop);
5108 if (expr->is_postfix()) {
5109 if (!context()->IsEffect()) {
5110 context()->PlugTOS();
5113 context()->Plug(r0);
5117 case KEYED_PROPERTY: {
5118 __ Pop(StoreDescriptor::ReceiverRegister(),
5119 StoreDescriptor::NameRegister());
5121 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5122 if (FLAG_vector_stores) {
5123 EmitLoadStoreICSlot(expr->CountSlot());
5126 CallIC(ic, expr->CountStoreFeedbackId());
5128 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5129 if (expr->is_postfix()) {
5130 if (!context()->IsEffect()) {
5131 context()->PlugTOS();
5134 context()->Plug(r0);
5142 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5143 DCHECK(!context()->IsEffect());
5144 DCHECK(!context()->IsTest());
5145 VariableProxy* proxy = expr->AsVariableProxy();
5146 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5147 Comment cmnt(masm_, "[ Global variable");
5148 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5149 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5150 __ mov(LoadDescriptor::SlotRegister(),
5151 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5152 // Use a regular load, not a contextual load, to avoid a reference
5154 CallLoadIC(NOT_CONTEXTUAL);
5155 PrepareForBailout(expr, TOS_REG);
5156 context()->Plug(r0);
5157 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5158 Comment cmnt(masm_, "[ Lookup slot");
5161 // Generate code for loading from variables potentially shadowed
5162 // by eval-introduced variables.
5163 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5166 __ mov(r0, Operand(proxy->name()));
5168 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5169 PrepareForBailout(expr, TOS_REG);
5172 context()->Plug(r0);
5174 // This expression cannot throw a reference error at the top level.
5175 VisitInDuplicateContext(expr);
5180 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5181 Expression* sub_expr,
5182 Handle<String> check) {
5183 Label materialize_true, materialize_false;
5184 Label* if_true = NULL;
5185 Label* if_false = NULL;
5186 Label* fall_through = NULL;
5187 context()->PrepareTest(&materialize_true, &materialize_false,
5188 &if_true, &if_false, &fall_through);
5190 { AccumulatorValueContext context(this);
5191 VisitForTypeofValue(sub_expr);
5193 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5195 Factory* factory = isolate()->factory();
5196 if (String::Equals(check, factory->number_string())) {
5197 __ JumpIfSmi(r0, if_true);
5198 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5199 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5201 Split(eq, if_true, if_false, fall_through);
5202 } else if (String::Equals(check, factory->string_string())) {
5203 __ JumpIfSmi(r0, if_false);
5204 // Check for undetectable objects => false.
5205 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
5207 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5208 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5209 Split(eq, if_true, if_false, fall_through);
5210 } else if (String::Equals(check, factory->symbol_string())) {
5211 __ JumpIfSmi(r0, if_false);
5212 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
5213 Split(eq, if_true, if_false, fall_through);
5214 } else if (String::Equals(check, factory->boolean_string())) {
5215 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
5217 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
5218 Split(eq, if_true, if_false, fall_through);
5219 } else if (String::Equals(check, factory->undefined_string())) {
5220 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
5222 __ JumpIfSmi(r0, if_false);
5223 // Check for undetectable objects => true.
5224 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5225 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5226 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5227 Split(ne, if_true, if_false, fall_through);
5229 } else if (String::Equals(check, factory->function_string())) {
5230 __ JumpIfSmi(r0, if_false);
5231 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5232 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
5234 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
5235 Split(eq, if_true, if_false, fall_through);
5236 } else if (String::Equals(check, factory->object_string())) {
5237 __ JumpIfSmi(r0, if_false);
5238 __ CompareRoot(r0, Heap::kNullValueRootIndex);
5240 // Check for JS objects => true.
5241 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5243 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5245 // Check for undetectable objects => false.
5246 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5247 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5248 Split(eq, if_true, if_false, fall_through);
5250 if (if_false != fall_through) __ jmp(if_false);
5252 context()->Plug(if_true, if_false);
5256 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5257 Comment cmnt(masm_, "[ CompareOperation");
5258 SetSourcePosition(expr->position());
5260 // First we try a fast inlined version of the compare when one of
5261 // the operands is a literal.
5262 if (TryLiteralCompare(expr)) return;
5264 // Always perform the comparison for its control flow. Pack the result
5265 // into the expression's context after the comparison is performed.
5266 Label materialize_true, materialize_false;
5267 Label* if_true = NULL;
5268 Label* if_false = NULL;
5269 Label* fall_through = NULL;
5270 context()->PrepareTest(&materialize_true, &materialize_false,
5271 &if_true, &if_false, &fall_through);
5273 Token::Value op = expr->op();
5274 VisitForStackValue(expr->left());
5277 VisitForStackValue(expr->right());
5278 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5279 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5280 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5282 Split(eq, if_true, if_false, fall_through);
5285 case Token::INSTANCEOF: {
5286 VisitForStackValue(expr->right());
5287 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5289 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5290 // The stub returns 0 for true.
5292 Split(eq, if_true, if_false, fall_through);
5297 VisitForAccumulatorValue(expr->right());
5298 Condition cond = CompareIC::ComputeCondition(op);
5301 bool inline_smi_code = ShouldInlineSmiCase(op);
5302 JumpPatchSite patch_site(masm_);
5303 if (inline_smi_code) {
5305 __ orr(r2, r0, Operand(r1));
5306 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5308 Split(cond, if_true, if_false, NULL);
5309 __ bind(&slow_case);
5312 // Record position and call the compare IC.
5313 SetSourcePosition(expr->position());
5315 CodeFactory::CompareIC(isolate(), op, language_mode()).code();
5316 CallIC(ic, expr->CompareOperationFeedbackId());
5317 patch_site.EmitPatchInfo();
5318 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5319 __ cmp(r0, Operand::Zero());
5320 Split(cond, if_true, if_false, fall_through);
5324 // Convert the result of the comparison into one expected for this
5325 // expression's context.
5326 context()->Plug(if_true, if_false);
5330 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5331 Expression* sub_expr,
5333 Label materialize_true, materialize_false;
5334 Label* if_true = NULL;
5335 Label* if_false = NULL;
5336 Label* fall_through = NULL;
5337 context()->PrepareTest(&materialize_true, &materialize_false,
5338 &if_true, &if_false, &fall_through);
5340 VisitForAccumulatorValue(sub_expr);
5341 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5342 if (expr->op() == Token::EQ_STRICT) {
5343 Heap::RootListIndex nil_value = nil == kNullValue ?
5344 Heap::kNullValueRootIndex :
5345 Heap::kUndefinedValueRootIndex;
5346 __ LoadRoot(r1, nil_value);
5348 Split(eq, if_true, if_false, fall_through);
5350 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5351 CallIC(ic, expr->CompareOperationFeedbackId());
5352 __ cmp(r0, Operand(0));
5353 Split(ne, if_true, if_false, fall_through);
5355 context()->Plug(if_true, if_false);
5359 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5360 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5361 context()->Plug(r0);
5365 Register FullCodeGenerator::result_register() {
5370 Register FullCodeGenerator::context_register() {
5375 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5376 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5377 __ str(value, MemOperand(fp, frame_offset));
5381 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5382 __ ldr(dst, ContextOperand(cp, context_index));
5386 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5387 Scope* declaration_scope = scope()->DeclarationScope();
5388 if (declaration_scope->is_script_scope() ||
5389 declaration_scope->is_module_scope()) {
5390 // Contexts nested in the native context have a canonical empty function
5391 // as their closure, not the anonymous closure containing the global
5392 // code. Pass a smi sentinel and let the runtime look up the empty
5394 __ mov(ip, Operand(Smi::FromInt(0)));
5395 } else if (declaration_scope->is_eval_scope()) {
5396 // Contexts created by a call to eval have the same closure as the
5397 // context calling eval, not the anonymous closure containing the eval
5398 // code. Fetch it from the context.
5399 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5401 DCHECK(declaration_scope->is_function_scope());
5402 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5408 // ----------------------------------------------------------------------------
5409 // Non-local control flow support.
5411 void FullCodeGenerator::EnterFinallyBlock() {
5412 DCHECK(!result_register().is(r1));
5413 // Store result register while executing finally block.
5414 __ push(result_register());
5415 // Cook return address in link register to stack (smi encoded Code* delta)
5416 __ sub(r1, lr, Operand(masm_->CodeObject()));
5419 // Store result register while executing finally block.
5422 // Store pending message while executing finally block.
5423 ExternalReference pending_message_obj =
5424 ExternalReference::address_of_pending_message_obj(isolate());
5425 __ mov(ip, Operand(pending_message_obj));
5426 __ ldr(r1, MemOperand(ip));
5429 ClearPendingMessage();
5433 void FullCodeGenerator::ExitFinallyBlock() {
5434 DCHECK(!result_register().is(r1));
5435 // Restore pending message from stack.
5437 ExternalReference pending_message_obj =
5438 ExternalReference::address_of_pending_message_obj(isolate());
5439 __ mov(ip, Operand(pending_message_obj));
5440 __ str(r1, MemOperand(ip));
5442 // Restore result register from stack.
5445 // Uncook return address and return.
5446 __ pop(result_register());
5448 __ add(pc, r1, Operand(masm_->CodeObject()));
5452 void FullCodeGenerator::ClearPendingMessage() {
5453 DCHECK(!result_register().is(r1));
5454 ExternalReference pending_message_obj =
5455 ExternalReference::address_of_pending_message_obj(isolate());
5456 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
5457 __ mov(ip, Operand(pending_message_obj));
5458 __ str(r1, MemOperand(ip));
5462 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5463 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5464 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5465 Operand(SmiFromSlot(slot)));
5472 static Address GetInterruptImmediateLoadAddress(Address pc) {
5473 Address load_address = pc - 2 * Assembler::kInstrSize;
5474 if (!FLAG_enable_embedded_constant_pool) {
5475 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5476 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5477 // This is an extended constant pool lookup.
5478 if (CpuFeatures::IsSupported(ARMv7)) {
5479 load_address -= 2 * Assembler::kInstrSize;
5480 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5481 DCHECK(Assembler::IsMovT(
5482 Memory::int32_at(load_address + Assembler::kInstrSize)));
5484 load_address -= 4 * Assembler::kInstrSize;
5485 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5486 DCHECK(Assembler::IsOrrImmed(
5487 Memory::int32_at(load_address + Assembler::kInstrSize)));
5488 DCHECK(Assembler::IsOrrImmed(
5489 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5490 DCHECK(Assembler::IsOrrImmed(
5491 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5493 } else if (CpuFeatures::IsSupported(ARMv7) &&
5494 Assembler::IsMovT(Memory::int32_at(load_address))) {
5495 // This is a movw / movt immediate load.
5496 load_address -= Assembler::kInstrSize;
5497 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5498 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5499 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5500 // This is a mov / orr immediate load.
5501 load_address -= 3 * Assembler::kInstrSize;
5502 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5503 DCHECK(Assembler::IsOrrImmed(
5504 Memory::int32_at(load_address + Assembler::kInstrSize)));
5505 DCHECK(Assembler::IsOrrImmed(
5506 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5508 // This is a small constant pool lookup.
5509 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5511 return load_address;
5515 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5517 BackEdgeState target_state,
5518 Code* replacement_code) {
5519 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5520 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5521 CodePatcher patcher(branch_address, 1);
5522 switch (target_state) {
5525 // <decrement profiling counter>
5527 // ; load interrupt stub address into ip - either of (for ARMv7):
5528 // ; <small cp load> | <extended cp load> | <immediate load>
5529 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5530 // | movt ip, #imm | movw ip, #imm
5531 // | ldr ip, [pp, ip]
5532 // ; or (for ARMv6):
5533 // ; <small cp load> | <extended cp load> | <immediate load>
5534 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5535 // | orr ip, ip, #imm> | orr ip, ip, #imm
5536 // | orr ip, ip, #imm> | orr ip, ip, #imm
5537 // | orr ip, ip, #imm> | orr ip, ip, #imm
5539 // <reset profiling counter>
5542 // Calculate branch offset to the ok-label - this is the difference
5543 // between the branch address and |pc| (which points at <blx ip>) plus
5544 // kProfileCounterResetSequence instructions
5545 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5546 kProfileCounterResetSequenceLength;
5547 patcher.masm()->b(branch_offset, pl);
5550 case ON_STACK_REPLACEMENT:
5551 case OSR_AFTER_STACK_CHECK:
5552 // <decrement profiling counter>
5554 // ; load on-stack replacement address into ip - either of (for ARMv7):
5555 // ; <small cp load> | <extended cp load> | <immediate load>
5556 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5557 // | movt ip, #imm> | movw ip, #imm
5558 // | ldr ip, [pp, ip]
5559 // ; or (for ARMv6):
5560 // ; <small cp load> | <extended cp load> | <immediate load>
5561 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5562 // | orr ip, ip, #imm> | orr ip, ip, #imm
5563 // | orr ip, ip, #imm> | orr ip, ip, #imm
5564 // | orr ip, ip, #imm> | orr ip, ip, #imm
5566 // <reset profiling counter>
5568 patcher.masm()->nop();
5572 // Replace the call address.
5573 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5574 replacement_code->entry());
5576 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5577 unoptimized_code, pc_immediate_load_address, replacement_code);
5581 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5583 Code* unoptimized_code,
5585 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5587 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5588 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5589 Address interrupt_address = Assembler::target_address_at(
5590 pc_immediate_load_address, unoptimized_code);
5592 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5593 DCHECK(interrupt_address ==
5594 isolate->builtins()->InterruptCheck()->entry());
5598 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5600 if (interrupt_address ==
5601 isolate->builtins()->OnStackReplacement()->entry()) {
5602 return ON_STACK_REPLACEMENT;
5605 DCHECK(interrupt_address ==
5606 isolate->builtins()->OsrAfterStackCheck()->entry());
5607 return OSR_AFTER_STACK_CHECK;
5611 } // namespace internal
5614 #endif // V8_TARGET_ARCH_ARM