1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
20 #include "src/arm/code-stubs-arm.h"
21 #include "src/arm/macro-assembler-arm.h"
26 #define __ ACCESS_MASM(masm_)
29 // A patch site is a location in the code which it is possible to patch. This
30 // class has a number of methods to emit the code which is patchable and the
31 // method EmitPatchInfo to record a marker back to the patchable code. This
32 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
33 // immediate value is used) is the delta from the pc to the first instruction of
34 // the patchable code.
35 class JumpPatchSite BASE_EMBEDDED {
37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
39 info_emitted_ = false;
44 DCHECK(patch_site_.is_bound() == info_emitted_);
47 // When initially emitting this ensure that a jump is always generated to skip
48 // the inlined smi code.
49 void EmitJumpIfNotSmi(Register reg, Label* target) {
50 DCHECK(!patch_site_.is_bound() && !info_emitted_);
51 Assembler::BlockConstPoolScope block_const_pool(masm_);
52 __ bind(&patch_site_);
53 __ cmp(reg, Operand(reg));
54 __ b(eq, target); // Always taken before patched.
57 // When initially emitting this ensure that a jump is never generated to skip
58 // the inlined smi code.
59 void EmitJumpIfSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockConstPoolScope block_const_pool(masm_);
62 __ bind(&patch_site_);
63 __ cmp(reg, Operand(reg));
64 __ b(ne, target); // Never taken before patched.
67 void EmitPatchInfo() {
68 // Block literal pool emission whilst recording patch site information.
69 Assembler::BlockConstPoolScope block_const_pool(masm_);
70 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
73 reg.set_code(delta_to_patch_site / kOff12Mask);
74 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
79 __ nop(); // Signals no inlined code.
84 MacroAssembler* masm_;
92 // Generate code for a JS function. On entry to the function the receiver
93 // and arguments have been pushed on the stack left to right. The actual
94 // argument count matches the formal parameter count expected by the
97 // The live registers are:
98 // o r1: the JS function object being called (i.e., ourselves)
100 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
101 // o fp: our caller's frame pointer
102 // o sp: stack pointer
103 // o lr: return address
105 // The function builds a JS frame. Please see JavaScriptFrameConstants in
106 // frames-arm.h for its layout.
107 void FullCodeGenerator::Generate() {
108 CompilationInfo* info = info_;
110 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
111 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
113 profiling_counter_ = isolate()->factory()->NewCell(
114 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
115 SetFunctionPosition(function());
116 Comment cmnt(masm_, "[ function compiled by full code generator");
118 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
121 if (strlen(FLAG_stop_at) > 0 &&
122 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
127 // Sloppy mode functions and builtins need to replace the receiver with the
128 // global proxy when called as functions (without an explicit receiver
130 if (is_sloppy(info->language_mode()) && !info->is_native()) {
132 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
133 __ ldr(r2, MemOperand(sp, receiver_offset));
134 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
137 __ ldr(r2, GlobalObjectOperand());
138 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
140 __ str(r2, MemOperand(sp, receiver_offset));
145 // Open a frame scope to indicate that there is a frame on the stack. The
146 // MANUAL indicates that the scope shouldn't actually generate code to set up
147 // the frame (that is done below).
148 FrameScope frame_scope(masm_, StackFrame::MANUAL);
150 info->set_prologue_offset(masm_->pc_offset());
151 __ Prologue(info->IsCodePreAgingActive());
152 info->AddNoFrameRange(0, masm_->pc_offset());
154 { Comment cmnt(masm_, "[ Allocate locals");
155 int locals_count = info->scope()->num_stack_slots();
156 // Generators allocate locals, if any, in context slots.
157 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
158 if (locals_count > 0) {
159 if (locals_count >= 128) {
161 __ sub(r9, sp, Operand(locals_count * kPointerSize));
162 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
163 __ cmp(r9, Operand(r2));
165 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
168 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ mov(r2, Operand(loop_iterations));
174 __ bind(&loop_header);
176 for (int i = 0; i < kMaxPushes; i++) {
179 // Continue loop if not done.
180 __ sub(r2, r2, Operand(1), SetCC);
181 __ b(&loop_header, ne);
183 int remaining = locals_count % kMaxPushes;
184 // Emit the remaining pushes.
185 for (int i = 0; i < remaining; i++) {
191 bool function_in_register = true;
193 // Possibly allocate a local context.
194 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195 if (heap_slots > 0) {
196 // Argument to NewContext is the function, which is still in r1.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 if (info->scope()->is_script_scope()) {
201 __ Push(info->scope()->GetScopeInfo(info->isolate()));
202 __ CallRuntime(Runtime::kNewScriptContext, 2);
203 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(isolate(), heap_slots);
206 // Result of FastNewContextStub is always in new space.
207 need_write_barrier = false;
210 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 function_in_register = false;
213 // Context is returned in r0. It replaces the context passed to us.
214 // It's saved in the stack and kept live in cp.
216 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
217 // Copy any necessary parameters into the context.
218 int num_parameters = info->scope()->num_parameters();
219 for (int i = 0; i < num_parameters; i++) {
220 Variable* var = scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ ldr(r0, MemOperand(fp, parameter_offset));
226 // Store it in the context.
227 MemOperand target = ContextOperand(cp, var->index());
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
233 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(cp, r0, &done);
237 __ Abort(kExpectedNewSpaceObject);
244 ArgumentsAccessStub::HasNewTarget has_new_target =
245 IsSubclassConstructor(info->function()->kind())
246 ? ArgumentsAccessStub::HAS_NEW_TARGET
247 : ArgumentsAccessStub::NO_NEW_TARGET;
249 // Possibly allocate RestParameters
251 Variable* rest_param = scope()->rest_parameter(&rest_index);
253 Comment cmnt(masm_, "[ Allocate rest parameter array");
255 int num_parameters = info->scope()->num_parameters();
256 int offset = num_parameters * kPointerSize;
257 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
262 __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
263 __ mov(r2, Operand(Smi::FromInt(num_parameters)));
264 __ mov(r1, Operand(Smi::FromInt(rest_index)));
267 RestParamAccessStub stub(isolate());
270 SetVar(rest_param, r0, r1, r2);
273 Variable* arguments = scope()->arguments();
274 if (arguments != NULL) {
275 // Function uses arguments object.
276 Comment cmnt(masm_, "[ Allocate arguments object");
277 if (!function_in_register) {
278 // Load this again, if it's used by the local context below.
279 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
283 // Receiver is just before the parameters on the caller's stack.
284 int num_parameters = info->scope()->num_parameters();
285 int offset = num_parameters * kPointerSize;
287 Operand(StandardFrameConstants::kCallerSPOffset + offset));
288 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
291 // Arguments to ArgumentsAccessStub:
292 // function, receiver address, parameter count.
293 // The stub will rewrite receiever and parameter count if the previous
294 // stack frame was an arguments adapter frame.
295 ArgumentsAccessStub::Type type;
296 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
297 type = ArgumentsAccessStub::NEW_STRICT;
298 } else if (function()->has_duplicate_parameters()) {
299 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
301 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
303 ArgumentsAccessStub stub(isolate(), type, has_new_target);
306 SetVar(arguments, r0, r1, r2);
310 __ CallRuntime(Runtime::kTraceEnter, 0);
313 // Visit the declarations and body unless there is an illegal
315 if (scope()->HasIllegalRedeclaration()) {
316 Comment cmnt(masm_, "[ Declarations");
317 scope()->VisitIllegalRedeclaration(this);
320 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
321 { Comment cmnt(masm_, "[ Declarations");
322 // For named function expressions, declare the function name as a
324 if (scope()->is_function_scope() && scope()->function() != NULL) {
325 VariableDeclaration* function = scope()->function();
326 DCHECK(function->proxy()->var()->mode() == CONST ||
327 function->proxy()->var()->mode() == CONST_LEGACY);
328 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
329 VisitVariableDeclaration(function);
331 VisitDeclarations(scope()->declarations());
334 { Comment cmnt(masm_, "[ Stack check");
335 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
337 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
338 __ cmp(sp, Operand(ip));
340 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
341 PredictableCodeSizeScope predictable(masm_,
342 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
343 __ Call(stack_check, RelocInfo::CODE_TARGET);
347 { Comment cmnt(masm_, "[ Body");
348 DCHECK(loop_depth() == 0);
349 VisitStatements(function()->body());
350 DCHECK(loop_depth() == 0);
354 // Always emit a 'return undefined' in case control fell off the end of
356 { Comment cmnt(masm_, "[ return <undefined>;");
357 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
359 EmitReturnSequence();
361 // Force emit the constant pool, so it doesn't get emitted in the middle
362 // of the back edge table.
363 masm()->CheckConstPool(true, false);
367 void FullCodeGenerator::ClearAccumulator() {
368 __ mov(r0, Operand(Smi::FromInt(0)));
372 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
373 __ mov(r2, Operand(profiling_counter_));
374 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
375 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
376 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
380 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
381 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
383 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
387 void FullCodeGenerator::EmitProfilingCounterReset() {
388 Assembler::BlockConstPoolScope block_const_pool(masm_);
389 PredictableCodeSizeScope predictable_code_size_scope(
390 masm_, kProfileCounterResetSequenceLength);
393 int reset_value = FLAG_interrupt_budget;
394 if (info_->is_debug()) {
395 // Detect debug break requests as soon as possible.
396 reset_value = FLAG_interrupt_budget >> 4;
398 __ mov(r2, Operand(profiling_counter_));
399 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
400 // instructions (for ARMv6) depending upon whether it is an extended constant
401 // pool - insert nop to compensate.
402 int expected_instr_count =
403 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
404 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
405 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
408 __ mov(r3, Operand(Smi::FromInt(reset_value)));
409 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
413 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
414 Label* back_edge_target) {
415 Comment cmnt(masm_, "[ Back edge bookkeeping");
416 // Block literal pools whilst emitting back edge code.
417 Assembler::BlockConstPoolScope block_const_pool(masm_);
420 DCHECK(back_edge_target->is_bound());
421 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
422 int weight = Min(kMaxBackEdgeWeight,
423 Max(1, distance / kCodeSizeMultiplier));
424 EmitProfilingCounterDecrement(weight);
426 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428 // Record a mapping of this PC offset to the OSR id. This is used to find
429 // the AST id from the unoptimized code in order to use it as a key into
430 // the deoptimization input data found in the optimized code.
431 RecordBackEdge(stmt->OsrEntryId());
433 EmitProfilingCounterReset();
436 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
437 // Record a mapping of the OSR id to this PC. This is used if the OSR
438 // entry becomes the target of a bailout. We don't expect it to be, but
439 // we want it to work if it is.
440 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
444 void FullCodeGenerator::EmitReturnSequence() {
445 Comment cmnt(masm_, "[ Return sequence");
446 if (return_label_.is_bound()) {
447 __ b(&return_label_);
449 __ bind(&return_label_);
451 // Push the return value on the stack as the parameter.
452 // Runtime::TraceExit returns its parameter in r0.
454 __ CallRuntime(Runtime::kTraceExit, 1);
456 // Pretend that the exit is a backwards jump to the entry.
458 if (info_->ShouldSelfOptimize()) {
459 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
461 int distance = masm_->pc_offset();
462 weight = Min(kMaxBackEdgeWeight,
463 Max(1, distance / kCodeSizeMultiplier));
465 EmitProfilingCounterDecrement(weight);
469 __ Call(isolate()->builtins()->InterruptCheck(),
470 RelocInfo::CODE_TARGET);
472 EmitProfilingCounterReset();
476 // Add a label for checking the size of the code used for returning.
477 Label check_exit_codesize;
478 __ bind(&check_exit_codesize);
480 // Make sure that the constant pool is not emitted inside of the return
482 { Assembler::BlockConstPoolScope block_const_pool(masm_);
483 int32_t arg_count = info_->scope()->num_parameters() + 1;
484 if (IsSubclassConstructor(info_->function()->kind())) {
487 int32_t sp_delta = arg_count * kPointerSize;
488 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
489 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
490 PredictableCodeSizeScope predictable(masm_, -1);
492 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
493 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
494 __ add(sp, sp, Operand(sp_delta));
496 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
501 // Check that the size of the code used for returning is large enough
502 // for the debugger's requirements.
503 DCHECK(Assembler::kJSReturnSequenceInstructions <=
504 masm_->InstructionsGeneratedSince(&check_exit_codesize));
510 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
511 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
515 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
516 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
517 codegen()->GetVar(result_register(), var);
521 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
522 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
523 codegen()->GetVar(result_register(), var);
524 __ push(result_register());
528 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
529 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
530 // For simplicity we always test the accumulator register.
531 codegen()->GetVar(result_register(), var);
532 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
533 codegen()->DoTest(this);
537 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
541 void FullCodeGenerator::AccumulatorValueContext::Plug(
542 Heap::RootListIndex index) const {
543 __ LoadRoot(result_register(), index);
547 void FullCodeGenerator::StackValueContext::Plug(
548 Heap::RootListIndex index) const {
549 __ LoadRoot(result_register(), index);
550 __ push(result_register());
554 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
555 codegen()->PrepareForBailoutBeforeSplit(condition(),
559 if (index == Heap::kUndefinedValueRootIndex ||
560 index == Heap::kNullValueRootIndex ||
561 index == Heap::kFalseValueRootIndex) {
562 if (false_label_ != fall_through_) __ b(false_label_);
563 } else if (index == Heap::kTrueValueRootIndex) {
564 if (true_label_ != fall_through_) __ b(true_label_);
566 __ LoadRoot(result_register(), index);
567 codegen()->DoTest(this);
572 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
576 void FullCodeGenerator::AccumulatorValueContext::Plug(
577 Handle<Object> lit) const {
578 __ mov(result_register(), Operand(lit));
582 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
583 // Immediates cannot be pushed directly.
584 __ mov(result_register(), Operand(lit));
585 __ push(result_register());
589 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
590 codegen()->PrepareForBailoutBeforeSplit(condition(),
594 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
595 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
596 if (false_label_ != fall_through_) __ b(false_label_);
597 } else if (lit->IsTrue() || lit->IsJSObject()) {
598 if (true_label_ != fall_through_) __ b(true_label_);
599 } else if (lit->IsString()) {
600 if (String::cast(*lit)->length() == 0) {
601 if (false_label_ != fall_through_) __ b(false_label_);
603 if (true_label_ != fall_through_) __ b(true_label_);
605 } else if (lit->IsSmi()) {
606 if (Smi::cast(*lit)->value() == 0) {
607 if (false_label_ != fall_through_) __ b(false_label_);
609 if (true_label_ != fall_through_) __ b(true_label_);
612 // For simplicity we always test the accumulator register.
613 __ mov(result_register(), Operand(lit));
614 codegen()->DoTest(this);
619 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
620 Register reg) const {
626 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
628 Register reg) const {
631 __ Move(result_register(), reg);
635 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
636 Register reg) const {
638 if (count > 1) __ Drop(count - 1);
639 __ str(reg, MemOperand(sp, 0));
643 void FullCodeGenerator::TestContext::DropAndPlug(int count,
644 Register reg) const {
646 // For simplicity we always test the accumulator register.
648 __ Move(result_register(), reg);
649 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
650 codegen()->DoTest(this);
654 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
655 Label* materialize_false) const {
656 DCHECK(materialize_true == materialize_false);
657 __ bind(materialize_true);
661 void FullCodeGenerator::AccumulatorValueContext::Plug(
662 Label* materialize_true,
663 Label* materialize_false) const {
665 __ bind(materialize_true);
666 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
668 __ bind(materialize_false);
669 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
674 void FullCodeGenerator::StackValueContext::Plug(
675 Label* materialize_true,
676 Label* materialize_false) const {
678 __ bind(materialize_true);
679 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
681 __ bind(materialize_false);
682 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
688 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
689 Label* materialize_false) const {
690 DCHECK(materialize_true == true_label_);
691 DCHECK(materialize_false == false_label_);
695 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
699 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
700 Heap::RootListIndex value_root_index =
701 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
702 __ LoadRoot(result_register(), value_root_index);
706 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
707 Heap::RootListIndex value_root_index =
708 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
709 __ LoadRoot(ip, value_root_index);
714 void FullCodeGenerator::TestContext::Plug(bool flag) const {
715 codegen()->PrepareForBailoutBeforeSplit(condition(),
720 if (true_label_ != fall_through_) __ b(true_label_);
722 if (false_label_ != fall_through_) __ b(false_label_);
727 void FullCodeGenerator::DoTest(Expression* condition,
730 Label* fall_through) {
731 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
732 CallIC(ic, condition->test_id());
733 __ tst(result_register(), result_register());
734 Split(ne, if_true, if_false, fall_through);
738 void FullCodeGenerator::Split(Condition cond,
741 Label* fall_through) {
742 if (if_false == fall_through) {
744 } else if (if_true == fall_through) {
745 __ b(NegateCondition(cond), if_false);
753 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
754 DCHECK(var->IsStackAllocated());
755 // Offset is negative because higher indexes are at lower addresses.
756 int offset = -var->index() * kPointerSize;
757 // Adjust by a (parameter or local) base offset.
758 if (var->IsParameter()) {
759 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
761 offset += JavaScriptFrameConstants::kLocal0Offset;
763 return MemOperand(fp, offset);
767 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
768 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
769 if (var->IsContextSlot()) {
770 int context_chain_length = scope()->ContextChainLength(var->scope());
771 __ LoadContext(scratch, context_chain_length);
772 return ContextOperand(scratch, var->index());
774 return StackOperand(var);
779 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
780 // Use destination as scratch.
781 MemOperand location = VarOperand(var, dest);
782 __ ldr(dest, location);
786 void FullCodeGenerator::SetVar(Variable* var,
790 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
791 DCHECK(!scratch0.is(src));
792 DCHECK(!scratch0.is(scratch1));
793 DCHECK(!scratch1.is(src));
794 MemOperand location = VarOperand(var, scratch0);
795 __ str(src, location);
797 // Emit the write barrier code if the location is in the heap.
798 if (var->IsContextSlot()) {
799 __ RecordWriteContextSlot(scratch0,
809 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
810 bool should_normalize,
813 // Only prepare for bailouts before splits if we're in a test
814 // context. Otherwise, we let the Visit function deal with the
815 // preparation to avoid preparing with the same AST id twice.
816 if (!context()->IsTest() || !info_->IsOptimizable()) return;
819 if (should_normalize) __ b(&skip);
820 PrepareForBailout(expr, TOS_REG);
821 if (should_normalize) {
822 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
824 Split(eq, if_true, if_false, NULL);
830 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
831 // The variable in the declaration always resides in the current function
833 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
834 if (generate_debug_code_) {
835 // Check that we're not inside a with or catch context.
836 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
837 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
838 __ Check(ne, kDeclarationInWithContext);
839 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
840 __ Check(ne, kDeclarationInCatchContext);
845 void FullCodeGenerator::VisitVariableDeclaration(
846 VariableDeclaration* declaration) {
847 // If it was not possible to allocate the variable at compile time, we
848 // need to "declare" it at runtime to make sure it actually exists in the
850 VariableProxy* proxy = declaration->proxy();
851 VariableMode mode = declaration->mode();
852 Variable* variable = proxy->var();
853 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
854 switch (variable->location()) {
855 case Variable::UNALLOCATED:
856 globals_->Add(variable->name(), zone());
857 globals_->Add(variable->binding_needs_init()
858 ? isolate()->factory()->the_hole_value()
859 : isolate()->factory()->undefined_value(),
863 case Variable::PARAMETER:
864 case Variable::LOCAL:
866 Comment cmnt(masm_, "[ VariableDeclaration");
867 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
868 __ str(ip, StackOperand(variable));
872 case Variable::CONTEXT:
874 Comment cmnt(masm_, "[ VariableDeclaration");
875 EmitDebugCheckDeclarationContext(variable);
876 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
877 __ str(ip, ContextOperand(cp, variable->index()));
878 // No write barrier since the_hole_value is in old space.
879 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
883 case Variable::LOOKUP: {
884 Comment cmnt(masm_, "[ VariableDeclaration");
885 __ mov(r2, Operand(variable->name()));
886 // Declaration nodes are always introduced in one of four modes.
887 DCHECK(IsDeclaredVariableMode(mode));
888 PropertyAttributes attr =
889 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
890 __ mov(r1, Operand(Smi::FromInt(attr)));
891 // Push initial value, if any.
892 // Note: For variables we must not push an initial value (such as
893 // 'undefined') because we may have a (legal) redeclaration and we
894 // must not destroy the current value.
896 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
897 __ Push(cp, r2, r1, r0);
899 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
900 __ Push(cp, r2, r1, r0);
902 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
909 void FullCodeGenerator::VisitFunctionDeclaration(
910 FunctionDeclaration* declaration) {
911 VariableProxy* proxy = declaration->proxy();
912 Variable* variable = proxy->var();
913 switch (variable->location()) {
914 case Variable::UNALLOCATED: {
915 globals_->Add(variable->name(), zone());
916 Handle<SharedFunctionInfo> function =
917 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
918 // Check for stack-overflow exception.
919 if (function.is_null()) return SetStackOverflow();
920 globals_->Add(function, zone());
924 case Variable::PARAMETER:
925 case Variable::LOCAL: {
926 Comment cmnt(masm_, "[ FunctionDeclaration");
927 VisitForAccumulatorValue(declaration->fun());
928 __ str(result_register(), StackOperand(variable));
932 case Variable::CONTEXT: {
933 Comment cmnt(masm_, "[ FunctionDeclaration");
934 EmitDebugCheckDeclarationContext(variable);
935 VisitForAccumulatorValue(declaration->fun());
936 __ str(result_register(), ContextOperand(cp, variable->index()));
937 int offset = Context::SlotOffset(variable->index());
938 // We know that we have written a function, which is not a smi.
939 __ RecordWriteContextSlot(cp,
947 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
951 case Variable::LOOKUP: {
952 Comment cmnt(masm_, "[ FunctionDeclaration");
953 __ mov(r2, Operand(variable->name()));
954 __ mov(r1, Operand(Smi::FromInt(NONE)));
956 // Push initial value for function declaration.
957 VisitForStackValue(declaration->fun());
958 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
965 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
966 Variable* variable = declaration->proxy()->var();
967 ModuleDescriptor* descriptor = declaration->module()->descriptor();
968 DCHECK(variable->location() == Variable::CONTEXT);
969 DCHECK(descriptor->IsFrozen());
971 Comment cmnt(masm_, "[ ModuleDeclaration");
972 EmitDebugCheckDeclarationContext(variable);
974 // Load instance object.
975 __ LoadContext(r1, scope_->ContextChainLength(scope_->ScriptScope()));
976 __ ldr(r1, ContextOperand(r1, descriptor->Index()));
977 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
980 __ str(r1, ContextOperand(cp, variable->index()));
981 // We know that we have written a module, which is not a smi.
982 __ RecordWriteContextSlot(cp,
983 Context::SlotOffset(variable->index()),
990 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
992 // Traverse into body.
993 Visit(declaration->module());
997 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
998 VariableProxy* proxy = declaration->proxy();
999 Variable* variable = proxy->var();
1000 switch (variable->location()) {
1001 case Variable::UNALLOCATED:
1005 case Variable::CONTEXT: {
1006 Comment cmnt(masm_, "[ ImportDeclaration");
1007 EmitDebugCheckDeclarationContext(variable);
1012 case Variable::PARAMETER:
1013 case Variable::LOCAL:
1014 case Variable::LOOKUP:
1020 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1025 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1026 // Call the runtime to declare the globals.
1027 // The context is the first argument.
1028 __ mov(r1, Operand(pairs));
1029 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1030 __ Push(cp, r1, r0);
1031 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1032 // Return value is ignored.
1036 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1037 // Call the runtime to declare the modules.
1038 __ Push(descriptions);
1039 __ CallRuntime(Runtime::kDeclareModules, 1);
1040 // Return value is ignored.
1044 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1045 Comment cmnt(masm_, "[ SwitchStatement");
1046 Breakable nested_statement(this, stmt);
1047 SetStatementPosition(stmt);
1049 // Keep the switch value on the stack until a case matches.
1050 VisitForStackValue(stmt->tag());
1051 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1053 ZoneList<CaseClause*>* clauses = stmt->cases();
1054 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1056 Label next_test; // Recycled for each test.
1057 // Compile all the tests with branches to their bodies.
1058 for (int i = 0; i < clauses->length(); i++) {
1059 CaseClause* clause = clauses->at(i);
1060 clause->body_target()->Unuse();
1062 // The default is not a test, but remember it as final fall through.
1063 if (clause->is_default()) {
1064 default_clause = clause;
1068 Comment cmnt(masm_, "[ Case comparison");
1069 __ bind(&next_test);
1072 // Compile the label expression.
1073 VisitForAccumulatorValue(clause->label());
1075 // Perform the comparison as if via '==='.
1076 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1077 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1078 JumpPatchSite patch_site(masm_);
1079 if (inline_smi_code) {
1082 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1085 __ b(ne, &next_test);
1086 __ Drop(1); // Switch value is no longer needed.
1087 __ b(clause->body_target());
1088 __ bind(&slow_case);
1091 // Record position before stub call for type feedback.
1092 SetSourcePosition(clause->position());
1094 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1095 CallIC(ic, clause->CompareId());
1096 patch_site.EmitPatchInfo();
1100 PrepareForBailout(clause, TOS_REG);
1101 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1103 __ b(ne, &next_test);
1105 __ jmp(clause->body_target());
1108 __ cmp(r0, Operand::Zero());
1109 __ b(ne, &next_test);
1110 __ Drop(1); // Switch value is no longer needed.
1111 __ b(clause->body_target());
1114 // Discard the test value and jump to the default if present, otherwise to
1115 // the end of the statement.
1116 __ bind(&next_test);
1117 __ Drop(1); // Switch value is no longer needed.
1118 if (default_clause == NULL) {
1119 __ b(nested_statement.break_label());
1121 __ b(default_clause->body_target());
1124 // Compile all the case bodies.
1125 for (int i = 0; i < clauses->length(); i++) {
1126 Comment cmnt(masm_, "[ Case body");
1127 CaseClause* clause = clauses->at(i);
1128 __ bind(clause->body_target());
1129 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1130 VisitStatements(clause->statements());
1133 __ bind(nested_statement.break_label());
1134 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1138 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1139 Comment cmnt(masm_, "[ ForInStatement");
1140 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1141 SetStatementPosition(stmt);
1144 ForIn loop_statement(this, stmt);
1145 increment_loop_depth();
1147 // Get the object to enumerate over. If the object is null or undefined, skip
1148 // over the loop. See ECMA-262 version 5, section 12.6.4.
1149 SetExpressionPosition(stmt->enumerable());
1150 VisitForAccumulatorValue(stmt->enumerable());
1151 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1154 Register null_value = r5;
1155 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1156 __ cmp(r0, null_value);
1159 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1161 // Convert the object to a JS object.
1162 Label convert, done_convert;
1163 __ JumpIfSmi(r0, &convert);
1164 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1165 __ b(ge, &done_convert);
1168 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1169 __ bind(&done_convert);
1170 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1173 // Check for proxies.
1175 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1176 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1177 __ b(le, &call_runtime);
1179 // Check cache validity in generated code. This is a fast case for
1180 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1181 // guarantee cache validity, call the runtime system to check cache
1182 // validity or get the property names in a fixed array.
1183 __ CheckEnumCache(null_value, &call_runtime);
1185 // The enum cache is valid. Load the map of the object being
1186 // iterated over and use the cache for the iteration.
1188 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1191 // Get the set of properties to enumerate.
1192 __ bind(&call_runtime);
1193 __ push(r0); // Duplicate the enumerable object on the stack.
1194 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1195 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1197 // If we got a map from the runtime call, we can do a fast
1198 // modification check. Otherwise, we got a fixed array, and we have
1199 // to do a slow check.
1201 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1202 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1204 __ b(ne, &fixed_array);
1206 // We got a map in register r0. Get the enumeration cache from it.
1207 Label no_descriptors;
1208 __ bind(&use_cache);
1210 __ EnumLength(r1, r0);
1211 __ cmp(r1, Operand(Smi::FromInt(0)));
1212 __ b(eq, &no_descriptors);
1214 __ LoadInstanceDescriptors(r0, r2);
1215 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1216 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1218 // Set up the four remaining stack slots.
1219 __ push(r0); // Map.
1220 __ mov(r0, Operand(Smi::FromInt(0)));
1221 // Push enumeration cache, enumeration cache length (as smi) and zero.
1222 __ Push(r2, r1, r0);
1225 __ bind(&no_descriptors);
1229 // We got a fixed array in register r0. Iterate through that.
1231 __ bind(&fixed_array);
1233 __ Move(r1, FeedbackVector());
1234 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1235 int vector_index = FeedbackVector()->GetIndex(slot);
1236 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1238 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1239 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1240 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1241 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1242 __ b(gt, &non_proxy);
1243 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1244 __ bind(&non_proxy);
1245 __ Push(r1, r0); // Smi and array
1246 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1247 __ mov(r0, Operand(Smi::FromInt(0)));
1248 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1250 // Generate code for doing the condition check.
1251 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1253 SetExpressionPosition(stmt->each());
1255 // Load the current count to r0, load the length to r1.
1256 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1257 __ cmp(r0, r1); // Compare to the array length.
1258 __ b(hs, loop_statement.break_label());
1260 // Get the current entry of the array into register r3.
1261 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1262 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1263 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1265 // Get the expected map from the stack or a smi in the
1266 // permanent slow case into register r2.
1267 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1269 // Check if the expected map still matches that of the enumerable.
1270 // If not, we may have to filter the key.
1272 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1273 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1274 __ cmp(r4, Operand(r2));
1275 __ b(eq, &update_each);
1277 // For proxies, no filtering is done.
1278 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1279 __ cmp(r2, Operand(Smi::FromInt(0)));
1280 __ b(eq, &update_each);
1282 // Convert the entry to a string or (smi) 0 if it isn't a property
1283 // any more. If the property has been removed while iterating, we
1285 __ push(r1); // Enumerable.
1286 __ push(r3); // Current entry.
1287 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1288 __ mov(r3, Operand(r0), SetCC);
1289 __ b(eq, loop_statement.continue_label());
1291 // Update the 'each' property or variable from the possibly filtered
1292 // entry in register r3.
1293 __ bind(&update_each);
1294 __ mov(result_register(), r3);
1295 // Perform the assignment as if via '='.
1296 { EffectContext context(this);
1297 EmitAssignment(stmt->each());
1298 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1301 // Generate code for the body of the loop.
1302 Visit(stmt->body());
1304 // Generate code for the going to the next element by incrementing
1305 // the index (smi) stored on top of the stack.
1306 __ bind(loop_statement.continue_label());
1308 __ add(r0, r0, Operand(Smi::FromInt(1)));
1311 EmitBackEdgeBookkeeping(stmt, &loop);
1314 // Remove the pointers stored on the stack.
1315 __ bind(loop_statement.break_label());
1318 // Exit and decrement the loop depth.
1319 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1321 decrement_loop_depth();
1325 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1327 // Use the fast case closure allocation code that allocates in new
1328 // space for nested functions that don't need literals cloning. If
1329 // we're running with the --always-opt or the --prepare-always-opt
1330 // flag, we need to use the runtime function so that the new function
1331 // we are creating here gets a chance to have its code optimized and
1332 // doesn't just get a copy of the existing unoptimized code.
1333 if (!FLAG_always_opt &&
1334 !FLAG_prepare_always_opt &&
1336 scope()->is_function_scope() &&
1337 info->num_literals() == 0) {
1338 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1339 __ mov(r2, Operand(info));
1342 __ mov(r0, Operand(info));
1343 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1344 : Heap::kFalseValueRootIndex);
1345 __ Push(cp, r0, r1);
1346 __ CallRuntime(Runtime::kNewClosure, 3);
1348 context()->Plug(r0);
1352 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1353 Comment cmnt(masm_, "[ VariableProxy");
1354 EmitVariableLoad(expr);
1358 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1359 Comment cnmt(masm_, "[ SuperReference ");
1361 __ ldr(LoadDescriptor::ReceiverRegister(),
1362 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1364 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1365 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1367 if (FLAG_vector_ics) {
1368 __ mov(VectorLoadICDescriptor::SlotRegister(),
1369 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1370 CallLoadIC(NOT_CONTEXTUAL);
1372 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1375 __ cmp(r0, Operand(isolate()->factory()->undefined_value()));
1378 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1383 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1385 if (NeedsHomeObject(initializer)) {
1386 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1387 __ mov(StoreDescriptor::NameRegister(),
1388 Operand(isolate()->factory()->home_object_symbol()));
1389 __ ldr(StoreDescriptor::ValueRegister(),
1390 MemOperand(sp, offset * kPointerSize));
1396 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1397 TypeofState typeof_state,
1399 Register current = cp;
1405 if (s->num_heap_slots() > 0) {
1406 if (s->calls_sloppy_eval()) {
1407 // Check that extension is NULL.
1408 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1412 // Load next context in chain.
1413 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1414 // Walk the rest of the chain without clobbering cp.
1417 // If no outer scope calls eval, we do not need to check more
1418 // context extensions.
1419 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1420 s = s->outer_scope();
1423 if (s->is_eval_scope()) {
1425 if (!current.is(next)) {
1426 __ Move(next, current);
1429 // Terminate at native context.
1430 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1431 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1434 // Check that extension is NULL.
1435 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1438 // Load next context in chain.
1439 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1444 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1445 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1446 if (FLAG_vector_ics) {
1447 __ mov(VectorLoadICDescriptor::SlotRegister(),
1448 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1451 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1458 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1460 DCHECK(var->IsContextSlot());
1461 Register context = cp;
1465 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1466 if (s->num_heap_slots() > 0) {
1467 if (s->calls_sloppy_eval()) {
1468 // Check that extension is NULL.
1469 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1473 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1474 // Walk the rest of the chain without clobbering cp.
1478 // Check that last extension is NULL.
1479 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1483 // This function is used only for loads, not stores, so it's safe to
1484 // return an cp-based operand (the write barrier cannot be allowed to
1485 // destroy the cp register).
1486 return ContextOperand(context, var->index());
1490 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1491 TypeofState typeof_state,
1494 // Generate fast-case code for variables that might be shadowed by
1495 // eval-introduced variables. Eval is used a lot without
1496 // introducing variables. In those cases, we do not want to
1497 // perform a runtime call for all variables in the scope
1498 // containing the eval.
1499 Variable* var = proxy->var();
1500 if (var->mode() == DYNAMIC_GLOBAL) {
1501 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1503 } else if (var->mode() == DYNAMIC_LOCAL) {
1504 Variable* local = var->local_if_not_shadowed();
1505 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1506 if (local->mode() == LET || local->mode() == CONST ||
1507 local->mode() == CONST_LEGACY) {
1508 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1509 if (local->mode() == CONST_LEGACY) {
1510 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1511 } else { // LET || CONST
1513 __ mov(r0, Operand(var->name()));
1515 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1523 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1524 // Record position before possible IC call.
1525 SetSourcePosition(proxy->position());
1526 Variable* var = proxy->var();
1528 // Three cases: global variables, lookup variables, and all other types of
1530 switch (var->location()) {
1531 case Variable::UNALLOCATED: {
1532 Comment cmnt(masm_, "[ Global variable");
1533 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1534 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1535 if (FLAG_vector_ics) {
1536 __ mov(VectorLoadICDescriptor::SlotRegister(),
1537 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1539 CallGlobalLoadIC(var->name());
1540 context()->Plug(r0);
1544 case Variable::PARAMETER:
1545 case Variable::LOCAL:
1546 case Variable::CONTEXT: {
1547 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1548 : "[ Stack variable");
1549 if (var->binding_needs_init()) {
1550 // var->scope() may be NULL when the proxy is located in eval code and
1551 // refers to a potential outside binding. Currently those bindings are
1552 // always looked up dynamically, i.e. in that case
1553 // var->location() == LOOKUP.
1555 DCHECK(var->scope() != NULL);
1557 // Check if the binding really needs an initialization check. The check
1558 // can be skipped in the following situation: we have a LET or CONST
1559 // binding in harmony mode, both the Variable and the VariableProxy have
1560 // the same declaration scope (i.e. they are both in global code, in the
1561 // same function or in the same eval code) and the VariableProxy is in
1562 // the source physically located after the initializer of the variable.
1564 // We cannot skip any initialization checks for CONST in non-harmony
1565 // mode because const variables may be declared but never initialized:
1566 // if (false) { const x; }; var y = x;
1568 // The condition on the declaration scopes is a conservative check for
1569 // nested functions that access a binding and are called before the
1570 // binding is initialized:
1571 // function() { f(); let x = 1; function f() { x = 2; } }
1573 bool skip_init_check;
1574 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1575 skip_init_check = false;
1576 } else if (var->is_this()) {
1577 CHECK(info_->function() != nullptr &&
1578 (info_->function()->kind() & kSubclassConstructor) != 0);
1579 // TODO(dslomov): implement 'this' hole check elimination.
1580 skip_init_check = false;
1582 // Check that we always have valid source position.
1583 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1584 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1585 skip_init_check = var->mode() != CONST_LEGACY &&
1586 var->initializer_position() < proxy->position();
1589 if (!skip_init_check) {
1590 // Let and const need a read barrier.
1592 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1593 if (var->mode() == LET || var->mode() == CONST) {
1594 // Throw a reference error when using an uninitialized let/const
1595 // binding in harmony mode.
1598 __ mov(r0, Operand(var->name()));
1600 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1603 // Uninitalized const bindings outside of harmony mode are unholed.
1604 DCHECK(var->mode() == CONST_LEGACY);
1605 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1607 context()->Plug(r0);
1611 context()->Plug(var);
1615 case Variable::LOOKUP: {
1616 Comment cmnt(masm_, "[ Lookup variable");
1618 // Generate code for loading from variables potentially shadowed
1619 // by eval-introduced variables.
1620 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1622 __ mov(r1, Operand(var->name()));
1623 __ Push(cp, r1); // Context and name.
1624 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1626 context()->Plug(r0);
1632 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1633 Comment cmnt(masm_, "[ RegExpLiteral");
1635 // Registers will be used as follows:
1636 // r5 = materialized value (RegExp literal)
1637 // r4 = JS function, literals array
1638 // r3 = literal index
1639 // r2 = RegExp pattern
1640 // r1 = RegExp flags
1641 // r0 = RegExp literal clone
1642 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1643 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1644 int literal_offset =
1645 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1646 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1647 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1649 __ b(ne, &materialized);
1651 // Create regexp literal using runtime function.
1652 // Result will be in r0.
1653 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1654 __ mov(r2, Operand(expr->pattern()));
1655 __ mov(r1, Operand(expr->flags()));
1656 __ Push(r4, r3, r2, r1);
1657 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1660 __ bind(&materialized);
1661 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1662 Label allocated, runtime_allocate;
1663 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1666 __ bind(&runtime_allocate);
1667 __ mov(r0, Operand(Smi::FromInt(size)));
1669 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1672 __ bind(&allocated);
1673 // After this, registers are used as follows:
1674 // r0: Newly allocated regexp.
1675 // r5: Materialized regexp.
1677 __ CopyFields(r0, r5, d0, size / kPointerSize);
1678 context()->Plug(r0);
1682 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1683 if (expression == NULL) {
1684 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1687 VisitForStackValue(expression);
1692 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1693 Comment cmnt(masm_, "[ ObjectLiteral");
1695 expr->BuildConstantProperties(isolate());
1696 Handle<FixedArray> constant_properties = expr->constant_properties();
1697 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1698 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1699 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1700 __ mov(r1, Operand(constant_properties));
1701 int flags = expr->ComputeFlags();
1702 __ mov(r0, Operand(Smi::FromInt(flags)));
1703 if (MustCreateObjectLiteralWithRuntime(expr)) {
1704 __ Push(r3, r2, r1, r0);
1705 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1707 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1710 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1712 // If result_saved is true the result is on top of the stack. If
1713 // result_saved is false the result is in r0.
1714 bool result_saved = false;
1716 // Mark all computed expressions that are bound to a key that
1717 // is shadowed by a later occurrence of the same key. For the
1718 // marked expressions, no store code is emitted.
1719 expr->CalculateEmitStore(zone());
1721 AccessorTable accessor_table(zone());
1722 int property_index = 0;
1723 for (; property_index < expr->properties()->length(); property_index++) {
1724 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1725 if (property->is_computed_name()) break;
1726 if (property->IsCompileTimeValue()) continue;
1728 Literal* key = property->key()->AsLiteral();
1729 Expression* value = property->value();
1730 if (!result_saved) {
1731 __ push(r0); // Save result on stack
1732 result_saved = true;
1734 switch (property->kind()) {
1735 case ObjectLiteral::Property::CONSTANT:
1737 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1738 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1740 case ObjectLiteral::Property::COMPUTED:
1741 // It is safe to use [[Put]] here because the boilerplate already
1742 // contains computed properties with an uninitialized value.
1743 if (key->value()->IsInternalizedString()) {
1744 if (property->emit_store()) {
1745 VisitForAccumulatorValue(value);
1746 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1747 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1748 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1749 CallStoreIC(key->LiteralFeedbackId());
1750 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1752 if (NeedsHomeObject(value)) {
1753 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1754 __ mov(StoreDescriptor::NameRegister(),
1755 Operand(isolate()->factory()->home_object_symbol()));
1756 __ ldr(StoreDescriptor::ValueRegister(), MemOperand(sp));
1760 VisitForEffect(value);
1764 // Duplicate receiver on stack.
1765 __ ldr(r0, MemOperand(sp));
1767 VisitForStackValue(key);
1768 VisitForStackValue(value);
1769 if (property->emit_store()) {
1770 EmitSetHomeObjectIfNeeded(value, 2);
1771 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1773 __ CallRuntime(Runtime::kSetProperty, 4);
1778 case ObjectLiteral::Property::PROTOTYPE:
1779 // Duplicate receiver on stack.
1780 __ ldr(r0, MemOperand(sp));
1782 VisitForStackValue(value);
1783 DCHECK(property->emit_store());
1784 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1787 case ObjectLiteral::Property::GETTER:
1788 if (property->emit_store()) {
1789 accessor_table.lookup(key)->second->getter = value;
1792 case ObjectLiteral::Property::SETTER:
1793 if (property->emit_store()) {
1794 accessor_table.lookup(key)->second->setter = value;
1800 // Emit code to define accessors, using only a single call to the runtime for
1801 // each pair of corresponding getters and setters.
1802 for (AccessorTable::Iterator it = accessor_table.begin();
1803 it != accessor_table.end();
1805 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1807 VisitForStackValue(it->first);
1808 EmitAccessor(it->second->getter);
1809 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1810 EmitAccessor(it->second->setter);
1811 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1812 __ mov(r0, Operand(Smi::FromInt(NONE)));
1814 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1817 // Object literals have two parts. The "static" part on the left contains no
1818 // computed property names, and so we can compute its map ahead of time; see
1819 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1820 // starts with the first computed property name, and continues with all
1821 // properties to its right. All the code from above initializes the static
1822 // component of the object literal, and arranges for the map of the result to
1823 // reflect the static order in which the keys appear. For the dynamic
1824 // properties, we compile them into a series of "SetOwnProperty" runtime
1825 // calls. This will preserve insertion order.
1826 for (; property_index < expr->properties()->length(); property_index++) {
1827 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1829 Expression* value = property->value();
1830 if (!result_saved) {
1831 __ push(r0); // Save result on the stack
1832 result_saved = true;
1835 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1838 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1839 DCHECK(!property->is_computed_name());
1840 VisitForStackValue(value);
1841 DCHECK(property->emit_store());
1842 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1844 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1845 VisitForStackValue(value);
1846 EmitSetHomeObjectIfNeeded(value, 2);
1848 switch (property->kind()) {
1849 case ObjectLiteral::Property::CONSTANT:
1850 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1851 case ObjectLiteral::Property::COMPUTED:
1852 if (property->emit_store()) {
1853 __ mov(r0, Operand(Smi::FromInt(NONE)));
1855 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1861 case ObjectLiteral::Property::PROTOTYPE:
1865 case ObjectLiteral::Property::GETTER:
1866 __ mov(r0, Operand(Smi::FromInt(NONE)));
1868 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1871 case ObjectLiteral::Property::SETTER:
1872 __ mov(r0, Operand(Smi::FromInt(NONE)));
1874 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1880 if (expr->has_function()) {
1881 DCHECK(result_saved);
1882 __ ldr(r0, MemOperand(sp));
1884 __ CallRuntime(Runtime::kToFastProperties, 1);
1888 context()->PlugTOS();
1890 context()->Plug(r0);
1895 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1896 Comment cmnt(masm_, "[ ArrayLiteral");
1898 expr->BuildConstantElements(isolate());
1900 Handle<FixedArray> constant_elements = expr->constant_elements();
1901 bool has_fast_elements =
1902 IsFastObjectElementsKind(expr->constant_elements_kind());
1903 Handle<FixedArrayBase> constant_elements_values(
1904 FixedArrayBase::cast(constant_elements->get(1)));
1906 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1907 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1908 // If the only customer of allocation sites is transitioning, then
1909 // we can turn it off if we don't have anywhere else to transition to.
1910 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1913 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1914 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1915 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1916 __ mov(r1, Operand(constant_elements));
1917 if (MustCreateArrayLiteralWithRuntime(expr)) {
1918 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1919 __ Push(r3, r2, r1, r0);
1920 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1922 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1925 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1927 bool result_saved = false; // Is the result saved to the stack?
1928 ZoneList<Expression*>* subexprs = expr->values();
1929 int length = subexprs->length();
1931 // Emit code to evaluate all the non-constant subexpressions and to store
1932 // them into the newly cloned array.
1933 for (int i = 0; i < length; i++) {
1934 Expression* subexpr = subexprs->at(i);
1935 // If the subexpression is a literal or a simple materialized literal it
1936 // is already set in the cloned array.
1937 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1939 if (!result_saved) {
1941 __ Push(Smi::FromInt(expr->literal_index()));
1942 result_saved = true;
1944 VisitForAccumulatorValue(subexpr);
1946 if (has_fast_elements) {
1947 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1948 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1949 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1950 __ str(result_register(), FieldMemOperand(r1, offset));
1951 // Update the write barrier for the array store.
1952 __ RecordWriteField(r1, offset, result_register(), r2,
1953 kLRHasBeenSaved, kDontSaveFPRegs,
1954 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1956 __ mov(r3, Operand(Smi::FromInt(i)));
1957 StoreArrayLiteralElementStub stub(isolate());
1961 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1965 __ pop(); // literal index
1966 context()->PlugTOS();
1968 context()->Plug(r0);
1973 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1974 DCHECK(expr->target()->IsValidReferenceExpression());
1976 Comment cmnt(masm_, "[ Assignment");
1978 Property* property = expr->target()->AsProperty();
1979 LhsKind assign_type = GetAssignType(property);
1981 // Evaluate LHS expression.
1982 switch (assign_type) {
1984 // Nothing to do here.
1986 case NAMED_PROPERTY:
1987 if (expr->is_compound()) {
1988 // We need the receiver both on the stack and in the register.
1989 VisitForStackValue(property->obj());
1990 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1992 VisitForStackValue(property->obj());
1995 case NAMED_SUPER_PROPERTY:
1996 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1997 EmitLoadHomeObject(property->obj()->AsSuperReference());
1998 __ Push(result_register());
1999 if (expr->is_compound()) {
2000 const Register scratch = r1;
2001 __ ldr(scratch, MemOperand(sp, kPointerSize));
2003 __ Push(result_register());
2006 case KEYED_SUPER_PROPERTY:
2007 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
2008 EmitLoadHomeObject(property->obj()->AsSuperReference());
2009 __ Push(result_register());
2010 VisitForAccumulatorValue(property->key());
2011 __ Push(result_register());
2012 if (expr->is_compound()) {
2013 const Register scratch = r1;
2014 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
2016 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
2018 __ Push(result_register());
2021 case KEYED_PROPERTY:
2022 if (expr->is_compound()) {
2023 VisitForStackValue(property->obj());
2024 VisitForStackValue(property->key());
2025 __ ldr(LoadDescriptor::ReceiverRegister(),
2026 MemOperand(sp, 1 * kPointerSize));
2027 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2029 VisitForStackValue(property->obj());
2030 VisitForStackValue(property->key());
2035 // For compound assignments we need another deoptimization point after the
2036 // variable/property load.
2037 if (expr->is_compound()) {
2038 { AccumulatorValueContext context(this);
2039 switch (assign_type) {
2041 EmitVariableLoad(expr->target()->AsVariableProxy());
2042 PrepareForBailout(expr->target(), TOS_REG);
2044 case NAMED_PROPERTY:
2045 EmitNamedPropertyLoad(property);
2046 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2048 case NAMED_SUPER_PROPERTY:
2049 EmitNamedSuperPropertyLoad(property);
2050 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2052 case KEYED_SUPER_PROPERTY:
2053 EmitKeyedSuperPropertyLoad(property);
2054 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2056 case KEYED_PROPERTY:
2057 EmitKeyedPropertyLoad(property);
2058 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2063 Token::Value op = expr->binary_op();
2064 __ push(r0); // Left operand goes on the stack.
2065 VisitForAccumulatorValue(expr->value());
2067 SetSourcePosition(expr->position() + 1);
2068 AccumulatorValueContext context(this);
2069 if (ShouldInlineSmiCase(op)) {
2070 EmitInlineSmiBinaryOp(expr->binary_operation(),
2075 EmitBinaryOp(expr->binary_operation(), op);
2078 // Deoptimization point in case the binary operation may have side effects.
2079 PrepareForBailout(expr->binary_operation(), TOS_REG);
2081 VisitForAccumulatorValue(expr->value());
2084 // Record source position before possible IC call.
2085 SetSourcePosition(expr->position());
2088 switch (assign_type) {
2090 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2092 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2093 context()->Plug(r0);
2095 case NAMED_PROPERTY:
2096 EmitNamedPropertyAssignment(expr);
2098 case NAMED_SUPER_PROPERTY:
2099 EmitNamedSuperPropertyStore(property);
2100 context()->Plug(r0);
2102 case KEYED_SUPER_PROPERTY:
2103 EmitKeyedSuperPropertyStore(property);
2104 context()->Plug(r0);
2106 case KEYED_PROPERTY:
2107 EmitKeyedPropertyAssignment(expr);
2113 void FullCodeGenerator::VisitYield(Yield* expr) {
2114 Comment cmnt(masm_, "[ Yield");
2115 // Evaluate yielded value first; the initial iterator definition depends on
2116 // this. It stays on the stack while we update the iterator.
2117 VisitForStackValue(expr->expression());
2119 switch (expr->yield_kind()) {
2120 case Yield::kSuspend:
2121 // Pop value from top-of-stack slot; box result into result register.
2122 EmitCreateIteratorResult(false);
2123 __ push(result_register());
2125 case Yield::kInitial: {
2126 Label suspend, continuation, post_runtime, resume;
2130 __ bind(&continuation);
2134 VisitForAccumulatorValue(expr->generator_object());
2135 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2136 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2137 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2138 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2140 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2141 kLRHasBeenSaved, kDontSaveFPRegs);
2142 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2144 __ b(eq, &post_runtime);
2145 __ push(r0); // generator object
2146 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2147 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2148 __ bind(&post_runtime);
2149 __ pop(result_register());
2150 EmitReturnSequence();
2153 context()->Plug(result_register());
2157 case Yield::kFinal: {
2158 VisitForAccumulatorValue(expr->generator_object());
2159 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2160 __ str(r1, FieldMemOperand(result_register(),
2161 JSGeneratorObject::kContinuationOffset));
2162 // Pop value from top-of-stack slot, box result into result register.
2163 EmitCreateIteratorResult(true);
2164 EmitUnwindBeforeReturn();
2165 EmitReturnSequence();
2169 case Yield::kDelegating: {
2170 VisitForStackValue(expr->generator_object());
2172 // Initial stack layout is as follows:
2173 // [sp + 1 * kPointerSize] iter
2174 // [sp + 0 * kPointerSize] g
2176 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2177 Label l_next, l_call, l_loop;
2178 Register load_receiver = LoadDescriptor::ReceiverRegister();
2179 Register load_name = LoadDescriptor::NameRegister();
2181 // Initial send value is undefined.
2182 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2185 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2187 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2188 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2189 __ Push(load_name, r3, r0); // "throw", iter, except
2192 // try { received = %yield result }
2193 // Shuffle the received result above a try handler and yield it without
2196 __ pop(r0); // result
2197 EnterTryBlock(expr->index(), &l_catch);
2198 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2199 __ push(r0); // result
2201 __ bind(&l_continuation);
2203 __ bind(&l_suspend);
2204 const int generator_object_depth = kPointerSize + try_block_size;
2205 __ ldr(r0, MemOperand(sp, generator_object_depth));
2207 __ Push(Smi::FromInt(expr->index())); // handler-index
2208 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2209 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2210 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2211 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2213 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2214 kLRHasBeenSaved, kDontSaveFPRegs);
2215 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2216 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2217 __ pop(r0); // result
2218 EmitReturnSequence();
2219 __ bind(&l_resume); // received in r0
2220 ExitTryBlock(expr->index());
2222 // receiver = iter; f = 'next'; arg = received;
2225 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2226 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2227 __ Push(load_name, r3, r0); // "next", iter, received
2229 // result = receiver[f](arg);
2231 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2232 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2233 if (FLAG_vector_ics) {
2234 __ mov(VectorLoadICDescriptor::SlotRegister(),
2235 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2237 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2238 CallIC(ic, TypeFeedbackId::None());
2240 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2241 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2244 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2245 __ Drop(1); // The function is still on the stack; drop it.
2247 // if (!result.done) goto l_try;
2249 __ Move(load_receiver, r0);
2251 __ push(load_receiver); // save result
2252 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2253 if (FLAG_vector_ics) {
2254 __ mov(VectorLoadICDescriptor::SlotRegister(),
2255 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2257 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2258 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2260 __ cmp(r0, Operand(0));
2264 __ pop(load_receiver); // result
2265 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2266 if (FLAG_vector_ics) {
2267 __ mov(VectorLoadICDescriptor::SlotRegister(),
2268 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2270 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value
2271 context()->DropAndPlug(2, r0); // drop iter and g
2278 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2280 JSGeneratorObject::ResumeMode resume_mode) {
2281 // The value stays in r0, and is ultimately read by the resumed generator, as
2282 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2283 // is read to throw the value when the resumed generator is already closed.
2284 // r1 will hold the generator object until the activation has been resumed.
2285 VisitForStackValue(generator);
2286 VisitForAccumulatorValue(value);
2289 // Load suspended function and context.
2290 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2291 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2293 // Load receiver and store as the first argument.
2294 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2297 // Push holes for the rest of the arguments to the generator function.
2298 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2300 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2301 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2302 Label push_argument_holes, push_frame;
2303 __ bind(&push_argument_holes);
2304 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2305 __ b(mi, &push_frame);
2307 __ jmp(&push_argument_holes);
2309 // Enter a new JavaScript frame, and initialize its slots as they were when
2310 // the generator was suspended.
2311 Label resume_frame, done;
2312 __ bind(&push_frame);
2313 __ bl(&resume_frame);
2315 __ bind(&resume_frame);
2316 // lr = return address.
2317 // fp = caller's frame pointer.
2318 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2319 // cp = callee's context,
2320 // r4 = callee's JS function.
2321 __ PushFixedFrame(r4);
2322 // Adjust FP to point to saved FP.
2323 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2325 // Load the operand stack size.
2326 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2327 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2330 // If we are sending a value and there is no operand stack, we can jump back
2332 if (resume_mode == JSGeneratorObject::NEXT) {
2334 __ cmp(r3, Operand(0));
2335 __ b(ne, &slow_resume);
2336 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2338 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2339 if (FLAG_enable_ool_constant_pool) {
2340 // Load the new code object's constant pool pointer.
2342 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
2345 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2348 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2349 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2352 __ bind(&slow_resume);
2355 // Otherwise, we push holes for the operand stack and call the runtime to fix
2356 // up the stack and the handlers.
2357 Label push_operand_holes, call_resume;
2358 __ bind(&push_operand_holes);
2359 __ sub(r3, r3, Operand(1), SetCC);
2360 __ b(mi, &call_resume);
2362 __ b(&push_operand_holes);
2363 __ bind(&call_resume);
2364 DCHECK(!result_register().is(r1));
2365 __ Push(r1, result_register());
2366 __ Push(Smi::FromInt(resume_mode));
2367 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2368 // Not reached: the runtime call returns elsewhere.
2369 __ stop("not-reached");
2372 context()->Plug(result_register());
2376 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2380 const int instance_size = 5 * kPointerSize;
2381 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2384 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT);
2387 __ bind(&gc_required);
2388 __ Push(Smi::FromInt(instance_size));
2389 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2390 __ ldr(context_register(),
2391 MemOperand(fp, StandardFrameConstants::kContextOffset));
2393 __ bind(&allocated);
2394 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2395 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2396 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2398 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2399 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2400 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2401 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2402 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2404 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2406 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2408 // Only the value field needs a write barrier, as the other values are in the
2410 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2411 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2415 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2416 SetSourcePosition(prop->position());
2417 Literal* key = prop->key()->AsLiteral();
2418 DCHECK(!prop->IsSuperAccess());
2420 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2421 if (FLAG_vector_ics) {
2422 __ mov(VectorLoadICDescriptor::SlotRegister(),
2423 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2424 CallLoadIC(NOT_CONTEXTUAL);
2426 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2431 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2432 // Stack: receiver, home_object.
2433 SetSourcePosition(prop->position());
2434 Literal* key = prop->key()->AsLiteral();
2435 DCHECK(!key->value()->IsSmi());
2436 DCHECK(prop->IsSuperAccess());
2438 __ Push(key->value());
2439 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2443 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2444 SetSourcePosition(prop->position());
2445 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2446 if (FLAG_vector_ics) {
2447 __ mov(VectorLoadICDescriptor::SlotRegister(),
2448 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2451 CallIC(ic, prop->PropertyFeedbackId());
2456 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2457 // Stack: receiver, home_object, key.
2458 SetSourcePosition(prop->position());
2460 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2464 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2466 Expression* left_expr,
2467 Expression* right_expr) {
2468 Label done, smi_case, stub_call;
2470 Register scratch1 = r2;
2471 Register scratch2 = r3;
2473 // Get the arguments.
2475 Register right = r0;
2478 // Perform combined smi check on both operands.
2479 __ orr(scratch1, left, Operand(right));
2480 STATIC_ASSERT(kSmiTag == 0);
2481 JumpPatchSite patch_site(masm_);
2482 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2484 __ bind(&stub_call);
2485 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2486 CallIC(code, expr->BinaryOperationFeedbackId());
2487 patch_site.EmitPatchInfo();
2491 // Smi case. This code works the same way as the smi-smi case in the type
2492 // recording binary operation stub, see
2495 __ GetLeastBitsFromSmi(scratch1, right, 5);
2496 __ mov(right, Operand(left, ASR, scratch1));
2497 __ bic(right, right, Operand(kSmiTagMask));
2500 __ SmiUntag(scratch1, left);
2501 __ GetLeastBitsFromSmi(scratch2, right, 5);
2502 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2503 __ TrySmiTag(right, scratch1, &stub_call);
2507 __ SmiUntag(scratch1, left);
2508 __ GetLeastBitsFromSmi(scratch2, right, 5);
2509 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2510 __ tst(scratch1, Operand(0xc0000000));
2511 __ b(ne, &stub_call);
2512 __ SmiTag(right, scratch1);
2516 __ add(scratch1, left, Operand(right), SetCC);
2517 __ b(vs, &stub_call);
2518 __ mov(right, scratch1);
2521 __ sub(scratch1, left, Operand(right), SetCC);
2522 __ b(vs, &stub_call);
2523 __ mov(right, scratch1);
2526 __ SmiUntag(ip, right);
2527 __ smull(scratch1, scratch2, left, ip);
2528 __ mov(ip, Operand(scratch1, ASR, 31));
2529 __ cmp(ip, Operand(scratch2));
2530 __ b(ne, &stub_call);
2531 __ cmp(scratch1, Operand::Zero());
2532 __ mov(right, Operand(scratch1), LeaveCC, ne);
2534 __ add(scratch2, right, Operand(left), SetCC);
2535 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2536 __ b(mi, &stub_call);
2540 __ orr(right, left, Operand(right));
2542 case Token::BIT_AND:
2543 __ and_(right, left, Operand(right));
2545 case Token::BIT_XOR:
2546 __ eor(right, left, Operand(right));
2553 context()->Plug(r0);
2557 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2558 // Constructor is in r0.
2559 DCHECK(lit != NULL);
2562 // No access check is needed here since the constructor is created by the
2564 Register scratch = r1;
2566 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2569 for (int i = 0; i < lit->properties()->length(); i++) {
2570 ObjectLiteral::Property* property = lit->properties()->at(i);
2571 Expression* value = property->value();
2573 if (property->is_static()) {
2574 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2576 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2579 EmitPropertyKey(property, lit->GetIdForProperty(i));
2581 // The static prototype property is read only. We handle the non computed
2582 // property name case in the parser. Since this is the only case where we
2583 // need to check for an own read only property we special case this so we do
2584 // not need to do this for every property.
2585 if (property->is_static() && property->is_computed_name()) {
2586 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2590 VisitForStackValue(value);
2591 EmitSetHomeObjectIfNeeded(value, 2);
2593 switch (property->kind()) {
2594 case ObjectLiteral::Property::CONSTANT:
2595 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2596 case ObjectLiteral::Property::PROTOTYPE:
2598 case ObjectLiteral::Property::COMPUTED:
2599 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2602 case ObjectLiteral::Property::GETTER:
2603 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2605 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2608 case ObjectLiteral::Property::SETTER:
2609 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2611 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2620 __ CallRuntime(Runtime::kToFastProperties, 1);
2623 __ CallRuntime(Runtime::kToFastProperties, 1);
2627 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2629 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2630 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2631 CallIC(code, expr->BinaryOperationFeedbackId());
2632 patch_site.EmitPatchInfo();
2633 context()->Plug(r0);
2637 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2638 DCHECK(expr->IsValidReferenceExpression());
2640 Property* prop = expr->AsProperty();
2641 LhsKind assign_type = GetAssignType(prop);
2643 switch (assign_type) {
2645 Variable* var = expr->AsVariableProxy()->var();
2646 EffectContext context(this);
2647 EmitVariableAssignment(var, Token::ASSIGN);
2650 case NAMED_PROPERTY: {
2651 __ push(r0); // Preserve value.
2652 VisitForAccumulatorValue(prop->obj());
2653 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2654 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2655 __ mov(StoreDescriptor::NameRegister(),
2656 Operand(prop->key()->AsLiteral()->value()));
2660 case NAMED_SUPER_PROPERTY: {
2662 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2663 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2664 // stack: value, this; r0: home_object
2665 Register scratch = r2;
2666 Register scratch2 = r3;
2667 __ mov(scratch, result_register()); // home_object
2668 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2669 __ ldr(scratch2, MemOperand(sp, 0)); // this
2670 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2671 __ str(scratch, MemOperand(sp, 0)); // home_object
2672 // stack: this, home_object; r0: value
2673 EmitNamedSuperPropertyStore(prop);
2676 case KEYED_SUPER_PROPERTY: {
2678 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2679 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2680 __ Push(result_register());
2681 VisitForAccumulatorValue(prop->key());
2682 Register scratch = r2;
2683 Register scratch2 = r3;
2684 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2685 // stack: value, this, home_object; r0: key, r3: value
2686 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2687 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2688 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2689 __ str(scratch, MemOperand(sp, kPointerSize));
2690 __ str(r0, MemOperand(sp, 0));
2691 __ Move(r0, scratch2);
2692 // stack: this, home_object, key; r0: value.
2693 EmitKeyedSuperPropertyStore(prop);
2696 case KEYED_PROPERTY: {
2697 __ push(r0); // Preserve value.
2698 VisitForStackValue(prop->obj());
2699 VisitForAccumulatorValue(prop->key());
2700 __ Move(StoreDescriptor::NameRegister(), r0);
2701 __ Pop(StoreDescriptor::ValueRegister(),
2702 StoreDescriptor::ReceiverRegister());
2704 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2709 context()->Plug(r0);
2713 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2714 Variable* var, MemOperand location) {
2715 __ str(result_register(), location);
2716 if (var->IsContextSlot()) {
2717 // RecordWrite may destroy all its register arguments.
2718 __ mov(r3, result_register());
2719 int offset = Context::SlotOffset(var->index());
2720 __ RecordWriteContextSlot(
2721 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2726 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2727 if (var->IsUnallocated()) {
2728 // Global var, const, or let.
2729 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2730 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2733 } else if (var->mode() == LET && op != Token::INIT_LET) {
2734 // Non-initializing assignment to let variable needs a write barrier.
2735 DCHECK(!var->IsLookupSlot());
2736 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2738 MemOperand location = VarOperand(var, r1);
2739 __ ldr(r3, location);
2740 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2742 __ mov(r3, Operand(var->name()));
2744 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2745 // Perform the assignment.
2747 EmitStoreToStackLocalOrContextSlot(var, location);
2749 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2750 // Assignment to const variable needs a write barrier.
2751 DCHECK(!var->IsLookupSlot());
2752 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2754 MemOperand location = VarOperand(var, r1);
2755 __ ldr(r3, location);
2756 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2757 __ b(ne, &const_error);
2758 __ mov(r3, Operand(var->name()));
2760 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2761 __ bind(&const_error);
2762 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2764 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2765 if (var->IsLookupSlot()) {
2766 // Assignment to var.
2767 __ push(r0); // Value.
2768 __ mov(r1, Operand(var->name()));
2769 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2770 __ Push(cp, r1, r0); // Context, name, language mode.
2771 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2773 // Assignment to var or initializing assignment to let/const in harmony
2775 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2776 MemOperand location = VarOperand(var, r1);
2777 if (generate_debug_code_ && op == Token::INIT_LET) {
2778 // Check for an uninitialized let binding.
2779 __ ldr(r2, location);
2780 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2781 __ Check(eq, kLetBindingReInitialization);
2783 EmitStoreToStackLocalOrContextSlot(var, location);
2786 } else if (op == Token::INIT_CONST_LEGACY) {
2787 // Const initializers need a write barrier.
2788 DCHECK(var->mode() == CONST_LEGACY);
2789 DCHECK(!var->IsParameter()); // No const parameters.
2790 if (var->IsLookupSlot()) {
2792 __ mov(r0, Operand(var->name()));
2793 __ Push(cp, r0); // Context and name.
2794 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2796 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2798 MemOperand location = VarOperand(var, r1);
2799 __ ldr(r2, location);
2800 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2802 EmitStoreToStackLocalOrContextSlot(var, location);
2807 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2808 if (is_strict(language_mode())) {
2809 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2811 // Silently ignore store in sloppy mode.
2816 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2817 // Assignment to a property, using a named store IC.
2818 Property* prop = expr->target()->AsProperty();
2819 DCHECK(prop != NULL);
2820 DCHECK(prop->key()->IsLiteral());
2822 // Record source code position before IC call.
2823 SetSourcePosition(expr->position());
2824 __ mov(StoreDescriptor::NameRegister(),
2825 Operand(prop->key()->AsLiteral()->value()));
2826 __ pop(StoreDescriptor::ReceiverRegister());
2827 CallStoreIC(expr->AssignmentFeedbackId());
2829 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2830 context()->Plug(r0);
2834 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2835 // Assignment to named property of super.
2837 // stack : receiver ('this'), home_object
2838 DCHECK(prop != NULL);
2839 Literal* key = prop->key()->AsLiteral();
2840 DCHECK(key != NULL);
2842 __ Push(key->value());
2844 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2845 : Runtime::kStoreToSuper_Sloppy),
2850 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2851 // Assignment to named property of super.
2853 // stack : receiver ('this'), home_object, key
2854 DCHECK(prop != NULL);
2858 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2859 : Runtime::kStoreKeyedToSuper_Sloppy),
2864 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2865 // Assignment to a property, using a keyed store IC.
2867 // Record source code position before IC call.
2868 SetSourcePosition(expr->position());
2869 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2870 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2873 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2874 CallIC(ic, expr->AssignmentFeedbackId());
2876 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2877 context()->Plug(r0);
2881 void FullCodeGenerator::VisitProperty(Property* expr) {
2882 Comment cmnt(masm_, "[ Property");
2883 Expression* key = expr->key();
2885 if (key->IsPropertyName()) {
2886 if (!expr->IsSuperAccess()) {
2887 VisitForAccumulatorValue(expr->obj());
2888 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2889 EmitNamedPropertyLoad(expr);
2891 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2892 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2893 __ Push(result_register());
2894 EmitNamedSuperPropertyLoad(expr);
2897 if (!expr->IsSuperAccess()) {
2898 VisitForStackValue(expr->obj());
2899 VisitForAccumulatorValue(expr->key());
2900 __ Move(LoadDescriptor::NameRegister(), r0);
2901 __ pop(LoadDescriptor::ReceiverRegister());
2902 EmitKeyedPropertyLoad(expr);
2904 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2905 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2906 __ Push(result_register());
2907 VisitForStackValue(expr->key());
2908 EmitKeyedSuperPropertyLoad(expr);
2911 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2912 context()->Plug(r0);
2916 void FullCodeGenerator::CallIC(Handle<Code> code,
2917 TypeFeedbackId ast_id) {
2919 // All calls must have a predictable size in full-codegen code to ensure that
2920 // the debugger can patch them correctly.
2921 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2922 NEVER_INLINE_TARGET_ADDRESS);
2926 // Code common for calls using the IC.
2927 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2928 Expression* callee = expr->expression();
2930 CallICState::CallType call_type =
2931 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2933 // Get the target function.
2934 if (call_type == CallICState::FUNCTION) {
2935 { StackValueContext context(this);
2936 EmitVariableLoad(callee->AsVariableProxy());
2937 PrepareForBailout(callee, NO_REGISTERS);
2939 // Push undefined as receiver. This is patched in the method prologue if it
2940 // is a sloppy mode method.
2941 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2944 // Load the function from the receiver.
2945 DCHECK(callee->IsProperty());
2946 DCHECK(!callee->AsProperty()->IsSuperAccess());
2947 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2948 EmitNamedPropertyLoad(callee->AsProperty());
2949 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2950 // Push the target function under the receiver.
2951 __ ldr(ip, MemOperand(sp, 0));
2953 __ str(r0, MemOperand(sp, kPointerSize));
2956 EmitCall(expr, call_type);
2960 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2961 Expression* callee = expr->expression();
2962 DCHECK(callee->IsProperty());
2963 Property* prop = callee->AsProperty();
2964 DCHECK(prop->IsSuperAccess());
2966 SetSourcePosition(prop->position());
2967 Literal* key = prop->key()->AsLiteral();
2968 DCHECK(!key->value()->IsSmi());
2969 // Load the function from the receiver.
2970 const Register scratch = r1;
2971 SuperReference* super_ref = prop->obj()->AsSuperReference();
2972 EmitLoadHomeObject(super_ref);
2974 VisitForAccumulatorValue(super_ref->this_var());
2977 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2979 __ Push(key->value());
2983 // - this (receiver)
2984 // - this (receiver) <-- LoadFromSuper will pop here and below.
2987 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2989 // Replace home_object with target function.
2990 __ str(r0, MemOperand(sp, kPointerSize));
2993 // - target function
2994 // - this (receiver)
2995 EmitCall(expr, CallICState::METHOD);
2999 // Code common for calls using the IC.
3000 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3003 VisitForAccumulatorValue(key);
3005 Expression* callee = expr->expression();
3007 // Load the function from the receiver.
3008 DCHECK(callee->IsProperty());
3009 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3010 __ Move(LoadDescriptor::NameRegister(), r0);
3011 EmitKeyedPropertyLoad(callee->AsProperty());
3012 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3014 // Push the target function under the receiver.
3015 __ ldr(ip, MemOperand(sp, 0));
3017 __ str(r0, MemOperand(sp, kPointerSize));
3019 EmitCall(expr, CallICState::METHOD);
3023 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3024 Expression* callee = expr->expression();
3025 DCHECK(callee->IsProperty());
3026 Property* prop = callee->AsProperty();
3027 DCHECK(prop->IsSuperAccess());
3029 SetSourcePosition(prop->position());
3030 // Load the function from the receiver.
3031 const Register scratch = r1;
3032 SuperReference* super_ref = prop->obj()->AsSuperReference();
3033 EmitLoadHomeObject(super_ref);
3035 VisitForAccumulatorValue(super_ref->this_var());
3038 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3040 VisitForStackValue(prop->key());
3044 // - this (receiver)
3045 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3048 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3050 // Replace home_object with target function.
3051 __ str(r0, MemOperand(sp, kPointerSize));
3054 // - target function
3055 // - this (receiver)
3056 EmitCall(expr, CallICState::METHOD);
3060 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3061 // Load the arguments.
3062 ZoneList<Expression*>* args = expr->arguments();
3063 int arg_count = args->length();
3064 { PreservePositionScope scope(masm()->positions_recorder());
3065 for (int i = 0; i < arg_count; i++) {
3066 VisitForStackValue(args->at(i));
3070 // Record source position of the IC call.
3071 SetSourcePosition(expr->position());
3072 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3073 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3074 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3075 // Don't assign a type feedback id to the IC, since type feedback is provided
3076 // by the vector above.
3079 RecordJSReturnSite(expr);
3080 // Restore context register.
3081 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3082 context()->DropAndPlug(1, r0);
3086 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3087 // r5: copy of the first argument or undefined if it doesn't exist.
3088 if (arg_count > 0) {
3089 __ ldr(r5, MemOperand(sp, arg_count * kPointerSize));
3091 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3094 // r4: the receiver of the enclosing function.
3095 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3097 // r3: the receiver of the enclosing function.
3098 int receiver_offset = 2 + info_->scope()->num_parameters();
3099 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
3101 // r2: language mode.
3102 __ mov(r2, Operand(Smi::FromInt(language_mode())));
3104 // r1: the start position of the scope the calls resides in.
3105 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3107 // Do the runtime call.
3109 __ Push(r4, r3, r2, r1);
3110 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3114 void FullCodeGenerator::EmitLoadSuperConstructor() {
3115 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3117 __ CallRuntime(Runtime::kGetPrototype, 1);
3121 void FullCodeGenerator::VisitCall(Call* expr) {
3123 // We want to verify that RecordJSReturnSite gets called on all paths
3124 // through this function. Avoid early returns.
3125 expr->return_is_recorded_ = false;
3128 Comment cmnt(masm_, "[ Call");
3129 Expression* callee = expr->expression();
3130 Call::CallType call_type = expr->GetCallType(isolate());
3132 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3133 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3134 // to resolve the function we need to call and the receiver of the
3135 // call. Then we call the resolved function using the given
3137 ZoneList<Expression*>* args = expr->arguments();
3138 int arg_count = args->length();
3140 { PreservePositionScope pos_scope(masm()->positions_recorder());
3141 VisitForStackValue(callee);
3142 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3143 __ push(r2); // Reserved receiver slot.
3145 // Push the arguments.
3146 for (int i = 0; i < arg_count; i++) {
3147 VisitForStackValue(args->at(i));
3150 // Push a copy of the function (found below the arguments) and
3152 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3154 EmitResolvePossiblyDirectEval(arg_count);
3156 // The runtime call returns a pair of values in r0 (function) and
3157 // r1 (receiver). Touch up the stack with the right values.
3158 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3159 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
3161 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3164 // Record source position for debugger.
3165 SetSourcePosition(expr->position());
3166 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3167 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3169 RecordJSReturnSite(expr);
3170 // Restore context register.
3171 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3172 context()->DropAndPlug(1, r0);
3173 } else if (call_type == Call::GLOBAL_CALL) {
3174 EmitCallWithLoadIC(expr);
3176 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3177 // Call to a lookup slot (dynamically introduced variable).
3178 VariableProxy* proxy = callee->AsVariableProxy();
3181 { PreservePositionScope scope(masm()->positions_recorder());
3182 // Generate code for loading from variables potentially shadowed
3183 // by eval-introduced variables.
3184 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3188 // Call the runtime to find the function to call (returned in r0)
3189 // and the object holding it (returned in edx).
3190 DCHECK(!context_register().is(r2));
3191 __ mov(r2, Operand(proxy->name()));
3192 __ Push(context_register(), r2);
3193 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3194 __ Push(r0, r1); // Function, receiver.
3195 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3197 // If fast case code has been generated, emit code to push the
3198 // function and receiver and have the slow path jump around this
3200 if (done.is_linked()) {
3206 // The receiver is implicitly the global receiver. Indicate this
3207 // by passing the hole to the call function stub.
3208 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3213 // The receiver is either the global receiver or an object found
3214 // by LoadContextSlot.
3216 } else if (call_type == Call::PROPERTY_CALL) {
3217 Property* property = callee->AsProperty();
3218 bool is_named_call = property->key()->IsPropertyName();
3219 if (property->IsSuperAccess()) {
3220 if (is_named_call) {
3221 EmitSuperCallWithLoadIC(expr);
3223 EmitKeyedSuperCallWithLoadIC(expr);
3227 PreservePositionScope scope(masm()->positions_recorder());
3228 VisitForStackValue(property->obj());
3230 if (is_named_call) {
3231 EmitCallWithLoadIC(expr);
3233 EmitKeyedCallWithLoadIC(expr, property->key());
3236 } else if (call_type == Call::SUPER_CALL) {
3237 EmitSuperConstructorCall(expr);
3239 DCHECK(call_type == Call::OTHER_CALL);
3240 // Call to an arbitrary expression not handled specially above.
3241 { PreservePositionScope scope(masm()->positions_recorder());
3242 VisitForStackValue(callee);
3244 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3246 // Emit function call.
3251 // RecordJSReturnSite should have been called.
3252 DCHECK(expr->return_is_recorded_);
3257 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3258 Comment cmnt(masm_, "[ CallNew");
3259 // According to ECMA-262, section 11.2.2, page 44, the function
3260 // expression in new calls must be evaluated before the
3263 // Push constructor on the stack. If it's not a function it's used as
3264 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3266 DCHECK(!expr->expression()->IsSuperReference());
3267 VisitForStackValue(expr->expression());
3269 // Push the arguments ("left-to-right") on the stack.
3270 ZoneList<Expression*>* args = expr->arguments();
3271 int arg_count = args->length();
3272 for (int i = 0; i < arg_count; i++) {
3273 VisitForStackValue(args->at(i));
3276 // Call the construct call builtin that handles allocation and
3277 // constructor invocation.
3278 SetSourcePosition(expr->position());
3280 // Load function and argument count into r1 and r0.
3281 __ mov(r0, Operand(arg_count));
3282 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3284 // Record call targets in unoptimized code.
3285 if (FLAG_pretenuring_call_new) {
3286 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3287 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3288 expr->CallNewFeedbackSlot().ToInt() + 1);
3291 __ Move(r2, FeedbackVector());
3292 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3294 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3295 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3296 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3297 context()->Plug(r0);
3301 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3302 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3303 GetVar(result_register(), new_target_var);
3304 __ Push(result_register());
3306 EmitLoadSuperConstructor();
3307 __ push(result_register());
3309 // Push the arguments ("left-to-right") on the stack.
3310 ZoneList<Expression*>* args = expr->arguments();
3311 int arg_count = args->length();
3312 for (int i = 0; i < arg_count; i++) {
3313 VisitForStackValue(args->at(i));
3316 // Call the construct call builtin that handles allocation and
3317 // constructor invocation.
3318 SetSourcePosition(expr->position());
3320 // Load function and argument count into r1 and r0.
3321 __ mov(r0, Operand(arg_count));
3322 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3324 // Record call targets in unoptimized code.
3325 if (FLAG_pretenuring_call_new) {
3327 /* TODO(dslomov): support pretenuring.
3328 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3329 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3330 expr->CallNewFeedbackSlot().ToInt() + 1);
3334 __ Move(r2, FeedbackVector());
3335 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3337 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3338 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3342 RecordJSReturnSite(expr);
3344 SuperReference* super_ref = expr->expression()->AsSuperReference();
3345 Variable* this_var = super_ref->this_var()->var();
3346 GetVar(r1, this_var);
3347 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
3348 Label uninitialized_this;
3349 __ b(eq, &uninitialized_this);
3350 __ mov(r0, Operand(this_var->name()));
3352 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3353 __ bind(&uninitialized_this);
3355 EmitVariableAssignment(this_var, Token::INIT_CONST);
3356 context()->Plug(r0);
3360 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3361 ZoneList<Expression*>* args = expr->arguments();
3362 DCHECK(args->length() == 1);
3364 VisitForAccumulatorValue(args->at(0));
3366 Label materialize_true, materialize_false;
3367 Label* if_true = NULL;
3368 Label* if_false = NULL;
3369 Label* fall_through = NULL;
3370 context()->PrepareTest(&materialize_true, &materialize_false,
3371 &if_true, &if_false, &fall_through);
3373 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3375 Split(eq, if_true, if_false, fall_through);
3377 context()->Plug(if_true, if_false);
3381 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3382 ZoneList<Expression*>* args = expr->arguments();
3383 DCHECK(args->length() == 1);
3385 VisitForAccumulatorValue(args->at(0));
3387 Label materialize_true, materialize_false;
3388 Label* if_true = NULL;
3389 Label* if_false = NULL;
3390 Label* fall_through = NULL;
3391 context()->PrepareTest(&materialize_true, &materialize_false,
3392 &if_true, &if_false, &fall_through);
3394 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3395 __ NonNegativeSmiTst(r0);
3396 Split(eq, if_true, if_false, fall_through);
3398 context()->Plug(if_true, if_false);
3402 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3403 ZoneList<Expression*>* args = expr->arguments();
3404 DCHECK(args->length() == 1);
3406 VisitForAccumulatorValue(args->at(0));
3408 Label materialize_true, materialize_false;
3409 Label* if_true = NULL;
3410 Label* if_false = NULL;
3411 Label* fall_through = NULL;
3412 context()->PrepareTest(&materialize_true, &materialize_false,
3413 &if_true, &if_false, &fall_through);
3415 __ JumpIfSmi(r0, if_false);
3416 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3419 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3420 // Undetectable objects behave like undefined when tested with typeof.
3421 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3422 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3424 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3425 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3427 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3428 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3429 Split(le, if_true, if_false, fall_through);
3431 context()->Plug(if_true, if_false);
3435 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3436 ZoneList<Expression*>* args = expr->arguments();
3437 DCHECK(args->length() == 1);
3439 VisitForAccumulatorValue(args->at(0));
3441 Label materialize_true, materialize_false;
3442 Label* if_true = NULL;
3443 Label* if_false = NULL;
3444 Label* fall_through = NULL;
3445 context()->PrepareTest(&materialize_true, &materialize_false,
3446 &if_true, &if_false, &fall_through);
3448 __ JumpIfSmi(r0, if_false);
3449 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3450 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3451 Split(ge, if_true, if_false, fall_through);
3453 context()->Plug(if_true, if_false);
3457 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3458 ZoneList<Expression*>* args = expr->arguments();
3459 DCHECK(args->length() == 1);
3461 VisitForAccumulatorValue(args->at(0));
3463 Label materialize_true, materialize_false;
3464 Label* if_true = NULL;
3465 Label* if_false = NULL;
3466 Label* fall_through = NULL;
3467 context()->PrepareTest(&materialize_true, &materialize_false,
3468 &if_true, &if_false, &fall_through);
3470 __ JumpIfSmi(r0, if_false);
3471 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3472 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3473 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3474 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3475 Split(ne, if_true, if_false, fall_through);
3477 context()->Plug(if_true, if_false);
3481 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3482 CallRuntime* expr) {
3483 ZoneList<Expression*>* args = expr->arguments();
3484 DCHECK(args->length() == 1);
3486 VisitForAccumulatorValue(args->at(0));
3488 Label materialize_true, materialize_false, skip_lookup;
3489 Label* if_true = NULL;
3490 Label* if_false = NULL;
3491 Label* fall_through = NULL;
3492 context()->PrepareTest(&materialize_true, &materialize_false,
3493 &if_true, &if_false, &fall_through);
3495 __ AssertNotSmi(r0);
3497 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3498 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3499 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3500 __ b(ne, &skip_lookup);
3502 // Check for fast case object. Generate false result for slow case object.
3503 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3504 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3505 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3509 // Look for valueOf name in the descriptor array, and indicate false if
3510 // found. Since we omit an enumeration index check, if it is added via a
3511 // transition that shares its descriptor array, this is a false positive.
3512 Label entry, loop, done;
3514 // Skip loop if no descriptors are valid.
3515 __ NumberOfOwnDescriptors(r3, r1);
3516 __ cmp(r3, Operand::Zero());
3519 __ LoadInstanceDescriptors(r1, r4);
3520 // r4: descriptor array.
3521 // r3: valid entries in the descriptor array.
3522 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3524 // Calculate location of the first key name.
3525 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3526 // Calculate the end of the descriptor array.
3528 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3530 // Loop through all the keys in the descriptor array. If one of these is the
3531 // string "valueOf" the result is false.
3532 // The use of ip to store the valueOf string assumes that it is not otherwise
3533 // used in the loop below.
3534 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3537 __ ldr(r3, MemOperand(r4, 0));
3540 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3542 __ cmp(r4, Operand(r2));
3547 // Set the bit in the map to indicate that there is no local valueOf field.
3548 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3549 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3550 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3552 __ bind(&skip_lookup);
3554 // If a valueOf property is not found on the object check that its
3555 // prototype is the un-modified String prototype. If not result is false.
3556 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3557 __ JumpIfSmi(r2, if_false);
3558 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3559 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3560 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3561 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3563 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3564 Split(eq, if_true, if_false, fall_through);
3566 context()->Plug(if_true, if_false);
3570 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3571 ZoneList<Expression*>* args = expr->arguments();
3572 DCHECK(args->length() == 1);
3574 VisitForAccumulatorValue(args->at(0));
3576 Label materialize_true, materialize_false;
3577 Label* if_true = NULL;
3578 Label* if_false = NULL;
3579 Label* fall_through = NULL;
3580 context()->PrepareTest(&materialize_true, &materialize_false,
3581 &if_true, &if_false, &fall_through);
3583 __ JumpIfSmi(r0, if_false);
3584 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3585 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3586 Split(eq, if_true, if_false, fall_through);
3588 context()->Plug(if_true, if_false);
3592 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3593 ZoneList<Expression*>* args = expr->arguments();
3594 DCHECK(args->length() == 1);
3596 VisitForAccumulatorValue(args->at(0));
3598 Label materialize_true, materialize_false;
3599 Label* if_true = NULL;
3600 Label* if_false = NULL;
3601 Label* fall_through = NULL;
3602 context()->PrepareTest(&materialize_true, &materialize_false,
3603 &if_true, &if_false, &fall_through);
3605 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3606 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3607 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3608 __ cmp(r2, Operand(0x80000000));
3609 __ cmp(r1, Operand(0x00000000), eq);
3611 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3612 Split(eq, if_true, if_false, fall_through);
3614 context()->Plug(if_true, if_false);
3618 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3619 ZoneList<Expression*>* args = expr->arguments();
3620 DCHECK(args->length() == 1);
3622 VisitForAccumulatorValue(args->at(0));
3624 Label materialize_true, materialize_false;
3625 Label* if_true = NULL;
3626 Label* if_false = NULL;
3627 Label* fall_through = NULL;
3628 context()->PrepareTest(&materialize_true, &materialize_false,
3629 &if_true, &if_false, &fall_through);
3631 __ JumpIfSmi(r0, if_false);
3632 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3634 Split(eq, if_true, if_false, fall_through);
3636 context()->Plug(if_true, if_false);
3640 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3641 ZoneList<Expression*>* args = expr->arguments();
3642 DCHECK(args->length() == 1);
3644 VisitForAccumulatorValue(args->at(0));
3646 Label materialize_true, materialize_false;
3647 Label* if_true = NULL;
3648 Label* if_false = NULL;
3649 Label* fall_through = NULL;
3650 context()->PrepareTest(&materialize_true, &materialize_false,
3651 &if_true, &if_false, &fall_through);
3653 __ JumpIfSmi(r0, if_false);
3654 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3655 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3656 Split(eq, if_true, if_false, fall_through);
3658 context()->Plug(if_true, if_false);
3662 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3663 ZoneList<Expression*>* args = expr->arguments();
3664 DCHECK(args->length() == 1);
3666 VisitForAccumulatorValue(args->at(0));
3668 Label materialize_true, materialize_false;
3669 Label* if_true = NULL;
3670 Label* if_false = NULL;
3671 Label* fall_through = NULL;
3672 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3673 &if_false, &fall_through);
3675 __ JumpIfSmi(r0, if_false);
3677 Register type_reg = r2;
3678 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3679 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3680 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3681 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3682 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3683 Split(ls, if_true, if_false, fall_through);
3685 context()->Plug(if_true, if_false);
3689 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3690 DCHECK(expr->arguments()->length() == 0);
3692 Label materialize_true, materialize_false;
3693 Label* if_true = NULL;
3694 Label* if_false = NULL;
3695 Label* fall_through = NULL;
3696 context()->PrepareTest(&materialize_true, &materialize_false,
3697 &if_true, &if_false, &fall_through);
3699 // Get the frame pointer for the calling frame.
3700 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3702 // Skip the arguments adaptor frame if it exists.
3703 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3704 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3705 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3707 // Check the marker in the calling frame.
3708 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3709 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3710 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3711 Split(eq, if_true, if_false, fall_through);
3713 context()->Plug(if_true, if_false);
3717 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3718 ZoneList<Expression*>* args = expr->arguments();
3719 DCHECK(args->length() == 2);
3721 // Load the two objects into registers and perform the comparison.
3722 VisitForStackValue(args->at(0));
3723 VisitForAccumulatorValue(args->at(1));
3725 Label materialize_true, materialize_false;
3726 Label* if_true = NULL;
3727 Label* if_false = NULL;
3728 Label* fall_through = NULL;
3729 context()->PrepareTest(&materialize_true, &materialize_false,
3730 &if_true, &if_false, &fall_through);
3734 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3735 Split(eq, if_true, if_false, fall_through);
3737 context()->Plug(if_true, if_false);
3741 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3742 ZoneList<Expression*>* args = expr->arguments();
3743 DCHECK(args->length() == 1);
3745 // ArgumentsAccessStub expects the key in edx and the formal
3746 // parameter count in r0.
3747 VisitForAccumulatorValue(args->at(0));
3749 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3750 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3752 context()->Plug(r0);
3756 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3757 DCHECK(expr->arguments()->length() == 0);
3759 // Get the number of formal parameters.
3760 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3762 // Check if the calling frame is an arguments adaptor frame.
3763 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3764 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3765 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3767 // Arguments adaptor case: Read the arguments length from the
3769 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3771 context()->Plug(r0);
3775 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3776 ZoneList<Expression*>* args = expr->arguments();
3777 DCHECK(args->length() == 1);
3778 Label done, null, function, non_function_constructor;
3780 VisitForAccumulatorValue(args->at(0));
3782 // If the object is a smi, we return null.
3783 __ JumpIfSmi(r0, &null);
3785 // Check that the object is a JS object but take special care of JS
3786 // functions to make sure they have 'Function' as their class.
3787 // Assume that there are only two callable types, and one of them is at
3788 // either end of the type range for JS object types. Saves extra comparisons.
3789 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3790 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3791 // Map is now in r0.
3793 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3794 FIRST_SPEC_OBJECT_TYPE + 1);
3795 __ b(eq, &function);
3797 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3798 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3799 LAST_SPEC_OBJECT_TYPE - 1);
3800 __ b(eq, &function);
3801 // Assume that there is no larger type.
3802 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3804 // Check if the constructor in the map is a JS function.
3805 Register instance_type = r2;
3806 __ GetMapConstructor(r0, r0, r1, instance_type);
3807 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3808 __ b(ne, &non_function_constructor);
3810 // r0 now contains the constructor function. Grab the
3811 // instance class name from there.
3812 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3813 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3816 // Functions have class 'Function'.
3818 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3821 // Objects with a non-function constructor have class 'Object'.
3822 __ bind(&non_function_constructor);
3823 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3826 // Non-JS objects have class null.
3828 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3833 context()->Plug(r0);
3837 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3838 // Load the arguments on the stack and call the stub.
3839 SubStringStub stub(isolate());
3840 ZoneList<Expression*>* args = expr->arguments();
3841 DCHECK(args->length() == 3);
3842 VisitForStackValue(args->at(0));
3843 VisitForStackValue(args->at(1));
3844 VisitForStackValue(args->at(2));
3846 context()->Plug(r0);
3850 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3851 // Load the arguments on the stack and call the stub.
3852 RegExpExecStub stub(isolate());
3853 ZoneList<Expression*>* args = expr->arguments();
3854 DCHECK(args->length() == 4);
3855 VisitForStackValue(args->at(0));
3856 VisitForStackValue(args->at(1));
3857 VisitForStackValue(args->at(2));
3858 VisitForStackValue(args->at(3));
3860 context()->Plug(r0);
3864 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3865 ZoneList<Expression*>* args = expr->arguments();
3866 DCHECK(args->length() == 1);
3867 VisitForAccumulatorValue(args->at(0)); // Load the object.
3870 // If the object is a smi return the object.
3871 __ JumpIfSmi(r0, &done);
3872 // If the object is not a value type, return the object.
3873 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3874 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3877 context()->Plug(r0);
3881 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3882 ZoneList<Expression*>* args = expr->arguments();
3883 DCHECK(args->length() == 2);
3884 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3885 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3887 VisitForAccumulatorValue(args->at(0)); // Load the object.
3889 Label runtime, done, not_date_object;
3890 Register object = r0;
3891 Register result = r0;
3892 Register scratch0 = r9;
3893 Register scratch1 = r1;
3895 __ JumpIfSmi(object, ¬_date_object);
3896 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3897 __ b(ne, ¬_date_object);
3899 if (index->value() == 0) {
3900 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3903 if (index->value() < JSDate::kFirstUncachedField) {
3904 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3905 __ mov(scratch1, Operand(stamp));
3906 __ ldr(scratch1, MemOperand(scratch1));
3907 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3908 __ cmp(scratch1, scratch0);
3910 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3911 kPointerSize * index->value()));
3915 __ PrepareCallCFunction(2, scratch1);
3916 __ mov(r1, Operand(index));
3917 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3921 __ bind(¬_date_object);
3922 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3924 context()->Plug(r0);
3928 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3929 ZoneList<Expression*>* args = expr->arguments();
3930 DCHECK_EQ(3, args->length());
3932 Register string = r0;
3933 Register index = r1;
3934 Register value = r2;
3936 VisitForStackValue(args->at(0)); // index
3937 VisitForStackValue(args->at(1)); // value
3938 VisitForAccumulatorValue(args->at(2)); // string
3939 __ Pop(index, value);
3941 if (FLAG_debug_code) {
3943 __ Check(eq, kNonSmiValue);
3945 __ Check(eq, kNonSmiIndex);
3946 __ SmiUntag(index, index);
3947 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3948 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3949 __ SmiTag(index, index);
3952 __ SmiUntag(value, value);
3955 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3956 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3957 context()->Plug(string);
3961 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3962 ZoneList<Expression*>* args = expr->arguments();
3963 DCHECK_EQ(3, args->length());
3965 Register string = r0;
3966 Register index = r1;
3967 Register value = r2;
3969 VisitForStackValue(args->at(0)); // index
3970 VisitForStackValue(args->at(1)); // value
3971 VisitForAccumulatorValue(args->at(2)); // string
3972 __ Pop(index, value);
3974 if (FLAG_debug_code) {
3976 __ Check(eq, kNonSmiValue);
3978 __ Check(eq, kNonSmiIndex);
3979 __ SmiUntag(index, index);
3980 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3981 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3982 __ SmiTag(index, index);
3985 __ SmiUntag(value, value);
3988 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3989 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3990 __ strh(value, MemOperand(ip, index));
3991 context()->Plug(string);
3996 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3997 // Load the arguments on the stack and call the runtime function.
3998 ZoneList<Expression*>* args = expr->arguments();
3999 DCHECK(args->length() == 2);
4000 VisitForStackValue(args->at(0));
4001 VisitForStackValue(args->at(1));
4002 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4004 context()->Plug(r0);
4008 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4009 ZoneList<Expression*>* args = expr->arguments();
4010 DCHECK(args->length() == 2);
4011 VisitForStackValue(args->at(0)); // Load the object.
4012 VisitForAccumulatorValue(args->at(1)); // Load the value.
4013 __ pop(r1); // r0 = value. r1 = object.
4016 // If the object is a smi, return the value.
4017 __ JumpIfSmi(r1, &done);
4019 // If the object is not a value type, return the value.
4020 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
4024 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
4025 // Update the write barrier. Save the value as it will be
4026 // overwritten by the write barrier code and is needed afterward.
4028 __ RecordWriteField(
4029 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
4032 context()->Plug(r0);
4036 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4037 ZoneList<Expression*>* args = expr->arguments();
4038 DCHECK_EQ(args->length(), 1);
4039 // Load the argument into r0 and call the stub.
4040 VisitForAccumulatorValue(args->at(0));
4042 NumberToStringStub stub(isolate());
4044 context()->Plug(r0);
4048 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4049 ZoneList<Expression*>* args = expr->arguments();
4050 DCHECK(args->length() == 1);
4051 VisitForAccumulatorValue(args->at(0));
4054 StringCharFromCodeGenerator generator(r0, r1);
4055 generator.GenerateFast(masm_);
4058 NopRuntimeCallHelper call_helper;
4059 generator.GenerateSlow(masm_, call_helper);
4062 context()->Plug(r1);
4066 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4067 ZoneList<Expression*>* args = expr->arguments();
4068 DCHECK(args->length() == 2);
4069 VisitForStackValue(args->at(0));
4070 VisitForAccumulatorValue(args->at(1));
4072 Register object = r1;
4073 Register index = r0;
4074 Register result = r3;
4078 Label need_conversion;
4079 Label index_out_of_range;
4081 StringCharCodeAtGenerator generator(object,
4086 &index_out_of_range,
4087 STRING_INDEX_IS_NUMBER);
4088 generator.GenerateFast(masm_);
4091 __ bind(&index_out_of_range);
4092 // When the index is out of range, the spec requires us to return
4094 __ LoadRoot(result, Heap::kNanValueRootIndex);
4097 __ bind(&need_conversion);
4098 // Load the undefined value into the result register, which will
4099 // trigger conversion.
4100 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4103 NopRuntimeCallHelper call_helper;
4104 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4107 context()->Plug(result);
4111 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4112 ZoneList<Expression*>* args = expr->arguments();
4113 DCHECK(args->length() == 2);
4114 VisitForStackValue(args->at(0));
4115 VisitForAccumulatorValue(args->at(1));
4117 Register object = r1;
4118 Register index = r0;
4119 Register scratch = r3;
4120 Register result = r0;
4124 Label need_conversion;
4125 Label index_out_of_range;
4127 StringCharAtGenerator generator(object,
4133 &index_out_of_range,
4134 STRING_INDEX_IS_NUMBER);
4135 generator.GenerateFast(masm_);
4138 __ bind(&index_out_of_range);
4139 // When the index is out of range, the spec requires us to return
4140 // the empty string.
4141 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4144 __ bind(&need_conversion);
4145 // Move smi zero into the result register, which will trigger
4147 __ mov(result, Operand(Smi::FromInt(0)));
4150 NopRuntimeCallHelper call_helper;
4151 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4154 context()->Plug(result);
4158 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4159 ZoneList<Expression*>* args = expr->arguments();
4160 DCHECK_EQ(2, args->length());
4161 VisitForStackValue(args->at(0));
4162 VisitForAccumulatorValue(args->at(1));
4165 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4167 context()->Plug(r0);
4171 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4172 ZoneList<Expression*>* args = expr->arguments();
4173 DCHECK_EQ(2, args->length());
4174 VisitForStackValue(args->at(0));
4175 VisitForStackValue(args->at(1));
4177 StringCompareStub stub(isolate());
4179 context()->Plug(r0);
4183 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4184 ZoneList<Expression*>* args = expr->arguments();
4185 DCHECK(args->length() >= 2);
4187 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4188 for (int i = 0; i < arg_count + 1; i++) {
4189 VisitForStackValue(args->at(i));
4191 VisitForAccumulatorValue(args->last()); // Function.
4193 Label runtime, done;
4194 // Check for non-function argument (including proxy).
4195 __ JumpIfSmi(r0, &runtime);
4196 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4199 // InvokeFunction requires the function in r1. Move it in there.
4200 __ mov(r1, result_register());
4201 ParameterCount count(arg_count);
4202 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4203 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4208 __ CallRuntime(Runtime::kCall, args->length());
4211 context()->Plug(r0);
4215 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4216 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4217 GetVar(result_register(), new_target_var);
4218 __ Push(result_register());
4220 EmitLoadSuperConstructor();
4221 __ Push(result_register());
4223 // Check if the calling frame is an arguments adaptor frame.
4224 Label adaptor_frame, args_set_up, runtime;
4225 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4226 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4227 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4228 __ b(eq, &adaptor_frame);
4229 // default constructor has no arguments, so no adaptor frame means no args.
4230 __ mov(r0, Operand::Zero());
4233 // Copy arguments from adaptor frame.
4235 __ bind(&adaptor_frame);
4236 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4237 __ SmiUntag(r1, r1);
4239 // Subtract 1 from arguments count, for new.target.
4240 __ sub(r1, r1, Operand(1));
4243 // Get arguments pointer in r2.
4244 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4245 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset));
4248 // Pre-decrement r2 with kPointerSize on each iteration.
4249 // Pre-decrement in order to skip receiver.
4250 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
4252 __ sub(r1, r1, Operand(1));
4253 __ cmp(r1, Operand::Zero());
4257 __ bind(&args_set_up);
4258 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4259 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
4261 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4262 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4266 context()->Plug(result_register());
4270 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4271 RegExpConstructResultStub stub(isolate());
4272 ZoneList<Expression*>* args = expr->arguments();
4273 DCHECK(args->length() == 3);
4274 VisitForStackValue(args->at(0));
4275 VisitForStackValue(args->at(1));
4276 VisitForAccumulatorValue(args->at(2));
4280 context()->Plug(r0);
4284 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4285 ZoneList<Expression*>* args = expr->arguments();
4286 DCHECK_EQ(2, args->length());
4287 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4288 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4290 Handle<FixedArray> jsfunction_result_caches(
4291 isolate()->native_context()->jsfunction_result_caches());
4292 if (jsfunction_result_caches->length() <= cache_id) {
4293 __ Abort(kAttemptToUseUndefinedCache);
4294 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4295 context()->Plug(r0);
4299 VisitForAccumulatorValue(args->at(1));
4302 Register cache = r1;
4303 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4304 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4305 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4307 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4310 Label done, not_found;
4311 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4312 // r2 now holds finger offset as a smi.
4313 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4314 // r3 now points to the start of fixed array elements.
4315 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
4316 // Note side effect of PreIndex: r3 now points to the key of the pair.
4318 __ b(ne, ¬_found);
4320 __ ldr(r0, MemOperand(r3, kPointerSize));
4323 __ bind(¬_found);
4324 // Call runtime to perform the lookup.
4325 __ Push(cache, key);
4326 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4329 context()->Plug(r0);
4333 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4334 ZoneList<Expression*>* args = expr->arguments();
4335 VisitForAccumulatorValue(args->at(0));
4337 Label materialize_true, materialize_false;
4338 Label* if_true = NULL;
4339 Label* if_false = NULL;
4340 Label* fall_through = NULL;
4341 context()->PrepareTest(&materialize_true, &materialize_false,
4342 &if_true, &if_false, &fall_through);
4344 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4345 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4346 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4347 Split(eq, if_true, if_false, fall_through);
4349 context()->Plug(if_true, if_false);
4353 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4354 ZoneList<Expression*>* args = expr->arguments();
4355 DCHECK(args->length() == 1);
4356 VisitForAccumulatorValue(args->at(0));
4358 __ AssertString(r0);
4360 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4361 __ IndexFromHash(r0, r0);
4363 context()->Plug(r0);
4367 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4368 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4369 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4370 one_char_separator_loop_entry, long_separator_loop;
4371 ZoneList<Expression*>* args = expr->arguments();
4372 DCHECK(args->length() == 2);
4373 VisitForStackValue(args->at(1));
4374 VisitForAccumulatorValue(args->at(0));
4376 // All aliases of the same register have disjoint lifetimes.
4377 Register array = r0;
4378 Register elements = no_reg; // Will be r0.
4379 Register result = no_reg; // Will be r0.
4380 Register separator = r1;
4381 Register array_length = r2;
4382 Register result_pos = no_reg; // Will be r2
4383 Register string_length = r3;
4384 Register string = r4;
4385 Register element = r5;
4386 Register elements_end = r6;
4387 Register scratch = r9;
4389 // Separator operand is on the stack.
4392 // Check that the array is a JSArray.
4393 __ JumpIfSmi(array, &bailout);
4394 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4397 // Check that the array has fast elements.
4398 __ CheckFastElements(scratch, array_length, &bailout);
4400 // If the array has length zero, return the empty string.
4401 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4402 __ SmiUntag(array_length, SetCC);
4403 __ b(ne, &non_trivial_array);
4404 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4407 __ bind(&non_trivial_array);
4409 // Get the FixedArray containing array's elements.
4411 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4412 array = no_reg; // End of array's live range.
4414 // Check that all array elements are sequential one-byte strings, and
4415 // accumulate the sum of their lengths, as a smi-encoded value.
4416 __ mov(string_length, Operand::Zero());
4418 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4419 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4420 // Loop condition: while (element < elements_end).
4421 // Live values in registers:
4422 // elements: Fixed array of strings.
4423 // array_length: Length of the fixed array of strings (not smi)
4424 // separator: Separator string
4425 // string_length: Accumulated sum of string lengths (smi).
4426 // element: Current array element.
4427 // elements_end: Array end.
4428 if (generate_debug_code_) {
4429 __ cmp(array_length, Operand::Zero());
4430 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4433 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4434 __ JumpIfSmi(string, &bailout);
4435 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4436 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4437 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4438 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4439 __ add(string_length, string_length, Operand(scratch), SetCC);
4441 __ cmp(element, elements_end);
4444 // If array_length is 1, return elements[0], a string.
4445 __ cmp(array_length, Operand(1));
4446 __ b(ne, ¬_size_one_array);
4447 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4450 __ bind(¬_size_one_array);
4452 // Live values in registers:
4453 // separator: Separator string
4454 // array_length: Length of the array.
4455 // string_length: Sum of string lengths (smi).
4456 // elements: FixedArray of strings.
4458 // Check that the separator is a flat one-byte string.
4459 __ JumpIfSmi(separator, &bailout);
4460 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4461 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4462 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4464 // Add (separator length times array_length) - separator length to the
4465 // string_length to get the length of the result string. array_length is not
4466 // smi but the other values are, so the result is a smi
4467 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4468 __ sub(string_length, string_length, Operand(scratch));
4469 __ smull(scratch, ip, array_length, scratch);
4470 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4472 __ cmp(ip, Operand::Zero());
4474 __ tst(scratch, Operand(0x80000000));
4476 __ add(string_length, string_length, Operand(scratch), SetCC);
4478 __ SmiUntag(string_length);
4480 // Get first element in the array to free up the elements register to be used
4483 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4484 result = elements; // End of live range for elements.
4486 // Live values in registers:
4487 // element: First array element
4488 // separator: Separator string
4489 // string_length: Length of result string (not smi)
4490 // array_length: Length of the array.
4491 __ AllocateOneByteString(result, string_length, scratch,
4492 string, // used as scratch
4493 elements_end, // used as scratch
4495 // Prepare for looping. Set up elements_end to end of the array. Set
4496 // result_pos to the position of the result where to write the first
4498 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4499 result_pos = array_length; // End of live range for array_length.
4500 array_length = no_reg;
4503 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4505 // Check the length of the separator.
4506 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4507 __ cmp(scratch, Operand(Smi::FromInt(1)));
4508 __ b(eq, &one_char_separator);
4509 __ b(gt, &long_separator);
4511 // Empty separator case
4512 __ bind(&empty_separator_loop);
4513 // Live values in registers:
4514 // result_pos: the position to which we are currently copying characters.
4515 // element: Current array element.
4516 // elements_end: Array end.
4518 // Copy next array element to the result.
4519 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4520 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4521 __ SmiUntag(string_length);
4524 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4525 __ CopyBytes(string, result_pos, string_length, scratch);
4526 __ cmp(element, elements_end);
4527 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4528 DCHECK(result.is(r0));
4531 // One-character separator case
4532 __ bind(&one_char_separator);
4533 // Replace separator with its one-byte character value.
4534 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4535 // Jump into the loop after the code that copies the separator, so the first
4536 // element is not preceded by a separator
4537 __ jmp(&one_char_separator_loop_entry);
4539 __ bind(&one_char_separator_loop);
4540 // Live values in registers:
4541 // result_pos: the position to which we are currently copying characters.
4542 // element: Current array element.
4543 // elements_end: Array end.
4544 // separator: Single separator one-byte char (in lower byte).
4546 // Copy the separator character to the result.
4547 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4549 // Copy next array element to the result.
4550 __ bind(&one_char_separator_loop_entry);
4551 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4552 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4553 __ SmiUntag(string_length);
4556 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4557 __ CopyBytes(string, result_pos, string_length, scratch);
4558 __ cmp(element, elements_end);
4559 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4560 DCHECK(result.is(r0));
4563 // Long separator case (separator is more than one character). Entry is at the
4564 // label long_separator below.
4565 __ bind(&long_separator_loop);
4566 // Live values in registers:
4567 // result_pos: the position to which we are currently copying characters.
4568 // element: Current array element.
4569 // elements_end: Array end.
4570 // separator: Separator string.
4572 // Copy the separator to the result.
4573 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4574 __ SmiUntag(string_length);
4577 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4578 __ CopyBytes(string, result_pos, string_length, scratch);
4580 __ bind(&long_separator);
4581 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4582 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4583 __ SmiUntag(string_length);
4586 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4587 __ CopyBytes(string, result_pos, string_length, scratch);
4588 __ cmp(element, elements_end);
4589 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4590 DCHECK(result.is(r0));
4594 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4596 context()->Plug(r0);
4600 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4601 DCHECK(expr->arguments()->length() == 0);
4602 ExternalReference debug_is_active =
4603 ExternalReference::debug_is_active_address(isolate());
4604 __ mov(ip, Operand(debug_is_active));
4605 __ ldrb(r0, MemOperand(ip));
4607 context()->Plug(r0);
4611 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4612 ZoneList<Expression*>* args = expr->arguments();
4613 int arg_count = args->length();
4615 if (expr->is_jsruntime()) {
4616 Comment cmnt(masm_, "[ CallRuntime");
4617 // Push the builtins object as the receiver.
4618 Register receiver = LoadDescriptor::ReceiverRegister();
4619 __ ldr(receiver, GlobalObjectOperand());
4620 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4623 // Load the function from the receiver.
4624 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4625 if (FLAG_vector_ics) {
4626 __ mov(VectorLoadICDescriptor::SlotRegister(),
4627 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4628 CallLoadIC(NOT_CONTEXTUAL);
4630 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4633 // Push the target function under the receiver.
4634 __ ldr(ip, MemOperand(sp, 0));
4636 __ str(r0, MemOperand(sp, kPointerSize));
4638 // Push the arguments ("left-to-right").
4639 for (int i = 0; i < arg_count; i++) {
4640 VisitForStackValue(args->at(i));
4643 // Record source position of the IC call.
4644 SetSourcePosition(expr->position());
4645 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4646 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4649 // Restore context register.
4650 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4652 context()->DropAndPlug(1, r0);
4655 const Runtime::Function* function = expr->function();
4656 switch (function->function_id) {
4657 #define CALL_INTRINSIC_GENERATOR(Name) \
4658 case Runtime::kInline##Name: { \
4659 Comment cmnt(masm_, "[ Inline" #Name); \
4660 return Emit##Name(expr); \
4662 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4663 #undef CALL_INTRINSIC_GENERATOR
4665 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4666 // Push the arguments ("left-to-right").
4667 for (int i = 0; i < arg_count; i++) {
4668 VisitForStackValue(args->at(i));
4671 // Call the C runtime function.
4672 __ CallRuntime(expr->function(), arg_count);
4673 context()->Plug(r0);
4680 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4681 switch (expr->op()) {
4682 case Token::DELETE: {
4683 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4684 Property* property = expr->expression()->AsProperty();
4685 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4687 if (property != NULL) {
4688 VisitForStackValue(property->obj());
4689 VisitForStackValue(property->key());
4690 __ mov(r1, Operand(Smi::FromInt(language_mode())));
4692 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4693 context()->Plug(r0);
4694 } else if (proxy != NULL) {
4695 Variable* var = proxy->var();
4696 // Delete of an unqualified identifier is disallowed in strict mode
4697 // but "delete this" is allowed.
4698 DCHECK(is_sloppy(language_mode()) || var->is_this());
4699 if (var->IsUnallocated()) {
4700 __ ldr(r2, GlobalObjectOperand());
4701 __ mov(r1, Operand(var->name()));
4702 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4703 __ Push(r2, r1, r0);
4704 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4705 context()->Plug(r0);
4706 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4707 // Result of deleting non-global, non-dynamic variables is false.
4708 // The subexpression does not have side effects.
4709 context()->Plug(var->is_this());
4711 // Non-global variable. Call the runtime to try to delete from the
4712 // context where the variable was introduced.
4713 DCHECK(!context_register().is(r2));
4714 __ mov(r2, Operand(var->name()));
4715 __ Push(context_register(), r2);
4716 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4717 context()->Plug(r0);
4720 // Result of deleting non-property, non-variable reference is true.
4721 // The subexpression may have side effects.
4722 VisitForEffect(expr->expression());
4723 context()->Plug(true);
4729 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4730 VisitForEffect(expr->expression());
4731 context()->Plug(Heap::kUndefinedValueRootIndex);
4736 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4737 if (context()->IsEffect()) {
4738 // Unary NOT has no side effects so it's only necessary to visit the
4739 // subexpression. Match the optimizing compiler by not branching.
4740 VisitForEffect(expr->expression());
4741 } else if (context()->IsTest()) {
4742 const TestContext* test = TestContext::cast(context());
4743 // The labels are swapped for the recursive call.
4744 VisitForControl(expr->expression(),
4745 test->false_label(),
4747 test->fall_through());
4748 context()->Plug(test->true_label(), test->false_label());
4750 // We handle value contexts explicitly rather than simply visiting
4751 // for control and plugging the control flow into the context,
4752 // because we need to prepare a pair of extra administrative AST ids
4753 // for the optimizing compiler.
4754 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4755 Label materialize_true, materialize_false, done;
4756 VisitForControl(expr->expression(),
4760 __ bind(&materialize_true);
4761 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4762 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4763 if (context()->IsStackValue()) __ push(r0);
4765 __ bind(&materialize_false);
4766 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4767 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4768 if (context()->IsStackValue()) __ push(r0);
4774 case Token::TYPEOF: {
4775 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4776 { StackValueContext context(this);
4777 VisitForTypeofValue(expr->expression());
4779 __ CallRuntime(Runtime::kTypeof, 1);
4780 context()->Plug(r0);
4790 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4791 DCHECK(expr->expression()->IsValidReferenceExpression());
4793 Comment cmnt(masm_, "[ CountOperation");
4794 SetSourcePosition(expr->position());
4796 Property* prop = expr->expression()->AsProperty();
4797 LhsKind assign_type = GetAssignType(prop);
4799 // Evaluate expression and get value.
4800 if (assign_type == VARIABLE) {
4801 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4802 AccumulatorValueContext context(this);
4803 EmitVariableLoad(expr->expression()->AsVariableProxy());
4805 // Reserve space for result of postfix operation.
4806 if (expr->is_postfix() && !context()->IsEffect()) {
4807 __ mov(ip, Operand(Smi::FromInt(0)));
4810 switch (assign_type) {
4811 case NAMED_PROPERTY: {
4812 // Put the object both on the stack and in the register.
4813 VisitForStackValue(prop->obj());
4814 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4815 EmitNamedPropertyLoad(prop);
4819 case NAMED_SUPER_PROPERTY: {
4820 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4821 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4822 __ Push(result_register());
4823 const Register scratch = r1;
4824 __ ldr(scratch, MemOperand(sp, kPointerSize));
4826 __ Push(result_register());
4827 EmitNamedSuperPropertyLoad(prop);
4831 case KEYED_SUPER_PROPERTY: {
4832 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4833 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4834 __ Push(result_register());
4835 VisitForAccumulatorValue(prop->key());
4836 __ Push(result_register());
4837 const Register scratch = r1;
4838 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4840 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4842 __ Push(result_register());
4843 EmitKeyedSuperPropertyLoad(prop);
4847 case KEYED_PROPERTY: {
4848 VisitForStackValue(prop->obj());
4849 VisitForStackValue(prop->key());
4850 __ ldr(LoadDescriptor::ReceiverRegister(),
4851 MemOperand(sp, 1 * kPointerSize));
4852 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4853 EmitKeyedPropertyLoad(prop);
4862 // We need a second deoptimization point after loading the value
4863 // in case evaluating the property load my have a side effect.
4864 if (assign_type == VARIABLE) {
4865 PrepareForBailout(expr->expression(), TOS_REG);
4867 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4870 // Inline smi case if we are in a loop.
4871 Label stub_call, done;
4872 JumpPatchSite patch_site(masm_);
4874 int count_value = expr->op() == Token::INC ? 1 : -1;
4875 if (ShouldInlineSmiCase(expr->op())) {
4877 patch_site.EmitJumpIfNotSmi(r0, &slow);
4879 // Save result for postfix expressions.
4880 if (expr->is_postfix()) {
4881 if (!context()->IsEffect()) {
4882 // Save the result on the stack. If we have a named or keyed property
4883 // we store the result under the receiver that is currently on top
4885 switch (assign_type) {
4889 case NAMED_PROPERTY:
4890 __ str(r0, MemOperand(sp, kPointerSize));
4892 case NAMED_SUPER_PROPERTY:
4893 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4895 case KEYED_PROPERTY:
4896 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4898 case KEYED_SUPER_PROPERTY:
4899 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4905 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4907 // Call stub. Undo operation first.
4908 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4912 ToNumberStub convert_stub(isolate());
4913 __ CallStub(&convert_stub);
4914 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4916 // Save result for postfix expressions.
4917 if (expr->is_postfix()) {
4918 if (!context()->IsEffect()) {
4919 // Save the result on the stack. If we have a named or keyed property
4920 // we store the result under the receiver that is currently on top
4922 switch (assign_type) {
4926 case NAMED_PROPERTY:
4927 __ str(r0, MemOperand(sp, kPointerSize));
4929 case NAMED_SUPER_PROPERTY:
4930 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4932 case KEYED_PROPERTY:
4933 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4935 case KEYED_SUPER_PROPERTY:
4936 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4943 __ bind(&stub_call);
4945 __ mov(r0, Operand(Smi::FromInt(count_value)));
4947 // Record position before stub call.
4948 SetSourcePosition(expr->position());
4950 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4951 CallIC(code, expr->CountBinOpFeedbackId());
4952 patch_site.EmitPatchInfo();
4955 // Store the value returned in r0.
4956 switch (assign_type) {
4958 if (expr->is_postfix()) {
4959 { EffectContext context(this);
4960 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4962 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4965 // For all contexts except EffectConstant We have the result on
4966 // top of the stack.
4967 if (!context()->IsEffect()) {
4968 context()->PlugTOS();
4971 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4973 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4974 context()->Plug(r0);
4977 case NAMED_PROPERTY: {
4978 __ mov(StoreDescriptor::NameRegister(),
4979 Operand(prop->key()->AsLiteral()->value()));
4980 __ pop(StoreDescriptor::ReceiverRegister());
4981 CallStoreIC(expr->CountStoreFeedbackId());
4982 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4983 if (expr->is_postfix()) {
4984 if (!context()->IsEffect()) {
4985 context()->PlugTOS();
4988 context()->Plug(r0);
4992 case NAMED_SUPER_PROPERTY: {
4993 EmitNamedSuperPropertyStore(prop);
4994 if (expr->is_postfix()) {
4995 if (!context()->IsEffect()) {
4996 context()->PlugTOS();
4999 context()->Plug(r0);
5003 case KEYED_SUPER_PROPERTY: {
5004 EmitKeyedSuperPropertyStore(prop);
5005 if (expr->is_postfix()) {
5006 if (!context()->IsEffect()) {
5007 context()->PlugTOS();
5010 context()->Plug(r0);
5014 case KEYED_PROPERTY: {
5015 __ Pop(StoreDescriptor::ReceiverRegister(),
5016 StoreDescriptor::NameRegister());
5018 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5019 CallIC(ic, expr->CountStoreFeedbackId());
5020 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5021 if (expr->is_postfix()) {
5022 if (!context()->IsEffect()) {
5023 context()->PlugTOS();
5026 context()->Plug(r0);
5034 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5035 DCHECK(!context()->IsEffect());
5036 DCHECK(!context()->IsTest());
5037 VariableProxy* proxy = expr->AsVariableProxy();
5038 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5039 Comment cmnt(masm_, "[ Global variable");
5040 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5041 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5042 if (FLAG_vector_ics) {
5043 __ mov(VectorLoadICDescriptor::SlotRegister(),
5044 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5046 // Use a regular load, not a contextual load, to avoid a reference
5048 CallLoadIC(NOT_CONTEXTUAL);
5049 PrepareForBailout(expr, TOS_REG);
5050 context()->Plug(r0);
5051 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5052 Comment cmnt(masm_, "[ Lookup slot");
5055 // Generate code for loading from variables potentially shadowed
5056 // by eval-introduced variables.
5057 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5060 __ mov(r0, Operand(proxy->name()));
5062 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5063 PrepareForBailout(expr, TOS_REG);
5066 context()->Plug(r0);
5068 // This expression cannot throw a reference error at the top level.
5069 VisitInDuplicateContext(expr);
5074 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5075 Expression* sub_expr,
5076 Handle<String> check) {
5077 Label materialize_true, materialize_false;
5078 Label* if_true = NULL;
5079 Label* if_false = NULL;
5080 Label* fall_through = NULL;
5081 context()->PrepareTest(&materialize_true, &materialize_false,
5082 &if_true, &if_false, &fall_through);
5084 { AccumulatorValueContext context(this);
5085 VisitForTypeofValue(sub_expr);
5087 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5089 Factory* factory = isolate()->factory();
5090 if (String::Equals(check, factory->number_string())) {
5091 __ JumpIfSmi(r0, if_true);
5092 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5093 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5095 Split(eq, if_true, if_false, fall_through);
5096 } else if (String::Equals(check, factory->string_string())) {
5097 __ JumpIfSmi(r0, if_false);
5098 // Check for undetectable objects => false.
5099 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
5101 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5102 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5103 Split(eq, if_true, if_false, fall_through);
5104 } else if (String::Equals(check, factory->symbol_string())) {
5105 __ JumpIfSmi(r0, if_false);
5106 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
5107 Split(eq, if_true, if_false, fall_through);
5108 } else if (String::Equals(check, factory->boolean_string())) {
5109 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
5111 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
5112 Split(eq, if_true, if_false, fall_through);
5113 } else if (String::Equals(check, factory->undefined_string())) {
5114 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
5116 __ JumpIfSmi(r0, if_false);
5117 // Check for undetectable objects => true.
5118 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5119 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5120 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5121 Split(ne, if_true, if_false, fall_through);
5123 } else if (String::Equals(check, factory->function_string())) {
5124 __ JumpIfSmi(r0, if_false);
5125 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5126 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
5128 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
5129 Split(eq, if_true, if_false, fall_through);
5130 } else if (String::Equals(check, factory->object_string())) {
5131 __ JumpIfSmi(r0, if_false);
5132 __ CompareRoot(r0, Heap::kNullValueRootIndex);
5134 // Check for JS objects => true.
5135 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5137 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5139 // Check for undetectable objects => false.
5140 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5141 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5142 Split(eq, if_true, if_false, fall_through);
5144 if (if_false != fall_through) __ jmp(if_false);
5146 context()->Plug(if_true, if_false);
5150 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5151 Comment cmnt(masm_, "[ CompareOperation");
5152 SetSourcePosition(expr->position());
5154 // First we try a fast inlined version of the compare when one of
5155 // the operands is a literal.
5156 if (TryLiteralCompare(expr)) return;
5158 // Always perform the comparison for its control flow. Pack the result
5159 // into the expression's context after the comparison is performed.
5160 Label materialize_true, materialize_false;
5161 Label* if_true = NULL;
5162 Label* if_false = NULL;
5163 Label* fall_through = NULL;
5164 context()->PrepareTest(&materialize_true, &materialize_false,
5165 &if_true, &if_false, &fall_through);
5167 Token::Value op = expr->op();
5168 VisitForStackValue(expr->left());
5171 VisitForStackValue(expr->right());
5172 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5173 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5174 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5176 Split(eq, if_true, if_false, fall_through);
5179 case Token::INSTANCEOF: {
5180 VisitForStackValue(expr->right());
5181 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5183 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5184 // The stub returns 0 for true.
5186 Split(eq, if_true, if_false, fall_through);
5191 VisitForAccumulatorValue(expr->right());
5192 Condition cond = CompareIC::ComputeCondition(op);
5195 bool inline_smi_code = ShouldInlineSmiCase(op);
5196 JumpPatchSite patch_site(masm_);
5197 if (inline_smi_code) {
5199 __ orr(r2, r0, Operand(r1));
5200 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5202 Split(cond, if_true, if_false, NULL);
5203 __ bind(&slow_case);
5206 // Record position and call the compare IC.
5207 SetSourcePosition(expr->position());
5208 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5209 CallIC(ic, expr->CompareOperationFeedbackId());
5210 patch_site.EmitPatchInfo();
5211 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5212 __ cmp(r0, Operand::Zero());
5213 Split(cond, if_true, if_false, fall_through);
5217 // Convert the result of the comparison into one expected for this
5218 // expression's context.
5219 context()->Plug(if_true, if_false);
5223 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5224 Expression* sub_expr,
5226 Label materialize_true, materialize_false;
5227 Label* if_true = NULL;
5228 Label* if_false = NULL;
5229 Label* fall_through = NULL;
5230 context()->PrepareTest(&materialize_true, &materialize_false,
5231 &if_true, &if_false, &fall_through);
5233 VisitForAccumulatorValue(sub_expr);
5234 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5235 if (expr->op() == Token::EQ_STRICT) {
5236 Heap::RootListIndex nil_value = nil == kNullValue ?
5237 Heap::kNullValueRootIndex :
5238 Heap::kUndefinedValueRootIndex;
5239 __ LoadRoot(r1, nil_value);
5241 Split(eq, if_true, if_false, fall_through);
5243 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5244 CallIC(ic, expr->CompareOperationFeedbackId());
5245 __ cmp(r0, Operand(0));
5246 Split(ne, if_true, if_false, fall_through);
5248 context()->Plug(if_true, if_false);
5252 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5253 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5254 context()->Plug(r0);
5258 Register FullCodeGenerator::result_register() {
5263 Register FullCodeGenerator::context_register() {
5268 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5269 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5270 __ str(value, MemOperand(fp, frame_offset));
5274 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5275 __ ldr(dst, ContextOperand(cp, context_index));
5279 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5280 Scope* declaration_scope = scope()->DeclarationScope();
5281 if (declaration_scope->is_script_scope() ||
5282 declaration_scope->is_module_scope()) {
5283 // Contexts nested in the native context have a canonical empty function
5284 // as their closure, not the anonymous closure containing the global
5285 // code. Pass a smi sentinel and let the runtime look up the empty
5287 __ mov(ip, Operand(Smi::FromInt(0)));
5288 } else if (declaration_scope->is_eval_scope()) {
5289 // Contexts created by a call to eval have the same closure as the
5290 // context calling eval, not the anonymous closure containing the eval
5291 // code. Fetch it from the context.
5292 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5294 DCHECK(declaration_scope->is_function_scope());
5295 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5301 // ----------------------------------------------------------------------------
5302 // Non-local control flow support.
5304 void FullCodeGenerator::EnterFinallyBlock() {
5305 DCHECK(!result_register().is(r1));
5306 // Store result register while executing finally block.
5307 __ push(result_register());
5308 // Cook return address in link register to stack (smi encoded Code* delta)
5309 __ sub(r1, lr, Operand(masm_->CodeObject()));
5312 // Store result register while executing finally block.
5315 // Store pending message while executing finally block.
5316 ExternalReference pending_message_obj =
5317 ExternalReference::address_of_pending_message_obj(isolate());
5318 __ mov(ip, Operand(pending_message_obj));
5319 __ ldr(r1, MemOperand(ip));
5324 void FullCodeGenerator::ExitFinallyBlock() {
5325 DCHECK(!result_register().is(r1));
5326 // Restore pending message from stack.
5328 ExternalReference pending_message_obj =
5329 ExternalReference::address_of_pending_message_obj(isolate());
5330 __ mov(ip, Operand(pending_message_obj));
5331 __ str(r1, MemOperand(ip));
5333 // Restore result register from stack.
5336 // Uncook return address and return.
5337 __ pop(result_register());
5339 __ add(pc, r1, Operand(masm_->CodeObject()));
5346 static Address GetInterruptImmediateLoadAddress(Address pc) {
5347 Address load_address = pc - 2 * Assembler::kInstrSize;
5348 if (!FLAG_enable_ool_constant_pool) {
5349 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5350 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5351 // This is an extended constant pool lookup.
5352 if (CpuFeatures::IsSupported(ARMv7)) {
5353 load_address -= 2 * Assembler::kInstrSize;
5354 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5355 DCHECK(Assembler::IsMovT(
5356 Memory::int32_at(load_address + Assembler::kInstrSize)));
5358 load_address -= 4 * Assembler::kInstrSize;
5359 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5360 DCHECK(Assembler::IsOrrImmed(
5361 Memory::int32_at(load_address + Assembler::kInstrSize)));
5362 DCHECK(Assembler::IsOrrImmed(
5363 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5364 DCHECK(Assembler::IsOrrImmed(
5365 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5367 } else if (CpuFeatures::IsSupported(ARMv7) &&
5368 Assembler::IsMovT(Memory::int32_at(load_address))) {
5369 // This is a movw / movt immediate load.
5370 load_address -= Assembler::kInstrSize;
5371 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5372 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5373 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5374 // This is a mov / orr immediate load.
5375 load_address -= 3 * Assembler::kInstrSize;
5376 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5377 DCHECK(Assembler::IsOrrImmed(
5378 Memory::int32_at(load_address + Assembler::kInstrSize)));
5379 DCHECK(Assembler::IsOrrImmed(
5380 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5382 // This is a small constant pool lookup.
5383 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5385 return load_address;
5389 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5391 BackEdgeState target_state,
5392 Code* replacement_code) {
5393 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5394 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5395 CodePatcher patcher(branch_address, 1);
5396 switch (target_state) {
5399 // <decrement profiling counter>
5401 // ; load interrupt stub address into ip - either of (for ARMv7):
5402 // ; <small cp load> | <extended cp load> | <immediate load>
5403 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5404 // | movt ip, #imm | movw ip, #imm
5405 // | ldr ip, [pp, ip]
5406 // ; or (for ARMv6):
5407 // ; <small cp load> | <extended cp load> | <immediate load>
5408 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5409 // | orr ip, ip, #imm> | orr ip, ip, #imm
5410 // | orr ip, ip, #imm> | orr ip, ip, #imm
5411 // | orr ip, ip, #imm> | orr ip, ip, #imm
5413 // <reset profiling counter>
5416 // Calculate branch offset to the ok-label - this is the difference
5417 // between the branch address and |pc| (which points at <blx ip>) plus
5418 // kProfileCounterResetSequence instructions
5419 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5420 kProfileCounterResetSequenceLength;
5421 patcher.masm()->b(branch_offset, pl);
5424 case ON_STACK_REPLACEMENT:
5425 case OSR_AFTER_STACK_CHECK:
5426 // <decrement profiling counter>
5428 // ; load on-stack replacement address into ip - either of (for ARMv7):
5429 // ; <small cp load> | <extended cp load> | <immediate load>
5430 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5431 // | movt ip, #imm> | movw ip, #imm
5432 // | ldr ip, [pp, ip]
5433 // ; or (for ARMv6):
5434 // ; <small cp load> | <extended cp load> | <immediate load>
5435 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5436 // | orr ip, ip, #imm> | orr ip, ip, #imm
5437 // | orr ip, ip, #imm> | orr ip, ip, #imm
5438 // | orr ip, ip, #imm> | orr ip, ip, #imm
5440 // <reset profiling counter>
5442 patcher.masm()->nop();
5446 // Replace the call address.
5447 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5448 replacement_code->entry());
5450 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5451 unoptimized_code, pc_immediate_load_address, replacement_code);
5455 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5457 Code* unoptimized_code,
5459 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5461 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5462 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5463 Address interrupt_address = Assembler::target_address_at(
5464 pc_immediate_load_address, unoptimized_code);
5466 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5467 DCHECK(interrupt_address ==
5468 isolate->builtins()->InterruptCheck()->entry());
5472 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5474 if (interrupt_address ==
5475 isolate->builtins()->OnStackReplacement()->entry()) {
5476 return ON_STACK_REPLACEMENT;
5479 DCHECK(interrupt_address ==
5480 isolate->builtins()->OsrAfterStackCheck()->entry());
5481 return OSR_AFTER_STACK_CHECK;
5485 } } // namespace v8::internal
5487 #endif // V8_TARGET_ARCH_ARM