1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/compiler.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parser.h"
15 #include "src/scopes.h"
17 #include "src/arm/code-stubs-arm.h"
18 #include "src/arm/macro-assembler-arm.h"
23 #define __ ACCESS_MASM(masm_)
26 // A patch site is a location in the code which it is possible to patch. This
27 // class has a number of methods to emit the code which is patchable and the
28 // method EmitPatchInfo to record a marker back to the patchable code. This
29 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
30 // immediate value is used) is the delta from the pc to the first instruction of
31 // the patchable code.
32 class JumpPatchSite BASE_EMBEDDED {
34 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
36 info_emitted_ = false;
41 DCHECK(patch_site_.is_bound() == info_emitted_);
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockConstPoolScope block_const_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, Operand(reg));
51 __ b(eq, target); // Always taken before patched.
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
56 void EmitJumpIfSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockConstPoolScope block_const_pool(masm_);
59 __ bind(&patch_site_);
60 __ cmp(reg, Operand(reg));
61 __ b(ne, target); // Never taken before patched.
64 void EmitPatchInfo() {
65 // Block literal pool emission whilst recording patch site information.
66 Assembler::BlockConstPoolScope block_const_pool(masm_);
67 if (patch_site_.is_bound()) {
68 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
70 reg.set_code(delta_to_patch_site / kOff12Mask);
71 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
76 __ nop(); // Signals no inlined code.
81 MacroAssembler* masm_;
89 // Generate code for a JS function. On entry to the function the receiver
90 // and arguments have been pushed on the stack left to right. The actual
91 // argument count matches the formal parameter count expected by the
94 // The live registers are:
95 // o r1: the JS function object being called (i.e., ourselves)
97 // o pp: our caller's constant pool pointer (if enabled)
98 // o fp: our caller's frame pointer
99 // o sp: stack pointer
100 // o lr: return address
102 // The function builds a JS frame. Please see JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(function());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
114 if (strlen(FLAG_stop_at) > 0 &&
115 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120 // Sloppy mode functions and builtins need to replace the receiver with the
121 // global proxy when called as functions (without an explicit receiver
123 if (is_sloppy(info->language_mode()) && !info->is_native() &&
124 info->MayUseThis() && info->scope()->has_this_declaration()) {
126 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
127 __ ldr(r2, MemOperand(sp, receiver_offset));
128 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
131 __ ldr(r2, GlobalObjectOperand());
132 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
134 __ str(r2, MemOperand(sp, receiver_offset));
139 // Open a frame scope to indicate that there is a frame on the stack. The
140 // MANUAL indicates that the scope shouldn't actually generate code to set up
141 // the frame (that is done below).
142 FrameScope frame_scope(masm_, StackFrame::MANUAL);
144 info->set_prologue_offset(masm_->pc_offset());
145 __ Prologue(info->IsCodePreAgingActive());
146 info->AddNoFrameRange(0, masm_->pc_offset());
148 { Comment cmnt(masm_, "[ Allocate locals");
149 int locals_count = info->scope()->num_stack_slots();
150 // Generators allocate locals, if any, in context slots.
151 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
152 if (locals_count > 0) {
153 if (locals_count >= 128) {
155 __ sub(r9, sp, Operand(locals_count * kPointerSize));
156 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
157 __ cmp(r9, Operand(r2));
159 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
162 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
163 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
164 if (locals_count >= kMaxPushes) {
165 int loop_iterations = locals_count / kMaxPushes;
166 __ mov(r2, Operand(loop_iterations));
168 __ bind(&loop_header);
170 for (int i = 0; i < kMaxPushes; i++) {
173 // Continue loop if not done.
174 __ sub(r2, r2, Operand(1), SetCC);
175 __ b(&loop_header, ne);
177 int remaining = locals_count % kMaxPushes;
178 // Emit the remaining pushes.
179 for (int i = 0; i < remaining; i++) {
185 bool function_in_register = true;
187 // Possibly allocate a local context.
188 if (info->scope()->num_heap_slots() > 0) {
189 // Argument to NewContext is the function, which is still in r1.
190 Comment cmnt(masm_, "[ Allocate context");
191 bool need_write_barrier = true;
192 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
193 if (info->scope()->is_script_scope()) {
195 __ Push(info->scope()->GetScopeInfo(info->isolate()));
196 __ CallRuntime(Runtime::kNewScriptContext, 2);
197 } else if (slots <= FastNewContextStub::kMaximumSlots) {
198 FastNewContextStub stub(isolate(), slots);
200 // Result of FastNewContextStub is always in new space.
201 need_write_barrier = false;
204 __ CallRuntime(Runtime::kNewFunctionContext, 1);
206 function_in_register = false;
207 // Context is returned in r0. It replaces the context passed to us.
208 // It's saved in the stack and kept live in cp.
210 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
211 // Copy any necessary parameters into the context.
212 int num_parameters = info->scope()->num_parameters();
213 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
214 for (int i = first_parameter; i < num_parameters; i++) {
215 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
216 if (var->IsContextSlot()) {
217 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
218 (num_parameters - 1 - i) * kPointerSize;
219 // Load parameter from stack.
220 __ ldr(r0, MemOperand(fp, parameter_offset));
221 // Store it in the context.
222 MemOperand target = ContextOperand(cp, var->index());
225 // Update the write barrier.
226 if (need_write_barrier) {
227 __ RecordWriteContextSlot(
228 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
229 } else if (FLAG_debug_code) {
231 __ JumpIfInNewSpace(cp, r0, &done);
232 __ Abort(kExpectedNewSpaceObject);
239 // Possibly set up a local binding to the this function which is used in
240 // derived constructors with super calls.
241 Variable* this_function_var = scope()->this_function_var();
242 if (this_function_var != nullptr) {
243 Comment cmnt(masm_, "[ This function");
244 if (!function_in_register) {
245 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
246 // The write barrier clobbers register again, keep is marked as such.
248 SetVar(this_function_var, r1, r0, r2);
251 Variable* new_target_var = scope()->new_target_var();
252 if (new_target_var != nullptr) {
253 Comment cmnt(masm_, "[ new.target");
255 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
256 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
257 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
258 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
259 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
260 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
261 Label non_construct_frame, done;
263 __ b(ne, &non_construct_frame);
265 MemOperand(r2, ConstructFrameConstants::kOriginalConstructorOffset));
268 __ bind(&non_construct_frame);
269 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
272 SetVar(new_target_var, r0, r2, r3);
275 // Possibly allocate RestParameters
277 Variable* rest_param = scope()->rest_parameter(&rest_index);
279 Comment cmnt(masm_, "[ Allocate rest parameter array");
281 int num_parameters = info->scope()->num_parameters();
282 int offset = num_parameters * kPointerSize;
284 __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
285 __ mov(r2, Operand(Smi::FromInt(num_parameters)));
286 __ mov(r1, Operand(Smi::FromInt(rest_index)));
287 __ mov(r0, Operand(Smi::FromInt(language_mode())));
288 __ Push(r3, r2, r1, r0);
290 RestParamAccessStub stub(isolate());
293 SetVar(rest_param, r0, r1, r2);
296 Variable* arguments = scope()->arguments();
297 if (arguments != NULL) {
298 // Function uses arguments object.
299 Comment cmnt(masm_, "[ Allocate arguments object");
300 if (!function_in_register) {
301 // Load this again, if it's used by the local context below.
302 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
306 // Receiver is just before the parameters on the caller's stack.
307 int num_parameters = info->scope()->num_parameters();
308 int offset = num_parameters * kPointerSize;
310 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
311 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
314 // Arguments to ArgumentsAccessStub:
315 // function, receiver address, parameter count.
316 // The stub will rewrite receiver and parameter count if the previous
317 // stack frame was an arguments adapter frame.
318 ArgumentsAccessStub::Type type;
319 if (is_strict(language_mode()) || !has_simple_parameters()) {
320 type = ArgumentsAccessStub::NEW_STRICT;
321 } else if (function()->has_duplicate_parameters()) {
322 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
324 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
326 ArgumentsAccessStub stub(isolate(), type);
329 SetVar(arguments, r0, r1, r2);
334 __ CallRuntime(Runtime::kTraceEnter, 0);
337 // Visit the declarations and body unless there is an illegal
339 if (scope()->HasIllegalRedeclaration()) {
340 Comment cmnt(masm_, "[ Declarations");
341 scope()->VisitIllegalRedeclaration(this);
344 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
345 { Comment cmnt(masm_, "[ Declarations");
346 VisitDeclarations(scope()->declarations());
349 // Assert that the declarations do not use ICs. Otherwise the debugger
350 // won't be able to redirect a PC at an IC to the correct IC in newly
352 DCHECK_EQ(0, ic_total_count_);
354 { Comment cmnt(masm_, "[ Stack check");
355 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
357 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
358 __ cmp(sp, Operand(ip));
360 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
361 PredictableCodeSizeScope predictable(masm_);
362 predictable.ExpectSize(
363 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
364 __ Call(stack_check, RelocInfo::CODE_TARGET);
368 { Comment cmnt(masm_, "[ Body");
369 DCHECK(loop_depth() == 0);
370 VisitStatements(function()->body());
371 DCHECK(loop_depth() == 0);
375 // Always emit a 'return undefined' in case control fell off the end of
377 { Comment cmnt(masm_, "[ return <undefined>;");
378 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
380 EmitReturnSequence();
382 // Force emit the constant pool, so it doesn't get emitted in the middle
383 // of the back edge table.
384 masm()->CheckConstPool(true, false);
388 void FullCodeGenerator::ClearAccumulator() {
389 __ mov(r0, Operand(Smi::FromInt(0)));
393 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
394 __ mov(r2, Operand(profiling_counter_));
395 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
396 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
397 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
401 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
402 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
404 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
408 void FullCodeGenerator::EmitProfilingCounterReset() {
409 Assembler::BlockConstPoolScope block_const_pool(masm_);
410 PredictableCodeSizeScope predictable_code_size_scope(
411 masm_, kProfileCounterResetSequenceLength);
414 int reset_value = FLAG_interrupt_budget;
415 __ mov(r2, Operand(profiling_counter_));
416 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
417 // instructions (for ARMv6) depending upon whether it is an extended constant
418 // pool - insert nop to compensate.
419 int expected_instr_count =
420 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
421 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
422 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
425 __ mov(r3, Operand(Smi::FromInt(reset_value)));
426 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
430 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
431 Label* back_edge_target) {
432 Comment cmnt(masm_, "[ Back edge bookkeeping");
433 // Block literal pools whilst emitting back edge code.
434 Assembler::BlockConstPoolScope block_const_pool(masm_);
437 DCHECK(back_edge_target->is_bound());
438 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
439 int weight = Min(kMaxBackEdgeWeight,
440 Max(1, distance / kCodeSizeMultiplier));
441 EmitProfilingCounterDecrement(weight);
443 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
445 // Record a mapping of this PC offset to the OSR id. This is used to find
446 // the AST id from the unoptimized code in order to use it as a key into
447 // the deoptimization input data found in the optimized code.
448 RecordBackEdge(stmt->OsrEntryId());
450 EmitProfilingCounterReset();
453 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
454 // Record a mapping of the OSR id to this PC. This is used if the OSR
455 // entry becomes the target of a bailout. We don't expect it to be, but
456 // we want it to work if it is.
457 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
461 void FullCodeGenerator::EmitReturnSequence() {
462 Comment cmnt(masm_, "[ Return sequence");
463 if (return_label_.is_bound()) {
464 __ b(&return_label_);
466 __ bind(&return_label_);
468 // Push the return value on the stack as the parameter.
469 // Runtime::TraceExit returns its parameter in r0.
471 __ CallRuntime(Runtime::kTraceExit, 1);
473 // Pretend that the exit is a backwards jump to the entry.
475 if (info_->ShouldSelfOptimize()) {
476 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
478 int distance = masm_->pc_offset();
479 weight = Min(kMaxBackEdgeWeight,
480 Max(1, distance / kCodeSizeMultiplier));
482 EmitProfilingCounterDecrement(weight);
486 __ Call(isolate()->builtins()->InterruptCheck(),
487 RelocInfo::CODE_TARGET);
489 EmitProfilingCounterReset();
492 // Make sure that the constant pool is not emitted inside of the return
494 { Assembler::BlockConstPoolScope block_const_pool(masm_);
495 int32_t arg_count = info_->scope()->num_parameters() + 1;
496 int32_t sp_delta = arg_count * kPointerSize;
497 SetReturnPosition(function());
498 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
499 PredictableCodeSizeScope predictable(masm_, -1);
500 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
501 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
502 __ add(sp, sp, Operand(sp_delta));
504 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
511 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
512 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
513 codegen()->GetVar(result_register(), var);
514 __ push(result_register());
518 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
522 void FullCodeGenerator::AccumulatorValueContext::Plug(
523 Heap::RootListIndex index) const {
524 __ LoadRoot(result_register(), index);
528 void FullCodeGenerator::StackValueContext::Plug(
529 Heap::RootListIndex index) const {
530 __ LoadRoot(result_register(), index);
531 __ push(result_register());
535 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
536 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 if (index == Heap::kUndefinedValueRootIndex ||
541 index == Heap::kNullValueRootIndex ||
542 index == Heap::kFalseValueRootIndex) {
543 if (false_label_ != fall_through_) __ b(false_label_);
544 } else if (index == Heap::kTrueValueRootIndex) {
545 if (true_label_ != fall_through_) __ b(true_label_);
547 __ LoadRoot(result_register(), index);
548 codegen()->DoTest(this);
553 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
557 void FullCodeGenerator::AccumulatorValueContext::Plug(
558 Handle<Object> lit) const {
559 __ mov(result_register(), Operand(lit));
563 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
564 // Immediates cannot be pushed directly.
565 __ mov(result_register(), Operand(lit));
566 __ push(result_register());
570 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
571 codegen()->PrepareForBailoutBeforeSplit(condition(),
575 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
576 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
577 if (false_label_ != fall_through_) __ b(false_label_);
578 } else if (lit->IsTrue() || lit->IsJSObject()) {
579 if (true_label_ != fall_through_) __ b(true_label_);
580 } else if (lit->IsString()) {
581 if (String::cast(*lit)->length() == 0) {
582 if (false_label_ != fall_through_) __ b(false_label_);
584 if (true_label_ != fall_through_) __ b(true_label_);
586 } else if (lit->IsSmi()) {
587 if (Smi::cast(*lit)->value() == 0) {
588 if (false_label_ != fall_through_) __ b(false_label_);
590 if (true_label_ != fall_through_) __ b(true_label_);
593 // For simplicity we always test the accumulator register.
594 __ mov(result_register(), Operand(lit));
595 codegen()->DoTest(this);
600 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
601 Register reg) const {
607 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
609 Register reg) const {
612 __ Move(result_register(), reg);
616 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
617 Register reg) const {
619 if (count > 1) __ Drop(count - 1);
620 __ str(reg, MemOperand(sp, 0));
624 void FullCodeGenerator::TestContext::DropAndPlug(int count,
625 Register reg) const {
627 // For simplicity we always test the accumulator register.
629 __ Move(result_register(), reg);
630 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
631 codegen()->DoTest(this);
635 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
636 Label* materialize_false) const {
637 DCHECK(materialize_true == materialize_false);
638 __ bind(materialize_true);
642 void FullCodeGenerator::AccumulatorValueContext::Plug(
643 Label* materialize_true,
644 Label* materialize_false) const {
646 __ bind(materialize_true);
647 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
649 __ bind(materialize_false);
650 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
655 void FullCodeGenerator::StackValueContext::Plug(
656 Label* materialize_true,
657 Label* materialize_false) const {
659 __ bind(materialize_true);
660 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
662 __ bind(materialize_false);
663 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
669 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
670 Label* materialize_false) const {
671 DCHECK(materialize_true == true_label_);
672 DCHECK(materialize_false == false_label_);
676 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
677 Heap::RootListIndex value_root_index =
678 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
679 __ LoadRoot(result_register(), value_root_index);
683 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
684 Heap::RootListIndex value_root_index =
685 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
686 __ LoadRoot(ip, value_root_index);
691 void FullCodeGenerator::TestContext::Plug(bool flag) const {
692 codegen()->PrepareForBailoutBeforeSplit(condition(),
697 if (true_label_ != fall_through_) __ b(true_label_);
699 if (false_label_ != fall_through_) __ b(false_label_);
704 void FullCodeGenerator::DoTest(Expression* condition,
707 Label* fall_through) {
708 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
709 CallIC(ic, condition->test_id());
710 __ tst(result_register(), result_register());
711 Split(ne, if_true, if_false, fall_through);
715 void FullCodeGenerator::Split(Condition cond,
718 Label* fall_through) {
719 if (if_false == fall_through) {
721 } else if (if_true == fall_through) {
722 __ b(NegateCondition(cond), if_false);
730 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
731 DCHECK(var->IsStackAllocated());
732 // Offset is negative because higher indexes are at lower addresses.
733 int offset = -var->index() * kPointerSize;
734 // Adjust by a (parameter or local) base offset.
735 if (var->IsParameter()) {
736 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
738 offset += JavaScriptFrameConstants::kLocal0Offset;
740 return MemOperand(fp, offset);
744 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
745 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
746 if (var->IsContextSlot()) {
747 int context_chain_length = scope()->ContextChainLength(var->scope());
748 __ LoadContext(scratch, context_chain_length);
749 return ContextOperand(scratch, var->index());
751 return StackOperand(var);
756 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
757 // Use destination as scratch.
758 MemOperand location = VarOperand(var, dest);
759 __ ldr(dest, location);
763 void FullCodeGenerator::SetVar(Variable* var,
767 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
768 DCHECK(!scratch0.is(src));
769 DCHECK(!scratch0.is(scratch1));
770 DCHECK(!scratch1.is(src));
771 MemOperand location = VarOperand(var, scratch0);
772 __ str(src, location);
774 // Emit the write barrier code if the location is in the heap.
775 if (var->IsContextSlot()) {
776 __ RecordWriteContextSlot(scratch0,
786 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
787 bool should_normalize,
790 // Only prepare for bailouts before splits if we're in a test
791 // context. Otherwise, we let the Visit function deal with the
792 // preparation to avoid preparing with the same AST id twice.
793 if (!context()->IsTest() || !info_->IsOptimizable()) return;
796 if (should_normalize) __ b(&skip);
797 PrepareForBailout(expr, TOS_REG);
798 if (should_normalize) {
799 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
801 Split(eq, if_true, if_false, NULL);
807 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
808 // The variable in the declaration always resides in the current function
810 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
811 if (generate_debug_code_) {
812 // Check that we're not inside a with or catch context.
813 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
814 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
815 __ Check(ne, kDeclarationInWithContext);
816 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
817 __ Check(ne, kDeclarationInCatchContext);
822 void FullCodeGenerator::VisitVariableDeclaration(
823 VariableDeclaration* declaration) {
824 // If it was not possible to allocate the variable at compile time, we
825 // need to "declare" it at runtime to make sure it actually exists in the
827 VariableProxy* proxy = declaration->proxy();
828 VariableMode mode = declaration->mode();
829 Variable* variable = proxy->var();
830 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
831 switch (variable->location()) {
832 case VariableLocation::GLOBAL:
833 case VariableLocation::UNALLOCATED:
834 globals_->Add(variable->name(), zone());
835 globals_->Add(variable->binding_needs_init()
836 ? isolate()->factory()->the_hole_value()
837 : isolate()->factory()->undefined_value(),
841 case VariableLocation::PARAMETER:
842 case VariableLocation::LOCAL:
844 Comment cmnt(masm_, "[ VariableDeclaration");
845 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
846 __ str(ip, StackOperand(variable));
850 case VariableLocation::CONTEXT:
852 Comment cmnt(masm_, "[ VariableDeclaration");
853 EmitDebugCheckDeclarationContext(variable);
854 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
855 __ str(ip, ContextOperand(cp, variable->index()));
856 // No write barrier since the_hole_value is in old space.
857 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
861 case VariableLocation::LOOKUP: {
862 Comment cmnt(masm_, "[ VariableDeclaration");
863 __ mov(r2, Operand(variable->name()));
864 // Declaration nodes are always introduced in one of four modes.
865 DCHECK(IsDeclaredVariableMode(mode));
866 // Push initial value, if any.
867 // Note: For variables we must not push an initial value (such as
868 // 'undefined') because we may have a (legal) redeclaration and we
869 // must not destroy the current value.
871 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
873 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
876 __ CallRuntime(IsImmutableVariableMode(mode)
877 ? Runtime::kDeclareReadOnlyLookupSlot
878 : Runtime::kDeclareLookupSlot,
886 void FullCodeGenerator::VisitFunctionDeclaration(
887 FunctionDeclaration* declaration) {
888 VariableProxy* proxy = declaration->proxy();
889 Variable* variable = proxy->var();
890 switch (variable->location()) {
891 case VariableLocation::GLOBAL:
892 case VariableLocation::UNALLOCATED: {
893 globals_->Add(variable->name(), zone());
894 Handle<SharedFunctionInfo> function =
895 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
896 // Check for stack-overflow exception.
897 if (function.is_null()) return SetStackOverflow();
898 globals_->Add(function, zone());
902 case VariableLocation::PARAMETER:
903 case VariableLocation::LOCAL: {
904 Comment cmnt(masm_, "[ FunctionDeclaration");
905 VisitForAccumulatorValue(declaration->fun());
906 __ str(result_register(), StackOperand(variable));
910 case VariableLocation::CONTEXT: {
911 Comment cmnt(masm_, "[ FunctionDeclaration");
912 EmitDebugCheckDeclarationContext(variable);
913 VisitForAccumulatorValue(declaration->fun());
914 __ str(result_register(), ContextOperand(cp, variable->index()));
915 int offset = Context::SlotOffset(variable->index());
916 // We know that we have written a function, which is not a smi.
917 __ RecordWriteContextSlot(cp,
925 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
929 case VariableLocation::LOOKUP: {
930 Comment cmnt(masm_, "[ FunctionDeclaration");
931 __ mov(r2, Operand(variable->name()));
933 // Push initial value for function declaration.
934 VisitForStackValue(declaration->fun());
935 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
942 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
943 // Call the runtime to declare the globals.
944 __ mov(r1, Operand(pairs));
945 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
947 __ CallRuntime(Runtime::kDeclareGlobals, 2);
948 // Return value is ignored.
952 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
953 // Call the runtime to declare the modules.
954 __ Push(descriptions);
955 __ CallRuntime(Runtime::kDeclareModules, 1);
956 // Return value is ignored.
960 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
961 Comment cmnt(masm_, "[ SwitchStatement");
962 Breakable nested_statement(this, stmt);
963 SetStatementPosition(stmt);
965 // Keep the switch value on the stack until a case matches.
966 VisitForStackValue(stmt->tag());
967 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
969 ZoneList<CaseClause*>* clauses = stmt->cases();
970 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
972 Label next_test; // Recycled for each test.
973 // Compile all the tests with branches to their bodies.
974 for (int i = 0; i < clauses->length(); i++) {
975 CaseClause* clause = clauses->at(i);
976 clause->body_target()->Unuse();
978 // The default is not a test, but remember it as final fall through.
979 if (clause->is_default()) {
980 default_clause = clause;
984 Comment cmnt(masm_, "[ Case comparison");
988 // Compile the label expression.
989 VisitForAccumulatorValue(clause->label());
991 // Perform the comparison as if via '==='.
992 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
993 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
994 JumpPatchSite patch_site(masm_);
995 if (inline_smi_code) {
998 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1001 __ b(ne, &next_test);
1002 __ Drop(1); // Switch value is no longer needed.
1003 __ b(clause->body_target());
1004 __ bind(&slow_case);
1007 // Record position before stub call for type feedback.
1008 SetExpressionPosition(clause);
1009 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1010 strength(language_mode())).code();
1011 CallIC(ic, clause->CompareId());
1012 patch_site.EmitPatchInfo();
1016 PrepareForBailout(clause, TOS_REG);
1017 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1019 __ b(ne, &next_test);
1021 __ jmp(clause->body_target());
1024 __ cmp(r0, Operand::Zero());
1025 __ b(ne, &next_test);
1026 __ Drop(1); // Switch value is no longer needed.
1027 __ b(clause->body_target());
1030 // Discard the test value and jump to the default if present, otherwise to
1031 // the end of the statement.
1032 __ bind(&next_test);
1033 __ Drop(1); // Switch value is no longer needed.
1034 if (default_clause == NULL) {
1035 __ b(nested_statement.break_label());
1037 __ b(default_clause->body_target());
1040 // Compile all the case bodies.
1041 for (int i = 0; i < clauses->length(); i++) {
1042 Comment cmnt(masm_, "[ Case body");
1043 CaseClause* clause = clauses->at(i);
1044 __ bind(clause->body_target());
1045 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1046 VisitStatements(clause->statements());
1049 __ bind(nested_statement.break_label());
1050 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1054 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1055 Comment cmnt(masm_, "[ ForInStatement");
1056 SetStatementPosition(stmt, SKIP_BREAK);
1058 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1061 ForIn loop_statement(this, stmt);
1062 increment_loop_depth();
1064 // Get the object to enumerate over. If the object is null or undefined, skip
1065 // over the loop. See ECMA-262 version 5, section 12.6.4.
1066 SetExpressionAsStatementPosition(stmt->enumerable());
1067 VisitForAccumulatorValue(stmt->enumerable());
1068 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1071 Register null_value = r5;
1072 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1073 __ cmp(r0, null_value);
1076 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1078 // Convert the object to a JS object.
1079 Label convert, done_convert;
1080 __ JumpIfSmi(r0, &convert);
1081 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1082 __ b(ge, &done_convert);
1084 ToObjectStub stub(isolate());
1086 __ bind(&done_convert);
1087 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1090 // Check for proxies.
1092 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1093 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1094 __ b(le, &call_runtime);
1096 // Check cache validity in generated code. This is a fast case for
1097 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1098 // guarantee cache validity, call the runtime system to check cache
1099 // validity or get the property names in a fixed array.
1100 __ CheckEnumCache(null_value, &call_runtime);
1102 // The enum cache is valid. Load the map of the object being
1103 // iterated over and use the cache for the iteration.
1105 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1108 // Get the set of properties to enumerate.
1109 __ bind(&call_runtime);
1110 __ push(r0); // Duplicate the enumerable object on the stack.
1111 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1112 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1114 // If we got a map from the runtime call, we can do a fast
1115 // modification check. Otherwise, we got a fixed array, and we have
1116 // to do a slow check.
1118 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1119 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1121 __ b(ne, &fixed_array);
1123 // We got a map in register r0. Get the enumeration cache from it.
1124 Label no_descriptors;
1125 __ bind(&use_cache);
1127 __ EnumLength(r1, r0);
1128 __ cmp(r1, Operand(Smi::FromInt(0)));
1129 __ b(eq, &no_descriptors);
1131 __ LoadInstanceDescriptors(r0, r2);
1132 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1133 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1135 // Set up the four remaining stack slots.
1136 __ push(r0); // Map.
1137 __ mov(r0, Operand(Smi::FromInt(0)));
1138 // Push enumeration cache, enumeration cache length (as smi) and zero.
1139 __ Push(r2, r1, r0);
1142 __ bind(&no_descriptors);
1146 // We got a fixed array in register r0. Iterate through that.
1148 __ bind(&fixed_array);
1150 __ Move(r1, FeedbackVector());
1151 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1152 int vector_index = FeedbackVector()->GetIndex(slot);
1153 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1155 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1156 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1157 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1158 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1159 __ b(gt, &non_proxy);
1160 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1161 __ bind(&non_proxy);
1162 __ Push(r1, r0); // Smi and array
1163 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1164 __ mov(r0, Operand(Smi::FromInt(0)));
1165 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1167 // Generate code for doing the condition check.
1168 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1170 SetExpressionAsStatementPosition(stmt->each());
1172 // Load the current count to r0, load the length to r1.
1173 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1174 __ cmp(r0, r1); // Compare to the array length.
1175 __ b(hs, loop_statement.break_label());
1177 // Get the current entry of the array into register r3.
1178 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1179 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1180 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1182 // Get the expected map from the stack or a smi in the
1183 // permanent slow case into register r2.
1184 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1186 // Check if the expected map still matches that of the enumerable.
1187 // If not, we may have to filter the key.
1189 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1190 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1191 __ cmp(r4, Operand(r2));
1192 __ b(eq, &update_each);
1194 // For proxies, no filtering is done.
1195 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1196 __ cmp(r2, Operand(Smi::FromInt(0)));
1197 __ b(eq, &update_each);
1199 // Convert the entry to a string or (smi) 0 if it isn't a property
1200 // any more. If the property has been removed while iterating, we
1202 __ push(r1); // Enumerable.
1203 __ push(r3); // Current entry.
1204 __ CallRuntime(Runtime::kForInFilter, 2);
1205 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1206 __ mov(r3, Operand(r0));
1207 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1209 __ b(eq, loop_statement.continue_label());
1211 // Update the 'each' property or variable from the possibly filtered
1212 // entry in register r3.
1213 __ bind(&update_each);
1214 __ mov(result_register(), r3);
1215 // Perform the assignment as if via '='.
1216 { EffectContext context(this);
1217 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1218 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1221 // Generate code for the body of the loop.
1222 Visit(stmt->body());
1224 // Generate code for the going to the next element by incrementing
1225 // the index (smi) stored on top of the stack.
1226 __ bind(loop_statement.continue_label());
1228 __ add(r0, r0, Operand(Smi::FromInt(1)));
1231 EmitBackEdgeBookkeeping(stmt, &loop);
1234 // Remove the pointers stored on the stack.
1235 __ bind(loop_statement.break_label());
1238 // Exit and decrement the loop depth.
1239 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1241 decrement_loop_depth();
1245 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1247 // Use the fast case closure allocation code that allocates in new
1248 // space for nested functions that don't need literals cloning. If
1249 // we're running with the --always-opt or the --prepare-always-opt
1250 // flag, we need to use the runtime function so that the new function
1251 // we are creating here gets a chance to have its code optimized and
1252 // doesn't just get a copy of the existing unoptimized code.
1253 if (!FLAG_always_opt &&
1254 !FLAG_prepare_always_opt &&
1256 scope()->is_function_scope() &&
1257 info->num_literals() == 0) {
1258 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1259 __ mov(r2, Operand(info));
1262 __ mov(r0, Operand(info));
1263 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1264 : Heap::kFalseValueRootIndex);
1265 __ Push(cp, r0, r1);
1266 __ CallRuntime(Runtime::kNewClosure, 3);
1268 context()->Plug(r0);
1272 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1274 FeedbackVectorICSlot slot) {
1275 if (NeedsHomeObject(initializer)) {
1276 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1277 __ mov(StoreDescriptor::NameRegister(),
1278 Operand(isolate()->factory()->home_object_symbol()));
1279 __ ldr(StoreDescriptor::ValueRegister(),
1280 MemOperand(sp, offset * kPointerSize));
1281 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1287 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1288 TypeofMode typeof_mode,
1290 Register current = cp;
1296 if (s->num_heap_slots() > 0) {
1297 if (s->calls_sloppy_eval()) {
1298 // Check that extension is NULL.
1299 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1303 // Load next context in chain.
1304 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1305 // Walk the rest of the chain without clobbering cp.
1308 // If no outer scope calls eval, we do not need to check more
1309 // context extensions.
1310 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1311 s = s->outer_scope();
1314 if (s->is_eval_scope()) {
1316 if (!current.is(next)) {
1317 __ Move(next, current);
1320 // Terminate at native context.
1321 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1322 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1325 // Check that extension is NULL.
1326 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1329 // Load next context in chain.
1330 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1335 // All extension objects were empty and it is safe to use a normal global
1337 EmitGlobalVariableLoad(proxy, typeof_mode);
1341 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1343 DCHECK(var->IsContextSlot());
1344 Register context = cp;
1348 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1349 if (s->num_heap_slots() > 0) {
1350 if (s->calls_sloppy_eval()) {
1351 // Check that extension is NULL.
1352 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1356 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1357 // Walk the rest of the chain without clobbering cp.
1361 // Check that last extension is NULL.
1362 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1366 // This function is used only for loads, not stores, so it's safe to
1367 // return an cp-based operand (the write barrier cannot be allowed to
1368 // destroy the cp register).
1369 return ContextOperand(context, var->index());
1373 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1374 TypeofMode typeof_mode,
1375 Label* slow, Label* done) {
1376 // Generate fast-case code for variables that might be shadowed by
1377 // eval-introduced variables. Eval is used a lot without
1378 // introducing variables. In those cases, we do not want to
1379 // perform a runtime call for all variables in the scope
1380 // containing the eval.
1381 Variable* var = proxy->var();
1382 if (var->mode() == DYNAMIC_GLOBAL) {
1383 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1385 } else if (var->mode() == DYNAMIC_LOCAL) {
1386 Variable* local = var->local_if_not_shadowed();
1387 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1388 if (local->mode() == LET || local->mode() == CONST ||
1389 local->mode() == CONST_LEGACY) {
1390 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1391 if (local->mode() == CONST_LEGACY) {
1392 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1393 } else { // LET || CONST
1395 __ mov(r0, Operand(var->name()));
1397 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1405 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1406 TypeofMode typeof_mode) {
1407 Variable* var = proxy->var();
1408 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1409 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1410 if (var->IsGlobalSlot()) {
1411 DCHECK(var->index() > 0);
1412 DCHECK(var->IsStaticGlobalObjectProperty());
1413 const int slot = var->index();
1414 const int depth = scope()->ContextChainLength(var->scope());
1415 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1416 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1417 LoadGlobalViaContextStub stub(isolate(), depth);
1420 __ Push(Smi::FromInt(slot));
1421 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1424 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1425 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1426 __ mov(LoadDescriptor::SlotRegister(),
1427 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1428 CallLoadIC(typeof_mode);
1433 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1434 TypeofMode typeof_mode) {
1435 // Record position before possible IC call.
1436 SetExpressionPosition(proxy);
1437 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1438 Variable* var = proxy->var();
1440 // Three cases: global variables, lookup variables, and all other types of
1442 switch (var->location()) {
1443 case VariableLocation::GLOBAL:
1444 case VariableLocation::UNALLOCATED: {
1445 Comment cmnt(masm_, "[ Global variable");
1446 EmitGlobalVariableLoad(proxy, typeof_mode);
1447 context()->Plug(r0);
1451 case VariableLocation::PARAMETER:
1452 case VariableLocation::LOCAL:
1453 case VariableLocation::CONTEXT: {
1454 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1455 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1456 : "[ Stack variable");
1457 if (var->binding_needs_init()) {
1458 // var->scope() may be NULL when the proxy is located in eval code and
1459 // refers to a potential outside binding. Currently those bindings are
1460 // always looked up dynamically, i.e. in that case
1461 // var->location() == LOOKUP.
1463 DCHECK(var->scope() != NULL);
1465 // Check if the binding really needs an initialization check. The check
1466 // can be skipped in the following situation: we have a LET or CONST
1467 // binding in harmony mode, both the Variable and the VariableProxy have
1468 // the same declaration scope (i.e. they are both in global code, in the
1469 // same function or in the same eval code) and the VariableProxy is in
1470 // the source physically located after the initializer of the variable.
1472 // We cannot skip any initialization checks for CONST in non-harmony
1473 // mode because const variables may be declared but never initialized:
1474 // if (false) { const x; }; var y = x;
1476 // The condition on the declaration scopes is a conservative check for
1477 // nested functions that access a binding and are called before the
1478 // binding is initialized:
1479 // function() { f(); let x = 1; function f() { x = 2; } }
1481 bool skip_init_check;
1482 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1483 skip_init_check = false;
1484 } else if (var->is_this()) {
1485 CHECK(info_->function() != nullptr &&
1486 (info_->function()->kind() & kSubclassConstructor) != 0);
1487 // TODO(dslomov): implement 'this' hole check elimination.
1488 skip_init_check = false;
1490 // Check that we always have valid source position.
1491 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1492 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1493 skip_init_check = var->mode() != CONST_LEGACY &&
1494 var->initializer_position() < proxy->position();
1497 if (!skip_init_check) {
1498 // Let and const need a read barrier.
1500 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1501 if (var->mode() == LET || var->mode() == CONST) {
1502 // Throw a reference error when using an uninitialized let/const
1503 // binding in harmony mode.
1506 __ mov(r0, Operand(var->name()));
1508 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1511 // Uninitalized const bindings outside of harmony mode are unholed.
1512 DCHECK(var->mode() == CONST_LEGACY);
1513 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1515 context()->Plug(r0);
1519 context()->Plug(var);
1523 case VariableLocation::LOOKUP: {
1524 Comment cmnt(masm_, "[ Lookup variable");
1526 // Generate code for loading from variables potentially shadowed
1527 // by eval-introduced variables.
1528 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1530 __ mov(r1, Operand(var->name()));
1531 __ Push(cp, r1); // Context and name.
1532 Runtime::FunctionId function_id =
1533 typeof_mode == NOT_INSIDE_TYPEOF
1534 ? Runtime::kLoadLookupSlot
1535 : Runtime::kLoadLookupSlotNoReferenceError;
1536 __ CallRuntime(function_id, 2);
1538 context()->Plug(r0);
1544 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1545 Comment cmnt(masm_, "[ RegExpLiteral");
1547 // Registers will be used as follows:
1548 // r5 = materialized value (RegExp literal)
1549 // r4 = JS function, literals array
1550 // r3 = literal index
1551 // r2 = RegExp pattern
1552 // r1 = RegExp flags
1553 // r0 = RegExp literal clone
1554 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1555 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1556 int literal_offset =
1557 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1558 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1559 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1561 __ b(ne, &materialized);
1563 // Create regexp literal using runtime function.
1564 // Result will be in r0.
1565 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1566 __ mov(r2, Operand(expr->pattern()));
1567 __ mov(r1, Operand(expr->flags()));
1568 __ Push(r4, r3, r2, r1);
1569 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1572 __ bind(&materialized);
1573 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1574 Label allocated, runtime_allocate;
1575 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1578 __ bind(&runtime_allocate);
1579 __ mov(r0, Operand(Smi::FromInt(size)));
1581 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1584 __ bind(&allocated);
1585 // After this, registers are used as follows:
1586 // r0: Newly allocated regexp.
1587 // r5: Materialized regexp.
1589 __ CopyFields(r0, r5, d0, size / kPointerSize);
1590 context()->Plug(r0);
1594 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1595 if (expression == NULL) {
1596 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1599 VisitForStackValue(expression);
1604 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1605 Comment cmnt(masm_, "[ ObjectLiteral");
1607 Handle<FixedArray> constant_properties = expr->constant_properties();
1608 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1609 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1610 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1611 __ mov(r1, Operand(constant_properties));
1612 int flags = expr->ComputeFlags();
1613 __ mov(r0, Operand(Smi::FromInt(flags)));
1614 if (MustCreateObjectLiteralWithRuntime(expr)) {
1615 __ Push(r3, r2, r1, r0);
1616 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1618 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1621 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1623 // If result_saved is true the result is on top of the stack. If
1624 // result_saved is false the result is in r0.
1625 bool result_saved = false;
1627 AccessorTable accessor_table(zone());
1628 int property_index = 0;
1629 // store_slot_index points to the vector IC slot for the next store IC used.
1630 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1631 // and must be updated if the number of store ICs emitted here changes.
1632 int store_slot_index = 0;
1633 for (; property_index < expr->properties()->length(); property_index++) {
1634 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1635 if (property->is_computed_name()) break;
1636 if (property->IsCompileTimeValue()) continue;
1638 Literal* key = property->key()->AsLiteral();
1639 Expression* value = property->value();
1640 if (!result_saved) {
1641 __ push(r0); // Save result on stack
1642 result_saved = true;
1644 switch (property->kind()) {
1645 case ObjectLiteral::Property::CONSTANT:
1647 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1648 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1650 case ObjectLiteral::Property::COMPUTED:
1651 // It is safe to use [[Put]] here because the boilerplate already
1652 // contains computed properties with an uninitialized value.
1653 if (key->value()->IsInternalizedString()) {
1654 if (property->emit_store()) {
1655 VisitForAccumulatorValue(value);
1656 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1657 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1658 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1659 if (FLAG_vector_stores) {
1660 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1663 CallStoreIC(key->LiteralFeedbackId());
1665 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1667 if (NeedsHomeObject(value)) {
1668 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1669 __ mov(StoreDescriptor::NameRegister(),
1670 Operand(isolate()->factory()->home_object_symbol()));
1671 __ ldr(StoreDescriptor::ValueRegister(), MemOperand(sp));
1672 if (FLAG_vector_stores) {
1673 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1678 VisitForEffect(value);
1682 // Duplicate receiver on stack.
1683 __ ldr(r0, MemOperand(sp));
1685 VisitForStackValue(key);
1686 VisitForStackValue(value);
1687 if (property->emit_store()) {
1688 EmitSetHomeObjectIfNeeded(
1689 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1690 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1692 __ CallRuntime(Runtime::kSetProperty, 4);
1697 case ObjectLiteral::Property::PROTOTYPE:
1698 // Duplicate receiver on stack.
1699 __ ldr(r0, MemOperand(sp));
1701 VisitForStackValue(value);
1702 DCHECK(property->emit_store());
1703 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1706 case ObjectLiteral::Property::GETTER:
1707 if (property->emit_store()) {
1708 accessor_table.lookup(key)->second->getter = value;
1711 case ObjectLiteral::Property::SETTER:
1712 if (property->emit_store()) {
1713 accessor_table.lookup(key)->second->setter = value;
1719 // Emit code to define accessors, using only a single call to the runtime for
1720 // each pair of corresponding getters and setters.
1721 for (AccessorTable::Iterator it = accessor_table.begin();
1722 it != accessor_table.end();
1724 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1726 VisitForStackValue(it->first);
1727 EmitAccessor(it->second->getter);
1728 EmitSetHomeObjectIfNeeded(
1729 it->second->getter, 2,
1730 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1731 EmitAccessor(it->second->setter);
1732 EmitSetHomeObjectIfNeeded(
1733 it->second->setter, 3,
1734 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1735 __ mov(r0, Operand(Smi::FromInt(NONE)));
1737 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1740 // Object literals have two parts. The "static" part on the left contains no
1741 // computed property names, and so we can compute its map ahead of time; see
1742 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1743 // starts with the first computed property name, and continues with all
1744 // properties to its right. All the code from above initializes the static
1745 // component of the object literal, and arranges for the map of the result to
1746 // reflect the static order in which the keys appear. For the dynamic
1747 // properties, we compile them into a series of "SetOwnProperty" runtime
1748 // calls. This will preserve insertion order.
1749 for (; property_index < expr->properties()->length(); property_index++) {
1750 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1752 Expression* value = property->value();
1753 if (!result_saved) {
1754 __ push(r0); // Save result on the stack
1755 result_saved = true;
1758 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1761 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1762 DCHECK(!property->is_computed_name());
1763 VisitForStackValue(value);
1764 DCHECK(property->emit_store());
1765 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1767 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1768 VisitForStackValue(value);
1769 EmitSetHomeObjectIfNeeded(
1770 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1772 switch (property->kind()) {
1773 case ObjectLiteral::Property::CONSTANT:
1774 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1775 case ObjectLiteral::Property::COMPUTED:
1776 if (property->emit_store()) {
1777 __ mov(r0, Operand(Smi::FromInt(NONE)));
1779 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1785 case ObjectLiteral::Property::PROTOTYPE:
1789 case ObjectLiteral::Property::GETTER:
1790 __ mov(r0, Operand(Smi::FromInt(NONE)));
1792 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1795 case ObjectLiteral::Property::SETTER:
1796 __ mov(r0, Operand(Smi::FromInt(NONE)));
1798 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1804 if (expr->has_function()) {
1805 DCHECK(result_saved);
1806 __ ldr(r0, MemOperand(sp));
1808 __ CallRuntime(Runtime::kToFastProperties, 1);
1812 context()->PlugTOS();
1814 context()->Plug(r0);
1817 // Verify that compilation exactly consumed the number of store ic slots that
1818 // the ObjectLiteral node had to offer.
1819 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1823 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1824 Comment cmnt(masm_, "[ ArrayLiteral");
1826 expr->BuildConstantElements(isolate());
1828 Handle<FixedArray> constant_elements = expr->constant_elements();
1829 bool has_fast_elements =
1830 IsFastObjectElementsKind(expr->constant_elements_kind());
1831 Handle<FixedArrayBase> constant_elements_values(
1832 FixedArrayBase::cast(constant_elements->get(1)));
1834 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1835 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1836 // If the only customer of allocation sites is transitioning, then
1837 // we can turn it off if we don't have anywhere else to transition to.
1838 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1841 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1842 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1843 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1844 __ mov(r1, Operand(constant_elements));
1845 if (MustCreateArrayLiteralWithRuntime(expr)) {
1846 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1847 __ Push(r3, r2, r1, r0);
1848 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1850 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1853 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1855 bool result_saved = false; // Is the result saved to the stack?
1856 ZoneList<Expression*>* subexprs = expr->values();
1857 int length = subexprs->length();
1859 // Emit code to evaluate all the non-constant subexpressions and to store
1860 // them into the newly cloned array.
1861 int array_index = 0;
1862 for (; array_index < length; array_index++) {
1863 Expression* subexpr = subexprs->at(array_index);
1864 if (subexpr->IsSpread()) break;
1866 // If the subexpression is a literal or a simple materialized literal it
1867 // is already set in the cloned array.
1868 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1870 if (!result_saved) {
1872 __ Push(Smi::FromInt(expr->literal_index()));
1873 result_saved = true;
1875 VisitForAccumulatorValue(subexpr);
1877 if (has_fast_elements) {
1878 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1879 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1880 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1881 __ str(result_register(), FieldMemOperand(r1, offset));
1882 // Update the write barrier for the array store.
1883 __ RecordWriteField(r1, offset, result_register(), r2,
1884 kLRHasBeenSaved, kDontSaveFPRegs,
1885 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1887 __ mov(r3, Operand(Smi::FromInt(array_index)));
1888 StoreArrayLiteralElementStub stub(isolate());
1892 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1895 // In case the array literal contains spread expressions it has two parts. The
1896 // first part is the "static" array which has a literal index is handled
1897 // above. The second part is the part after the first spread expression
1898 // (inclusive) and these elements gets appended to the array. Note that the
1899 // number elements an iterable produces is unknown ahead of time.
1900 if (array_index < length && result_saved) {
1901 __ pop(); // literal index
1903 result_saved = false;
1905 for (; array_index < length; array_index++) {
1906 Expression* subexpr = subexprs->at(array_index);
1909 if (subexpr->IsSpread()) {
1910 VisitForStackValue(subexpr->AsSpread()->expression());
1911 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1913 VisitForStackValue(subexpr);
1914 __ CallRuntime(Runtime::kAppendElement, 2);
1917 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1921 __ pop(); // literal index
1922 context()->PlugTOS();
1924 context()->Plug(r0);
1929 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1930 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1932 Comment cmnt(masm_, "[ Assignment");
1933 SetExpressionPosition(expr, INSERT_BREAK);
1935 Property* property = expr->target()->AsProperty();
1936 LhsKind assign_type = Property::GetAssignType(property);
1938 // Evaluate LHS expression.
1939 switch (assign_type) {
1941 // Nothing to do here.
1943 case NAMED_PROPERTY:
1944 if (expr->is_compound()) {
1945 // We need the receiver both on the stack and in the register.
1946 VisitForStackValue(property->obj());
1947 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1949 VisitForStackValue(property->obj());
1952 case NAMED_SUPER_PROPERTY:
1954 property->obj()->AsSuperPropertyReference()->this_var());
1955 VisitForAccumulatorValue(
1956 property->obj()->AsSuperPropertyReference()->home_object());
1957 __ Push(result_register());
1958 if (expr->is_compound()) {
1959 const Register scratch = r1;
1960 __ ldr(scratch, MemOperand(sp, kPointerSize));
1962 __ Push(result_register());
1965 case KEYED_SUPER_PROPERTY:
1967 property->obj()->AsSuperPropertyReference()->this_var());
1969 property->obj()->AsSuperPropertyReference()->home_object());
1970 VisitForAccumulatorValue(property->key());
1971 __ Push(result_register());
1972 if (expr->is_compound()) {
1973 const Register scratch = r1;
1974 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1976 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1978 __ Push(result_register());
1981 case KEYED_PROPERTY:
1982 if (expr->is_compound()) {
1983 VisitForStackValue(property->obj());
1984 VisitForStackValue(property->key());
1985 __ ldr(LoadDescriptor::ReceiverRegister(),
1986 MemOperand(sp, 1 * kPointerSize));
1987 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1989 VisitForStackValue(property->obj());
1990 VisitForStackValue(property->key());
1995 // For compound assignments we need another deoptimization point after the
1996 // variable/property load.
1997 if (expr->is_compound()) {
1998 { AccumulatorValueContext context(this);
1999 switch (assign_type) {
2001 EmitVariableLoad(expr->target()->AsVariableProxy());
2002 PrepareForBailout(expr->target(), TOS_REG);
2004 case NAMED_PROPERTY:
2005 EmitNamedPropertyLoad(property);
2006 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2008 case NAMED_SUPER_PROPERTY:
2009 EmitNamedSuperPropertyLoad(property);
2010 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2012 case KEYED_SUPER_PROPERTY:
2013 EmitKeyedSuperPropertyLoad(property);
2014 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2016 case KEYED_PROPERTY:
2017 EmitKeyedPropertyLoad(property);
2018 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2023 Token::Value op = expr->binary_op();
2024 __ push(r0); // Left operand goes on the stack.
2025 VisitForAccumulatorValue(expr->value());
2027 AccumulatorValueContext context(this);
2028 if (ShouldInlineSmiCase(op)) {
2029 EmitInlineSmiBinaryOp(expr->binary_operation(),
2034 EmitBinaryOp(expr->binary_operation(), op);
2037 // Deoptimization point in case the binary operation may have side effects.
2038 PrepareForBailout(expr->binary_operation(), TOS_REG);
2040 VisitForAccumulatorValue(expr->value());
2043 SetExpressionPosition(expr);
2046 switch (assign_type) {
2048 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2049 expr->op(), expr->AssignmentSlot());
2050 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2051 context()->Plug(r0);
2053 case NAMED_PROPERTY:
2054 EmitNamedPropertyAssignment(expr);
2056 case NAMED_SUPER_PROPERTY:
2057 EmitNamedSuperPropertyStore(property);
2058 context()->Plug(r0);
2060 case KEYED_SUPER_PROPERTY:
2061 EmitKeyedSuperPropertyStore(property);
2062 context()->Plug(r0);
2064 case KEYED_PROPERTY:
2065 EmitKeyedPropertyAssignment(expr);
2071 void FullCodeGenerator::VisitYield(Yield* expr) {
2072 Comment cmnt(masm_, "[ Yield");
2073 SetExpressionPosition(expr);
2075 // Evaluate yielded value first; the initial iterator definition depends on
2076 // this. It stays on the stack while we update the iterator.
2077 VisitForStackValue(expr->expression());
2079 switch (expr->yield_kind()) {
2080 case Yield::kSuspend:
2081 // Pop value from top-of-stack slot; box result into result register.
2082 EmitCreateIteratorResult(false);
2083 __ push(result_register());
2085 case Yield::kInitial: {
2086 Label suspend, continuation, post_runtime, resume;
2089 __ bind(&continuation);
2090 __ RecordGeneratorContinuation();
2094 VisitForAccumulatorValue(expr->generator_object());
2095 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2096 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2097 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2098 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2100 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2101 kLRHasBeenSaved, kDontSaveFPRegs);
2102 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2104 __ b(eq, &post_runtime);
2105 __ push(r0); // generator object
2106 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2107 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2108 __ bind(&post_runtime);
2109 __ pop(result_register());
2110 EmitReturnSequence();
2113 context()->Plug(result_register());
2117 case Yield::kFinal: {
2118 VisitForAccumulatorValue(expr->generator_object());
2119 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2120 __ str(r1, FieldMemOperand(result_register(),
2121 JSGeneratorObject::kContinuationOffset));
2122 // Pop value from top-of-stack slot, box result into result register.
2123 EmitCreateIteratorResult(true);
2124 EmitUnwindBeforeReturn();
2125 EmitReturnSequence();
2129 case Yield::kDelegating: {
2130 VisitForStackValue(expr->generator_object());
2132 // Initial stack layout is as follows:
2133 // [sp + 1 * kPointerSize] iter
2134 // [sp + 0 * kPointerSize] g
2136 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2137 Label l_next, l_call, l_loop;
2138 Register load_receiver = LoadDescriptor::ReceiverRegister();
2139 Register load_name = LoadDescriptor::NameRegister();
2141 // Initial send value is undefined.
2142 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2145 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2147 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2148 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2149 __ Push(load_name, r3, r0); // "throw", iter, except
2152 // try { received = %yield result }
2153 // Shuffle the received result above a try handler and yield it without
2156 __ pop(r0); // result
2157 int handler_index = NewHandlerTableEntry();
2158 EnterTryBlock(handler_index, &l_catch);
2159 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2160 __ push(r0); // result
2163 __ bind(&l_continuation);
2164 __ RecordGeneratorContinuation();
2167 __ bind(&l_suspend);
2168 const int generator_object_depth = kPointerSize + try_block_size;
2169 __ ldr(r0, MemOperand(sp, generator_object_depth));
2171 __ Push(Smi::FromInt(handler_index)); // handler-index
2172 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2173 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2174 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2175 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2177 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2178 kLRHasBeenSaved, kDontSaveFPRegs);
2179 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2180 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2181 __ pop(r0); // result
2182 EmitReturnSequence();
2183 __ bind(&l_resume); // received in r0
2184 ExitTryBlock(handler_index);
2186 // receiver = iter; f = 'next'; arg = received;
2189 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2190 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2191 __ Push(load_name, r3, r0); // "next", iter, received
2193 // result = receiver[f](arg);
2195 __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2196 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2197 __ mov(LoadDescriptor::SlotRegister(),
2198 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2199 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2200 CallIC(ic, TypeFeedbackId::None());
2202 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2203 SetCallPosition(expr, 1);
2204 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2207 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2208 __ Drop(1); // The function is still on the stack; drop it.
2210 // if (!result.done) goto l_try;
2212 __ Move(load_receiver, r0);
2214 __ push(load_receiver); // save result
2215 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2216 __ mov(LoadDescriptor::SlotRegister(),
2217 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2218 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2219 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2221 __ cmp(r0, Operand(0));
2225 __ pop(load_receiver); // result
2226 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2227 __ mov(LoadDescriptor::SlotRegister(),
2228 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2229 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.value
2230 context()->DropAndPlug(2, r0); // drop iter and g
2237 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2239 JSGeneratorObject::ResumeMode resume_mode) {
2240 // The value stays in r0, and is ultimately read by the resumed generator, as
2241 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2242 // is read to throw the value when the resumed generator is already closed.
2243 // r1 will hold the generator object until the activation has been resumed.
2244 VisitForStackValue(generator);
2245 VisitForAccumulatorValue(value);
2248 // Load suspended function and context.
2249 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2250 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2252 // Load receiver and store as the first argument.
2253 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2256 // Push holes for the rest of the arguments to the generator function.
2257 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2259 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2260 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2261 Label push_argument_holes, push_frame;
2262 __ bind(&push_argument_holes);
2263 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2264 __ b(mi, &push_frame);
2266 __ jmp(&push_argument_holes);
2268 // Enter a new JavaScript frame, and initialize its slots as they were when
2269 // the generator was suspended.
2270 Label resume_frame, done;
2271 __ bind(&push_frame);
2272 __ bl(&resume_frame);
2274 __ bind(&resume_frame);
2275 // lr = return address.
2276 // fp = caller's frame pointer.
2277 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
2278 // cp = callee's context,
2279 // r4 = callee's JS function.
2280 __ PushFixedFrame(r4);
2281 // Adjust FP to point to saved FP.
2282 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2284 // Load the operand stack size.
2285 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2286 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2289 // If we are sending a value and there is no operand stack, we can jump back
2291 if (resume_mode == JSGeneratorObject::NEXT) {
2293 __ cmp(r3, Operand(0));
2294 __ b(ne, &slow_resume);
2295 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2297 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2298 if (FLAG_enable_embedded_constant_pool) {
2299 // Load the new code object's constant pool pointer.
2300 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2303 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2306 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2307 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2310 __ bind(&slow_resume);
2313 // Otherwise, we push holes for the operand stack and call the runtime to fix
2314 // up the stack and the handlers.
2315 Label push_operand_holes, call_resume;
2316 __ bind(&push_operand_holes);
2317 __ sub(r3, r3, Operand(1), SetCC);
2318 __ b(mi, &call_resume);
2320 __ b(&push_operand_holes);
2321 __ bind(&call_resume);
2322 DCHECK(!result_register().is(r1));
2323 __ Push(r1, result_register());
2324 __ Push(Smi::FromInt(resume_mode));
2325 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2326 // Not reached: the runtime call returns elsewhere.
2327 __ stop("not-reached");
2330 context()->Plug(result_register());
2334 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2338 const int instance_size = 5 * kPointerSize;
2339 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2342 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT);
2345 __ bind(&gc_required);
2346 __ Push(Smi::FromInt(instance_size));
2347 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2348 __ ldr(context_register(),
2349 MemOperand(fp, StandardFrameConstants::kContextOffset));
2351 __ bind(&allocated);
2352 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2353 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
2354 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX));
2356 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2357 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2358 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2359 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2360 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2362 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2364 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2366 // Only the value field needs a write barrier, as the other values are in the
2368 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2369 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2373 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2374 SetExpressionPosition(prop);
2375 Literal* key = prop->key()->AsLiteral();
2376 DCHECK(!prop->IsSuperAccess());
2378 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2379 __ mov(LoadDescriptor::SlotRegister(),
2380 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2381 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2385 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2386 // Stack: receiver, home_object.
2387 SetExpressionPosition(prop);
2388 Literal* key = prop->key()->AsLiteral();
2389 DCHECK(!key->value()->IsSmi());
2390 DCHECK(prop->IsSuperAccess());
2392 __ Push(key->value());
2393 __ Push(Smi::FromInt(language_mode()));
2394 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2398 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2399 SetExpressionPosition(prop);
2400 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2401 __ mov(LoadDescriptor::SlotRegister(),
2402 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2407 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2408 // Stack: receiver, home_object, key.
2409 SetExpressionPosition(prop);
2410 __ Push(Smi::FromInt(language_mode()));
2411 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2415 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2417 Expression* left_expr,
2418 Expression* right_expr) {
2419 Label done, smi_case, stub_call;
2421 Register scratch1 = r2;
2422 Register scratch2 = r3;
2424 // Get the arguments.
2426 Register right = r0;
2429 // Perform combined smi check on both operands.
2430 __ orr(scratch1, left, Operand(right));
2431 STATIC_ASSERT(kSmiTag == 0);
2432 JumpPatchSite patch_site(masm_);
2433 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2435 __ bind(&stub_call);
2437 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2438 CallIC(code, expr->BinaryOperationFeedbackId());
2439 patch_site.EmitPatchInfo();
2443 // Smi case. This code works the same way as the smi-smi case in the type
2444 // recording binary operation stub, see
2447 __ GetLeastBitsFromSmi(scratch1, right, 5);
2448 __ mov(right, Operand(left, ASR, scratch1));
2449 __ bic(right, right, Operand(kSmiTagMask));
2452 __ SmiUntag(scratch1, left);
2453 __ GetLeastBitsFromSmi(scratch2, right, 5);
2454 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2455 __ TrySmiTag(right, scratch1, &stub_call);
2459 __ SmiUntag(scratch1, left);
2460 __ GetLeastBitsFromSmi(scratch2, right, 5);
2461 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2462 __ tst(scratch1, Operand(0xc0000000));
2463 __ b(ne, &stub_call);
2464 __ SmiTag(right, scratch1);
2468 __ add(scratch1, left, Operand(right), SetCC);
2469 __ b(vs, &stub_call);
2470 __ mov(right, scratch1);
2473 __ sub(scratch1, left, Operand(right), SetCC);
2474 __ b(vs, &stub_call);
2475 __ mov(right, scratch1);
2478 __ SmiUntag(ip, right);
2479 __ smull(scratch1, scratch2, left, ip);
2480 __ mov(ip, Operand(scratch1, ASR, 31));
2481 __ cmp(ip, Operand(scratch2));
2482 __ b(ne, &stub_call);
2483 __ cmp(scratch1, Operand::Zero());
2484 __ mov(right, Operand(scratch1), LeaveCC, ne);
2486 __ add(scratch2, right, Operand(left), SetCC);
2487 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2488 __ b(mi, &stub_call);
2492 __ orr(right, left, Operand(right));
2494 case Token::BIT_AND:
2495 __ and_(right, left, Operand(right));
2497 case Token::BIT_XOR:
2498 __ eor(right, left, Operand(right));
2505 context()->Plug(r0);
2509 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2510 int* used_store_slots) {
2511 // Constructor is in r0.
2512 DCHECK(lit != NULL);
2515 // No access check is needed here since the constructor is created by the
2517 Register scratch = r1;
2519 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2522 for (int i = 0; i < lit->properties()->length(); i++) {
2523 ObjectLiteral::Property* property = lit->properties()->at(i);
2524 Expression* value = property->value();
2526 if (property->is_static()) {
2527 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2529 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2532 EmitPropertyKey(property, lit->GetIdForProperty(i));
2534 // The static prototype property is read only. We handle the non computed
2535 // property name case in the parser. Since this is the only case where we
2536 // need to check for an own read only property we special case this so we do
2537 // not need to do this for every property.
2538 if (property->is_static() && property->is_computed_name()) {
2539 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2543 VisitForStackValue(value);
2544 EmitSetHomeObjectIfNeeded(value, 2,
2545 lit->SlotForHomeObject(value, used_store_slots));
2547 switch (property->kind()) {
2548 case ObjectLiteral::Property::CONSTANT:
2549 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2550 case ObjectLiteral::Property::PROTOTYPE:
2552 case ObjectLiteral::Property::COMPUTED:
2553 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2556 case ObjectLiteral::Property::GETTER:
2557 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2559 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2562 case ObjectLiteral::Property::SETTER:
2563 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2565 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2573 // Set both the prototype and constructor to have fast properties, and also
2574 // freeze them in strong mode.
2575 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2579 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2582 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2583 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2584 CallIC(code, expr->BinaryOperationFeedbackId());
2585 patch_site.EmitPatchInfo();
2586 context()->Plug(r0);
2590 void FullCodeGenerator::EmitAssignment(Expression* expr,
2591 FeedbackVectorICSlot slot) {
2592 DCHECK(expr->IsValidReferenceExpressionOrThis());
2594 Property* prop = expr->AsProperty();
2595 LhsKind assign_type = Property::GetAssignType(prop);
2597 switch (assign_type) {
2599 Variable* var = expr->AsVariableProxy()->var();
2600 EffectContext context(this);
2601 EmitVariableAssignment(var, Token::ASSIGN, slot);
2604 case NAMED_PROPERTY: {
2605 __ push(r0); // Preserve value.
2606 VisitForAccumulatorValue(prop->obj());
2607 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2608 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2609 __ mov(StoreDescriptor::NameRegister(),
2610 Operand(prop->key()->AsLiteral()->value()));
2611 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2615 case NAMED_SUPER_PROPERTY: {
2617 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2618 VisitForAccumulatorValue(
2619 prop->obj()->AsSuperPropertyReference()->home_object());
2620 // stack: value, this; r0: home_object
2621 Register scratch = r2;
2622 Register scratch2 = r3;
2623 __ mov(scratch, result_register()); // home_object
2624 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2625 __ ldr(scratch2, MemOperand(sp, 0)); // this
2626 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2627 __ str(scratch, MemOperand(sp, 0)); // home_object
2628 // stack: this, home_object; r0: value
2629 EmitNamedSuperPropertyStore(prop);
2632 case KEYED_SUPER_PROPERTY: {
2634 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2636 prop->obj()->AsSuperPropertyReference()->home_object());
2637 VisitForAccumulatorValue(prop->key());
2638 Register scratch = r2;
2639 Register scratch2 = r3;
2640 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2641 // stack: value, this, home_object; r0: key, r3: value
2642 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2643 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2644 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2645 __ str(scratch, MemOperand(sp, kPointerSize));
2646 __ str(r0, MemOperand(sp, 0));
2647 __ Move(r0, scratch2);
2648 // stack: this, home_object, key; r0: value.
2649 EmitKeyedSuperPropertyStore(prop);
2652 case KEYED_PROPERTY: {
2653 __ push(r0); // Preserve value.
2654 VisitForStackValue(prop->obj());
2655 VisitForAccumulatorValue(prop->key());
2656 __ Move(StoreDescriptor::NameRegister(), r0);
2657 __ Pop(StoreDescriptor::ValueRegister(),
2658 StoreDescriptor::ReceiverRegister());
2659 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2661 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2666 context()->Plug(r0);
2670 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2671 Variable* var, MemOperand location) {
2672 __ str(result_register(), location);
2673 if (var->IsContextSlot()) {
2674 // RecordWrite may destroy all its register arguments.
2675 __ mov(r3, result_register());
2676 int offset = Context::SlotOffset(var->index());
2677 __ RecordWriteContextSlot(
2678 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2683 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2684 FeedbackVectorICSlot slot) {
2685 if (var->IsUnallocated()) {
2686 // Global var, const, or let.
2687 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2688 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2689 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2692 } else if (var->IsGlobalSlot()) {
2693 // Global var, const, or let.
2694 DCHECK(var->index() > 0);
2695 DCHECK(var->IsStaticGlobalObjectProperty());
2696 const int slot = var->index();
2697 const int depth = scope()->ContextChainLength(var->scope());
2698 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2699 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2700 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
2701 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2704 __ Push(Smi::FromInt(slot));
2706 __ CallRuntime(is_strict(language_mode())
2707 ? Runtime::kStoreGlobalViaContext_Strict
2708 : Runtime::kStoreGlobalViaContext_Sloppy,
2711 } else if (var->mode() == LET && op != Token::INIT_LET) {
2712 // Non-initializing assignment to let variable needs a write barrier.
2713 DCHECK(!var->IsLookupSlot());
2714 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2716 MemOperand location = VarOperand(var, r1);
2717 __ ldr(r3, location);
2718 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2720 __ mov(r3, Operand(var->name()));
2722 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2723 // Perform the assignment.
2725 EmitStoreToStackLocalOrContextSlot(var, location);
2727 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2728 // Assignment to const variable needs a write barrier.
2729 DCHECK(!var->IsLookupSlot());
2730 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2732 MemOperand location = VarOperand(var, r1);
2733 __ ldr(r3, location);
2734 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2735 __ b(ne, &const_error);
2736 __ mov(r3, Operand(var->name()));
2738 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2739 __ bind(&const_error);
2740 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2742 } else if (var->is_this() && op == Token::INIT_CONST) {
2743 // Initializing assignment to const {this} needs a write barrier.
2744 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2745 Label uninitialized_this;
2746 MemOperand location = VarOperand(var, r1);
2747 __ ldr(r3, location);
2748 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2749 __ b(eq, &uninitialized_this);
2750 __ mov(r0, Operand(var->name()));
2752 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2753 __ bind(&uninitialized_this);
2754 EmitStoreToStackLocalOrContextSlot(var, location);
2756 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2757 if (var->IsLookupSlot()) {
2758 // Assignment to var.
2759 __ push(r0); // Value.
2760 __ mov(r1, Operand(var->name()));
2761 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2762 __ Push(cp, r1, r0); // Context, name, language mode.
2763 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2765 // Assignment to var or initializing assignment to let/const in harmony
2767 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2768 MemOperand location = VarOperand(var, r1);
2769 if (generate_debug_code_ && op == Token::INIT_LET) {
2770 // Check for an uninitialized let binding.
2771 __ ldr(r2, location);
2772 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2773 __ Check(eq, kLetBindingReInitialization);
2775 EmitStoreToStackLocalOrContextSlot(var, location);
2778 } else if (op == Token::INIT_CONST_LEGACY) {
2779 // Const initializers need a write barrier.
2780 DCHECK(var->mode() == CONST_LEGACY);
2781 DCHECK(!var->IsParameter()); // No const parameters.
2782 if (var->IsLookupSlot()) {
2784 __ mov(r0, Operand(var->name()));
2785 __ Push(cp, r0); // Context and name.
2786 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2788 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2790 MemOperand location = VarOperand(var, r1);
2791 __ ldr(r2, location);
2792 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2794 EmitStoreToStackLocalOrContextSlot(var, location);
2799 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2800 if (is_strict(language_mode())) {
2801 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2803 // Silently ignore store in sloppy mode.
2808 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2809 // Assignment to a property, using a named store IC.
2810 Property* prop = expr->target()->AsProperty();
2811 DCHECK(prop != NULL);
2812 DCHECK(prop->key()->IsLiteral());
2814 __ mov(StoreDescriptor::NameRegister(),
2815 Operand(prop->key()->AsLiteral()->value()));
2816 __ pop(StoreDescriptor::ReceiverRegister());
2817 if (FLAG_vector_stores) {
2818 EmitLoadStoreICSlot(expr->AssignmentSlot());
2821 CallStoreIC(expr->AssignmentFeedbackId());
2824 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2825 context()->Plug(r0);
2829 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2830 // Assignment to named property of super.
2832 // stack : receiver ('this'), home_object
2833 DCHECK(prop != NULL);
2834 Literal* key = prop->key()->AsLiteral();
2835 DCHECK(key != NULL);
2837 __ Push(key->value());
2839 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2840 : Runtime::kStoreToSuper_Sloppy),
2845 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2846 // Assignment to named property of super.
2848 // stack : receiver ('this'), home_object, key
2849 DCHECK(prop != NULL);
2853 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2854 : Runtime::kStoreKeyedToSuper_Sloppy),
2859 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2860 // Assignment to a property, using a keyed store IC.
2861 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2862 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2865 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2866 if (FLAG_vector_stores) {
2867 EmitLoadStoreICSlot(expr->AssignmentSlot());
2870 CallIC(ic, expr->AssignmentFeedbackId());
2873 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2874 context()->Plug(r0);
2878 void FullCodeGenerator::VisitProperty(Property* expr) {
2879 Comment cmnt(masm_, "[ Property");
2880 SetExpressionPosition(expr);
2882 Expression* key = expr->key();
2884 if (key->IsPropertyName()) {
2885 if (!expr->IsSuperAccess()) {
2886 VisitForAccumulatorValue(expr->obj());
2887 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2888 EmitNamedPropertyLoad(expr);
2890 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2892 expr->obj()->AsSuperPropertyReference()->home_object());
2893 EmitNamedSuperPropertyLoad(expr);
2896 if (!expr->IsSuperAccess()) {
2897 VisitForStackValue(expr->obj());
2898 VisitForAccumulatorValue(expr->key());
2899 __ Move(LoadDescriptor::NameRegister(), r0);
2900 __ pop(LoadDescriptor::ReceiverRegister());
2901 EmitKeyedPropertyLoad(expr);
2903 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2905 expr->obj()->AsSuperPropertyReference()->home_object());
2906 VisitForStackValue(expr->key());
2907 EmitKeyedSuperPropertyLoad(expr);
2910 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2911 context()->Plug(r0);
2915 void FullCodeGenerator::CallIC(Handle<Code> code,
2916 TypeFeedbackId ast_id) {
2918 // All calls must have a predictable size in full-codegen code to ensure that
2919 // the debugger can patch them correctly.
2920 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2921 NEVER_INLINE_TARGET_ADDRESS);
2925 // Code common for calls using the IC.
2926 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2927 Expression* callee = expr->expression();
2929 CallICState::CallType call_type =
2930 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2932 // Get the target function.
2933 if (call_type == CallICState::FUNCTION) {
2934 { StackValueContext context(this);
2935 EmitVariableLoad(callee->AsVariableProxy());
2936 PrepareForBailout(callee, NO_REGISTERS);
2938 // Push undefined as receiver. This is patched in the method prologue if it
2939 // is a sloppy mode method.
2940 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2943 // Load the function from the receiver.
2944 DCHECK(callee->IsProperty());
2945 DCHECK(!callee->AsProperty()->IsSuperAccess());
2946 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2947 EmitNamedPropertyLoad(callee->AsProperty());
2948 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2949 // Push the target function under the receiver.
2950 __ ldr(ip, MemOperand(sp, 0));
2952 __ str(r0, MemOperand(sp, kPointerSize));
2955 EmitCall(expr, call_type);
2959 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2960 Expression* callee = expr->expression();
2961 DCHECK(callee->IsProperty());
2962 Property* prop = callee->AsProperty();
2963 DCHECK(prop->IsSuperAccess());
2964 SetExpressionPosition(prop);
2966 Literal* key = prop->key()->AsLiteral();
2967 DCHECK(!key->value()->IsSmi());
2968 // Load the function from the receiver.
2969 const Register scratch = r1;
2970 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2971 VisitForStackValue(super_ref->home_object());
2972 VisitForAccumulatorValue(super_ref->this_var());
2975 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2977 __ Push(key->value());
2978 __ Push(Smi::FromInt(language_mode()));
2982 // - this (receiver)
2983 // - this (receiver) <-- LoadFromSuper will pop here and below.
2987 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2989 // Replace home_object with target function.
2990 __ str(r0, MemOperand(sp, kPointerSize));
2993 // - target function
2994 // - this (receiver)
2995 EmitCall(expr, CallICState::METHOD);
2999 // Code common for calls using the IC.
3000 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3003 VisitForAccumulatorValue(key);
3005 Expression* callee = expr->expression();
3007 // Load the function from the receiver.
3008 DCHECK(callee->IsProperty());
3009 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3010 __ Move(LoadDescriptor::NameRegister(), r0);
3011 EmitKeyedPropertyLoad(callee->AsProperty());
3012 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3014 // Push the target function under the receiver.
3015 __ ldr(ip, MemOperand(sp, 0));
3017 __ str(r0, MemOperand(sp, kPointerSize));
3019 EmitCall(expr, CallICState::METHOD);
3023 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3024 Expression* callee = expr->expression();
3025 DCHECK(callee->IsProperty());
3026 Property* prop = callee->AsProperty();
3027 DCHECK(prop->IsSuperAccess());
3029 SetExpressionPosition(prop);
3030 // Load the function from the receiver.
3031 const Register scratch = r1;
3032 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3033 VisitForStackValue(super_ref->home_object());
3034 VisitForAccumulatorValue(super_ref->this_var());
3037 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
3039 VisitForStackValue(prop->key());
3040 __ Push(Smi::FromInt(language_mode()));
3044 // - this (receiver)
3045 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3049 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3051 // Replace home_object with target function.
3052 __ str(r0, MemOperand(sp, kPointerSize));
3055 // - target function
3056 // - this (receiver)
3057 EmitCall(expr, CallICState::METHOD);
3061 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3062 // Load the arguments.
3063 ZoneList<Expression*>* args = expr->arguments();
3064 int arg_count = args->length();
3065 for (int i = 0; i < arg_count; i++) {
3066 VisitForStackValue(args->at(i));
3069 SetCallPosition(expr, arg_count);
3070 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3071 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3072 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3073 // Don't assign a type feedback id to the IC, since type feedback is provided
3074 // by the vector above.
3077 RecordJSReturnSite(expr);
3078 // Restore context register.
3079 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3080 context()->DropAndPlug(1, r0);
3084 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3085 // r4: copy of the first argument or undefined if it doesn't exist.
3086 if (arg_count > 0) {
3087 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
3089 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3092 // r3: the receiver of the enclosing function.
3093 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3095 // r2: language mode.
3096 __ mov(r2, Operand(Smi::FromInt(language_mode())));
3098 // r1: the start position of the scope the calls resides in.
3099 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
3101 // Do the runtime call.
3102 __ Push(r4, r3, r2, r1);
3103 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3107 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3108 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3109 VariableProxy* callee = expr->expression()->AsVariableProxy();
3110 if (callee->var()->IsLookupSlot()) {
3112 SetExpressionPosition(callee);
3113 // Generate code for loading from variables potentially shadowed
3114 // by eval-introduced variables.
3115 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3118 // Call the runtime to find the function to call (returned in r0)
3119 // and the object holding it (returned in edx).
3120 DCHECK(!context_register().is(r2));
3121 __ mov(r2, Operand(callee->name()));
3122 __ Push(context_register(), r2);
3123 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3124 __ Push(r0, r1); // Function, receiver.
3125 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3127 // If fast case code has been generated, emit code to push the
3128 // function and receiver and have the slow path jump around this
3130 if (done.is_linked()) {
3136 // The receiver is implicitly the global receiver. Indicate this
3137 // by passing the hole to the call function stub.
3138 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3143 VisitForStackValue(callee);
3144 // refEnv.WithBaseObject()
3145 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3146 __ push(r2); // Reserved receiver slot.
3151 void FullCodeGenerator::VisitCall(Call* expr) {
3153 // We want to verify that RecordJSReturnSite gets called on all paths
3154 // through this function. Avoid early returns.
3155 expr->return_is_recorded_ = false;
3158 Comment cmnt(masm_, "[ Call");
3159 Expression* callee = expr->expression();
3160 Call::CallType call_type = expr->GetCallType(isolate());
3162 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3163 // In a call to eval, we first call
3164 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
3165 // to call. Then we call the resolved function using the given arguments.
3166 ZoneList<Expression*>* args = expr->arguments();
3167 int arg_count = args->length();
3169 PushCalleeAndWithBaseObject(expr);
3171 // Push the arguments.
3172 for (int i = 0; i < arg_count; i++) {
3173 VisitForStackValue(args->at(i));
3176 // Push a copy of the function (found below the arguments) and
3178 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3180 EmitResolvePossiblyDirectEval(arg_count);
3182 // Touch up the stack with the resolved function.
3183 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3185 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3187 // Record source position for debugger.
3188 SetCallPosition(expr, arg_count);
3189 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3190 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3192 RecordJSReturnSite(expr);
3193 // Restore context register.
3194 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3195 context()->DropAndPlug(1, r0);
3196 } else if (call_type == Call::GLOBAL_CALL) {
3197 EmitCallWithLoadIC(expr);
3199 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3200 // Call to a lookup slot (dynamically introduced variable).
3201 PushCalleeAndWithBaseObject(expr);
3203 } else if (call_type == Call::PROPERTY_CALL) {
3204 Property* property = callee->AsProperty();
3205 bool is_named_call = property->key()->IsPropertyName();
3206 if (property->IsSuperAccess()) {
3207 if (is_named_call) {
3208 EmitSuperCallWithLoadIC(expr);
3210 EmitKeyedSuperCallWithLoadIC(expr);
3213 VisitForStackValue(property->obj());
3214 if (is_named_call) {
3215 EmitCallWithLoadIC(expr);
3217 EmitKeyedCallWithLoadIC(expr, property->key());
3220 } else if (call_type == Call::SUPER_CALL) {
3221 EmitSuperConstructorCall(expr);
3223 DCHECK(call_type == Call::OTHER_CALL);
3224 // Call to an arbitrary expression not handled specially above.
3225 VisitForStackValue(callee);
3226 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
3228 // Emit function call.
3233 // RecordJSReturnSite should have been called.
3234 DCHECK(expr->return_is_recorded_);
3239 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3240 Comment cmnt(masm_, "[ CallNew");
3241 // According to ECMA-262, section 11.2.2, page 44, the function
3242 // expression in new calls must be evaluated before the
3245 // Push constructor on the stack. If it's not a function it's used as
3246 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3248 DCHECK(!expr->expression()->IsSuperPropertyReference());
3249 VisitForStackValue(expr->expression());
3251 // Push the arguments ("left-to-right") on the stack.
3252 ZoneList<Expression*>* args = expr->arguments();
3253 int arg_count = args->length();
3254 for (int i = 0; i < arg_count; i++) {
3255 VisitForStackValue(args->at(i));
3258 // Call the construct call builtin that handles allocation and
3259 // constructor invocation.
3260 SetConstructCallPosition(expr);
3262 // Load function and argument count into r1 and r0.
3263 __ mov(r0, Operand(arg_count));
3264 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3266 // Record call targets in unoptimized code.
3267 if (FLAG_pretenuring_call_new) {
3268 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3269 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3270 expr->CallNewFeedbackSlot().ToInt() + 1);
3273 __ Move(r2, FeedbackVector());
3274 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3276 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3277 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3278 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3279 context()->Plug(r0);
3283 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3284 SuperCallReference* super_call_ref =
3285 expr->expression()->AsSuperCallReference();
3286 DCHECK_NOT_NULL(super_call_ref);
3288 EmitLoadSuperConstructor(super_call_ref);
3289 __ push(result_register());
3291 // Push the arguments ("left-to-right") on the stack.
3292 ZoneList<Expression*>* args = expr->arguments();
3293 int arg_count = args->length();
3294 for (int i = 0; i < arg_count; i++) {
3295 VisitForStackValue(args->at(i));
3298 // Call the construct call builtin that handles allocation and
3299 // constructor invocation.
3300 SetConstructCallPosition(expr);
3302 // Load original constructor into r4.
3303 VisitForAccumulatorValue(super_call_ref->new_target_var());
3304 __ mov(r4, result_register());
3306 // Load function and argument count into r1 and r0.
3307 __ mov(r0, Operand(arg_count));
3308 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3310 // Record call targets in unoptimized code.
3311 if (FLAG_pretenuring_call_new) {
3313 /* TODO(dslomov): support pretenuring.
3314 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3315 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3316 expr->CallNewFeedbackSlot().ToInt() + 1);
3320 __ Move(r2, FeedbackVector());
3321 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3323 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3324 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3326 RecordJSReturnSite(expr);
3328 context()->Plug(r0);
3332 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3333 ZoneList<Expression*>* args = expr->arguments();
3334 DCHECK(args->length() == 1);
3336 VisitForAccumulatorValue(args->at(0));
3338 Label materialize_true, materialize_false;
3339 Label* if_true = NULL;
3340 Label* if_false = NULL;
3341 Label* fall_through = NULL;
3342 context()->PrepareTest(&materialize_true, &materialize_false,
3343 &if_true, &if_false, &fall_through);
3345 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3347 Split(eq, if_true, if_false, fall_through);
3349 context()->Plug(if_true, if_false);
3353 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3354 ZoneList<Expression*>* args = expr->arguments();
3355 DCHECK(args->length() == 1);
3357 VisitForAccumulatorValue(args->at(0));
3359 Label materialize_true, materialize_false;
3360 Label* if_true = NULL;
3361 Label* if_false = NULL;
3362 Label* fall_through = NULL;
3363 context()->PrepareTest(&materialize_true, &materialize_false,
3364 &if_true, &if_false, &fall_through);
3366 __ JumpIfSmi(r0, if_false);
3367 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3370 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3371 // Undetectable objects behave like undefined when tested with typeof.
3372 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3373 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3375 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3376 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3378 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3379 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3380 Split(le, if_true, if_false, fall_through);
3382 context()->Plug(if_true, if_false);
3386 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3387 ZoneList<Expression*>* args = expr->arguments();
3388 DCHECK(args->length() == 1);
3390 VisitForAccumulatorValue(args->at(0));
3392 Label materialize_true, materialize_false;
3393 Label* if_true = NULL;
3394 Label* if_false = NULL;
3395 Label* fall_through = NULL;
3396 context()->PrepareTest(&materialize_true, &materialize_false,
3397 &if_true, &if_false, &fall_through);
3399 __ JumpIfSmi(r0, if_false);
3400 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3401 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3402 Split(ge, if_true, if_false, fall_through);
3404 context()->Plug(if_true, if_false);
3408 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3409 ZoneList<Expression*>* args = expr->arguments();
3410 DCHECK(args->length() == 1);
3412 VisitForAccumulatorValue(args->at(0));
3414 Label materialize_true, materialize_false;
3415 Label* if_true = NULL;
3416 Label* if_false = NULL;
3417 Label* fall_through = NULL;
3418 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3419 &if_false, &fall_through);
3421 __ JumpIfSmi(r0, if_false);
3422 __ CompareObjectType(r0, r1, r1, SIMD128_VALUE_TYPE);
3423 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3424 Split(eq, if_true, if_false, fall_through);
3426 context()->Plug(if_true, if_false);
3430 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3431 CallRuntime* expr) {
3432 ZoneList<Expression*>* args = expr->arguments();
3433 DCHECK(args->length() == 1);
3435 VisitForAccumulatorValue(args->at(0));
3437 Label materialize_true, materialize_false, skip_lookup;
3438 Label* if_true = NULL;
3439 Label* if_false = NULL;
3440 Label* fall_through = NULL;
3441 context()->PrepareTest(&materialize_true, &materialize_false,
3442 &if_true, &if_false, &fall_through);
3444 __ AssertNotSmi(r0);
3446 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3447 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3448 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3449 __ b(ne, &skip_lookup);
3451 // Check for fast case object. Generate false result for slow case object.
3452 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3453 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3454 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3458 // Look for valueOf name in the descriptor array, and indicate false if
3459 // found. Since we omit an enumeration index check, if it is added via a
3460 // transition that shares its descriptor array, this is a false positive.
3461 Label entry, loop, done;
3463 // Skip loop if no descriptors are valid.
3464 __ NumberOfOwnDescriptors(r3, r1);
3465 __ cmp(r3, Operand::Zero());
3468 __ LoadInstanceDescriptors(r1, r4);
3469 // r4: descriptor array.
3470 // r3: valid entries in the descriptor array.
3471 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3473 // Calculate location of the first key name.
3474 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3475 // Calculate the end of the descriptor array.
3477 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3479 // Loop through all the keys in the descriptor array. If one of these is the
3480 // string "valueOf" the result is false.
3481 // The use of ip to store the valueOf string assumes that it is not otherwise
3482 // used in the loop below.
3483 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3486 __ ldr(r3, MemOperand(r4, 0));
3489 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3491 __ cmp(r4, Operand(r2));
3496 // Set the bit in the map to indicate that there is no local valueOf field.
3497 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3498 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3499 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3501 __ bind(&skip_lookup);
3503 // If a valueOf property is not found on the object check that its
3504 // prototype is the un-modified String prototype. If not result is false.
3505 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3506 __ JumpIfSmi(r2, if_false);
3507 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3508 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3509 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3510 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3512 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3513 Split(eq, if_true, if_false, fall_through);
3515 context()->Plug(if_true, if_false);
3519 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3520 ZoneList<Expression*>* args = expr->arguments();
3521 DCHECK(args->length() == 1);
3523 VisitForAccumulatorValue(args->at(0));
3525 Label materialize_true, materialize_false;
3526 Label* if_true = NULL;
3527 Label* if_false = NULL;
3528 Label* fall_through = NULL;
3529 context()->PrepareTest(&materialize_true, &materialize_false,
3530 &if_true, &if_false, &fall_through);
3532 __ JumpIfSmi(r0, if_false);
3533 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3534 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3535 Split(eq, if_true, if_false, fall_through);
3537 context()->Plug(if_true, if_false);
3541 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3542 ZoneList<Expression*>* args = expr->arguments();
3543 DCHECK(args->length() == 1);
3545 VisitForAccumulatorValue(args->at(0));
3547 Label materialize_true, materialize_false;
3548 Label* if_true = NULL;
3549 Label* if_false = NULL;
3550 Label* fall_through = NULL;
3551 context()->PrepareTest(&materialize_true, &materialize_false,
3552 &if_true, &if_false, &fall_through);
3554 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3555 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3556 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3557 __ cmp(r2, Operand(0x80000000));
3558 __ cmp(r1, Operand(0x00000000), eq);
3560 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3561 Split(eq, if_true, if_false, fall_through);
3563 context()->Plug(if_true, if_false);
3567 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3568 ZoneList<Expression*>* args = expr->arguments();
3569 DCHECK(args->length() == 1);
3571 VisitForAccumulatorValue(args->at(0));
3573 Label materialize_true, materialize_false;
3574 Label* if_true = NULL;
3575 Label* if_false = NULL;
3576 Label* fall_through = NULL;
3577 context()->PrepareTest(&materialize_true, &materialize_false,
3578 &if_true, &if_false, &fall_through);
3580 __ JumpIfSmi(r0, if_false);
3581 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3582 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3583 Split(eq, if_true, if_false, fall_through);
3585 context()->Plug(if_true, if_false);
3589 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3590 ZoneList<Expression*>* args = expr->arguments();
3591 DCHECK(args->length() == 1);
3593 VisitForAccumulatorValue(args->at(0));
3595 Label materialize_true, materialize_false;
3596 Label* if_true = NULL;
3597 Label* if_false = NULL;
3598 Label* fall_through = NULL;
3599 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3600 &if_false, &fall_through);
3602 __ JumpIfSmi(r0, if_false);
3603 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
3604 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3605 Split(eq, if_true, if_false, fall_through);
3607 context()->Plug(if_true, if_false);
3611 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3612 ZoneList<Expression*>* args = expr->arguments();
3613 DCHECK(args->length() == 1);
3615 VisitForAccumulatorValue(args->at(0));
3617 Label materialize_true, materialize_false;
3618 Label* if_true = NULL;
3619 Label* if_false = NULL;
3620 Label* fall_through = NULL;
3621 context()->PrepareTest(&materialize_true, &materialize_false,
3622 &if_true, &if_false, &fall_through);
3624 __ JumpIfSmi(r0, if_false);
3625 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3626 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3627 Split(eq, if_true, if_false, fall_through);
3629 context()->Plug(if_true, if_false);
3633 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3634 ZoneList<Expression*>* args = expr->arguments();
3635 DCHECK(args->length() == 1);
3637 VisitForAccumulatorValue(args->at(0));
3639 Label materialize_true, materialize_false;
3640 Label* if_true = NULL;
3641 Label* if_false = NULL;
3642 Label* fall_through = NULL;
3643 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3644 &if_false, &fall_through);
3646 __ JumpIfSmi(r0, if_false);
3648 Register type_reg = r2;
3649 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset));
3650 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3651 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3652 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3653 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3654 Split(ls, if_true, if_false, fall_through);
3656 context()->Plug(if_true, if_false);
3660 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3661 DCHECK(expr->arguments()->length() == 0);
3663 Label materialize_true, materialize_false;
3664 Label* if_true = NULL;
3665 Label* if_false = NULL;
3666 Label* fall_through = NULL;
3667 context()->PrepareTest(&materialize_true, &materialize_false,
3668 &if_true, &if_false, &fall_through);
3670 // Get the frame pointer for the calling frame.
3671 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3673 // Skip the arguments adaptor frame if it exists.
3674 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3675 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3676 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3678 // Check the marker in the calling frame.
3679 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3680 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3681 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3682 Split(eq, if_true, if_false, fall_through);
3684 context()->Plug(if_true, if_false);
3688 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3690 DCHECK(args->length() == 2);
3692 // Load the two objects into registers and perform the comparison.
3693 VisitForStackValue(args->at(0));
3694 VisitForAccumulatorValue(args->at(1));
3696 Label materialize_true, materialize_false;
3697 Label* if_true = NULL;
3698 Label* if_false = NULL;
3699 Label* fall_through = NULL;
3700 context()->PrepareTest(&materialize_true, &materialize_false,
3701 &if_true, &if_false, &fall_through);
3705 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3706 Split(eq, if_true, if_false, fall_through);
3708 context()->Plug(if_true, if_false);
3712 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3713 ZoneList<Expression*>* args = expr->arguments();
3714 DCHECK(args->length() == 1);
3716 // ArgumentsAccessStub expects the key in edx and the formal
3717 // parameter count in r0.
3718 VisitForAccumulatorValue(args->at(0));
3720 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3721 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3723 context()->Plug(r0);
3727 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3728 DCHECK(expr->arguments()->length() == 0);
3730 // Get the number of formal parameters.
3731 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3733 // Check if the calling frame is an arguments adaptor frame.
3734 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3735 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3736 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3738 // Arguments adaptor case: Read the arguments length from the
3740 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3742 context()->Plug(r0);
3746 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3747 ZoneList<Expression*>* args = expr->arguments();
3748 DCHECK(args->length() == 1);
3749 Label done, null, function, non_function_constructor;
3751 VisitForAccumulatorValue(args->at(0));
3753 // If the object is a smi, we return null.
3754 __ JumpIfSmi(r0, &null);
3756 // Check that the object is a JS object but take special care of JS
3757 // functions to make sure they have 'Function' as their class.
3758 // Assume that there are only two callable types, and one of them is at
3759 // either end of the type range for JS object types. Saves extra comparisons.
3760 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3761 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3762 // Map is now in r0.
3764 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3765 FIRST_SPEC_OBJECT_TYPE + 1);
3766 __ b(eq, &function);
3768 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3769 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3770 LAST_SPEC_OBJECT_TYPE - 1);
3771 __ b(eq, &function);
3772 // Assume that there is no larger type.
3773 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3775 // Check if the constructor in the map is a JS function.
3776 Register instance_type = r2;
3777 __ GetMapConstructor(r0, r0, r1, instance_type);
3778 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3779 __ b(ne, &non_function_constructor);
3781 // r0 now contains the constructor function. Grab the
3782 // instance class name from there.
3783 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3784 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3787 // Functions have class 'Function'.
3789 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3792 // Objects with a non-function constructor have class 'Object'.
3793 __ bind(&non_function_constructor);
3794 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3797 // Non-JS objects have class null.
3799 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3804 context()->Plug(r0);
3808 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3809 ZoneList<Expression*>* args = expr->arguments();
3810 DCHECK(args->length() == 1);
3811 VisitForAccumulatorValue(args->at(0)); // Load the object.
3814 // If the object is a smi return the object.
3815 __ JumpIfSmi(r0, &done);
3816 // If the object is not a value type, return the object.
3817 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3818 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3821 context()->Plug(r0);
3825 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3826 ZoneList<Expression*>* args = expr->arguments();
3827 DCHECK_EQ(1, args->length());
3829 VisitForAccumulatorValue(args->at(0));
3831 Label materialize_true, materialize_false;
3832 Label* if_true = nullptr;
3833 Label* if_false = nullptr;
3834 Label* fall_through = nullptr;
3835 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3836 &if_false, &fall_through);
3838 __ JumpIfSmi(r0, if_false);
3839 __ CompareObjectType(r0, r1, r1, JS_DATE_TYPE);
3840 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3841 Split(eq, if_true, if_false, fall_through);
3843 context()->Plug(if_true, if_false);
3847 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3848 ZoneList<Expression*>* args = expr->arguments();
3849 DCHECK(args->length() == 2);
3850 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3851 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3853 VisitForAccumulatorValue(args->at(0)); // Load the object.
3855 Register object = r0;
3856 Register result = r0;
3857 Register scratch0 = r9;
3858 Register scratch1 = r1;
3860 if (index->value() == 0) {
3861 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3863 Label runtime, done;
3864 if (index->value() < JSDate::kFirstUncachedField) {
3865 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3866 __ mov(scratch1, Operand(stamp));
3867 __ ldr(scratch1, MemOperand(scratch1));
3868 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3869 __ cmp(scratch1, scratch0);
3871 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3872 kPointerSize * index->value()));
3876 __ PrepareCallCFunction(2, scratch1);
3877 __ mov(r1, Operand(index));
3878 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3882 context()->Plug(result);
3886 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3887 ZoneList<Expression*>* args = expr->arguments();
3888 DCHECK_EQ(3, args->length());
3890 Register string = r0;
3891 Register index = r1;
3892 Register value = r2;
3894 VisitForStackValue(args->at(0)); // index
3895 VisitForStackValue(args->at(1)); // value
3896 VisitForAccumulatorValue(args->at(2)); // string
3897 __ Pop(index, value);
3899 if (FLAG_debug_code) {
3901 __ Check(eq, kNonSmiValue);
3903 __ Check(eq, kNonSmiIndex);
3904 __ SmiUntag(index, index);
3905 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3906 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3907 __ SmiTag(index, index);
3910 __ SmiUntag(value, value);
3913 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3914 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3915 context()->Plug(string);
3919 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3920 ZoneList<Expression*>* args = expr->arguments();
3921 DCHECK_EQ(3, args->length());
3923 Register string = r0;
3924 Register index = r1;
3925 Register value = r2;
3927 VisitForStackValue(args->at(0)); // index
3928 VisitForStackValue(args->at(1)); // value
3929 VisitForAccumulatorValue(args->at(2)); // string
3930 __ Pop(index, value);
3932 if (FLAG_debug_code) {
3934 __ Check(eq, kNonSmiValue);
3936 __ Check(eq, kNonSmiIndex);
3937 __ SmiUntag(index, index);
3938 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3939 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3940 __ SmiTag(index, index);
3943 __ SmiUntag(value, value);
3946 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3947 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3948 __ strh(value, MemOperand(ip, index));
3949 context()->Plug(string);
3953 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3954 ZoneList<Expression*>* args = expr->arguments();
3955 DCHECK(args->length() == 2);
3956 VisitForStackValue(args->at(0)); // Load the object.
3957 VisitForAccumulatorValue(args->at(1)); // Load the value.
3958 __ pop(r1); // r0 = value. r1 = object.
3961 // If the object is a smi, return the value.
3962 __ JumpIfSmi(r1, &done);
3964 // If the object is not a value type, return the value.
3965 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3969 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3970 // Update the write barrier. Save the value as it will be
3971 // overwritten by the write barrier code and is needed afterward.
3973 __ RecordWriteField(
3974 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3977 context()->Plug(r0);
3981 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3982 ZoneList<Expression*>* args = expr->arguments();
3983 DCHECK_EQ(args->length(), 1);
3984 // Load the argument into r0 and call the stub.
3985 VisitForAccumulatorValue(args->at(0));
3987 NumberToStringStub stub(isolate());
3989 context()->Plug(r0);
3993 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3994 ZoneList<Expression*>* args = expr->arguments();
3995 DCHECK_EQ(1, args->length());
3997 // Load the argument into r0 and convert it.
3998 VisitForAccumulatorValue(args->at(0));
4000 ToObjectStub stub(isolate());
4002 context()->Plug(r0);
4006 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4007 ZoneList<Expression*>* args = expr->arguments();
4008 DCHECK(args->length() == 1);
4009 VisitForAccumulatorValue(args->at(0));
4012 StringCharFromCodeGenerator generator(r0, r1);
4013 generator.GenerateFast(masm_);
4016 NopRuntimeCallHelper call_helper;
4017 generator.GenerateSlow(masm_, call_helper);
4020 context()->Plug(r1);
4024 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4025 ZoneList<Expression*>* args = expr->arguments();
4026 DCHECK(args->length() == 2);
4027 VisitForStackValue(args->at(0));
4028 VisitForAccumulatorValue(args->at(1));
4030 Register object = r1;
4031 Register index = r0;
4032 Register result = r3;
4036 Label need_conversion;
4037 Label index_out_of_range;
4039 StringCharCodeAtGenerator generator(object,
4044 &index_out_of_range,
4045 STRING_INDEX_IS_NUMBER);
4046 generator.GenerateFast(masm_);
4049 __ bind(&index_out_of_range);
4050 // When the index is out of range, the spec requires us to return
4052 __ LoadRoot(result, Heap::kNanValueRootIndex);
4055 __ bind(&need_conversion);
4056 // Load the undefined value into the result register, which will
4057 // trigger conversion.
4058 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4061 NopRuntimeCallHelper call_helper;
4062 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4065 context()->Plug(result);
4069 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4070 ZoneList<Expression*>* args = expr->arguments();
4071 DCHECK(args->length() == 2);
4072 VisitForStackValue(args->at(0));
4073 VisitForAccumulatorValue(args->at(1));
4075 Register object = r1;
4076 Register index = r0;
4077 Register scratch = r3;
4078 Register result = r0;
4082 Label need_conversion;
4083 Label index_out_of_range;
4085 StringCharAtGenerator generator(object,
4091 &index_out_of_range,
4092 STRING_INDEX_IS_NUMBER);
4093 generator.GenerateFast(masm_);
4096 __ bind(&index_out_of_range);
4097 // When the index is out of range, the spec requires us to return
4098 // the empty string.
4099 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4102 __ bind(&need_conversion);
4103 // Move smi zero into the result register, which will trigger
4105 __ mov(result, Operand(Smi::FromInt(0)));
4108 NopRuntimeCallHelper call_helper;
4109 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4112 context()->Plug(result);
4116 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4117 ZoneList<Expression*>* args = expr->arguments();
4118 DCHECK_EQ(2, args->length());
4119 VisitForStackValue(args->at(0));
4120 VisitForAccumulatorValue(args->at(1));
4123 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4125 context()->Plug(r0);
4129 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4130 ZoneList<Expression*>* args = expr->arguments();
4131 DCHECK(args->length() >= 2);
4133 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4134 for (int i = 0; i < arg_count + 1; i++) {
4135 VisitForStackValue(args->at(i));
4137 VisitForAccumulatorValue(args->last()); // Function.
4139 Label runtime, done;
4140 // Check for non-function argument (including proxy).
4141 __ JumpIfSmi(r0, &runtime);
4142 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4145 // InvokeFunction requires the function in r1. Move it in there.
4146 __ mov(r1, result_register());
4147 ParameterCount count(arg_count);
4148 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
4149 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4154 __ CallRuntime(Runtime::kCall, args->length());
4157 context()->Plug(r0);
4161 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4162 ZoneList<Expression*>* args = expr->arguments();
4163 DCHECK(args->length() == 2);
4166 VisitForStackValue(args->at(0));
4169 VisitForStackValue(args->at(1));
4170 __ CallRuntime(Runtime::kGetPrototype, 1);
4171 __ Push(result_register());
4173 // Load original constructor into r4.
4174 __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
4176 // Check if the calling frame is an arguments adaptor frame.
4177 Label adaptor_frame, args_set_up, runtime;
4178 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4179 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4180 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4181 __ b(eq, &adaptor_frame);
4182 // default constructor has no arguments, so no adaptor frame means no args.
4183 __ mov(r0, Operand::Zero());
4186 // Copy arguments from adaptor frame.
4188 __ bind(&adaptor_frame);
4189 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4190 __ SmiUntag(r1, r1);
4193 // Get arguments pointer in r2.
4194 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4195 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset));
4198 // Pre-decrement r2 with kPointerSize on each iteration.
4199 // Pre-decrement in order to skip receiver.
4200 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
4202 __ sub(r1, r1, Operand(1));
4203 __ cmp(r1, Operand::Zero());
4207 __ bind(&args_set_up);
4208 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4209 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
4211 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4212 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4216 context()->Plug(result_register());
4220 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4221 RegExpConstructResultStub stub(isolate());
4222 ZoneList<Expression*>* args = expr->arguments();
4223 DCHECK(args->length() == 3);
4224 VisitForStackValue(args->at(0));
4225 VisitForStackValue(args->at(1));
4226 VisitForAccumulatorValue(args->at(2));
4230 context()->Plug(r0);
4234 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4235 ZoneList<Expression*>* args = expr->arguments();
4236 VisitForAccumulatorValue(args->at(0));
4238 Label materialize_true, materialize_false;
4239 Label* if_true = NULL;
4240 Label* if_false = NULL;
4241 Label* fall_through = NULL;
4242 context()->PrepareTest(&materialize_true, &materialize_false,
4243 &if_true, &if_false, &fall_through);
4245 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4246 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
4247 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4248 Split(eq, if_true, if_false, fall_through);
4250 context()->Plug(if_true, if_false);
4254 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4255 ZoneList<Expression*>* args = expr->arguments();
4256 DCHECK(args->length() == 1);
4257 VisitForAccumulatorValue(args->at(0));
4259 __ AssertString(r0);
4261 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
4262 __ IndexFromHash(r0, r0);
4264 context()->Plug(r0);
4268 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4269 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4270 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4271 one_char_separator_loop_entry, long_separator_loop;
4272 ZoneList<Expression*>* args = expr->arguments();
4273 DCHECK(args->length() == 2);
4274 VisitForStackValue(args->at(1));
4275 VisitForAccumulatorValue(args->at(0));
4277 // All aliases of the same register have disjoint lifetimes.
4278 Register array = r0;
4279 Register elements = no_reg; // Will be r0.
4280 Register result = no_reg; // Will be r0.
4281 Register separator = r1;
4282 Register array_length = r2;
4283 Register result_pos = no_reg; // Will be r2
4284 Register string_length = r3;
4285 Register string = r4;
4286 Register element = r5;
4287 Register elements_end = r6;
4288 Register scratch = r9;
4290 // Separator operand is on the stack.
4293 // Check that the array is a JSArray.
4294 __ JumpIfSmi(array, &bailout);
4295 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
4298 // Check that the array has fast elements.
4299 __ CheckFastElements(scratch, array_length, &bailout);
4301 // If the array has length zero, return the empty string.
4302 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4303 __ SmiUntag(array_length, SetCC);
4304 __ b(ne, &non_trivial_array);
4305 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
4308 __ bind(&non_trivial_array);
4310 // Get the FixedArray containing array's elements.
4312 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4313 array = no_reg; // End of array's live range.
4315 // Check that all array elements are sequential one-byte strings, and
4316 // accumulate the sum of their lengths, as a smi-encoded value.
4317 __ mov(string_length, Operand::Zero());
4319 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4320 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4321 // Loop condition: while (element < elements_end).
4322 // Live values in registers:
4323 // elements: Fixed array of strings.
4324 // array_length: Length of the fixed array of strings (not smi)
4325 // separator: Separator string
4326 // string_length: Accumulated sum of string lengths (smi).
4327 // element: Current array element.
4328 // elements_end: Array end.
4329 if (generate_debug_code_) {
4330 __ cmp(array_length, Operand::Zero());
4331 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4334 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4335 __ JumpIfSmi(string, &bailout);
4336 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4337 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4338 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4339 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4340 __ add(string_length, string_length, Operand(scratch), SetCC);
4342 __ cmp(element, elements_end);
4345 // If array_length is 1, return elements[0], a string.
4346 __ cmp(array_length, Operand(1));
4347 __ b(ne, ¬_size_one_array);
4348 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4351 __ bind(¬_size_one_array);
4353 // Live values in registers:
4354 // separator: Separator string
4355 // array_length: Length of the array.
4356 // string_length: Sum of string lengths (smi).
4357 // elements: FixedArray of strings.
4359 // Check that the separator is a flat one-byte string.
4360 __ JumpIfSmi(separator, &bailout);
4361 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4362 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4363 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4365 // Add (separator length times array_length) - separator length to the
4366 // string_length to get the length of the result string. array_length is not
4367 // smi but the other values are, so the result is a smi
4368 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4369 __ sub(string_length, string_length, Operand(scratch));
4370 __ smull(scratch, ip, array_length, scratch);
4371 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4373 __ cmp(ip, Operand::Zero());
4375 __ tst(scratch, Operand(0x80000000));
4377 __ add(string_length, string_length, Operand(scratch), SetCC);
4379 __ SmiUntag(string_length);
4381 // Get first element in the array to free up the elements register to be used
4384 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4385 result = elements; // End of live range for elements.
4387 // Live values in registers:
4388 // element: First array element
4389 // separator: Separator string
4390 // string_length: Length of result string (not smi)
4391 // array_length: Length of the array.
4392 __ AllocateOneByteString(result, string_length, scratch,
4393 string, // used as scratch
4394 elements_end, // used as scratch
4396 // Prepare for looping. Set up elements_end to end of the array. Set
4397 // result_pos to the position of the result where to write the first
4399 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4400 result_pos = array_length; // End of live range for array_length.
4401 array_length = no_reg;
4404 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4406 // Check the length of the separator.
4407 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4408 __ cmp(scratch, Operand(Smi::FromInt(1)));
4409 __ b(eq, &one_char_separator);
4410 __ b(gt, &long_separator);
4412 // Empty separator case
4413 __ bind(&empty_separator_loop);
4414 // Live values in registers:
4415 // result_pos: the position to which we are currently copying characters.
4416 // element: Current array element.
4417 // elements_end: Array end.
4419 // Copy next array element to the result.
4420 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4421 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4422 __ SmiUntag(string_length);
4425 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4426 __ CopyBytes(string, result_pos, string_length, scratch);
4427 __ cmp(element, elements_end);
4428 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4429 DCHECK(result.is(r0));
4432 // One-character separator case
4433 __ bind(&one_char_separator);
4434 // Replace separator with its one-byte character value.
4435 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4436 // Jump into the loop after the code that copies the separator, so the first
4437 // element is not preceded by a separator
4438 __ jmp(&one_char_separator_loop_entry);
4440 __ bind(&one_char_separator_loop);
4441 // Live values in registers:
4442 // result_pos: the position to which we are currently copying characters.
4443 // element: Current array element.
4444 // elements_end: Array end.
4445 // separator: Single separator one-byte char (in lower byte).
4447 // Copy the separator character to the result.
4448 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4450 // Copy next array element to the result.
4451 __ bind(&one_char_separator_loop_entry);
4452 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4453 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4454 __ SmiUntag(string_length);
4457 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4458 __ CopyBytes(string, result_pos, string_length, scratch);
4459 __ cmp(element, elements_end);
4460 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4461 DCHECK(result.is(r0));
4464 // Long separator case (separator is more than one character). Entry is at the
4465 // label long_separator below.
4466 __ bind(&long_separator_loop);
4467 // Live values in registers:
4468 // result_pos: the position to which we are currently copying characters.
4469 // element: Current array element.
4470 // elements_end: Array end.
4471 // separator: Separator string.
4473 // Copy the separator to the result.
4474 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4475 __ SmiUntag(string_length);
4478 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4479 __ CopyBytes(string, result_pos, string_length, scratch);
4481 __ bind(&long_separator);
4482 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4483 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4484 __ SmiUntag(string_length);
4487 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4488 __ CopyBytes(string, result_pos, string_length, scratch);
4489 __ cmp(element, elements_end);
4490 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4491 DCHECK(result.is(r0));
4495 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4497 context()->Plug(r0);
4501 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4502 DCHECK(expr->arguments()->length() == 0);
4503 ExternalReference debug_is_active =
4504 ExternalReference::debug_is_active_address(isolate());
4505 __ mov(ip, Operand(debug_is_active));
4506 __ ldrb(r0, MemOperand(ip));
4508 context()->Plug(r0);
4512 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4513 // Push the builtins object as the receiver.
4514 Register receiver = LoadDescriptor::ReceiverRegister();
4515 __ ldr(receiver, GlobalObjectOperand());
4516 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4519 // Load the function from the receiver.
4520 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4521 __ mov(LoadDescriptor::SlotRegister(),
4522 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4523 CallLoadIC(NOT_INSIDE_TYPEOF);
4527 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4528 ZoneList<Expression*>* args = expr->arguments();
4529 int arg_count = args->length();
4531 SetCallPosition(expr, arg_count);
4532 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4533 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4538 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4539 ZoneList<Expression*>* args = expr->arguments();
4540 int arg_count = args->length();
4542 if (expr->is_jsruntime()) {
4543 Comment cmnt(masm_, "[ CallRuntime");
4544 EmitLoadJSRuntimeFunction(expr);
4546 // Push the target function under the receiver.
4547 __ ldr(ip, MemOperand(sp, 0));
4549 __ str(r0, MemOperand(sp, kPointerSize));
4551 // Push the arguments ("left-to-right").
4552 for (int i = 0; i < arg_count; i++) {
4553 VisitForStackValue(args->at(i));
4556 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4557 EmitCallJSRuntimeFunction(expr);
4559 // Restore context register.
4560 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4562 context()->DropAndPlug(1, r0);
4565 const Runtime::Function* function = expr->function();
4566 switch (function->function_id) {
4567 #define CALL_INTRINSIC_GENERATOR(Name) \
4568 case Runtime::kInline##Name: { \
4569 Comment cmnt(masm_, "[ Inline" #Name); \
4570 return Emit##Name(expr); \
4572 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4573 #undef CALL_INTRINSIC_GENERATOR
4575 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4576 // Push the arguments ("left-to-right").
4577 for (int i = 0; i < arg_count; i++) {
4578 VisitForStackValue(args->at(i));
4581 // Call the C runtime function.
4582 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4583 __ CallRuntime(expr->function(), arg_count);
4584 context()->Plug(r0);
4591 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4592 switch (expr->op()) {
4593 case Token::DELETE: {
4594 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4595 Property* property = expr->expression()->AsProperty();
4596 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4598 if (property != NULL) {
4599 VisitForStackValue(property->obj());
4600 VisitForStackValue(property->key());
4601 __ CallRuntime(is_strict(language_mode())
4602 ? Runtime::kDeleteProperty_Strict
4603 : Runtime::kDeleteProperty_Sloppy,
4605 context()->Plug(r0);
4606 } else if (proxy != NULL) {
4607 Variable* var = proxy->var();
4608 // Delete of an unqualified identifier is disallowed in strict mode but
4609 // "delete this" is allowed.
4610 bool is_this = var->HasThisName(isolate());
4611 DCHECK(is_sloppy(language_mode()) || is_this);
4612 if (var->IsUnallocatedOrGlobalSlot()) {
4613 __ ldr(r2, GlobalObjectOperand());
4614 __ mov(r1, Operand(var->name()));
4616 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4617 context()->Plug(r0);
4618 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4619 // Result of deleting non-global, non-dynamic variables is false.
4620 // The subexpression does not have side effects.
4621 context()->Plug(is_this);
4623 // Non-global variable. Call the runtime to try to delete from the
4624 // context where the variable was introduced.
4625 DCHECK(!context_register().is(r2));
4626 __ mov(r2, Operand(var->name()));
4627 __ Push(context_register(), r2);
4628 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4629 context()->Plug(r0);
4632 // Result of deleting non-property, non-variable reference is true.
4633 // The subexpression may have side effects.
4634 VisitForEffect(expr->expression());
4635 context()->Plug(true);
4641 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4642 VisitForEffect(expr->expression());
4643 context()->Plug(Heap::kUndefinedValueRootIndex);
4648 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4649 if (context()->IsEffect()) {
4650 // Unary NOT has no side effects so it's only necessary to visit the
4651 // subexpression. Match the optimizing compiler by not branching.
4652 VisitForEffect(expr->expression());
4653 } else if (context()->IsTest()) {
4654 const TestContext* test = TestContext::cast(context());
4655 // The labels are swapped for the recursive call.
4656 VisitForControl(expr->expression(),
4657 test->false_label(),
4659 test->fall_through());
4660 context()->Plug(test->true_label(), test->false_label());
4662 // We handle value contexts explicitly rather than simply visiting
4663 // for control and plugging the control flow into the context,
4664 // because we need to prepare a pair of extra administrative AST ids
4665 // for the optimizing compiler.
4666 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4667 Label materialize_true, materialize_false, done;
4668 VisitForControl(expr->expression(),
4672 __ bind(&materialize_true);
4673 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4674 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4675 if (context()->IsStackValue()) __ push(r0);
4677 __ bind(&materialize_false);
4678 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4679 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4680 if (context()->IsStackValue()) __ push(r0);
4686 case Token::TYPEOF: {
4687 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4689 AccumulatorValueContext context(this);
4690 VisitForTypeofValue(expr->expression());
4693 TypeofStub typeof_stub(isolate());
4694 __ CallStub(&typeof_stub);
4695 context()->Plug(r0);
4705 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4706 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4708 Comment cmnt(masm_, "[ CountOperation");
4710 Property* prop = expr->expression()->AsProperty();
4711 LhsKind assign_type = Property::GetAssignType(prop);
4713 // Evaluate expression and get value.
4714 if (assign_type == VARIABLE) {
4715 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4716 AccumulatorValueContext context(this);
4717 EmitVariableLoad(expr->expression()->AsVariableProxy());
4719 // Reserve space for result of postfix operation.
4720 if (expr->is_postfix() && !context()->IsEffect()) {
4721 __ mov(ip, Operand(Smi::FromInt(0)));
4724 switch (assign_type) {
4725 case NAMED_PROPERTY: {
4726 // Put the object both on the stack and in the register.
4727 VisitForStackValue(prop->obj());
4728 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4729 EmitNamedPropertyLoad(prop);
4733 case NAMED_SUPER_PROPERTY: {
4734 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4735 VisitForAccumulatorValue(
4736 prop->obj()->AsSuperPropertyReference()->home_object());
4737 __ Push(result_register());
4738 const Register scratch = r1;
4739 __ ldr(scratch, MemOperand(sp, kPointerSize));
4741 __ Push(result_register());
4742 EmitNamedSuperPropertyLoad(prop);
4746 case KEYED_SUPER_PROPERTY: {
4747 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4749 prop->obj()->AsSuperPropertyReference()->home_object());
4750 VisitForAccumulatorValue(prop->key());
4751 __ Push(result_register());
4752 const Register scratch = r1;
4753 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4755 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4757 __ Push(result_register());
4758 EmitKeyedSuperPropertyLoad(prop);
4762 case KEYED_PROPERTY: {
4763 VisitForStackValue(prop->obj());
4764 VisitForStackValue(prop->key());
4765 __ ldr(LoadDescriptor::ReceiverRegister(),
4766 MemOperand(sp, 1 * kPointerSize));
4767 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4768 EmitKeyedPropertyLoad(prop);
4777 // We need a second deoptimization point after loading the value
4778 // in case evaluating the property load my have a side effect.
4779 if (assign_type == VARIABLE) {
4780 PrepareForBailout(expr->expression(), TOS_REG);
4782 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4785 // Inline smi case if we are in a loop.
4786 Label stub_call, done;
4787 JumpPatchSite patch_site(masm_);
4789 int count_value = expr->op() == Token::INC ? 1 : -1;
4790 if (ShouldInlineSmiCase(expr->op())) {
4792 patch_site.EmitJumpIfNotSmi(r0, &slow);
4794 // Save result for postfix expressions.
4795 if (expr->is_postfix()) {
4796 if (!context()->IsEffect()) {
4797 // Save the result on the stack. If we have a named or keyed property
4798 // we store the result under the receiver that is currently on top
4800 switch (assign_type) {
4804 case NAMED_PROPERTY:
4805 __ str(r0, MemOperand(sp, kPointerSize));
4807 case NAMED_SUPER_PROPERTY:
4808 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4810 case KEYED_PROPERTY:
4811 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4813 case KEYED_SUPER_PROPERTY:
4814 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4820 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4822 // Call stub. Undo operation first.
4823 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4827 if (!is_strong(language_mode())) {
4828 ToNumberStub convert_stub(isolate());
4829 __ CallStub(&convert_stub);
4830 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4833 // Save result for postfix expressions.
4834 if (expr->is_postfix()) {
4835 if (!context()->IsEffect()) {
4836 // Save the result on the stack. If we have a named or keyed property
4837 // we store the result under the receiver that is currently on top
4839 switch (assign_type) {
4843 case NAMED_PROPERTY:
4844 __ str(r0, MemOperand(sp, kPointerSize));
4846 case NAMED_SUPER_PROPERTY:
4847 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4849 case KEYED_PROPERTY:
4850 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4852 case KEYED_SUPER_PROPERTY:
4853 __ str(r0, MemOperand(sp, 3 * kPointerSize));
4860 __ bind(&stub_call);
4862 __ mov(r0, Operand(Smi::FromInt(count_value)));
4864 SetExpressionPosition(expr);
4866 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4867 strength(language_mode())).code();
4868 CallIC(code, expr->CountBinOpFeedbackId());
4869 patch_site.EmitPatchInfo();
4872 if (is_strong(language_mode())) {
4873 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4875 // Store the value returned in r0.
4876 switch (assign_type) {
4878 if (expr->is_postfix()) {
4879 { EffectContext context(this);
4880 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4881 Token::ASSIGN, expr->CountSlot());
4882 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4885 // For all contexts except EffectConstant We have the result on
4886 // top of the stack.
4887 if (!context()->IsEffect()) {
4888 context()->PlugTOS();
4891 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4892 Token::ASSIGN, expr->CountSlot());
4893 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4894 context()->Plug(r0);
4897 case NAMED_PROPERTY: {
4898 __ mov(StoreDescriptor::NameRegister(),
4899 Operand(prop->key()->AsLiteral()->value()));
4900 __ pop(StoreDescriptor::ReceiverRegister());
4901 if (FLAG_vector_stores) {
4902 EmitLoadStoreICSlot(expr->CountSlot());
4905 CallStoreIC(expr->CountStoreFeedbackId());
4907 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4908 if (expr->is_postfix()) {
4909 if (!context()->IsEffect()) {
4910 context()->PlugTOS();
4913 context()->Plug(r0);
4917 case NAMED_SUPER_PROPERTY: {
4918 EmitNamedSuperPropertyStore(prop);
4919 if (expr->is_postfix()) {
4920 if (!context()->IsEffect()) {
4921 context()->PlugTOS();
4924 context()->Plug(r0);
4928 case KEYED_SUPER_PROPERTY: {
4929 EmitKeyedSuperPropertyStore(prop);
4930 if (expr->is_postfix()) {
4931 if (!context()->IsEffect()) {
4932 context()->PlugTOS();
4935 context()->Plug(r0);
4939 case KEYED_PROPERTY: {
4940 __ Pop(StoreDescriptor::ReceiverRegister(),
4941 StoreDescriptor::NameRegister());
4943 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4944 if (FLAG_vector_stores) {
4945 EmitLoadStoreICSlot(expr->CountSlot());
4948 CallIC(ic, expr->CountStoreFeedbackId());
4950 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4951 if (expr->is_postfix()) {
4952 if (!context()->IsEffect()) {
4953 context()->PlugTOS();
4956 context()->Plug(r0);
4964 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4965 Expression* sub_expr,
4966 Handle<String> check) {
4967 Label materialize_true, materialize_false;
4968 Label* if_true = NULL;
4969 Label* if_false = NULL;
4970 Label* fall_through = NULL;
4971 context()->PrepareTest(&materialize_true, &materialize_false,
4972 &if_true, &if_false, &fall_through);
4974 { AccumulatorValueContext context(this);
4975 VisitForTypeofValue(sub_expr);
4977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4979 Factory* factory = isolate()->factory();
4980 if (String::Equals(check, factory->number_string())) {
4981 __ JumpIfSmi(r0, if_true);
4982 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4983 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4985 Split(eq, if_true, if_false, fall_through);
4986 } else if (String::Equals(check, factory->string_string())) {
4987 __ JumpIfSmi(r0, if_false);
4988 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4989 Split(lt, if_true, if_false, fall_through);
4990 } else if (String::Equals(check, factory->symbol_string())) {
4991 __ JumpIfSmi(r0, if_false);
4992 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4993 Split(eq, if_true, if_false, fall_through);
4994 } else if (String::Equals(check, factory->boolean_string())) {
4995 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4997 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4998 Split(eq, if_true, if_false, fall_through);
4999 } else if (String::Equals(check, factory->undefined_string())) {
5000 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
5002 __ JumpIfSmi(r0, if_false);
5003 // Check for undetectable objects => true.
5004 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5005 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5006 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5007 Split(ne, if_true, if_false, fall_through);
5009 } else if (String::Equals(check, factory->function_string())) {
5010 __ JumpIfSmi(r0, if_false);
5011 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5012 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
5014 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
5015 Split(eq, if_true, if_false, fall_through);
5016 } else if (String::Equals(check, factory->object_string())) {
5017 __ JumpIfSmi(r0, if_false);
5018 __ CompareRoot(r0, Heap::kNullValueRootIndex);
5020 // Check for JS objects => true.
5021 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5023 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5025 // Check for undetectable objects => false.
5026 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
5027 __ tst(r1, Operand(1 << Map::kIsUndetectable));
5028 Split(eq, if_true, if_false, fall_through);
5030 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5031 } else if (String::Equals(check, factory->type##_string())) { \
5032 __ JumpIfSmi(r0, if_false); \
5033 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
5034 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
5035 Split(eq, if_true, if_false, fall_through);
5036 SIMD128_TYPES(SIMD128_TYPE)
5040 if (if_false != fall_through) __ jmp(if_false);
5042 context()->Plug(if_true, if_false);
5046 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5047 Comment cmnt(masm_, "[ CompareOperation");
5048 SetExpressionPosition(expr);
5050 // First we try a fast inlined version of the compare when one of
5051 // the operands is a literal.
5052 if (TryLiteralCompare(expr)) return;
5054 // Always perform the comparison for its control flow. Pack the result
5055 // into the expression's context after the comparison is performed.
5056 Label materialize_true, materialize_false;
5057 Label* if_true = NULL;
5058 Label* if_false = NULL;
5059 Label* fall_through = NULL;
5060 context()->PrepareTest(&materialize_true, &materialize_false,
5061 &if_true, &if_false, &fall_through);
5063 Token::Value op = expr->op();
5064 VisitForStackValue(expr->left());
5067 VisitForStackValue(expr->right());
5068 __ CallRuntime(Runtime::kHasProperty, 2);
5069 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5070 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5072 Split(eq, if_true, if_false, fall_through);
5075 case Token::INSTANCEOF: {
5076 VisitForStackValue(expr->right());
5077 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5079 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5080 // The stub returns 0 for true.
5082 Split(eq, if_true, if_false, fall_through);
5087 VisitForAccumulatorValue(expr->right());
5088 Condition cond = CompareIC::ComputeCondition(op);
5091 bool inline_smi_code = ShouldInlineSmiCase(op);
5092 JumpPatchSite patch_site(masm_);
5093 if (inline_smi_code) {
5095 __ orr(r2, r0, Operand(r1));
5096 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
5098 Split(cond, if_true, if_false, NULL);
5099 __ bind(&slow_case);
5102 Handle<Code> ic = CodeFactory::CompareIC(
5103 isolate(), op, strength(language_mode())).code();
5104 CallIC(ic, expr->CompareOperationFeedbackId());
5105 patch_site.EmitPatchInfo();
5106 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5107 __ cmp(r0, Operand::Zero());
5108 Split(cond, if_true, if_false, fall_through);
5112 // Convert the result of the comparison into one expected for this
5113 // expression's context.
5114 context()->Plug(if_true, if_false);
5118 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5119 Expression* sub_expr,
5121 Label materialize_true, materialize_false;
5122 Label* if_true = NULL;
5123 Label* if_false = NULL;
5124 Label* fall_through = NULL;
5125 context()->PrepareTest(&materialize_true, &materialize_false,
5126 &if_true, &if_false, &fall_through);
5128 VisitForAccumulatorValue(sub_expr);
5129 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5130 if (expr->op() == Token::EQ_STRICT) {
5131 Heap::RootListIndex nil_value = nil == kNullValue ?
5132 Heap::kNullValueRootIndex :
5133 Heap::kUndefinedValueRootIndex;
5134 __ LoadRoot(r1, nil_value);
5136 Split(eq, if_true, if_false, fall_through);
5138 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5139 CallIC(ic, expr->CompareOperationFeedbackId());
5140 __ cmp(r0, Operand(0));
5141 Split(ne, if_true, if_false, fall_through);
5143 context()->Plug(if_true, if_false);
5147 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5148 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5149 context()->Plug(r0);
5153 Register FullCodeGenerator::result_register() {
5158 Register FullCodeGenerator::context_register() {
5163 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5164 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5165 __ str(value, MemOperand(fp, frame_offset));
5169 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5170 __ ldr(dst, ContextOperand(cp, context_index));
5174 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5175 Scope* closure_scope = scope()->ClosureScope();
5176 if (closure_scope->is_script_scope() ||
5177 closure_scope->is_module_scope()) {
5178 // Contexts nested in the native context have a canonical empty function
5179 // as their closure, not the anonymous closure containing the global
5180 // code. Pass a smi sentinel and let the runtime look up the empty
5182 __ mov(ip, Operand(Smi::FromInt(0)));
5183 } else if (closure_scope->is_eval_scope()) {
5184 // Contexts created by a call to eval have the same closure as the
5185 // context calling eval, not the anonymous closure containing the eval
5186 // code. Fetch it from the context.
5187 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5189 DCHECK(closure_scope->is_function_scope());
5190 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5196 // ----------------------------------------------------------------------------
5197 // Non-local control flow support.
5199 void FullCodeGenerator::EnterFinallyBlock() {
5200 DCHECK(!result_register().is(r1));
5201 // Store result register while executing finally block.
5202 __ push(result_register());
5203 // Cook return address in link register to stack (smi encoded Code* delta)
5204 __ sub(r1, lr, Operand(masm_->CodeObject()));
5207 // Store result register while executing finally block.
5210 // Store pending message while executing finally block.
5211 ExternalReference pending_message_obj =
5212 ExternalReference::address_of_pending_message_obj(isolate());
5213 __ mov(ip, Operand(pending_message_obj));
5214 __ ldr(r1, MemOperand(ip));
5217 ClearPendingMessage();
5221 void FullCodeGenerator::ExitFinallyBlock() {
5222 DCHECK(!result_register().is(r1));
5223 // Restore pending message from stack.
5225 ExternalReference pending_message_obj =
5226 ExternalReference::address_of_pending_message_obj(isolate());
5227 __ mov(ip, Operand(pending_message_obj));
5228 __ str(r1, MemOperand(ip));
5230 // Restore result register from stack.
5233 // Uncook return address and return.
5234 __ pop(result_register());
5236 __ add(pc, r1, Operand(masm_->CodeObject()));
5240 void FullCodeGenerator::ClearPendingMessage() {
5241 DCHECK(!result_register().is(r1));
5242 ExternalReference pending_message_obj =
5243 ExternalReference::address_of_pending_message_obj(isolate());
5244 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
5245 __ mov(ip, Operand(pending_message_obj));
5246 __ str(r1, MemOperand(ip));
5250 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5251 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5252 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5253 Operand(SmiFromSlot(slot)));
5260 static Address GetInterruptImmediateLoadAddress(Address pc) {
5261 Address load_address = pc - 2 * Assembler::kInstrSize;
5262 if (!FLAG_enable_embedded_constant_pool) {
5263 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5264 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5265 // This is an extended constant pool lookup.
5266 if (CpuFeatures::IsSupported(ARMv7)) {
5267 load_address -= 2 * Assembler::kInstrSize;
5268 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5269 DCHECK(Assembler::IsMovT(
5270 Memory::int32_at(load_address + Assembler::kInstrSize)));
5272 load_address -= 4 * Assembler::kInstrSize;
5273 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5274 DCHECK(Assembler::IsOrrImmed(
5275 Memory::int32_at(load_address + Assembler::kInstrSize)));
5276 DCHECK(Assembler::IsOrrImmed(
5277 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5278 DCHECK(Assembler::IsOrrImmed(
5279 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5281 } else if (CpuFeatures::IsSupported(ARMv7) &&
5282 Assembler::IsMovT(Memory::int32_at(load_address))) {
5283 // This is a movw / movt immediate load.
5284 load_address -= Assembler::kInstrSize;
5285 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5286 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5287 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5288 // This is a mov / orr immediate load.
5289 load_address -= 3 * Assembler::kInstrSize;
5290 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5291 DCHECK(Assembler::IsOrrImmed(
5292 Memory::int32_at(load_address + Assembler::kInstrSize)));
5293 DCHECK(Assembler::IsOrrImmed(
5294 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5296 // This is a small constant pool lookup.
5297 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5299 return load_address;
5303 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5305 BackEdgeState target_state,
5306 Code* replacement_code) {
5307 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5308 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5309 CodePatcher patcher(branch_address, 1);
5310 switch (target_state) {
5313 // <decrement profiling counter>
5315 // ; load interrupt stub address into ip - either of (for ARMv7):
5316 // ; <small cp load> | <extended cp load> | <immediate load>
5317 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5318 // | movt ip, #imm | movw ip, #imm
5319 // | ldr ip, [pp, ip]
5320 // ; or (for ARMv6):
5321 // ; <small cp load> | <extended cp load> | <immediate load>
5322 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5323 // | orr ip, ip, #imm> | orr ip, ip, #imm
5324 // | orr ip, ip, #imm> | orr ip, ip, #imm
5325 // | orr ip, ip, #imm> | orr ip, ip, #imm
5327 // <reset profiling counter>
5330 // Calculate branch offset to the ok-label - this is the difference
5331 // between the branch address and |pc| (which points at <blx ip>) plus
5332 // kProfileCounterResetSequence instructions
5333 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5334 kProfileCounterResetSequenceLength;
5335 patcher.masm()->b(branch_offset, pl);
5338 case ON_STACK_REPLACEMENT:
5339 case OSR_AFTER_STACK_CHECK:
5340 // <decrement profiling counter>
5342 // ; load on-stack replacement address into ip - either of (for ARMv7):
5343 // ; <small cp load> | <extended cp load> | <immediate load>
5344 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5345 // | movt ip, #imm> | movw ip, #imm
5346 // | ldr ip, [pp, ip]
5347 // ; or (for ARMv6):
5348 // ; <small cp load> | <extended cp load> | <immediate load>
5349 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5350 // | orr ip, ip, #imm> | orr ip, ip, #imm
5351 // | orr ip, ip, #imm> | orr ip, ip, #imm
5352 // | orr ip, ip, #imm> | orr ip, ip, #imm
5354 // <reset profiling counter>
5356 patcher.masm()->nop();
5360 // Replace the call address.
5361 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5362 replacement_code->entry());
5364 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5365 unoptimized_code, pc_immediate_load_address, replacement_code);
5369 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5371 Code* unoptimized_code,
5373 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
5375 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5376 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5377 Address interrupt_address = Assembler::target_address_at(
5378 pc_immediate_load_address, unoptimized_code);
5380 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5381 DCHECK(interrupt_address ==
5382 isolate->builtins()->InterruptCheck()->entry());
5386 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5388 if (interrupt_address ==
5389 isolate->builtins()->OnStackReplacement()->entry()) {
5390 return ON_STACK_REPLACEMENT;
5393 DCHECK(interrupt_address ==
5394 isolate->builtins()->OsrAfterStackCheck()->entry());
5395 return OSR_AFTER_STACK_CHECK;
5399 } // namespace internal
5402 #endif // V8_TARGET_ARCH_ARM