1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS64
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-stubs.h"
18 #include "src/codegen.h"
19 #include "src/compiler.h"
20 #include "src/debug.h"
21 #include "src/full-codegen.h"
22 #include "src/isolate-inl.h"
23 #include "src/parser.h"
24 #include "src/scopes.h"
25 #include "src/stub-cache.h"
27 #include "src/mips64/code-stubs-mips64.h"
28 #include "src/mips64/macro-assembler-mips64.h"
33 #define __ ACCESS_MASM(masm_)
36 // A patch site is a location in the code which it is possible to patch. This
37 // class has a number of methods to emit the code which is patchable and the
38 // method EmitPatchInfo to record a marker back to the patchable code. This
39 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
40 // (raw 16 bit immediate value is used) is the delta from the pc to the first
41 // instruction of the patchable code.
42 // The marker instruction is effectively a NOP (dest is zero_reg) and will
43 // never be emitted by normal code.
44 class JumpPatchSite BASE_EMBEDDED {
46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48 info_emitted_ = false;
53 DCHECK(patch_site_.is_bound() == info_emitted_);
56 // When initially emitting this ensure that a jump is always generated to skip
57 // the inlined smi code.
58 void EmitJumpIfNotSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
61 __ bind(&patch_site_);
63 // Always taken before patched.
64 __ BranchShort(target, eq, at, Operand(zero_reg));
67 // When initially emitting this ensure that a jump is never generated to skip
68 // the inlined smi code.
69 void EmitJumpIfSmi(Register reg, Label* target) {
70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
71 DCHECK(!patch_site_.is_bound() && !info_emitted_);
72 __ bind(&patch_site_);
74 // Never taken before patched.
75 __ BranchShort(target, ne, at, Operand(zero_reg));
78 void EmitPatchInfo() {
79 if (patch_site_.is_bound()) {
80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
82 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
87 __ nop(); // Signals no inlined code.
92 MacroAssembler* masm_;
100 // Generate code for a JS function. On entry to the function the receiver
101 // and arguments have been pushed on the stack left to right. The actual
102 // argument count matches the formal parameter count expected by the
105 // The live registers are:
106 // o a1: the JS function object being called (i.e. ourselves)
108 // o fp: our caller's frame pointer
109 // o sp: stack pointer
110 // o ra: return address
112 // The function builds a JS frame. Please see JavaScriptFrameConstants in
113 // frames-mips.h for its layout.
114 void FullCodeGenerator::Generate() {
115 CompilationInfo* info = info_;
117 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
119 profiling_counter_ = isolate()->factory()->NewCell(
120 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
121 SetFunctionPosition(function());
122 Comment cmnt(masm_, "[ function compiled by full code generator");
124 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
127 if (strlen(FLAG_stop_at) > 0 &&
128 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
133 // Sloppy mode functions and builtins need to replace the receiver with the
134 // global proxy when called as functions (without an explicit receiver
136 if (info->strict_mode() == SLOPPY && !info->is_native()) {
138 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
139 __ ld(at, MemOperand(sp, receiver_offset));
140 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
141 __ Branch(&ok, ne, a2, Operand(at));
143 __ ld(a2, GlobalObjectOperand());
144 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
146 __ sd(a2, MemOperand(sp, receiver_offset));
149 // Open a frame scope to indicate that there is a frame on the stack. The
150 // MANUAL indicates that the scope shouldn't actually generate code to set up
151 // the frame (that is done below).
152 FrameScope frame_scope(masm_, StackFrame::MANUAL);
153 info->set_prologue_offset(masm_->pc_offset());
154 __ Prologue(info->IsCodePreAgingActive());
155 info->AddNoFrameRange(0, masm_->pc_offset());
157 { Comment cmnt(masm_, "[ Allocate locals");
158 int locals_count = info->scope()->num_stack_slots();
159 // Generators allocate locals, if any, in context slots.
160 DCHECK(!info->function()->is_generator() || locals_count == 0);
161 if (locals_count > 0) {
162 if (locals_count >= 128) {
164 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
165 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
166 __ Branch(&ok, hs, t1, Operand(a2));
167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
170 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
171 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
172 if (locals_count >= kMaxPushes) {
173 int loop_iterations = locals_count / kMaxPushes;
174 __ li(a2, Operand(loop_iterations));
176 __ bind(&loop_header);
178 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
179 for (int i = 0; i < kMaxPushes; i++) {
180 __ sd(t1, MemOperand(sp, i * kPointerSize));
182 // Continue loop if not done.
183 __ Dsubu(a2, a2, Operand(1));
184 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
186 int remaining = locals_count % kMaxPushes;
187 // Emit the remaining pushes.
188 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
189 for (int i = 0; i < remaining; i++) {
190 __ sd(t1, MemOperand(sp, i * kPointerSize));
195 bool function_in_register = true;
197 // Possibly allocate a local context.
198 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
199 if (heap_slots > 0) {
200 Comment cmnt(masm_, "[ Allocate context");
201 // Argument to NewContext is the function, which is still in a1.
202 bool need_write_barrier = true;
203 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
205 __ Push(info->scope()->GetScopeInfo());
206 __ CallRuntime(Runtime::kNewGlobalContext, 2);
207 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
208 FastNewContextStub stub(isolate(), heap_slots);
210 // Result of FastNewContextStub is always in new space.
211 need_write_barrier = false;
214 __ CallRuntime(Runtime::kNewFunctionContext, 1);
216 function_in_register = false;
217 // Context is returned in v0. It replaces the context passed to us.
218 // It's saved in the stack and kept live in cp.
220 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
221 // Copy any necessary parameters into the context.
222 int num_parameters = info->scope()->num_parameters();
223 for (int i = 0; i < num_parameters; i++) {
224 Variable* var = scope()->parameter(i);
225 if (var->IsContextSlot()) {
226 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
227 (num_parameters - 1 - i) * kPointerSize;
228 // Load parameter from stack.
229 __ ld(a0, MemOperand(fp, parameter_offset));
230 // Store it in the context.
231 MemOperand target = ContextOperand(cp, var->index());
234 // Update the write barrier.
235 if (need_write_barrier) {
236 __ RecordWriteContextSlot(
237 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
238 } else if (FLAG_debug_code) {
240 __ JumpIfInNewSpace(cp, a0, &done);
241 __ Abort(kExpectedNewSpaceObject);
247 Variable* arguments = scope()->arguments();
248 if (arguments != NULL) {
249 // Function uses arguments object.
250 Comment cmnt(masm_, "[ Allocate arguments object");
251 if (!function_in_register) {
252 // Load this again, if it's used by the local context below.
253 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
257 // Receiver is just before the parameters on the caller's stack.
258 int num_parameters = info->scope()->num_parameters();
259 int offset = num_parameters * kPointerSize;
261 Operand(StandardFrameConstants::kCallerSPOffset + offset));
262 __ li(a1, Operand(Smi::FromInt(num_parameters)));
265 // Arguments to ArgumentsAccessStub:
266 // function, receiver address, parameter count.
267 // The stub will rewrite receiever and parameter count if the previous
268 // stack frame was an arguments adapter frame.
269 ArgumentsAccessStub::Type type;
270 if (strict_mode() == STRICT) {
271 type = ArgumentsAccessStub::NEW_STRICT;
272 } else if (function()->has_duplicate_parameters()) {
273 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
275 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
277 ArgumentsAccessStub stub(isolate(), type);
280 SetVar(arguments, v0, a1, a2);
284 __ CallRuntime(Runtime::kTraceEnter, 0);
286 // Visit the declarations and body unless there is an illegal
288 if (scope()->HasIllegalRedeclaration()) {
289 Comment cmnt(masm_, "[ Declarations");
290 scope()->VisitIllegalRedeclaration(this);
293 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
294 { Comment cmnt(masm_, "[ Declarations");
295 // For named function expressions, declare the function name as a
297 if (scope()->is_function_scope() && scope()->function() != NULL) {
298 VariableDeclaration* function = scope()->function();
299 DCHECK(function->proxy()->var()->mode() == CONST ||
300 function->proxy()->var()->mode() == CONST_LEGACY);
301 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
302 VisitVariableDeclaration(function);
304 VisitDeclarations(scope()->declarations());
306 { Comment cmnt(masm_, "[ Stack check");
307 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
309 __ LoadRoot(at, Heap::kStackLimitRootIndex);
310 __ Branch(&ok, hs, sp, Operand(at));
311 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
312 PredictableCodeSizeScope predictable(masm_,
313 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
314 __ Call(stack_check, RelocInfo::CODE_TARGET);
318 { Comment cmnt(masm_, "[ Body");
319 DCHECK(loop_depth() == 0);
321 VisitStatements(function()->body());
323 DCHECK(loop_depth() == 0);
327 // Always emit a 'return undefined' in case control fell off the end of
329 { Comment cmnt(masm_, "[ return <undefined>;");
330 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
332 EmitReturnSequence();
336 void FullCodeGenerator::ClearAccumulator() {
337 DCHECK(Smi::FromInt(0) == 0);
338 __ mov(v0, zero_reg);
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ li(a2, Operand(profiling_counter_));
344 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
345 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
346 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
350 void FullCodeGenerator::EmitProfilingCounterReset() {
351 int reset_value = FLAG_interrupt_budget;
352 if (info_->is_debug()) {
353 // Detect debug break requests as soon as possible.
354 reset_value = FLAG_interrupt_budget >> 4;
356 __ li(a2, Operand(profiling_counter_));
357 __ li(a3, Operand(Smi::FromInt(reset_value)));
358 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363 Label* back_edge_target) {
364 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
365 // to make sure it is constant. Branch may emit a skip-or-jump sequence
366 // instead of the normal Branch. It seems that the "skip" part of that
367 // sequence is about as long as this Branch would be so it is safe to ignore
369 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
370 Comment cmnt(masm_, "[ Back edge bookkeeping");
372 DCHECK(back_edge_target->is_bound());
373 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
374 int weight = Min(kMaxBackEdgeWeight,
375 Max(1, distance / kCodeSizeMultiplier));
376 EmitProfilingCounterDecrement(weight);
377 __ slt(at, a3, zero_reg);
378 __ beq(at, zero_reg, &ok);
379 // Call will emit a li t9 first, so it is safe to use the delay slot.
380 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
381 // Record a mapping of this PC offset to the OSR id. This is used to find
382 // the AST id from the unoptimized code in order to use it as a key into
383 // the deoptimization input data found in the optimized code.
384 RecordBackEdge(stmt->OsrEntryId());
385 EmitProfilingCounterReset();
388 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
389 // Record a mapping of the OSR id to this PC. This is used if the OSR
390 // entry becomes the target of a bailout. We don't expect it to be, but
391 // we want it to work if it is.
392 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
396 void FullCodeGenerator::EmitReturnSequence() {
397 Comment cmnt(masm_, "[ Return sequence");
398 if (return_label_.is_bound()) {
399 __ Branch(&return_label_);
401 __ bind(&return_label_);
403 // Push the return value on the stack as the parameter.
404 // Runtime::TraceExit returns its parameter in v0.
406 __ CallRuntime(Runtime::kTraceExit, 1);
408 // Pretend that the exit is a backwards jump to the entry.
410 if (info_->ShouldSelfOptimize()) {
411 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 int distance = masm_->pc_offset();
414 weight = Min(kMaxBackEdgeWeight,
415 Max(1, distance / kCodeSizeMultiplier));
417 EmitProfilingCounterDecrement(weight);
419 __ Branch(&ok, ge, a3, Operand(zero_reg));
421 __ Call(isolate()->builtins()->InterruptCheck(),
422 RelocInfo::CODE_TARGET);
424 EmitProfilingCounterReset();
428 // Add a label for checking the size of the code used for returning.
429 Label check_exit_codesize;
430 masm_->bind(&check_exit_codesize);
432 // Make sure that the constant pool is not emitted inside of the return
434 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
435 // Here we use masm_-> instead of the __ macro to avoid the code coverage
436 // tool from instrumenting as we rely on the code size here.
437 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
438 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
441 int no_frame_start = masm_->pc_offset();
442 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
443 masm_->Daddu(sp, sp, Operand(sp_delta));
445 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
449 // Check that the size of the code used for returning is large enough
450 // for the debugger's requirements.
451 DCHECK(Assembler::kJSReturnSequenceInstructions <=
452 masm_->InstructionsGeneratedSince(&check_exit_codesize));
458 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
459 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
463 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
465 codegen()->GetVar(result_register(), var);
469 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
470 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
471 codegen()->GetVar(result_register(), var);
472 __ push(result_register());
476 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
477 // For simplicity we always test the accumulator register.
478 codegen()->GetVar(result_register(), var);
479 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
480 codegen()->DoTest(this);
484 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
488 void FullCodeGenerator::AccumulatorValueContext::Plug(
489 Heap::RootListIndex index) const {
490 __ LoadRoot(result_register(), index);
494 void FullCodeGenerator::StackValueContext::Plug(
495 Heap::RootListIndex index) const {
496 __ LoadRoot(result_register(), index);
497 __ push(result_register());
501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
502 codegen()->PrepareForBailoutBeforeSplit(condition(),
506 if (index == Heap::kUndefinedValueRootIndex ||
507 index == Heap::kNullValueRootIndex ||
508 index == Heap::kFalseValueRootIndex) {
509 if (false_label_ != fall_through_) __ Branch(false_label_);
510 } else if (index == Heap::kTrueValueRootIndex) {
511 if (true_label_ != fall_through_) __ Branch(true_label_);
513 __ LoadRoot(result_register(), index);
514 codegen()->DoTest(this);
519 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
523 void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Handle<Object> lit) const {
525 __ li(result_register(), Operand(lit));
529 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
530 // Immediates cannot be pushed directly.
531 __ li(result_register(), Operand(lit));
532 __ push(result_register());
536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
537 codegen()->PrepareForBailoutBeforeSplit(condition(),
541 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
543 if (false_label_ != fall_through_) __ Branch(false_label_);
544 } else if (lit->IsTrue() || lit->IsJSObject()) {
545 if (true_label_ != fall_through_) __ Branch(true_label_);
546 } else if (lit->IsString()) {
547 if (String::cast(*lit)->length() == 0) {
548 if (false_label_ != fall_through_) __ Branch(false_label_);
550 if (true_label_ != fall_through_) __ Branch(true_label_);
552 } else if (lit->IsSmi()) {
553 if (Smi::cast(*lit)->value() == 0) {
554 if (false_label_ != fall_through_) __ Branch(false_label_);
556 if (true_label_ != fall_through_) __ Branch(true_label_);
559 // For simplicity we always test the accumulator register.
560 __ li(result_register(), Operand(lit));
561 codegen()->DoTest(this);
566 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
567 Register reg) const {
573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
575 Register reg) const {
578 __ Move(result_register(), reg);
582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
583 Register reg) const {
585 if (count > 1) __ Drop(count - 1);
586 __ sd(reg, MemOperand(sp, 0));
590 void FullCodeGenerator::TestContext::DropAndPlug(int count,
591 Register reg) const {
593 // For simplicity we always test the accumulator register.
595 __ Move(result_register(), reg);
596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
597 codegen()->DoTest(this);
601 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
602 Label* materialize_false) const {
603 DCHECK(materialize_true == materialize_false);
604 __ bind(materialize_true);
608 void FullCodeGenerator::AccumulatorValueContext::Plug(
609 Label* materialize_true,
610 Label* materialize_false) const {
612 __ bind(materialize_true);
613 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
615 __ bind(materialize_false);
616 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
621 void FullCodeGenerator::StackValueContext::Plug(
622 Label* materialize_true,
623 Label* materialize_false) const {
625 __ bind(materialize_true);
626 __ LoadRoot(at, Heap::kTrueValueRootIndex);
627 // Push the value as the following branch can clobber at in long branch mode.
630 __ bind(materialize_false);
631 __ LoadRoot(at, Heap::kFalseValueRootIndex);
637 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
638 Label* materialize_false) const {
639 DCHECK(materialize_true == true_label_);
640 DCHECK(materialize_false == false_label_);
644 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
648 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
649 Heap::RootListIndex value_root_index =
650 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
651 __ LoadRoot(result_register(), value_root_index);
655 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
656 Heap::RootListIndex value_root_index =
657 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
658 __ LoadRoot(at, value_root_index);
663 void FullCodeGenerator::TestContext::Plug(bool flag) const {
664 codegen()->PrepareForBailoutBeforeSplit(condition(),
669 if (true_label_ != fall_through_) __ Branch(true_label_);
671 if (false_label_ != fall_through_) __ Branch(false_label_);
676 void FullCodeGenerator::DoTest(Expression* condition,
679 Label* fall_through) {
680 __ mov(a0, result_register());
681 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
682 CallIC(ic, condition->test_id());
683 __ mov(at, zero_reg);
684 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
688 void FullCodeGenerator::Split(Condition cc,
693 Label* fall_through) {
694 if (if_false == fall_through) {
695 __ Branch(if_true, cc, lhs, rhs);
696 } else if (if_true == fall_through) {
697 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
699 __ Branch(if_true, cc, lhs, rhs);
705 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
706 DCHECK(var->IsStackAllocated());
707 // Offset is negative because higher indexes are at lower addresses.
708 int offset = -var->index() * kPointerSize;
709 // Adjust by a (parameter or local) base offset.
710 if (var->IsParameter()) {
711 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
713 offset += JavaScriptFrameConstants::kLocal0Offset;
715 return MemOperand(fp, offset);
719 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
720 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
721 if (var->IsContextSlot()) {
722 int context_chain_length = scope()->ContextChainLength(var->scope());
723 __ LoadContext(scratch, context_chain_length);
724 return ContextOperand(scratch, var->index());
726 return StackOperand(var);
731 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
732 // Use destination as scratch.
733 MemOperand location = VarOperand(var, dest);
734 __ ld(dest, location);
738 void FullCodeGenerator::SetVar(Variable* var,
742 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
743 DCHECK(!scratch0.is(src));
744 DCHECK(!scratch0.is(scratch1));
745 DCHECK(!scratch1.is(src));
746 MemOperand location = VarOperand(var, scratch0);
747 __ sd(src, location);
748 // Emit the write barrier code if the location is in the heap.
749 if (var->IsContextSlot()) {
750 __ RecordWriteContextSlot(scratch0,
760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
761 bool should_normalize,
764 // Only prepare for bailouts before splits if we're in a test
765 // context. Otherwise, we let the Visit function deal with the
766 // preparation to avoid preparing with the same AST id twice.
767 if (!context()->IsTest() || !info_->IsOptimizable()) return;
770 if (should_normalize) __ Branch(&skip);
771 PrepareForBailout(expr, TOS_REG);
772 if (should_normalize) {
773 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
774 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
780 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
781 // The variable in the declaration always resides in the current function
783 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
784 if (generate_debug_code_) {
785 // Check that we're not inside a with or catch context.
786 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
787 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
788 __ Check(ne, kDeclarationInWithContext,
790 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
791 __ Check(ne, kDeclarationInCatchContext,
797 void FullCodeGenerator::VisitVariableDeclaration(
798 VariableDeclaration* declaration) {
799 // If it was not possible to allocate the variable at compile time, we
800 // need to "declare" it at runtime to make sure it actually exists in the
802 VariableProxy* proxy = declaration->proxy();
803 VariableMode mode = declaration->mode();
804 Variable* variable = proxy->var();
805 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
806 switch (variable->location()) {
807 case Variable::UNALLOCATED:
808 globals_->Add(variable->name(), zone());
809 globals_->Add(variable->binding_needs_init()
810 ? isolate()->factory()->the_hole_value()
811 : isolate()->factory()->undefined_value(),
815 case Variable::PARAMETER:
816 case Variable::LOCAL:
818 Comment cmnt(masm_, "[ VariableDeclaration");
819 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
820 __ sd(a4, StackOperand(variable));
824 case Variable::CONTEXT:
826 Comment cmnt(masm_, "[ VariableDeclaration");
827 EmitDebugCheckDeclarationContext(variable);
828 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
829 __ sd(at, ContextOperand(cp, variable->index()));
830 // No write barrier since the_hole_value is in old space.
831 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
835 case Variable::LOOKUP: {
836 Comment cmnt(masm_, "[ VariableDeclaration");
837 __ li(a2, Operand(variable->name()));
838 // Declaration nodes are always introduced in one of four modes.
839 DCHECK(IsDeclaredVariableMode(mode));
840 PropertyAttributes attr =
841 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
842 __ li(a1, Operand(Smi::FromInt(attr)));
843 // Push initial value, if any.
844 // Note: For variables we must not push an initial value (such as
845 // 'undefined') because we may have a (legal) redeclaration and we
846 // must not destroy the current value.
848 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
849 __ Push(cp, a2, a1, a0);
851 DCHECK(Smi::FromInt(0) == 0);
852 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
853 __ Push(cp, a2, a1, a0);
855 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
862 void FullCodeGenerator::VisitFunctionDeclaration(
863 FunctionDeclaration* declaration) {
864 VariableProxy* proxy = declaration->proxy();
865 Variable* variable = proxy->var();
866 switch (variable->location()) {
867 case Variable::UNALLOCATED: {
868 globals_->Add(variable->name(), zone());
869 Handle<SharedFunctionInfo> function =
870 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
871 // Check for stack-overflow exception.
872 if (function.is_null()) return SetStackOverflow();
873 globals_->Add(function, zone());
877 case Variable::PARAMETER:
878 case Variable::LOCAL: {
879 Comment cmnt(masm_, "[ FunctionDeclaration");
880 VisitForAccumulatorValue(declaration->fun());
881 __ sd(result_register(), StackOperand(variable));
885 case Variable::CONTEXT: {
886 Comment cmnt(masm_, "[ FunctionDeclaration");
887 EmitDebugCheckDeclarationContext(variable);
888 VisitForAccumulatorValue(declaration->fun());
889 __ sd(result_register(), ContextOperand(cp, variable->index()));
890 int offset = Context::SlotOffset(variable->index());
891 // We know that we have written a function, which is not a smi.
892 __ RecordWriteContextSlot(cp,
900 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
904 case Variable::LOOKUP: {
905 Comment cmnt(masm_, "[ FunctionDeclaration");
906 __ li(a2, Operand(variable->name()));
907 __ li(a1, Operand(Smi::FromInt(NONE)));
909 // Push initial value for function declaration.
910 VisitForStackValue(declaration->fun());
911 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
918 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
919 Variable* variable = declaration->proxy()->var();
920 DCHECK(variable->location() == Variable::CONTEXT);
921 DCHECK(variable->interface()->IsFrozen());
922 Comment cmnt(masm_, "[ ModuleDeclaration");
923 EmitDebugCheckDeclarationContext(variable);
925 // Load instance object.
926 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
927 __ ld(a1, ContextOperand(a1, variable->interface()->Index()));
928 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
931 __ sd(a1, ContextOperand(cp, variable->index()));
932 // We know that we have written a module, which is not a smi.
933 __ RecordWriteContextSlot(cp,
934 Context::SlotOffset(variable->index()),
941 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
943 // Traverse into body.
944 Visit(declaration->module());
948 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
949 VariableProxy* proxy = declaration->proxy();
950 Variable* variable = proxy->var();
951 switch (variable->location()) {
952 case Variable::UNALLOCATED:
956 case Variable::CONTEXT: {
957 Comment cmnt(masm_, "[ ImportDeclaration");
958 EmitDebugCheckDeclarationContext(variable);
963 case Variable::PARAMETER:
964 case Variable::LOCAL:
965 case Variable::LOOKUP:
971 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
976 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
977 // Call the runtime to declare the globals.
978 // The context is the first argument.
979 __ li(a1, Operand(pairs));
980 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
982 __ CallRuntime(Runtime::kDeclareGlobals, 3);
983 // Return value is ignored.
987 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
988 // Call the runtime to declare the modules.
989 __ Push(descriptions);
990 __ CallRuntime(Runtime::kDeclareModules, 1);
991 // Return value is ignored.
995 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
996 Comment cmnt(masm_, "[ SwitchStatement");
997 Breakable nested_statement(this, stmt);
998 SetStatementPosition(stmt);
1000 // Keep the switch value on the stack until a case matches.
1001 VisitForStackValue(stmt->tag());
1002 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1004 ZoneList<CaseClause*>* clauses = stmt->cases();
1005 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1007 Label next_test; // Recycled for each test.
1008 // Compile all the tests with branches to their bodies.
1009 for (int i = 0; i < clauses->length(); i++) {
1010 CaseClause* clause = clauses->at(i);
1011 clause->body_target()->Unuse();
1013 // The default is not a test, but remember it as final fall through.
1014 if (clause->is_default()) {
1015 default_clause = clause;
1019 Comment cmnt(masm_, "[ Case comparison");
1020 __ bind(&next_test);
1023 // Compile the label expression.
1024 VisitForAccumulatorValue(clause->label());
1025 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1027 // Perform the comparison as if via '==='.
1028 __ ld(a1, MemOperand(sp, 0)); // Switch value.
1029 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1030 JumpPatchSite patch_site(masm_);
1031 if (inline_smi_code) {
1034 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1036 __ Branch(&next_test, ne, a1, Operand(a0));
1037 __ Drop(1); // Switch value is no longer needed.
1038 __ Branch(clause->body_target());
1040 __ bind(&slow_case);
1043 // Record position before stub call for type feedback.
1044 SetSourcePosition(clause->position());
1045 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1046 CallIC(ic, clause->CompareId());
1047 patch_site.EmitPatchInfo();
1051 PrepareForBailout(clause, TOS_REG);
1052 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1053 __ Branch(&next_test, ne, v0, Operand(at));
1055 __ Branch(clause->body_target());
1058 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1059 __ Drop(1); // Switch value is no longer needed.
1060 __ Branch(clause->body_target());
1063 // Discard the test value and jump to the default if present, otherwise to
1064 // the end of the statement.
1065 __ bind(&next_test);
1066 __ Drop(1); // Switch value is no longer needed.
1067 if (default_clause == NULL) {
1068 __ Branch(nested_statement.break_label());
1070 __ Branch(default_clause->body_target());
1073 // Compile all the case bodies.
1074 for (int i = 0; i < clauses->length(); i++) {
1075 Comment cmnt(masm_, "[ Case body");
1076 CaseClause* clause = clauses->at(i);
1077 __ bind(clause->body_target());
1078 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1079 VisitStatements(clause->statements());
1082 __ bind(nested_statement.break_label());
1083 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1087 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1088 Comment cmnt(masm_, "[ ForInStatement");
1089 int slot = stmt->ForInFeedbackSlot();
1090 SetStatementPosition(stmt);
1093 ForIn loop_statement(this, stmt);
1094 increment_loop_depth();
1096 // Get the object to enumerate over. If the object is null or undefined, skip
1097 // over the loop. See ECMA-262 version 5, section 12.6.4.
1098 VisitForAccumulatorValue(stmt->enumerable());
1099 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1100 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1101 __ Branch(&exit, eq, a0, Operand(at));
1102 Register null_value = a5;
1103 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1104 __ Branch(&exit, eq, a0, Operand(null_value));
1105 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1107 // Convert the object to a JS object.
1108 Label convert, done_convert;
1109 __ JumpIfSmi(a0, &convert);
1110 __ GetObjectType(a0, a1, a1);
1111 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1114 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1116 __ bind(&done_convert);
1119 // Check for proxies.
1121 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1122 __ GetObjectType(a0, a1, a1);
1123 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1125 // Check cache validity in generated code. This is a fast case for
1126 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1127 // guarantee cache validity, call the runtime system to check cache
1128 // validity or get the property names in a fixed array.
1129 __ CheckEnumCache(null_value, &call_runtime);
1131 // The enum cache is valid. Load the map of the object being
1132 // iterated over and use the cache for the iteration.
1134 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1135 __ Branch(&use_cache);
1137 // Get the set of properties to enumerate.
1138 __ bind(&call_runtime);
1139 __ push(a0); // Duplicate the enumerable object on the stack.
1140 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1142 // If we got a map from the runtime call, we can do a fast
1143 // modification check. Otherwise, we got a fixed array, and we have
1144 // to do a slow check.
1146 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1147 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1148 __ Branch(&fixed_array, ne, a2, Operand(at));
1150 // We got a map in register v0. Get the enumeration cache from it.
1151 Label no_descriptors;
1152 __ bind(&use_cache);
1154 __ EnumLength(a1, v0);
1155 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1157 __ LoadInstanceDescriptors(v0, a2);
1158 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1159 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1161 // Set up the four remaining stack slots.
1162 __ li(a0, Operand(Smi::FromInt(0)));
1163 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1164 __ Push(v0, a2, a1, a0);
1167 __ bind(&no_descriptors);
1171 // We got a fixed array in register v0. Iterate through that.
1173 __ bind(&fixed_array);
1175 __ li(a1, FeedbackVector());
1176 __ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1177 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot)));
1179 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1180 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1181 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1182 __ GetObjectType(a2, a3, a3);
1183 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1184 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1185 __ bind(&non_proxy);
1186 __ Push(a1, v0); // Smi and array
1187 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1188 __ li(a0, Operand(Smi::FromInt(0)));
1189 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1191 // Generate code for doing the condition check.
1192 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1194 // Load the current count to a0, load the length to a1.
1195 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1196 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1197 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1199 // Get the current entry of the array into register a3.
1200 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1201 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1202 __ SmiScale(a4, a0, kPointerSizeLog2);
1203 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1204 __ ld(a3, MemOperand(a4)); // Current entry.
1206 // Get the expected map from the stack or a smi in the
1207 // permanent slow case into register a2.
1208 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1210 // Check if the expected map still matches that of the enumerable.
1211 // If not, we may have to filter the key.
1213 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1214 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1215 __ Branch(&update_each, eq, a4, Operand(a2));
1217 // For proxies, no filtering is done.
1218 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1219 DCHECK_EQ(Smi::FromInt(0), 0);
1220 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1222 // Convert the entry to a string or (smi) 0 if it isn't a property
1223 // any more. If the property has been removed while iterating, we
1225 __ Push(a1, a3); // Enumerable and current entry.
1226 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1227 __ mov(a3, result_register());
1228 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1230 // Update the 'each' property or variable from the possibly filtered
1231 // entry in register a3.
1232 __ bind(&update_each);
1233 __ mov(result_register(), a3);
1234 // Perform the assignment as if via '='.
1235 { EffectContext context(this);
1236 EmitAssignment(stmt->each());
1239 // Generate code for the body of the loop.
1240 Visit(stmt->body());
1242 // Generate code for the going to the next element by incrementing
1243 // the index (smi) stored on top of the stack.
1244 __ bind(loop_statement.continue_label());
1246 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1249 EmitBackEdgeBookkeeping(stmt, &loop);
1252 // Remove the pointers stored on the stack.
1253 __ bind(loop_statement.break_label());
1256 // Exit and decrement the loop depth.
1257 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1259 decrement_loop_depth();
1263 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1264 Comment cmnt(masm_, "[ ForOfStatement");
1265 SetStatementPosition(stmt);
1267 Iteration loop_statement(this, stmt);
1268 increment_loop_depth();
1270 // var iterator = iterable[Symbol.iterator]();
1271 VisitForEffect(stmt->assign_iterator());
1274 __ bind(loop_statement.continue_label());
1276 // result = iterator.next()
1277 VisitForEffect(stmt->next_result());
1279 // if (result.done) break;
1280 Label result_not_done;
1281 VisitForControl(stmt->result_done(),
1282 loop_statement.break_label(),
1285 __ bind(&result_not_done);
1287 // each = result.value
1288 VisitForEffect(stmt->assign_each());
1290 // Generate code for the body of the loop.
1291 Visit(stmt->body());
1293 // Check stack before looping.
1294 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1295 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1296 __ jmp(loop_statement.continue_label());
1298 // Exit and decrement the loop depth.
1299 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1300 __ bind(loop_statement.break_label());
1301 decrement_loop_depth();
1305 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1307 // Use the fast case closure allocation code that allocates in new
1308 // space for nested functions that don't need literals cloning. If
1309 // we're running with the --always-opt or the --prepare-always-opt
1310 // flag, we need to use the runtime function so that the new function
1311 // we are creating here gets a chance to have its code optimized and
1312 // doesn't just get a copy of the existing unoptimized code.
1313 if (!FLAG_always_opt &&
1314 !FLAG_prepare_always_opt &&
1316 scope()->is_function_scope() &&
1317 info->num_literals() == 0) {
1318 FastNewClosureStub stub(isolate(),
1319 info->strict_mode(),
1320 info->is_generator());
1321 __ li(a2, Operand(info));
1324 __ li(a0, Operand(info));
1325 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1326 : Heap::kFalseValueRootIndex);
1327 __ Push(cp, a0, a1);
1328 __ CallRuntime(Runtime::kNewClosure, 3);
1330 context()->Plug(v0);
1334 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1335 Comment cmnt(masm_, "[ VariableProxy");
1336 EmitVariableLoad(expr);
1340 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1341 TypeofState typeof_state,
1343 Register current = cp;
1349 if (s->num_heap_slots() > 0) {
1350 if (s->calls_sloppy_eval()) {
1351 // Check that extension is NULL.
1352 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1353 __ Branch(slow, ne, temp, Operand(zero_reg));
1355 // Load next context in chain.
1356 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1357 // Walk the rest of the chain without clobbering cp.
1360 // If no outer scope calls eval, we do not need to check more
1361 // context extensions.
1362 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1363 s = s->outer_scope();
1366 if (s->is_eval_scope()) {
1368 if (!current.is(next)) {
1369 __ Move(next, current);
1372 // Terminate at native context.
1373 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1374 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1375 __ Branch(&fast, eq, temp, Operand(a4));
1376 // Check that extension is NULL.
1377 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1378 __ Branch(slow, ne, temp, Operand(zero_reg));
1379 // Load next context in chain.
1380 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1385 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1386 __ li(LoadIC::NameRegister(), Operand(proxy->var()->name()));
1387 if (FLAG_vector_ics) {
1388 __ li(LoadIC::SlotRegister(),
1389 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1392 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1399 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1401 DCHECK(var->IsContextSlot());
1402 Register context = cp;
1406 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1407 if (s->num_heap_slots() > 0) {
1408 if (s->calls_sloppy_eval()) {
1409 // Check that extension is NULL.
1410 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1411 __ Branch(slow, ne, temp, Operand(zero_reg));
1413 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1414 // Walk the rest of the chain without clobbering cp.
1418 // Check that last extension is NULL.
1419 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1420 __ Branch(slow, ne, temp, Operand(zero_reg));
1422 // This function is used only for loads, not stores, so it's safe to
1423 // return an cp-based operand (the write barrier cannot be allowed to
1424 // destroy the cp register).
1425 return ContextOperand(context, var->index());
1429 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1430 TypeofState typeof_state,
1433 // Generate fast-case code for variables that might be shadowed by
1434 // eval-introduced variables. Eval is used a lot without
1435 // introducing variables. In those cases, we do not want to
1436 // perform a runtime call for all variables in the scope
1437 // containing the eval.
1438 Variable* var = proxy->var();
1439 if (var->mode() == DYNAMIC_GLOBAL) {
1440 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1442 } else if (var->mode() == DYNAMIC_LOCAL) {
1443 Variable* local = var->local_if_not_shadowed();
1444 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1445 if (local->mode() == LET || local->mode() == CONST ||
1446 local->mode() == CONST_LEGACY) {
1447 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1448 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1449 if (local->mode() == CONST_LEGACY) {
1450 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1451 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1452 } else { // LET || CONST
1453 __ Branch(done, ne, at, Operand(zero_reg));
1454 __ li(a0, Operand(var->name()));
1456 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1464 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1465 // Record position before possible IC call.
1466 SetSourcePosition(proxy->position());
1467 Variable* var = proxy->var();
1469 // Three cases: global variables, lookup variables, and all other types of
1471 switch (var->location()) {
1472 case Variable::UNALLOCATED: {
1473 Comment cmnt(masm_, "[ Global variable");
1474 // Use inline caching. Variable name is passed in a2 and the global
1475 // object (receiver) in a0.
1476 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1477 __ li(LoadIC::NameRegister(), Operand(var->name()));
1478 if (FLAG_vector_ics) {
1479 __ li(LoadIC::SlotRegister(),
1480 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1482 CallLoadIC(CONTEXTUAL);
1483 context()->Plug(v0);
1487 case Variable::PARAMETER:
1488 case Variable::LOCAL:
1489 case Variable::CONTEXT: {
1490 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1491 : "[ Stack variable");
1492 if (var->binding_needs_init()) {
1493 // var->scope() may be NULL when the proxy is located in eval code and
1494 // refers to a potential outside binding. Currently those bindings are
1495 // always looked up dynamically, i.e. in that case
1496 // var->location() == LOOKUP.
1498 DCHECK(var->scope() != NULL);
1500 // Check if the binding really needs an initialization check. The check
1501 // can be skipped in the following situation: we have a LET or CONST
1502 // binding in harmony mode, both the Variable and the VariableProxy have
1503 // the same declaration scope (i.e. they are both in global code, in the
1504 // same function or in the same eval code) and the VariableProxy is in
1505 // the source physically located after the initializer of the variable.
1507 // We cannot skip any initialization checks for CONST in non-harmony
1508 // mode because const variables may be declared but never initialized:
1509 // if (false) { const x; }; var y = x;
1511 // The condition on the declaration scopes is a conservative check for
1512 // nested functions that access a binding and are called before the
1513 // binding is initialized:
1514 // function() { f(); let x = 1; function f() { x = 2; } }
1516 bool skip_init_check;
1517 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1518 skip_init_check = false;
1520 // Check that we always have valid source position.
1521 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1522 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1523 skip_init_check = var->mode() != CONST_LEGACY &&
1524 var->initializer_position() < proxy->position();
1527 if (!skip_init_check) {
1528 // Let and const need a read barrier.
1530 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1531 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1532 if (var->mode() == LET || var->mode() == CONST) {
1533 // Throw a reference error when using an uninitialized let/const
1534 // binding in harmony mode.
1536 __ Branch(&done, ne, at, Operand(zero_reg));
1537 __ li(a0, Operand(var->name()));
1539 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1542 // Uninitalized const bindings outside of harmony mode are unholed.
1543 DCHECK(var->mode() == CONST_LEGACY);
1544 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1545 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1547 context()->Plug(v0);
1551 context()->Plug(var);
1555 case Variable::LOOKUP: {
1556 Comment cmnt(masm_, "[ Lookup variable");
1558 // Generate code for loading from variables potentially shadowed
1559 // by eval-introduced variables.
1560 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1562 __ li(a1, Operand(var->name()));
1563 __ Push(cp, a1); // Context and name.
1564 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1566 context()->Plug(v0);
1572 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1573 Comment cmnt(masm_, "[ RegExpLiteral");
1575 // Registers will be used as follows:
1576 // a5 = materialized value (RegExp literal)
1577 // a4 = JS function, literals array
1578 // a3 = literal index
1579 // a2 = RegExp pattern
1580 // a1 = RegExp flags
1581 // a0 = RegExp literal clone
1582 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1583 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1584 int literal_offset =
1585 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1586 __ ld(a5, FieldMemOperand(a4, literal_offset));
1587 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1588 __ Branch(&materialized, ne, a5, Operand(at));
1590 // Create regexp literal using runtime function.
1591 // Result will be in v0.
1592 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1593 __ li(a2, Operand(expr->pattern()));
1594 __ li(a1, Operand(expr->flags()));
1595 __ Push(a4, a3, a2, a1);
1596 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1599 __ bind(&materialized);
1600 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1601 Label allocated, runtime_allocate;
1602 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1605 __ bind(&runtime_allocate);
1606 __ li(a0, Operand(Smi::FromInt(size)));
1608 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1611 __ bind(&allocated);
1613 // After this, registers are used as follows:
1614 // v0: Newly allocated regexp.
1615 // a5: Materialized regexp.
1617 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1618 context()->Plug(v0);
1622 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1623 if (expression == NULL) {
1624 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1627 VisitForStackValue(expression);
1632 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1633 Comment cmnt(masm_, "[ ObjectLiteral");
1635 expr->BuildConstantProperties(isolate());
1636 Handle<FixedArray> constant_properties = expr->constant_properties();
1637 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1638 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1639 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1640 __ li(a1, Operand(constant_properties));
1641 int flags = expr->fast_elements()
1642 ? ObjectLiteral::kFastElements
1643 : ObjectLiteral::kNoFlags;
1644 flags |= expr->has_function()
1645 ? ObjectLiteral::kHasFunction
1646 : ObjectLiteral::kNoFlags;
1647 __ li(a0, Operand(Smi::FromInt(flags)));
1648 int properties_count = constant_properties->length() / 2;
1649 if (expr->may_store_doubles() || expr->depth() > 1 ||
1650 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1651 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1652 __ Push(a3, a2, a1, a0);
1653 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1655 FastCloneShallowObjectStub stub(isolate(), properties_count);
1659 // If result_saved is true the result is on top of the stack. If
1660 // result_saved is false the result is in v0.
1661 bool result_saved = false;
1663 // Mark all computed expressions that are bound to a key that
1664 // is shadowed by a later occurrence of the same key. For the
1665 // marked expressions, no store code is emitted.
1666 expr->CalculateEmitStore(zone());
1668 AccessorTable accessor_table(zone());
1669 for (int i = 0; i < expr->properties()->length(); i++) {
1670 ObjectLiteral::Property* property = expr->properties()->at(i);
1671 if (property->IsCompileTimeValue()) continue;
1673 Literal* key = property->key();
1674 Expression* value = property->value();
1675 if (!result_saved) {
1676 __ push(v0); // Save result on stack.
1677 result_saved = true;
1679 switch (property->kind()) {
1680 case ObjectLiteral::Property::CONSTANT:
1682 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1683 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1685 case ObjectLiteral::Property::COMPUTED:
1686 if (key->value()->IsInternalizedString()) {
1687 if (property->emit_store()) {
1688 VisitForAccumulatorValue(value);
1689 __ mov(StoreIC::ValueRegister(), result_register());
1690 DCHECK(StoreIC::ValueRegister().is(a0));
1691 __ li(StoreIC::NameRegister(), Operand(key->value()));
1692 __ ld(StoreIC::ReceiverRegister(), MemOperand(sp));
1693 CallStoreIC(key->LiteralFeedbackId());
1694 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1696 VisitForEffect(value);
1700 // Duplicate receiver on stack.
1701 __ ld(a0, MemOperand(sp));
1703 VisitForStackValue(key);
1704 VisitForStackValue(value);
1705 if (property->emit_store()) {
1706 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1708 __ CallRuntime(Runtime::kSetProperty, 4);
1713 case ObjectLiteral::Property::PROTOTYPE:
1714 // Duplicate receiver on stack.
1715 __ ld(a0, MemOperand(sp));
1717 VisitForStackValue(value);
1718 if (property->emit_store()) {
1719 __ CallRuntime(Runtime::kSetPrototype, 2);
1724 case ObjectLiteral::Property::GETTER:
1725 accessor_table.lookup(key)->second->getter = value;
1727 case ObjectLiteral::Property::SETTER:
1728 accessor_table.lookup(key)->second->setter = value;
1733 // Emit code to define accessors, using only a single call to the runtime for
1734 // each pair of corresponding getters and setters.
1735 for (AccessorTable::Iterator it = accessor_table.begin();
1736 it != accessor_table.end();
1738 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1740 VisitForStackValue(it->first);
1741 EmitAccessor(it->second->getter);
1742 EmitAccessor(it->second->setter);
1743 __ li(a0, Operand(Smi::FromInt(NONE)));
1745 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1748 if (expr->has_function()) {
1749 DCHECK(result_saved);
1750 __ ld(a0, MemOperand(sp));
1752 __ CallRuntime(Runtime::kToFastProperties, 1);
1756 context()->PlugTOS();
1758 context()->Plug(v0);
1763 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1764 Comment cmnt(masm_, "[ ArrayLiteral");
1766 expr->BuildConstantElements(isolate());
1767 int flags = expr->depth() == 1
1768 ? ArrayLiteral::kShallowElements
1769 : ArrayLiteral::kNoFlags;
1771 ZoneList<Expression*>* subexprs = expr->values();
1772 int length = subexprs->length();
1774 Handle<FixedArray> constant_elements = expr->constant_elements();
1775 DCHECK_EQ(2, constant_elements->length());
1776 ElementsKind constant_elements_kind =
1777 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1778 bool has_fast_elements =
1779 IsFastObjectElementsKind(constant_elements_kind);
1780 Handle<FixedArrayBase> constant_elements_values(
1781 FixedArrayBase::cast(constant_elements->get(1)));
1783 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1784 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1785 // If the only customer of allocation sites is transitioning, then
1786 // we can turn it off if we don't have anywhere else to transition to.
1787 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1790 __ mov(a0, result_register());
1791 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1792 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1793 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1794 __ li(a1, Operand(constant_elements));
1795 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1796 __ li(a0, Operand(Smi::FromInt(flags)));
1797 __ Push(a3, a2, a1, a0);
1798 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1800 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1804 bool result_saved = false; // Is the result saved to the stack?
1806 // Emit code to evaluate all the non-constant subexpressions and to store
1807 // them into the newly cloned array.
1808 for (int i = 0; i < length; i++) {
1809 Expression* subexpr = subexprs->at(i);
1810 // If the subexpression is a literal or a simple materialized literal it
1811 // is already set in the cloned array.
1812 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1814 if (!result_saved) {
1815 __ push(v0); // array literal
1816 __ Push(Smi::FromInt(expr->literal_index()));
1817 result_saved = true;
1820 VisitForAccumulatorValue(subexpr);
1822 if (IsFastObjectElementsKind(constant_elements_kind)) {
1823 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1824 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1825 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1826 __ sd(result_register(), FieldMemOperand(a1, offset));
1827 // Update the write barrier for the array store.
1828 __ RecordWriteField(a1, offset, result_register(), a2,
1829 kRAHasBeenSaved, kDontSaveFPRegs,
1830 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1832 __ li(a3, Operand(Smi::FromInt(i)));
1833 __ mov(a0, result_register());
1834 StoreArrayLiteralElementStub stub(isolate());
1838 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1841 __ Pop(); // literal index
1842 context()->PlugTOS();
1844 context()->Plug(v0);
1849 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1850 DCHECK(expr->target()->IsValidReferenceExpression());
1852 Comment cmnt(masm_, "[ Assignment");
1854 // Left-hand side can only be a property, a global or a (parameter or local)
1856 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1857 LhsKind assign_type = VARIABLE;
1858 Property* property = expr->target()->AsProperty();
1859 if (property != NULL) {
1860 assign_type = (property->key()->IsPropertyName())
1865 // Evaluate LHS expression.
1866 switch (assign_type) {
1868 // Nothing to do here.
1870 case NAMED_PROPERTY:
1871 if (expr->is_compound()) {
1872 // We need the receiver both on the stack and in the register.
1873 VisitForStackValue(property->obj());
1874 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
1876 VisitForStackValue(property->obj());
1879 case KEYED_PROPERTY:
1880 // We need the key and receiver on both the stack and in v0 and a1.
1881 if (expr->is_compound()) {
1882 VisitForStackValue(property->obj());
1883 VisitForStackValue(property->key());
1884 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
1885 __ ld(LoadIC::NameRegister(), MemOperand(sp, 0));
1887 VisitForStackValue(property->obj());
1888 VisitForStackValue(property->key());
1893 // For compound assignments we need another deoptimization point after the
1894 // variable/property load.
1895 if (expr->is_compound()) {
1896 { AccumulatorValueContext context(this);
1897 switch (assign_type) {
1899 EmitVariableLoad(expr->target()->AsVariableProxy());
1900 PrepareForBailout(expr->target(), TOS_REG);
1902 case NAMED_PROPERTY:
1903 EmitNamedPropertyLoad(property);
1904 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1906 case KEYED_PROPERTY:
1907 EmitKeyedPropertyLoad(property);
1908 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1913 Token::Value op = expr->binary_op();
1914 __ push(v0); // Left operand goes on the stack.
1915 VisitForAccumulatorValue(expr->value());
1917 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1920 SetSourcePosition(expr->position() + 1);
1921 AccumulatorValueContext context(this);
1922 if (ShouldInlineSmiCase(op)) {
1923 EmitInlineSmiBinaryOp(expr->binary_operation(),
1929 EmitBinaryOp(expr->binary_operation(), op, mode);
1932 // Deoptimization point in case the binary operation may have side effects.
1933 PrepareForBailout(expr->binary_operation(), TOS_REG);
1935 VisitForAccumulatorValue(expr->value());
1938 // Record source position before possible IC call.
1939 SetSourcePosition(expr->position());
1942 switch (assign_type) {
1944 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1946 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1947 context()->Plug(v0);
1949 case NAMED_PROPERTY:
1950 EmitNamedPropertyAssignment(expr);
1952 case KEYED_PROPERTY:
1953 EmitKeyedPropertyAssignment(expr);
1959 void FullCodeGenerator::VisitYield(Yield* expr) {
1960 Comment cmnt(masm_, "[ Yield");
1961 // Evaluate yielded value first; the initial iterator definition depends on
1962 // this. It stays on the stack while we update the iterator.
1963 VisitForStackValue(expr->expression());
1965 switch (expr->yield_kind()) {
1966 case Yield::SUSPEND:
1967 // Pop value from top-of-stack slot; box result into result register.
1968 EmitCreateIteratorResult(false);
1969 __ push(result_register());
1971 case Yield::INITIAL: {
1972 Label suspend, continuation, post_runtime, resume;
1976 __ bind(&continuation);
1980 VisitForAccumulatorValue(expr->generator_object());
1981 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1982 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1983 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1984 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1986 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1987 kRAHasBeenSaved, kDontSaveFPRegs);
1988 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1989 __ Branch(&post_runtime, eq, sp, Operand(a1));
1990 __ push(v0); // generator object
1991 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1992 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1993 __ bind(&post_runtime);
1994 __ pop(result_register());
1995 EmitReturnSequence();
1998 context()->Plug(result_register());
2002 case Yield::FINAL: {
2003 VisitForAccumulatorValue(expr->generator_object());
2004 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2005 __ sd(a1, FieldMemOperand(result_register(),
2006 JSGeneratorObject::kContinuationOffset));
2007 // Pop value from top-of-stack slot, box result into result register.
2008 EmitCreateIteratorResult(true);
2009 EmitUnwindBeforeReturn();
2010 EmitReturnSequence();
2014 case Yield::DELEGATING: {
2015 VisitForStackValue(expr->generator_object());
2017 // Initial stack layout is as follows:
2018 // [sp + 1 * kPointerSize] iter
2019 // [sp + 0 * kPointerSize] g
2021 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2022 Label l_next, l_call;
2023 Register load_receiver = LoadIC::ReceiverRegister();
2024 Register load_name = LoadIC::NameRegister();
2025 // Initial send value is undefined.
2026 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2029 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2032 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2033 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2034 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2035 __ Push(a2, a3, a0); // "throw", iter, except
2038 // try { received = %yield result }
2039 // Shuffle the received result above a try handler and yield it without
2042 __ pop(a0); // result
2043 __ PushTryHandler(StackHandler::CATCH, expr->index());
2044 const int handler_size = StackHandlerConstants::kSize;
2045 __ push(a0); // result
2047 __ bind(&l_continuation);
2050 __ bind(&l_suspend);
2051 const int generator_object_depth = kPointerSize + handler_size;
2052 __ ld(a0, MemOperand(sp, generator_object_depth));
2054 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2055 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2056 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2057 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2059 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2060 kRAHasBeenSaved, kDontSaveFPRegs);
2061 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2062 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2063 __ pop(v0); // result
2064 EmitReturnSequence();
2066 __ bind(&l_resume); // received in a0
2069 // receiver = iter; f = 'next'; arg = received;
2071 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2072 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2073 __ Push(load_name, a3, a0); // "next", iter, received
2075 // result = receiver[f](arg);
2077 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2078 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2079 if (FLAG_vector_ics) {
2080 __ li(LoadIC::SlotRegister(),
2081 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2083 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2084 CallIC(ic, TypeFeedbackId::None());
2087 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2088 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2091 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2092 __ Drop(1); // The function is still on the stack; drop it.
2094 // if (!result.done) goto l_try;
2095 __ Move(load_receiver, v0);
2097 __ push(load_receiver); // save result
2098 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2099 if (FLAG_vector_ics) {
2100 __ li(LoadIC::SlotRegister(),
2101 Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2103 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2105 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2107 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2110 __ pop(load_receiver); // result
2111 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2112 if (FLAG_vector_ics) {
2113 __ li(LoadIC::SlotRegister(),
2114 Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2116 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2117 context()->DropAndPlug(2, v0); // drop iter and g
2124 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2126 JSGeneratorObject::ResumeMode resume_mode) {
2127 // The value stays in a0, and is ultimately read by the resumed generator, as
2128 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2129 // is read to throw the value when the resumed generator is already closed.
2130 // a1 will hold the generator object until the activation has been resumed.
2131 VisitForStackValue(generator);
2132 VisitForAccumulatorValue(value);
2135 // Check generator state.
2136 Label wrong_state, closed_state, done;
2137 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2138 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2139 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2140 __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2141 __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2143 // Load suspended function and context.
2144 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2145 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2147 // Load receiver and store as the first argument.
2148 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2151 // Push holes for the rest of the arguments to the generator function.
2152 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2153 // The argument count is stored as int32_t on 64-bit platforms.
2154 // TODO(plind): Smi on 32-bit platforms.
2156 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2157 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2158 Label push_argument_holes, push_frame;
2159 __ bind(&push_argument_holes);
2160 __ Dsubu(a3, a3, Operand(1));
2161 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2163 __ jmp(&push_argument_holes);
2165 // Enter a new JavaScript frame, and initialize its slots as they were when
2166 // the generator was suspended.
2168 __ bind(&push_frame);
2169 __ Call(&resume_frame);
2171 __ bind(&resume_frame);
2172 // ra = return address.
2173 // fp = caller's frame pointer.
2174 // cp = callee's context,
2175 // a4 = callee's JS function.
2176 __ Push(ra, fp, cp, a4);
2177 // Adjust FP to point to saved FP.
2178 __ Daddu(fp, sp, 2 * kPointerSize);
2180 // Load the operand stack size.
2181 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2182 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2185 // If we are sending a value and there is no operand stack, we can jump back
2187 if (resume_mode == JSGeneratorObject::NEXT) {
2189 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2190 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2191 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2193 __ Daddu(a3, a3, Operand(a2));
2194 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2195 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2197 __ bind(&slow_resume);
2200 // Otherwise, we push holes for the operand stack and call the runtime to fix
2201 // up the stack and the handlers.
2202 Label push_operand_holes, call_resume;
2203 __ bind(&push_operand_holes);
2204 __ Dsubu(a3, a3, Operand(1));
2205 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2207 __ Branch(&push_operand_holes);
2208 __ bind(&call_resume);
2209 DCHECK(!result_register().is(a1));
2210 __ Push(a1, result_register());
2211 __ Push(Smi::FromInt(resume_mode));
2212 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2213 // Not reached: the runtime call returns elsewhere.
2214 __ stop("not-reached");
2216 // Reach here when generator is closed.
2217 __ bind(&closed_state);
2218 if (resume_mode == JSGeneratorObject::NEXT) {
2219 // Return completed iterator result when generator is closed.
2220 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2222 // Pop value from top-of-stack slot; box result into result register.
2223 EmitCreateIteratorResult(true);
2225 // Throw the provided value.
2227 __ CallRuntime(Runtime::kThrow, 1);
2231 // Throw error if we attempt to operate on a running generator.
2232 __ bind(&wrong_state);
2234 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2237 context()->Plug(result_register());
2241 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2245 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2247 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2250 __ bind(&gc_required);
2251 __ Push(Smi::FromInt(map->instance_size()));
2252 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2253 __ ld(context_register(),
2254 MemOperand(fp, StandardFrameConstants::kContextOffset));
2256 __ bind(&allocated);
2257 __ li(a1, Operand(map));
2259 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2260 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2261 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2262 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2263 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2264 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2266 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2268 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2270 // Only the value field needs a write barrier, as the other values are in the
2272 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2273 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2277 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2278 SetSourcePosition(prop->position());
2279 Literal* key = prop->key()->AsLiteral();
2280 __ li(LoadIC::NameRegister(), Operand(key->value()));
2281 if (FLAG_vector_ics) {
2282 __ li(LoadIC::SlotRegister(),
2283 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2284 CallLoadIC(NOT_CONTEXTUAL);
2286 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2291 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2292 SetSourcePosition(prop->position());
2293 // Call keyed load IC. It has register arguments receiver and key.
2294 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2295 if (FLAG_vector_ics) {
2296 __ li(LoadIC::SlotRegister(),
2297 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2300 CallIC(ic, prop->PropertyFeedbackId());
2305 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2308 Expression* left_expr,
2309 Expression* right_expr) {
2310 Label done, smi_case, stub_call;
2312 Register scratch1 = a2;
2313 Register scratch2 = a3;
2315 // Get the arguments.
2317 Register right = a0;
2319 __ mov(a0, result_register());
2321 // Perform combined smi check on both operands.
2322 __ Or(scratch1, left, Operand(right));
2323 STATIC_ASSERT(kSmiTag == 0);
2324 JumpPatchSite patch_site(masm_);
2325 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2327 __ bind(&stub_call);
2328 BinaryOpICStub stub(isolate(), op, mode);
2329 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2330 patch_site.EmitPatchInfo();
2334 // Smi case. This code works the same way as the smi-smi case in the type
2335 // recording binary operation stub, see
2338 __ GetLeastBitsFromSmi(scratch1, right, 5);
2339 __ dsrav(right, left, scratch1);
2340 __ And(v0, right, Operand(0xffffffff00000000L));
2343 __ SmiUntag(scratch1, left);
2344 __ GetLeastBitsFromSmi(scratch2, right, 5);
2345 __ dsllv(scratch1, scratch1, scratch2);
2346 __ SmiTag(v0, scratch1);
2350 __ SmiUntag(scratch1, left);
2351 __ GetLeastBitsFromSmi(scratch2, right, 5);
2352 __ dsrlv(scratch1, scratch1, scratch2);
2353 __ And(scratch2, scratch1, 0x80000000);
2354 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2355 __ SmiTag(v0, scratch1);
2359 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2360 __ BranchOnOverflow(&stub_call, scratch1);
2363 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2364 __ BranchOnOverflow(&stub_call, scratch1);
2367 __ Dmulh(v0, left, right);
2368 __ dsra32(scratch2, v0, 0);
2369 __ sra(scratch1, v0, 31);
2370 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2372 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2373 __ Daddu(scratch2, right, left);
2374 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2375 DCHECK(Smi::FromInt(0) == 0);
2376 __ mov(v0, zero_reg);
2380 __ Or(v0, left, Operand(right));
2382 case Token::BIT_AND:
2383 __ And(v0, left, Operand(right));
2385 case Token::BIT_XOR:
2386 __ Xor(v0, left, Operand(right));
2393 context()->Plug(v0);
2397 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2399 OverwriteMode mode) {
2400 __ mov(a0, result_register());
2402 BinaryOpICStub stub(isolate(), op, mode);
2403 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2404 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2405 patch_site.EmitPatchInfo();
2406 context()->Plug(v0);
2410 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2411 DCHECK(expr->IsValidReferenceExpression());
2413 // Left-hand side can only be a property, a global or a (parameter or local)
2415 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2416 LhsKind assign_type = VARIABLE;
2417 Property* prop = expr->AsProperty();
2419 assign_type = (prop->key()->IsPropertyName())
2424 switch (assign_type) {
2426 Variable* var = expr->AsVariableProxy()->var();
2427 EffectContext context(this);
2428 EmitVariableAssignment(var, Token::ASSIGN);
2431 case NAMED_PROPERTY: {
2432 __ push(result_register()); // Preserve value.
2433 VisitForAccumulatorValue(prop->obj());
2434 __ mov(StoreIC::ReceiverRegister(), result_register());
2435 __ pop(StoreIC::ValueRegister()); // Restore value.
2436 __ li(StoreIC::NameRegister(),
2437 Operand(prop->key()->AsLiteral()->value()));
2441 case KEYED_PROPERTY: {
2442 __ push(result_register()); // Preserve value.
2443 VisitForStackValue(prop->obj());
2444 VisitForAccumulatorValue(prop->key());
2445 __ Move(KeyedStoreIC::NameRegister(), result_register());
2446 __ Pop(KeyedStoreIC::ValueRegister(), KeyedStoreIC::ReceiverRegister());
2447 Handle<Code> ic = strict_mode() == SLOPPY
2448 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2449 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2454 context()->Plug(v0);
2458 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2459 Variable* var, MemOperand location) {
2460 __ sd(result_register(), location);
2461 if (var->IsContextSlot()) {
2462 // RecordWrite may destroy all its register arguments.
2463 __ Move(a3, result_register());
2464 int offset = Context::SlotOffset(var->index());
2465 __ RecordWriteContextSlot(
2466 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2471 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2472 if (var->IsUnallocated()) {
2473 // Global var, const, or let.
2474 __ mov(StoreIC::ValueRegister(), result_register());
2475 __ li(StoreIC::NameRegister(), Operand(var->name()));
2476 __ ld(StoreIC::ReceiverRegister(), GlobalObjectOperand());
2478 } else if (op == Token::INIT_CONST_LEGACY) {
2479 // Const initializers need a write barrier.
2480 DCHECK(!var->IsParameter()); // No const parameters.
2481 if (var->IsLookupSlot()) {
2482 __ li(a0, Operand(var->name()));
2483 __ Push(v0, cp, a0); // Context and name.
2484 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2486 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2488 MemOperand location = VarOperand(var, a1);
2489 __ ld(a2, location);
2490 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2491 __ Branch(&skip, ne, a2, Operand(at));
2492 EmitStoreToStackLocalOrContextSlot(var, location);
2496 } else if (var->mode() == LET && op != Token::INIT_LET) {
2497 // Non-initializing assignment to let variable needs a write barrier.
2498 DCHECK(!var->IsLookupSlot());
2499 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2501 MemOperand location = VarOperand(var, a1);
2502 __ ld(a3, location);
2503 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2504 __ Branch(&assign, ne, a3, Operand(a4));
2505 __ li(a3, Operand(var->name()));
2507 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2508 // Perform the assignment.
2510 EmitStoreToStackLocalOrContextSlot(var, location);
2512 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2513 if (var->IsLookupSlot()) {
2514 // Assignment to var.
2515 __ li(a4, Operand(var->name()));
2516 __ li(a3, Operand(Smi::FromInt(strict_mode())));
2519 // jssp[16] : context.
2520 // jssp[24] : value.
2521 __ Push(v0, cp, a4, a3);
2522 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2524 // Assignment to var or initializing assignment to let/const in harmony
2526 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2527 MemOperand location = VarOperand(var, a1);
2528 if (generate_debug_code_ && op == Token::INIT_LET) {
2529 // Check for an uninitialized let binding.
2530 __ ld(a2, location);
2531 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2532 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2534 EmitStoreToStackLocalOrContextSlot(var, location);
2537 // Non-initializing assignments to consts are ignored.
2541 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2542 // Assignment to a property, using a named store IC.
2543 Property* prop = expr->target()->AsProperty();
2544 DCHECK(prop != NULL);
2545 DCHECK(prop->key()->IsLiteral());
2547 // Record source code position before IC call.
2548 SetSourcePosition(expr->position());
2549 __ mov(StoreIC::ValueRegister(), result_register());
2550 __ li(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value()));
2551 __ pop(StoreIC::ReceiverRegister());
2552 CallStoreIC(expr->AssignmentFeedbackId());
2554 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2555 context()->Plug(v0);
2559 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2560 // Assignment to a property, using a keyed store IC.
2562 // Record source code position before IC call.
2563 SetSourcePosition(expr->position());
2564 // Call keyed store IC.
2565 // The arguments are:
2566 // - a0 is the value,
2568 // - a2 is the receiver.
2569 __ mov(KeyedStoreIC::ValueRegister(), result_register());
2570 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
2571 DCHECK(KeyedStoreIC::ValueRegister().is(a0));
2573 Handle<Code> ic = strict_mode() == SLOPPY
2574 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2575 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2576 CallIC(ic, expr->AssignmentFeedbackId());
2578 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2579 context()->Plug(v0);
2583 void FullCodeGenerator::VisitProperty(Property* expr) {
2584 Comment cmnt(masm_, "[ Property");
2585 Expression* key = expr->key();
2587 if (key->IsPropertyName()) {
2588 VisitForAccumulatorValue(expr->obj());
2589 __ Move(LoadIC::ReceiverRegister(), v0);
2590 EmitNamedPropertyLoad(expr);
2591 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2592 context()->Plug(v0);
2594 VisitForStackValue(expr->obj());
2595 VisitForAccumulatorValue(expr->key());
2596 __ Move(LoadIC::NameRegister(), v0);
2597 __ pop(LoadIC::ReceiverRegister());
2598 EmitKeyedPropertyLoad(expr);
2599 context()->Plug(v0);
2604 void FullCodeGenerator::CallIC(Handle<Code> code,
2605 TypeFeedbackId id) {
2607 __ Call(code, RelocInfo::CODE_TARGET, id);
2611 // Code common for calls using the IC.
2612 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2613 Expression* callee = expr->expression();
2615 CallIC::CallType call_type = callee->IsVariableProxy()
2619 // Get the target function.
2620 if (call_type == CallIC::FUNCTION) {
2621 { StackValueContext context(this);
2622 EmitVariableLoad(callee->AsVariableProxy());
2623 PrepareForBailout(callee, NO_REGISTERS);
2625 // Push undefined as receiver. This is patched in the method prologue if it
2626 // is a sloppy mode method.
2627 __ Push(isolate()->factory()->undefined_value());
2629 // Load the function from the receiver.
2630 DCHECK(callee->IsProperty());
2631 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2632 EmitNamedPropertyLoad(callee->AsProperty());
2633 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2634 // Push the target function under the receiver.
2635 __ ld(at, MemOperand(sp, 0));
2637 __ sd(v0, MemOperand(sp, kPointerSize));
2640 EmitCall(expr, call_type);
2644 // Code common for calls using the IC.
2645 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2648 VisitForAccumulatorValue(key);
2650 Expression* callee = expr->expression();
2652 // Load the function from the receiver.
2653 DCHECK(callee->IsProperty());
2654 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2655 __ Move(LoadIC::NameRegister(), v0);
2656 EmitKeyedPropertyLoad(callee->AsProperty());
2657 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2659 // Push the target function under the receiver.
2660 __ ld(at, MemOperand(sp, 0));
2662 __ sd(v0, MemOperand(sp, kPointerSize));
2664 EmitCall(expr, CallIC::METHOD);
2668 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2669 // Load the arguments.
2670 ZoneList<Expression*>* args = expr->arguments();
2671 int arg_count = args->length();
2672 { PreservePositionScope scope(masm()->positions_recorder());
2673 for (int i = 0; i < arg_count; i++) {
2674 VisitForStackValue(args->at(i));
2678 // Record source position of the IC call.
2679 SetSourcePosition(expr->position());
2680 Handle<Code> ic = CallIC::initialize_stub(
2681 isolate(), arg_count, call_type);
2682 __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2683 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2684 // Don't assign a type feedback id to the IC, since type feedback is provided
2685 // by the vector above.
2687 RecordJSReturnSite(expr);
2688 // Restore context register.
2689 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2690 context()->DropAndPlug(1, v0);
2694 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2695 // a6: copy of the first argument or undefined if it doesn't exist.
2696 if (arg_count > 0) {
2697 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2699 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2702 // a5: the receiver of the enclosing function.
2703 int receiver_offset = 2 + info_->scope()->num_parameters();
2704 __ ld(a5, MemOperand(fp, receiver_offset * kPointerSize));
2706 // a4: the strict mode.
2707 __ li(a4, Operand(Smi::FromInt(strict_mode())));
2709 // a1: the start position of the scope the calls resides in.
2710 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2712 // Do the runtime call.
2713 __ Push(a6, a5, a4, a1);
2714 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2718 void FullCodeGenerator::VisitCall(Call* expr) {
2720 // We want to verify that RecordJSReturnSite gets called on all paths
2721 // through this function. Avoid early returns.
2722 expr->return_is_recorded_ = false;
2725 Comment cmnt(masm_, "[ Call");
2726 Expression* callee = expr->expression();
2727 Call::CallType call_type = expr->GetCallType(isolate());
2729 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2730 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2731 // to resolve the function we need to call and the receiver of the
2732 // call. Then we call the resolved function using the given
2734 ZoneList<Expression*>* args = expr->arguments();
2735 int arg_count = args->length();
2737 { PreservePositionScope pos_scope(masm()->positions_recorder());
2738 VisitForStackValue(callee);
2739 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2740 __ push(a2); // Reserved receiver slot.
2742 // Push the arguments.
2743 for (int i = 0; i < arg_count; i++) {
2744 VisitForStackValue(args->at(i));
2747 // Push a copy of the function (found below the arguments) and
2749 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2751 EmitResolvePossiblyDirectEval(arg_count);
2753 // The runtime call returns a pair of values in v0 (function) and
2754 // v1 (receiver). Touch up the stack with the right values.
2755 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2756 __ sd(v1, MemOperand(sp, arg_count * kPointerSize));
2758 // Record source position for debugger.
2759 SetSourcePosition(expr->position());
2760 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2761 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2763 RecordJSReturnSite(expr);
2764 // Restore context register.
2765 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2766 context()->DropAndPlug(1, v0);
2767 } else if (call_type == Call::GLOBAL_CALL) {
2768 EmitCallWithLoadIC(expr);
2769 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2770 // Call to a lookup slot (dynamically introduced variable).
2771 VariableProxy* proxy = callee->AsVariableProxy();
2774 { PreservePositionScope scope(masm()->positions_recorder());
2775 // Generate code for loading from variables potentially shadowed
2776 // by eval-introduced variables.
2777 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2781 // Call the runtime to find the function to call (returned in v0)
2782 // and the object holding it (returned in v1).
2783 DCHECK(!context_register().is(a2));
2784 __ li(a2, Operand(proxy->name()));
2785 __ Push(context_register(), a2);
2786 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2787 __ Push(v0, v1); // Function, receiver.
2789 // If fast case code has been generated, emit code to push the
2790 // function and receiver and have the slow path jump around this
2792 if (done.is_linked()) {
2798 // The receiver is implicitly the global receiver. Indicate this
2799 // by passing the hole to the call function stub.
2800 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2805 // The receiver is either the global receiver or an object found
2806 // by LoadContextSlot.
2808 } else if (call_type == Call::PROPERTY_CALL) {
2809 Property* property = callee->AsProperty();
2810 { PreservePositionScope scope(masm()->positions_recorder());
2811 VisitForStackValue(property->obj());
2813 if (property->key()->IsPropertyName()) {
2814 EmitCallWithLoadIC(expr);
2816 EmitKeyedCallWithLoadIC(expr, property->key());
2819 DCHECK(call_type == Call::OTHER_CALL);
2820 // Call to an arbitrary expression not handled specially above.
2821 { PreservePositionScope scope(masm()->positions_recorder());
2822 VisitForStackValue(callee);
2824 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2826 // Emit function call.
2831 // RecordJSReturnSite should have been called.
2832 DCHECK(expr->return_is_recorded_);
2837 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2838 Comment cmnt(masm_, "[ CallNew");
2839 // According to ECMA-262, section 11.2.2, page 44, the function
2840 // expression in new calls must be evaluated before the
2843 // Push constructor on the stack. If it's not a function it's used as
2844 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2846 VisitForStackValue(expr->expression());
2848 // Push the arguments ("left-to-right") on the stack.
2849 ZoneList<Expression*>* args = expr->arguments();
2850 int arg_count = args->length();
2851 for (int i = 0; i < arg_count; i++) {
2852 VisitForStackValue(args->at(i));
2854 // Call the construct call builtin that handles allocation and
2855 // constructor invocation.
2856 SetSourcePosition(expr->position());
2858 // Load function and argument count into a1 and a0.
2859 __ li(a0, Operand(arg_count));
2860 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2862 // Record call targets in unoptimized code.
2863 if (FLAG_pretenuring_call_new) {
2864 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2865 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2866 expr->CallNewFeedbackSlot() + 1);
2869 __ li(a2, FeedbackVector());
2870 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2872 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2873 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2874 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2875 context()->Plug(v0);
2879 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2880 ZoneList<Expression*>* args = expr->arguments();
2881 DCHECK(args->length() == 1);
2883 VisitForAccumulatorValue(args->at(0));
2885 Label materialize_true, materialize_false;
2886 Label* if_true = NULL;
2887 Label* if_false = NULL;
2888 Label* fall_through = NULL;
2889 context()->PrepareTest(&materialize_true, &materialize_false,
2890 &if_true, &if_false, &fall_through);
2892 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2894 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2896 context()->Plug(if_true, if_false);
2900 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2901 ZoneList<Expression*>* args = expr->arguments();
2902 DCHECK(args->length() == 1);
2904 VisitForAccumulatorValue(args->at(0));
2906 Label materialize_true, materialize_false;
2907 Label* if_true = NULL;
2908 Label* if_false = NULL;
2909 Label* fall_through = NULL;
2910 context()->PrepareTest(&materialize_true, &materialize_false,
2911 &if_true, &if_false, &fall_through);
2913 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2914 __ NonNegativeSmiTst(v0, at);
2915 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2917 context()->Plug(if_true, if_false);
2921 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2922 ZoneList<Expression*>* args = expr->arguments();
2923 DCHECK(args->length() == 1);
2925 VisitForAccumulatorValue(args->at(0));
2927 Label materialize_true, materialize_false;
2928 Label* if_true = NULL;
2929 Label* if_false = NULL;
2930 Label* fall_through = NULL;
2931 context()->PrepareTest(&materialize_true, &materialize_false,
2932 &if_true, &if_false, &fall_through);
2934 __ JumpIfSmi(v0, if_false);
2935 __ LoadRoot(at, Heap::kNullValueRootIndex);
2936 __ Branch(if_true, eq, v0, Operand(at));
2937 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
2938 // Undetectable objects behave like undefined when tested with typeof.
2939 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2940 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2941 __ Branch(if_false, ne, at, Operand(zero_reg));
2942 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2943 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2944 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2945 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2946 if_true, if_false, fall_through);
2948 context()->Plug(if_true, if_false);
2952 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2953 ZoneList<Expression*>* args = expr->arguments();
2954 DCHECK(args->length() == 1);
2956 VisitForAccumulatorValue(args->at(0));
2958 Label materialize_true, materialize_false;
2959 Label* if_true = NULL;
2960 Label* if_false = NULL;
2961 Label* fall_through = NULL;
2962 context()->PrepareTest(&materialize_true, &materialize_false,
2963 &if_true, &if_false, &fall_through);
2965 __ JumpIfSmi(v0, if_false);
2966 __ GetObjectType(v0, a1, a1);
2967 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2968 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2969 if_true, if_false, fall_through);
2971 context()->Plug(if_true, if_false);
2975 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2976 ZoneList<Expression*>* args = expr->arguments();
2977 DCHECK(args->length() == 1);
2979 VisitForAccumulatorValue(args->at(0));
2981 Label materialize_true, materialize_false;
2982 Label* if_true = NULL;
2983 Label* if_false = NULL;
2984 Label* fall_through = NULL;
2985 context()->PrepareTest(&materialize_true, &materialize_false,
2986 &if_true, &if_false, &fall_through);
2988 __ JumpIfSmi(v0, if_false);
2989 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2990 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2991 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2992 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2993 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2995 context()->Plug(if_true, if_false);
2999 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3000 CallRuntime* expr) {
3001 ZoneList<Expression*>* args = expr->arguments();
3002 DCHECK(args->length() == 1);
3004 VisitForAccumulatorValue(args->at(0));
3006 Label materialize_true, materialize_false, skip_lookup;
3007 Label* if_true = NULL;
3008 Label* if_false = NULL;
3009 Label* fall_through = NULL;
3010 context()->PrepareTest(&materialize_true, &materialize_false,
3011 &if_true, &if_false, &fall_through);
3013 __ AssertNotSmi(v0);
3015 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3016 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3017 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3018 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3020 // Check for fast case object. Generate false result for slow case object.
3021 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3022 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3023 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3024 __ Branch(if_false, eq, a2, Operand(a4));
3026 // Look for valueOf name in the descriptor array, and indicate false if
3027 // found. Since we omit an enumeration index check, if it is added via a
3028 // transition that shares its descriptor array, this is a false positive.
3029 Label entry, loop, done;
3031 // Skip loop if no descriptors are valid.
3032 __ NumberOfOwnDescriptors(a3, a1);
3033 __ Branch(&done, eq, a3, Operand(zero_reg));
3035 __ LoadInstanceDescriptors(a1, a4);
3036 // a4: descriptor array.
3037 // a3: valid entries in the descriptor array.
3038 STATIC_ASSERT(kSmiTag == 0);
3039 STATIC_ASSERT(kSmiTagSize == 1);
3041 // STATIC_ASSERT(kPointerSize == 4);
3042 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3043 __ Dmul(a3, a3, at);
3044 // Calculate location of the first key name.
3045 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3046 // Calculate the end of the descriptor array.
3048 __ dsll(a5, a3, kPointerSizeLog2);
3049 __ Daddu(a2, a2, a5);
3051 // Loop through all the keys in the descriptor array. If one of these is the
3052 // string "valueOf" the result is false.
3053 // The use of a6 to store the valueOf string assumes that it is not otherwise
3054 // used in the loop below.
3055 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3058 __ ld(a3, MemOperand(a4, 0));
3059 __ Branch(if_false, eq, a3, Operand(a6));
3060 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3062 __ Branch(&loop, ne, a4, Operand(a2));
3066 // Set the bit in the map to indicate that there is no local valueOf field.
3067 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3068 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3069 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3071 __ bind(&skip_lookup);
3073 // If a valueOf property is not found on the object check that its
3074 // prototype is the un-modified String prototype. If not result is false.
3075 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3076 __ JumpIfSmi(a2, if_false);
3077 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3078 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3079 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3080 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3081 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3082 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3084 context()->Plug(if_true, if_false);
3088 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3089 ZoneList<Expression*>* args = expr->arguments();
3090 DCHECK(args->length() == 1);
3092 VisitForAccumulatorValue(args->at(0));
3094 Label materialize_true, materialize_false;
3095 Label* if_true = NULL;
3096 Label* if_false = NULL;
3097 Label* fall_through = NULL;
3098 context()->PrepareTest(&materialize_true, &materialize_false,
3099 &if_true, &if_false, &fall_through);
3101 __ JumpIfSmi(v0, if_false);
3102 __ GetObjectType(v0, a1, a2);
3103 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3104 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3105 __ Branch(if_false);
3107 context()->Plug(if_true, if_false);
3111 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3112 ZoneList<Expression*>* args = expr->arguments();
3113 DCHECK(args->length() == 1);
3115 VisitForAccumulatorValue(args->at(0));
3117 Label materialize_true, materialize_false;
3118 Label* if_true = NULL;
3119 Label* if_false = NULL;
3120 Label* fall_through = NULL;
3121 context()->PrepareTest(&materialize_true, &materialize_false,
3122 &if_true, &if_false, &fall_through);
3124 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3125 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3126 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3127 __ li(a4, 0x80000000);
3129 __ Branch(¬_nan, ne, a2, Operand(a4));
3130 __ mov(a4, zero_reg);
3134 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3135 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3137 context()->Plug(if_true, if_false);
3141 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3142 ZoneList<Expression*>* args = expr->arguments();
3143 DCHECK(args->length() == 1);
3145 VisitForAccumulatorValue(args->at(0));
3147 Label materialize_true, materialize_false;
3148 Label* if_true = NULL;
3149 Label* if_false = NULL;
3150 Label* fall_through = NULL;
3151 context()->PrepareTest(&materialize_true, &materialize_false,
3152 &if_true, &if_false, &fall_through);
3154 __ JumpIfSmi(v0, if_false);
3155 __ GetObjectType(v0, a1, a1);
3156 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3157 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3158 if_true, if_false, fall_through);
3160 context()->Plug(if_true, if_false);
3164 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3165 ZoneList<Expression*>* args = expr->arguments();
3166 DCHECK(args->length() == 1);
3168 VisitForAccumulatorValue(args->at(0));
3170 Label materialize_true, materialize_false;
3171 Label* if_true = NULL;
3172 Label* if_false = NULL;
3173 Label* fall_through = NULL;
3174 context()->PrepareTest(&materialize_true, &materialize_false,
3175 &if_true, &if_false, &fall_through);
3177 __ JumpIfSmi(v0, if_false);
3178 __ GetObjectType(v0, a1, a1);
3179 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3180 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3182 context()->Plug(if_true, if_false);
3186 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3187 DCHECK(expr->arguments()->length() == 0);
3189 Label materialize_true, materialize_false;
3190 Label* if_true = NULL;
3191 Label* if_false = NULL;
3192 Label* fall_through = NULL;
3193 context()->PrepareTest(&materialize_true, &materialize_false,
3194 &if_true, &if_false, &fall_through);
3196 // Get the frame pointer for the calling frame.
3197 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3199 // Skip the arguments adaptor frame if it exists.
3200 Label check_frame_marker;
3201 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3202 __ Branch(&check_frame_marker, ne,
3203 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3204 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3206 // Check the marker in the calling frame.
3207 __ bind(&check_frame_marker);
3208 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3209 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3211 if_true, if_false, fall_through);
3213 context()->Plug(if_true, if_false);
3217 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3218 ZoneList<Expression*>* args = expr->arguments();
3219 DCHECK(args->length() == 2);
3221 // Load the two objects into registers and perform the comparison.
3222 VisitForStackValue(args->at(0));
3223 VisitForAccumulatorValue(args->at(1));
3225 Label materialize_true, materialize_false;
3226 Label* if_true = NULL;
3227 Label* if_false = NULL;
3228 Label* fall_through = NULL;
3229 context()->PrepareTest(&materialize_true, &materialize_false,
3230 &if_true, &if_false, &fall_through);
3233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3234 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3236 context()->Plug(if_true, if_false);
3240 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3241 ZoneList<Expression*>* args = expr->arguments();
3242 DCHECK(args->length() == 1);
3244 // ArgumentsAccessStub expects the key in a1 and the formal
3245 // parameter count in a0.
3246 VisitForAccumulatorValue(args->at(0));
3248 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3249 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3251 context()->Plug(v0);
3255 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3256 DCHECK(expr->arguments()->length() == 0);
3258 // Get the number of formal parameters.
3259 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3261 // Check if the calling frame is an arguments adaptor frame.
3262 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3263 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3264 __ Branch(&exit, ne, a3,
3265 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3267 // Arguments adaptor case: Read the arguments length from the
3269 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3272 context()->Plug(v0);
3276 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3277 ZoneList<Expression*>* args = expr->arguments();
3278 DCHECK(args->length() == 1);
3279 Label done, null, function, non_function_constructor;
3281 VisitForAccumulatorValue(args->at(0));
3283 // If the object is a smi, we return null.
3284 __ JumpIfSmi(v0, &null);
3286 // Check that the object is a JS object but take special care of JS
3287 // functions to make sure they have 'Function' as their class.
3288 // Assume that there are only two callable types, and one of them is at
3289 // either end of the type range for JS object types. Saves extra comparisons.
3290 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3291 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3292 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3294 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3295 FIRST_SPEC_OBJECT_TYPE + 1);
3296 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3298 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3299 LAST_SPEC_OBJECT_TYPE - 1);
3300 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3301 // Assume that there is no larger type.
3302 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3304 // Check if the constructor in the map is a JS function.
3305 __ ld(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3306 __ GetObjectType(v0, a1, a1);
3307 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3309 // v0 now contains the constructor function. Grab the
3310 // instance class name from there.
3311 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3312 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3315 // Functions have class 'Function'.
3317 __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex);
3320 // Objects with a non-function constructor have class 'Object'.
3321 __ bind(&non_function_constructor);
3322 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3325 // Non-JS objects have class null.
3327 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3332 context()->Plug(v0);
3336 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3337 // Load the arguments on the stack and call the stub.
3338 SubStringStub stub(isolate());
3339 ZoneList<Expression*>* args = expr->arguments();
3340 DCHECK(args->length() == 3);
3341 VisitForStackValue(args->at(0));
3342 VisitForStackValue(args->at(1));
3343 VisitForStackValue(args->at(2));
3345 context()->Plug(v0);
3349 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3350 // Load the arguments on the stack and call the stub.
3351 RegExpExecStub stub(isolate());
3352 ZoneList<Expression*>* args = expr->arguments();
3353 DCHECK(args->length() == 4);
3354 VisitForStackValue(args->at(0));
3355 VisitForStackValue(args->at(1));
3356 VisitForStackValue(args->at(2));
3357 VisitForStackValue(args->at(3));
3359 context()->Plug(v0);
3363 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3364 ZoneList<Expression*>* args = expr->arguments();
3365 DCHECK(args->length() == 1);
3367 VisitForAccumulatorValue(args->at(0)); // Load the object.
3370 // If the object is a smi return the object.
3371 __ JumpIfSmi(v0, &done);
3372 // If the object is not a value type, return the object.
3373 __ GetObjectType(v0, a1, a1);
3374 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3376 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3379 context()->Plug(v0);
3383 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3384 ZoneList<Expression*>* args = expr->arguments();
3385 DCHECK(args->length() == 2);
3386 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3387 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3389 VisitForAccumulatorValue(args->at(0)); // Load the object.
3391 Label runtime, done, not_date_object;
3392 Register object = v0;
3393 Register result = v0;
3394 Register scratch0 = t1;
3395 Register scratch1 = a1;
3397 __ JumpIfSmi(object, ¬_date_object);
3398 __ GetObjectType(object, scratch1, scratch1);
3399 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3401 if (index->value() == 0) {
3402 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3405 if (index->value() < JSDate::kFirstUncachedField) {
3406 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3407 __ li(scratch1, Operand(stamp));
3408 __ ld(scratch1, MemOperand(scratch1));
3409 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3410 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3411 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3412 kPointerSize * index->value()));
3416 __ PrepareCallCFunction(2, scratch1);
3417 __ li(a1, Operand(index));
3418 __ Move(a0, object);
3419 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3423 __ bind(¬_date_object);
3424 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3426 context()->Plug(v0);
3430 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3431 ZoneList<Expression*>* args = expr->arguments();
3432 DCHECK_EQ(3, args->length());
3434 Register string = v0;
3435 Register index = a1;
3436 Register value = a2;
3438 VisitForStackValue(args->at(1)); // index
3439 VisitForStackValue(args->at(2)); // value
3440 VisitForAccumulatorValue(args->at(0)); // string
3441 __ Pop(index, value);
3443 if (FLAG_debug_code) {
3444 __ SmiTst(value, at);
3445 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3446 __ SmiTst(index, at);
3447 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3448 __ SmiUntag(index, index);
3449 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3450 Register scratch = t1;
3451 __ EmitSeqStringSetCharCheck(
3452 string, index, value, scratch, one_byte_seq_type);
3453 __ SmiTag(index, index);
3456 __ SmiUntag(value, value);
3459 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3461 __ Daddu(at, at, index);
3462 __ sb(value, MemOperand(at));
3463 context()->Plug(string);
3467 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3468 ZoneList<Expression*>* args = expr->arguments();
3469 DCHECK_EQ(3, args->length());
3471 Register string = v0;
3472 Register index = a1;
3473 Register value = a2;
3475 VisitForStackValue(args->at(1)); // index
3476 VisitForStackValue(args->at(2)); // value
3477 VisitForAccumulatorValue(args->at(0)); // string
3478 __ Pop(index, value);
3480 if (FLAG_debug_code) {
3481 __ SmiTst(value, at);
3482 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3483 __ SmiTst(index, at);
3484 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3485 __ SmiUntag(index, index);
3486 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3487 Register scratch = t1;
3488 __ EmitSeqStringSetCharCheck(
3489 string, index, value, scratch, two_byte_seq_type);
3490 __ SmiTag(index, index);
3493 __ SmiUntag(value, value);
3496 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3497 __ dsra(index, index, 32 - 1);
3498 __ Daddu(at, at, index);
3499 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3500 __ sh(value, MemOperand(at));
3501 context()->Plug(string);
3505 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3506 // Load the arguments on the stack and call the runtime function.
3507 ZoneList<Expression*>* args = expr->arguments();
3508 DCHECK(args->length() == 2);
3509 VisitForStackValue(args->at(0));
3510 VisitForStackValue(args->at(1));
3511 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3513 context()->Plug(v0);
3517 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3518 ZoneList<Expression*>* args = expr->arguments();
3519 DCHECK(args->length() == 2);
3521 VisitForStackValue(args->at(0)); // Load the object.
3522 VisitForAccumulatorValue(args->at(1)); // Load the value.
3523 __ pop(a1); // v0 = value. a1 = object.
3526 // If the object is a smi, return the value.
3527 __ JumpIfSmi(a1, &done);
3529 // If the object is not a value type, return the value.
3530 __ GetObjectType(a1, a2, a2);
3531 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3534 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3535 // Update the write barrier. Save the value as it will be
3536 // overwritten by the write barrier code and is needed afterward.
3538 __ RecordWriteField(
3539 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3542 context()->Plug(v0);
3546 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3547 ZoneList<Expression*>* args = expr->arguments();
3548 DCHECK_EQ(args->length(), 1);
3550 // Load the argument into a0 and call the stub.
3551 VisitForAccumulatorValue(args->at(0));
3552 __ mov(a0, result_register());
3554 NumberToStringStub stub(isolate());
3556 context()->Plug(v0);
3560 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3561 ZoneList<Expression*>* args = expr->arguments();
3562 DCHECK(args->length() == 1);
3564 VisitForAccumulatorValue(args->at(0));
3567 StringCharFromCodeGenerator generator(v0, a1);
3568 generator.GenerateFast(masm_);
3571 NopRuntimeCallHelper call_helper;
3572 generator.GenerateSlow(masm_, call_helper);
3575 context()->Plug(a1);
3579 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3580 ZoneList<Expression*>* args = expr->arguments();
3581 DCHECK(args->length() == 2);
3583 VisitForStackValue(args->at(0));
3584 VisitForAccumulatorValue(args->at(1));
3585 __ mov(a0, result_register());
3587 Register object = a1;
3588 Register index = a0;
3589 Register result = v0;
3593 Label need_conversion;
3594 Label index_out_of_range;
3596 StringCharCodeAtGenerator generator(object,
3601 &index_out_of_range,
3602 STRING_INDEX_IS_NUMBER);
3603 generator.GenerateFast(masm_);
3606 __ bind(&index_out_of_range);
3607 // When the index is out of range, the spec requires us to return
3609 __ LoadRoot(result, Heap::kNanValueRootIndex);
3612 __ bind(&need_conversion);
3613 // Load the undefined value into the result register, which will
3614 // trigger conversion.
3615 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3618 NopRuntimeCallHelper call_helper;
3619 generator.GenerateSlow(masm_, call_helper);
3622 context()->Plug(result);
3626 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3627 ZoneList<Expression*>* args = expr->arguments();
3628 DCHECK(args->length() == 2);
3630 VisitForStackValue(args->at(0));
3631 VisitForAccumulatorValue(args->at(1));
3632 __ mov(a0, result_register());
3634 Register object = a1;
3635 Register index = a0;
3636 Register scratch = a3;
3637 Register result = v0;
3641 Label need_conversion;
3642 Label index_out_of_range;
3644 StringCharAtGenerator generator(object,
3650 &index_out_of_range,
3651 STRING_INDEX_IS_NUMBER);
3652 generator.GenerateFast(masm_);
3655 __ bind(&index_out_of_range);
3656 // When the index is out of range, the spec requires us to return
3657 // the empty string.
3658 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3661 __ bind(&need_conversion);
3662 // Move smi zero into the result register, which will trigger
3664 __ li(result, Operand(Smi::FromInt(0)));
3667 NopRuntimeCallHelper call_helper;
3668 generator.GenerateSlow(masm_, call_helper);
3671 context()->Plug(result);
3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments();
3677 DCHECK_EQ(2, args->length());
3678 VisitForStackValue(args->at(0));
3679 VisitForAccumulatorValue(args->at(1));
3682 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3683 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3685 context()->Plug(v0);
3689 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3690 ZoneList<Expression*>* args = expr->arguments();
3691 DCHECK_EQ(2, args->length());
3693 VisitForStackValue(args->at(0));
3694 VisitForStackValue(args->at(1));
3696 StringCompareStub stub(isolate());
3698 context()->Plug(v0);
3702 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3703 ZoneList<Expression*>* args = expr->arguments();
3704 DCHECK(args->length() >= 2);
3706 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3707 for (int i = 0; i < arg_count + 1; i++) {
3708 VisitForStackValue(args->at(i));
3710 VisitForAccumulatorValue(args->last()); // Function.
3712 Label runtime, done;
3713 // Check for non-function argument (including proxy).
3714 __ JumpIfSmi(v0, &runtime);
3715 __ GetObjectType(v0, a1, a1);
3716 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3718 // InvokeFunction requires the function in a1. Move it in there.
3719 __ mov(a1, result_register());
3720 ParameterCount count(arg_count);
3721 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
3722 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3727 __ CallRuntime(Runtime::kCall, args->length());
3730 context()->Plug(v0);
3734 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3735 RegExpConstructResultStub stub(isolate());
3736 ZoneList<Expression*>* args = expr->arguments();
3737 DCHECK(args->length() == 3);
3738 VisitForStackValue(args->at(0));
3739 VisitForStackValue(args->at(1));
3740 VisitForAccumulatorValue(args->at(2));
3741 __ mov(a0, result_register());
3745 context()->Plug(v0);
3749 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments();
3751 DCHECK_EQ(2, args->length());
3753 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3754 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3756 Handle<FixedArray> jsfunction_result_caches(
3757 isolate()->native_context()->jsfunction_result_caches());
3758 if (jsfunction_result_caches->length() <= cache_id) {
3759 __ Abort(kAttemptToUseUndefinedCache);
3760 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3761 context()->Plug(v0);
3765 VisitForAccumulatorValue(args->at(1));
3768 Register cache = a1;
3769 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3770 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3773 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3775 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3778 Label done, not_found;
3779 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3780 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3781 // a2 now holds finger offset as a smi.
3782 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3783 // a3 now points to the start of fixed array elements.
3784 __ SmiScale(at, a2, kPointerSizeLog2);
3785 __ daddu(a3, a3, at);
3786 // a3 now points to key of indexed element of cache.
3787 __ ld(a2, MemOperand(a3));
3788 __ Branch(¬_found, ne, key, Operand(a2));
3790 __ ld(v0, MemOperand(a3, kPointerSize));
3793 __ bind(¬_found);
3794 // Call runtime to perform the lookup.
3795 __ Push(cache, key);
3796 __ CallRuntime(Runtime::kGetFromCache, 2);
3799 context()->Plug(v0);
3803 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3804 ZoneList<Expression*>* args = expr->arguments();
3805 VisitForAccumulatorValue(args->at(0));
3807 Label materialize_true, materialize_false;
3808 Label* if_true = NULL;
3809 Label* if_false = NULL;
3810 Label* fall_through = NULL;
3811 context()->PrepareTest(&materialize_true, &materialize_false,
3812 &if_true, &if_false, &fall_through);
3814 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3815 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3817 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3818 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3820 context()->Plug(if_true, if_false);
3824 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3825 ZoneList<Expression*>* args = expr->arguments();
3826 DCHECK(args->length() == 1);
3827 VisitForAccumulatorValue(args->at(0));
3829 __ AssertString(v0);
3831 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3832 __ IndexFromHash(v0, v0);
3834 context()->Plug(v0);
3838 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3839 Label bailout, done, one_char_separator, long_separator,
3840 non_trivial_array, not_size_one_array, loop,
3841 empty_separator_loop, one_char_separator_loop,
3842 one_char_separator_loop_entry, long_separator_loop;
3843 ZoneList<Expression*>* args = expr->arguments();
3844 DCHECK(args->length() == 2);
3845 VisitForStackValue(args->at(1));
3846 VisitForAccumulatorValue(args->at(0));
3848 // All aliases of the same register have disjoint lifetimes.
3849 Register array = v0;
3850 Register elements = no_reg; // Will be v0.
3851 Register result = no_reg; // Will be v0.
3852 Register separator = a1;
3853 Register array_length = a2;
3854 Register result_pos = no_reg; // Will be a2.
3855 Register string_length = a3;
3856 Register string = a4;
3857 Register element = a5;
3858 Register elements_end = a6;
3859 Register scratch1 = a7;
3860 Register scratch2 = t1;
3861 Register scratch3 = t0;
3863 // Separator operand is on the stack.
3866 // Check that the array is a JSArray.
3867 __ JumpIfSmi(array, &bailout);
3868 __ GetObjectType(array, scratch1, scratch2);
3869 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3871 // Check that the array has fast elements.
3872 __ CheckFastElements(scratch1, scratch2, &bailout);
3874 // If the array has length zero, return the empty string.
3875 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3876 __ SmiUntag(array_length);
3877 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3878 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3881 __ bind(&non_trivial_array);
3883 // Get the FixedArray containing array's elements.
3885 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3886 array = no_reg; // End of array's live range.
3888 // Check that all array elements are sequential ASCII strings, and
3889 // accumulate the sum of their lengths, as a smi-encoded value.
3890 __ mov(string_length, zero_reg);
3892 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3893 __ dsll(elements_end, array_length, kPointerSizeLog2);
3894 __ Daddu(elements_end, element, elements_end);
3895 // Loop condition: while (element < elements_end).
3896 // Live values in registers:
3897 // elements: Fixed array of strings.
3898 // array_length: Length of the fixed array of strings (not smi)
3899 // separator: Separator string
3900 // string_length: Accumulated sum of string lengths (smi).
3901 // element: Current array element.
3902 // elements_end: Array end.
3903 if (generate_debug_code_) {
3904 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
3905 array_length, Operand(zero_reg));
3908 __ ld(string, MemOperand(element));
3909 __ Daddu(element, element, kPointerSize);
3910 __ JumpIfSmi(string, &bailout);
3911 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3912 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3913 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3914 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3915 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3916 __ BranchOnOverflow(&bailout, scratch3);
3917 __ Branch(&loop, lt, element, Operand(elements_end));
3919 // If array_length is 1, return elements[0], a string.
3920 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
3921 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3924 __ bind(¬_size_one_array);
3926 // Live values in registers:
3927 // separator: Separator string
3928 // array_length: Length of the array.
3929 // string_length: Sum of string lengths (smi).
3930 // elements: FixedArray of strings.
3932 // Check that the separator is a flat ASCII string.
3933 __ JumpIfSmi(separator, &bailout);
3934 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3935 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3936 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3938 // Add (separator length times array_length) - separator length to the
3939 // string_length to get the length of the result string. array_length is not
3940 // smi but the other values are, so the result is a smi.
3941 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3942 __ Dsubu(string_length, string_length, Operand(scratch1));
3943 __ SmiUntag(scratch1);
3944 __ Dmul(scratch2, array_length, scratch1);
3945 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3947 __ dsra32(scratch1, scratch2, 0);
3948 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3949 __ SmiUntag(string_length);
3950 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3951 __ BranchOnOverflow(&bailout, scratch3);
3953 // Get first element in the array to free up the elements register to be used
3956 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3957 result = elements; // End of live range for elements.
3959 // Live values in registers:
3960 // element: First array element
3961 // separator: Separator string
3962 // string_length: Length of result string (not smi)
3963 // array_length: Length of the array.
3964 __ AllocateAsciiString(result,
3970 // Prepare for looping. Set up elements_end to end of the array. Set
3971 // result_pos to the position of the result where to write the first
3973 __ dsll(elements_end, array_length, kPointerSizeLog2);
3974 __ Daddu(elements_end, element, elements_end);
3975 result_pos = array_length; // End of live range for array_length.
3976 array_length = no_reg;
3977 __ Daddu(result_pos,
3979 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3981 // Check the length of the separator.
3982 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3983 __ li(at, Operand(Smi::FromInt(1)));
3984 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3985 __ Branch(&long_separator, gt, scratch1, Operand(at));
3987 // Empty separator case.
3988 __ bind(&empty_separator_loop);
3989 // Live values in registers:
3990 // result_pos: the position to which we are currently copying characters.
3991 // element: Current array element.
3992 // elements_end: Array end.
3994 // Copy next array element to the result.
3995 __ ld(string, MemOperand(element));
3996 __ Daddu(element, element, kPointerSize);
3997 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
3998 __ SmiUntag(string_length);
3999 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4000 __ CopyBytes(string, result_pos, string_length, scratch1);
4001 // End while (element < elements_end).
4002 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4003 DCHECK(result.is(v0));
4006 // One-character separator case.
4007 __ bind(&one_char_separator);
4008 // Replace separator with its ASCII character value.
4009 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4010 // Jump into the loop after the code that copies the separator, so the first
4011 // element is not preceded by a separator.
4012 __ jmp(&one_char_separator_loop_entry);
4014 __ bind(&one_char_separator_loop);
4015 // Live values in registers:
4016 // result_pos: the position to which we are currently copying characters.
4017 // element: Current array element.
4018 // elements_end: Array end.
4019 // separator: Single separator ASCII char (in lower byte).
4021 // Copy the separator character to the result.
4022 __ sb(separator, MemOperand(result_pos));
4023 __ Daddu(result_pos, result_pos, 1);
4025 // Copy next array element to the result.
4026 __ bind(&one_char_separator_loop_entry);
4027 __ ld(string, MemOperand(element));
4028 __ Daddu(element, element, kPointerSize);
4029 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4030 __ SmiUntag(string_length);
4031 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4032 __ CopyBytes(string, result_pos, string_length, scratch1);
4033 // End while (element < elements_end).
4034 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4035 DCHECK(result.is(v0));
4038 // Long separator case (separator is more than one character). Entry is at the
4039 // label long_separator below.
4040 __ bind(&long_separator_loop);
4041 // Live values in registers:
4042 // result_pos: the position to which we are currently copying characters.
4043 // element: Current array element.
4044 // elements_end: Array end.
4045 // separator: Separator string.
4047 // Copy the separator to the result.
4048 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4049 __ SmiUntag(string_length);
4052 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4053 __ CopyBytes(string, result_pos, string_length, scratch1);
4055 __ bind(&long_separator);
4056 __ ld(string, MemOperand(element));
4057 __ Daddu(element, element, kPointerSize);
4058 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4059 __ SmiUntag(string_length);
4060 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4061 __ CopyBytes(string, result_pos, string_length, scratch1);
4062 // End while (element < elements_end).
4063 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4064 DCHECK(result.is(v0));
4068 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4070 context()->Plug(v0);
4074 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4075 DCHECK(expr->arguments()->length() == 0);
4076 ExternalReference debug_is_active =
4077 ExternalReference::debug_is_active_address(isolate());
4078 __ li(at, Operand(debug_is_active));
4079 __ lbu(v0, MemOperand(at));
4081 context()->Plug(v0);
4085 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4086 if (expr->function() != NULL &&
4087 expr->function()->intrinsic_type == Runtime::INLINE) {
4088 Comment cmnt(masm_, "[ InlineRuntimeCall");
4089 EmitInlineRuntimeCall(expr);
4093 Comment cmnt(masm_, "[ CallRuntime");
4094 ZoneList<Expression*>* args = expr->arguments();
4095 int arg_count = args->length();
4097 if (expr->is_jsruntime()) {
4098 // Push the builtins object as the receiver.
4099 Register receiver = LoadIC::ReceiverRegister();
4100 __ ld(receiver, GlobalObjectOperand());
4101 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4104 // Load the function from the receiver.
4105 __ li(LoadIC::NameRegister(), Operand(expr->name()));
4106 if (FLAG_vector_ics) {
4107 __ li(LoadIC::SlotRegister(),
4108 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4109 CallLoadIC(NOT_CONTEXTUAL);
4111 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4114 // Push the target function under the receiver.
4115 __ ld(at, MemOperand(sp, 0));
4117 __ sd(v0, MemOperand(sp, kPointerSize));
4119 // Push the arguments ("left-to-right").
4120 int arg_count = args->length();
4121 for (int i = 0; i < arg_count; i++) {
4122 VisitForStackValue(args->at(i));
4125 // Record source position of the IC call.
4126 SetSourcePosition(expr->position());
4127 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4128 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4131 // Restore context register.
4132 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4134 context()->DropAndPlug(1, v0);
4136 // Push the arguments ("left-to-right").
4137 for (int i = 0; i < arg_count; i++) {
4138 VisitForStackValue(args->at(i));
4141 // Call the C runtime function.
4142 __ CallRuntime(expr->function(), arg_count);
4143 context()->Plug(v0);
4148 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4149 switch (expr->op()) {
4150 case Token::DELETE: {
4151 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4152 Property* property = expr->expression()->AsProperty();
4153 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4155 if (property != NULL) {
4156 VisitForStackValue(property->obj());
4157 VisitForStackValue(property->key());
4158 __ li(a1, Operand(Smi::FromInt(strict_mode())));
4160 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4161 context()->Plug(v0);
4162 } else if (proxy != NULL) {
4163 Variable* var = proxy->var();
4164 // Delete of an unqualified identifier is disallowed in strict mode
4165 // but "delete this" is allowed.
4166 DCHECK(strict_mode() == SLOPPY || var->is_this());
4167 if (var->IsUnallocated()) {
4168 __ ld(a2, GlobalObjectOperand());
4169 __ li(a1, Operand(var->name()));
4170 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4171 __ Push(a2, a1, a0);
4172 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4173 context()->Plug(v0);
4174 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4175 // Result of deleting non-global, non-dynamic variables is false.
4176 // The subexpression does not have side effects.
4177 context()->Plug(var->is_this());
4179 // Non-global variable. Call the runtime to try to delete from the
4180 // context where the variable was introduced.
4181 DCHECK(!context_register().is(a2));
4182 __ li(a2, Operand(var->name()));
4183 __ Push(context_register(), a2);
4184 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4185 context()->Plug(v0);
4188 // Result of deleting non-property, non-variable reference is true.
4189 // The subexpression may have side effects.
4190 VisitForEffect(expr->expression());
4191 context()->Plug(true);
4197 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4198 VisitForEffect(expr->expression());
4199 context()->Plug(Heap::kUndefinedValueRootIndex);
4204 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4205 if (context()->IsEffect()) {
4206 // Unary NOT has no side effects so it's only necessary to visit the
4207 // subexpression. Match the optimizing compiler by not branching.
4208 VisitForEffect(expr->expression());
4209 } else if (context()->IsTest()) {
4210 const TestContext* test = TestContext::cast(context());
4211 // The labels are swapped for the recursive call.
4212 VisitForControl(expr->expression(),
4213 test->false_label(),
4215 test->fall_through());
4216 context()->Plug(test->true_label(), test->false_label());
4218 // We handle value contexts explicitly rather than simply visiting
4219 // for control and plugging the control flow into the context,
4220 // because we need to prepare a pair of extra administrative AST ids
4221 // for the optimizing compiler.
4222 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4223 Label materialize_true, materialize_false, done;
4224 VisitForControl(expr->expression(),
4228 __ bind(&materialize_true);
4229 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4230 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4231 if (context()->IsStackValue()) __ push(v0);
4233 __ bind(&materialize_false);
4234 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4235 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4236 if (context()->IsStackValue()) __ push(v0);
4242 case Token::TYPEOF: {
4243 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4244 { StackValueContext context(this);
4245 VisitForTypeofValue(expr->expression());
4247 __ CallRuntime(Runtime::kTypeof, 1);
4248 context()->Plug(v0);
4258 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4259 DCHECK(expr->expression()->IsValidReferenceExpression());
4261 Comment cmnt(masm_, "[ CountOperation");
4262 SetSourcePosition(expr->position());
4264 // Expression can only be a property, a global or a (parameter or local)
4266 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4267 LhsKind assign_type = VARIABLE;
4268 Property* prop = expr->expression()->AsProperty();
4269 // In case of a property we use the uninitialized expression context
4270 // of the key to detect a named property.
4273 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4276 // Evaluate expression and get value.
4277 if (assign_type == VARIABLE) {
4278 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4279 AccumulatorValueContext context(this);
4280 EmitVariableLoad(expr->expression()->AsVariableProxy());
4282 // Reserve space for result of postfix operation.
4283 if (expr->is_postfix() && !context()->IsEffect()) {
4284 __ li(at, Operand(Smi::FromInt(0)));
4287 if (assign_type == NAMED_PROPERTY) {
4288 // Put the object both on the stack and in the register.
4289 VisitForStackValue(prop->obj());
4290 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
4291 EmitNamedPropertyLoad(prop);
4293 VisitForStackValue(prop->obj());
4294 VisitForStackValue(prop->key());
4295 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
4296 __ ld(LoadIC::NameRegister(), MemOperand(sp, 0));
4297 EmitKeyedPropertyLoad(prop);
4301 // We need a second deoptimization point after loading the value
4302 // in case evaluating the property load my have a side effect.
4303 if (assign_type == VARIABLE) {
4304 PrepareForBailout(expr->expression(), TOS_REG);
4306 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4309 // Inline smi case if we are in a loop.
4310 Label stub_call, done;
4311 JumpPatchSite patch_site(masm_);
4313 int count_value = expr->op() == Token::INC ? 1 : -1;
4315 if (ShouldInlineSmiCase(expr->op())) {
4317 patch_site.EmitJumpIfNotSmi(v0, &slow);
4319 // Save result for postfix expressions.
4320 if (expr->is_postfix()) {
4321 if (!context()->IsEffect()) {
4322 // Save the result on the stack. If we have a named or keyed property
4323 // we store the result under the receiver that is currently on top
4325 switch (assign_type) {
4329 case NAMED_PROPERTY:
4330 __ sd(v0, MemOperand(sp, kPointerSize));
4332 case KEYED_PROPERTY:
4333 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4339 Register scratch1 = a1;
4340 Register scratch2 = a4;
4341 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4342 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4343 __ BranchOnNoOverflow(&done, scratch2);
4344 // Call stub. Undo operation first.
4349 ToNumberStub convert_stub(isolate());
4350 __ CallStub(&convert_stub);
4352 // Save result for postfix expressions.
4353 if (expr->is_postfix()) {
4354 if (!context()->IsEffect()) {
4355 // Save the result on the stack. If we have a named or keyed property
4356 // we store the result under the receiver that is currently on top
4358 switch (assign_type) {
4362 case NAMED_PROPERTY:
4363 __ sd(v0, MemOperand(sp, kPointerSize));
4365 case KEYED_PROPERTY:
4366 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4372 __ bind(&stub_call);
4374 __ li(a0, Operand(Smi::FromInt(count_value)));
4376 // Record position before stub call.
4377 SetSourcePosition(expr->position());
4379 BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
4380 CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4381 patch_site.EmitPatchInfo();
4384 // Store the value returned in v0.
4385 switch (assign_type) {
4387 if (expr->is_postfix()) {
4388 { EffectContext context(this);
4389 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4391 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4394 // For all contexts except EffectConstant we have the result on
4395 // top of the stack.
4396 if (!context()->IsEffect()) {
4397 context()->PlugTOS();
4400 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4402 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4403 context()->Plug(v0);
4406 case NAMED_PROPERTY: {
4407 __ mov(StoreIC::ValueRegister(), result_register());
4408 __ li(StoreIC::NameRegister(),
4409 Operand(prop->key()->AsLiteral()->value()));
4410 __ pop(StoreIC::ReceiverRegister());
4411 CallStoreIC(expr->CountStoreFeedbackId());
4412 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4413 if (expr->is_postfix()) {
4414 if (!context()->IsEffect()) {
4415 context()->PlugTOS();
4418 context()->Plug(v0);
4422 case KEYED_PROPERTY: {
4423 __ mov(KeyedStoreIC::ValueRegister(), result_register());
4424 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
4425 Handle<Code> ic = strict_mode() == SLOPPY
4426 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4427 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4428 CallIC(ic, expr->CountStoreFeedbackId());
4429 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4430 if (expr->is_postfix()) {
4431 if (!context()->IsEffect()) {
4432 context()->PlugTOS();
4435 context()->Plug(v0);
4443 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4444 DCHECK(!context()->IsEffect());
4445 DCHECK(!context()->IsTest());
4446 VariableProxy* proxy = expr->AsVariableProxy();
4447 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4448 Comment cmnt(masm_, "[ Global variable");
4449 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand());
4450 __ li(LoadIC::NameRegister(), Operand(proxy->name()));
4451 if (FLAG_vector_ics) {
4452 __ li(LoadIC::SlotRegister(),
4453 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4455 // Use a regular load, not a contextual load, to avoid a reference
4457 CallLoadIC(NOT_CONTEXTUAL);
4458 PrepareForBailout(expr, TOS_REG);
4459 context()->Plug(v0);
4460 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4461 Comment cmnt(masm_, "[ Lookup slot");
4464 // Generate code for loading from variables potentially shadowed
4465 // by eval-introduced variables.
4466 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4469 __ li(a0, Operand(proxy->name()));
4471 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4472 PrepareForBailout(expr, TOS_REG);
4475 context()->Plug(v0);
4477 // This expression cannot throw a reference error at the top level.
4478 VisitInDuplicateContext(expr);
4482 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4483 Expression* sub_expr,
4484 Handle<String> check) {
4485 Label materialize_true, materialize_false;
4486 Label* if_true = NULL;
4487 Label* if_false = NULL;
4488 Label* fall_through = NULL;
4489 context()->PrepareTest(&materialize_true, &materialize_false,
4490 &if_true, &if_false, &fall_through);
4492 { AccumulatorValueContext context(this);
4493 VisitForTypeofValue(sub_expr);
4495 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4497 Factory* factory = isolate()->factory();
4498 if (String::Equals(check, factory->number_string())) {
4499 __ JumpIfSmi(v0, if_true);
4500 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4501 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4502 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4503 } else if (String::Equals(check, factory->string_string())) {
4504 __ JumpIfSmi(v0, if_false);
4505 // Check for undetectable objects => false.
4506 __ GetObjectType(v0, v0, a1);
4507 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4508 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4509 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4510 Split(eq, a1, Operand(zero_reg),
4511 if_true, if_false, fall_through);
4512 } else if (String::Equals(check, factory->symbol_string())) {
4513 __ JumpIfSmi(v0, if_false);
4514 __ GetObjectType(v0, v0, a1);
4515 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4516 } else if (String::Equals(check, factory->boolean_string())) {
4517 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4518 __ Branch(if_true, eq, v0, Operand(at));
4519 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4520 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4521 } else if (String::Equals(check, factory->undefined_string())) {
4522 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4523 __ Branch(if_true, eq, v0, Operand(at));
4524 __ JumpIfSmi(v0, if_false);
4525 // Check for undetectable objects => true.
4526 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4527 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4528 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4529 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4530 } else if (String::Equals(check, factory->function_string())) {
4531 __ JumpIfSmi(v0, if_false);
4532 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4533 __ GetObjectType(v0, v0, a1);
4534 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4535 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4536 if_true, if_false, fall_through);
4537 } else if (String::Equals(check, factory->object_string())) {
4538 __ JumpIfSmi(v0, if_false);
4539 __ LoadRoot(at, Heap::kNullValueRootIndex);
4540 __ Branch(if_true, eq, v0, Operand(at));
4541 // Check for JS objects => true.
4542 __ GetObjectType(v0, v0, a1);
4543 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4544 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4545 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4546 // Check for undetectable objects => false.
4547 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4548 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4549 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4551 if (if_false != fall_through) __ jmp(if_false);
4553 context()->Plug(if_true, if_false);
4557 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4558 Comment cmnt(masm_, "[ CompareOperation");
4559 SetSourcePosition(expr->position());
4561 // First we try a fast inlined version of the compare when one of
4562 // the operands is a literal.
4563 if (TryLiteralCompare(expr)) return;
4565 // Always perform the comparison for its control flow. Pack the result
4566 // into the expression's context after the comparison is performed.
4567 Label materialize_true, materialize_false;
4568 Label* if_true = NULL;
4569 Label* if_false = NULL;
4570 Label* fall_through = NULL;
4571 context()->PrepareTest(&materialize_true, &materialize_false,
4572 &if_true, &if_false, &fall_through);
4574 Token::Value op = expr->op();
4575 VisitForStackValue(expr->left());
4578 VisitForStackValue(expr->right());
4579 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4580 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4581 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
4582 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
4585 case Token::INSTANCEOF: {
4586 VisitForStackValue(expr->right());
4587 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4589 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4590 // The stub returns 0 for true.
4591 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4596 VisitForAccumulatorValue(expr->right());
4597 Condition cc = CompareIC::ComputeCondition(op);
4598 __ mov(a0, result_register());
4601 bool inline_smi_code = ShouldInlineSmiCase(op);
4602 JumpPatchSite patch_site(masm_);
4603 if (inline_smi_code) {
4605 __ Or(a2, a0, Operand(a1));
4606 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4607 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4608 __ bind(&slow_case);
4610 // Record position and call the compare IC.
4611 SetSourcePosition(expr->position());
4612 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4613 CallIC(ic, expr->CompareOperationFeedbackId());
4614 patch_site.EmitPatchInfo();
4615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4616 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4620 // Convert the result of the comparison into one expected for this
4621 // expression's context.
4622 context()->Plug(if_true, if_false);
4626 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4627 Expression* sub_expr,
4629 Label materialize_true, materialize_false;
4630 Label* if_true = NULL;
4631 Label* if_false = NULL;
4632 Label* fall_through = NULL;
4633 context()->PrepareTest(&materialize_true, &materialize_false,
4634 &if_true, &if_false, &fall_through);
4636 VisitForAccumulatorValue(sub_expr);
4637 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4638 __ mov(a0, result_register());
4639 if (expr->op() == Token::EQ_STRICT) {
4640 Heap::RootListIndex nil_value = nil == kNullValue ?
4641 Heap::kNullValueRootIndex :
4642 Heap::kUndefinedValueRootIndex;
4643 __ LoadRoot(a1, nil_value);
4644 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4646 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4647 CallIC(ic, expr->CompareOperationFeedbackId());
4648 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4650 context()->Plug(if_true, if_false);
4654 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4655 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4656 context()->Plug(v0);
4660 Register FullCodeGenerator::result_register() {
4665 Register FullCodeGenerator::context_register() {
4670 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4671 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4672 DCHECK(IsAligned(frame_offset, kPointerSize));
4673 // __ sw(value, MemOperand(fp, frame_offset));
4674 __ sd(value, MemOperand(fp, frame_offset));
4678 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4679 __ ld(dst, ContextOperand(cp, context_index));
4683 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4684 Scope* declaration_scope = scope()->DeclarationScope();
4685 if (declaration_scope->is_global_scope() ||
4686 declaration_scope->is_module_scope()) {
4687 // Contexts nested in the native context have a canonical empty function
4688 // as their closure, not the anonymous closure containing the global
4689 // code. Pass a smi sentinel and let the runtime look up the empty
4691 __ li(at, Operand(Smi::FromInt(0)));
4692 } else if (declaration_scope->is_eval_scope()) {
4693 // Contexts created by a call to eval have the same closure as the
4694 // context calling eval, not the anonymous closure containing the eval
4695 // code. Fetch it from the context.
4696 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4698 DCHECK(declaration_scope->is_function_scope());
4699 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4705 // ----------------------------------------------------------------------------
4706 // Non-local control flow support.
4708 void FullCodeGenerator::EnterFinallyBlock() {
4709 DCHECK(!result_register().is(a1));
4710 // Store result register while executing finally block.
4711 __ push(result_register());
4712 // Cook return address in link register to stack (smi encoded Code* delta).
4713 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
4716 // Store result register while executing finally block.
4719 // Store pending message while executing finally block.
4720 ExternalReference pending_message_obj =
4721 ExternalReference::address_of_pending_message_obj(isolate());
4722 __ li(at, Operand(pending_message_obj));
4723 __ ld(a1, MemOperand(at));
4726 ExternalReference has_pending_message =
4727 ExternalReference::address_of_has_pending_message(isolate());
4728 __ li(at, Operand(has_pending_message));
4729 __ ld(a1, MemOperand(at));
4733 ExternalReference pending_message_script =
4734 ExternalReference::address_of_pending_message_script(isolate());
4735 __ li(at, Operand(pending_message_script));
4736 __ ld(a1, MemOperand(at));
4741 void FullCodeGenerator::ExitFinallyBlock() {
4742 DCHECK(!result_register().is(a1));
4743 // Restore pending message from stack.
4745 ExternalReference pending_message_script =
4746 ExternalReference::address_of_pending_message_script(isolate());
4747 __ li(at, Operand(pending_message_script));
4748 __ sd(a1, MemOperand(at));
4752 ExternalReference has_pending_message =
4753 ExternalReference::address_of_has_pending_message(isolate());
4754 __ li(at, Operand(has_pending_message));
4755 __ sd(a1, MemOperand(at));
4758 ExternalReference pending_message_obj =
4759 ExternalReference::address_of_pending_message_obj(isolate());
4760 __ li(at, Operand(pending_message_obj));
4761 __ sd(a1, MemOperand(at));
4763 // Restore result register from stack.
4766 // Uncook return address and return.
4767 __ pop(result_register());
4770 __ Daddu(at, a1, Operand(masm_->CodeObject()));
4777 #define __ ACCESS_MASM(masm())
4779 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4781 int* context_length) {
4782 // The macros used here must preserve the result register.
4784 // Because the handler block contains the context of the finally
4785 // code, we can restore it directly from there for the finally code
4786 // rather than iteratively unwinding contexts via their previous
4788 __ Drop(*stack_depth); // Down to the handler block.
4789 if (*context_length > 0) {
4790 // Restore the context to its dedicated register and the stack.
4791 __ ld(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4792 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4795 __ Call(finally_entry_);
4798 *context_length = 0;
4806 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4808 BackEdgeState target_state,
4809 Code* replacement_code) {
4810 static const int kInstrSize = Assembler::kInstrSize;
4811 Address branch_address = pc - 8 * kInstrSize;
4812 CodePatcher patcher(branch_address, 1);
4814 switch (target_state) {
4816 // slt at, a3, zero_reg (in case of count based interrupts)
4817 // beq at, zero_reg, ok
4818 // lui t9, <interrupt stub address> upper
4819 // ori t9, <interrupt stub address> u-middle
4821 // ori t9, <interrupt stub address> lower
4824 // ok-label ----- pc_after points here
4825 patcher.masm()->slt(at, a3, zero_reg);
4827 case ON_STACK_REPLACEMENT:
4828 case OSR_AFTER_STACK_CHECK:
4829 // addiu at, zero_reg, 1
4830 // beq at, zero_reg, ok ;; Not changed
4831 // lui t9, <on-stack replacement address> upper
4832 // ori t9, <on-stack replacement address> middle
4834 // ori t9, <on-stack replacement address> lower
4835 // jalr t9 ;; Not changed
4836 // nop ;; Not changed
4837 // ok-label ----- pc_after points here
4838 patcher.masm()->daddiu(at, zero_reg, 1);
4841 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4842 // Replace the stack check address in the load-immediate (6-instr sequence)
4843 // with the entry address of the replacement code.
4844 Assembler::set_target_address_at(pc_immediate_load_address,
4845 replacement_code->entry());
4847 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4848 unoptimized_code, pc_immediate_load_address, replacement_code);
4852 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4854 Code* unoptimized_code,
4856 static const int kInstrSize = Assembler::kInstrSize;
4857 Address branch_address = pc - 8 * kInstrSize;
4858 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4860 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
4861 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4862 DCHECK(reinterpret_cast<uint64_t>(
4863 Assembler::target_address_at(pc_immediate_load_address)) ==
4864 reinterpret_cast<uint64_t>(
4865 isolate->builtins()->InterruptCheck()->entry()));
4869 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4871 if (reinterpret_cast<uint64_t>(
4872 Assembler::target_address_at(pc_immediate_load_address)) ==
4873 reinterpret_cast<uint64_t>(
4874 isolate->builtins()->OnStackReplacement()->entry())) {
4875 return ON_STACK_REPLACEMENT;
4878 DCHECK(reinterpret_cast<uint64_t>(
4879 Assembler::target_address_at(pc_immediate_load_address)) ==
4880 reinterpret_cast<uint64_t>(
4881 isolate->builtins()->OsrAfterStackCheck()->entry()));
4882 return OSR_AFTER_STACK_CHECK;
4886 } } // namespace v8::internal
4888 #endif // V8_TARGET_ARCH_MIPS64