1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_MIPS
32 // Note on Mips implementation:
34 // The result_register() for mips is the 'v0' register, which is defined
35 // by the ABI to contain function return values. However, the first
36 // parameter to a function is defined to be 'a0'. So there are many
37 // places where we have to move a previous result in v0 to a0 for the
38 // next call: mov(a0, v0). This is not needed on the other architectures.
40 #include "code-stubs.h"
44 #include "full-codegen.h"
45 #include "isolate-inl.h"
48 #include "stub-cache.h"
50 #include "mips/code-stubs-mips.h"
51 #include "mips/macro-assembler-mips.h"
56 #define __ ACCESS_MASM(masm_)
59 // A patch site is a location in the code which it is possible to patch. This
60 // class has a number of methods to emit the code which is patchable and the
61 // method EmitPatchInfo to record a marker back to the patchable code. This
62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63 // (raw 16 bit immediate value is used) is the delta from the pc to the first
64 // instruction of the patchable code.
65 // The marker instruction is effectively a NOP (dest is zero_reg) and will
66 // never be emitted by normal code.
67 class JumpPatchSite BASE_EMBEDDED {
69 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
71 info_emitted_ = false;
76 ASSERT(patch_site_.is_bound() == info_emitted_);
79 // When initially emitting this ensure that a jump is always generated to skip
80 // the inlined smi code.
81 void EmitJumpIfNotSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84 __ bind(&patch_site_);
86 // Always taken before patched.
87 __ BranchShort(target, eq, at, Operand(zero_reg));
90 // When initially emitting this ensure that a jump is never generated to skip
91 // the inlined smi code.
92 void EmitJumpIfSmi(Register reg, Label* target) {
93 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94 ASSERT(!patch_site_.is_bound() && !info_emitted_);
95 __ bind(&patch_site_);
97 // Never taken before patched.
98 __ BranchShort(target, ne, at, Operand(zero_reg));
101 void EmitPatchInfo() {
102 if (patch_site_.is_bound()) {
103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
107 info_emitted_ = true;
110 __ nop(); // Signals no inlined code.
115 MacroAssembler* masm_;
123 // Generate code for a JS function. On entry to the function the receiver
124 // and arguments have been pushed on the stack left to right. The actual
125 // argument count matches the formal parameter count expected by the
128 // The live registers are:
129 // o a1: the JS function object being called (i.e. ourselves)
131 // o fp: our caller's frame pointer
132 // o sp: stack pointer
133 // o ra: return address
135 // The function builds a JS frame. Please see JavaScriptFrameConstants in
136 // frames-mips.h for its layout.
137 void FullCodeGenerator::Generate() {
138 CompilationInfo* info = info_;
140 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
141 profiling_counter_ = isolate()->factory()->NewCell(
142 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
143 SetFunctionPosition(function());
144 Comment cmnt(masm_, "[ function compiled by full code generator");
146 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
149 if (strlen(FLAG_stop_at) > 0 &&
150 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
155 // Classic mode functions and builtins need to replace the receiver with the
156 // global proxy when called as functions (without an explicit receiver
158 if (info->is_classic_mode() && !info->is_native()) {
160 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
161 __ lw(at, MemOperand(sp, receiver_offset));
162 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
163 __ Branch(&ok, ne, a2, Operand(at));
165 __ lw(a2, GlobalObjectOperand());
166 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
168 __ sw(a2, MemOperand(sp, receiver_offset));
173 // Open a frame scope to indicate that there is a frame on the stack. The
174 // MANUAL indicates that the scope shouldn't actually generate code to set up
175 // the frame (that is done below).
176 FrameScope frame_scope(masm_, StackFrame::MANUAL);
178 info->set_prologue_offset(masm_->pc_offset());
179 __ Prologue(BUILD_FUNCTION_FRAME);
180 info->AddNoFrameRange(0, masm_->pc_offset());
182 { Comment cmnt(masm_, "[ Allocate locals");
183 int locals_count = info->scope()->num_stack_slots();
184 // Generators allocate locals, if any, in context slots.
185 ASSERT(!info->function()->is_generator() || locals_count == 0);
186 if (locals_count > 0) {
187 // Emit a loop to initialize stack cells for locals when optimizing for
188 // size. Otherwise, unroll the loop for maximum performance.
189 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
190 if ((FLAG_optimize_for_size && locals_count > 4) ||
191 !is_int16(locals_count)) {
193 __ Subu(a2, sp, Operand(locals_count * kPointerSize));
195 __ Subu(sp, sp, Operand(kPointerSize));
196 __ Branch(&loop, gt, sp, Operand(a2), USE_DELAY_SLOT);
197 __ sw(t5, MemOperand(sp, 0)); // Push in the delay slot.
199 __ Subu(sp, sp, Operand(locals_count * kPointerSize));
200 for (int i = 0; i < locals_count; i++) {
201 __ sw(t5, MemOperand(sp, i * kPointerSize));
207 bool function_in_register = true;
209 // Possibly allocate a local context.
210 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
211 if (heap_slots > 0) {
212 Comment cmnt(masm_, "[ Allocate context");
213 // Argument to NewContext is the function, which is still in a1.
214 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
216 __ Push(info->scope()->GetScopeInfo());
217 __ CallRuntime(Runtime::kNewGlobalContext, 2);
218 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(heap_slots);
223 __ CallRuntime(Runtime::kNewFunctionContext, 1);
225 function_in_register = false;
226 // Context is returned in v0. It replaces the context passed to us.
227 // It's saved in the stack and kept live in cp.
229 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
230 // Copy any necessary parameters into the context.
231 int num_parameters = info->scope()->num_parameters();
232 for (int i = 0; i < num_parameters; i++) {
233 Variable* var = scope()->parameter(i);
234 if (var->IsContextSlot()) {
235 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
236 (num_parameters - 1 - i) * kPointerSize;
237 // Load parameter from stack.
238 __ lw(a0, MemOperand(fp, parameter_offset));
239 // Store it in the context.
240 MemOperand target = ContextOperand(cp, var->index());
243 // Update the write barrier.
244 __ RecordWriteContextSlot(
245 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
250 Variable* arguments = scope()->arguments();
251 if (arguments != NULL) {
252 // Function uses arguments object.
253 Comment cmnt(masm_, "[ Allocate arguments object");
254 if (!function_in_register) {
255 // Load this again, if it's used by the local context below.
256 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
260 // Receiver is just before the parameters on the caller's stack.
261 int num_parameters = info->scope()->num_parameters();
262 int offset = num_parameters * kPointerSize;
264 Operand(StandardFrameConstants::kCallerSPOffset + offset));
265 __ li(a1, Operand(Smi::FromInt(num_parameters)));
268 // Arguments to ArgumentsAccessStub:
269 // function, receiver address, parameter count.
270 // The stub will rewrite receiever and parameter count if the previous
271 // stack frame was an arguments adapter frame.
272 ArgumentsAccessStub::Type type;
273 if (!is_classic_mode()) {
274 type = ArgumentsAccessStub::NEW_STRICT;
275 } else if (function()->has_duplicate_parameters()) {
276 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
278 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
280 ArgumentsAccessStub stub(type);
283 SetVar(arguments, v0, a1, a2);
287 __ CallRuntime(Runtime::kTraceEnter, 0);
290 // Visit the declarations and body unless there is an illegal
292 if (scope()->HasIllegalRedeclaration()) {
293 Comment cmnt(masm_, "[ Declarations");
294 scope()->VisitIllegalRedeclaration(this);
297 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
298 { Comment cmnt(masm_, "[ Declarations");
299 // For named function expressions, declare the function name as a
301 if (scope()->is_function_scope() && scope()->function() != NULL) {
302 VariableDeclaration* function = scope()->function();
303 ASSERT(function->proxy()->var()->mode() == CONST ||
304 function->proxy()->var()->mode() == CONST_HARMONY);
305 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
306 VisitVariableDeclaration(function);
308 VisitDeclarations(scope()->declarations());
311 { Comment cmnt(masm_, "[ Stack check");
312 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
314 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
315 __ Branch(&ok, hs, sp, Operand(t0));
316 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
320 { Comment cmnt(masm_, "[ Body");
321 ASSERT(loop_depth() == 0);
322 VisitStatements(function()->body());
323 ASSERT(loop_depth() == 0);
327 // Always emit a 'return undefined' in case control fell off the end of
329 { Comment cmnt(masm_, "[ return <undefined>;");
330 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
332 EmitReturnSequence();
336 void FullCodeGenerator::ClearAccumulator() {
337 ASSERT(Smi::FromInt(0) == 0);
338 __ mov(v0, zero_reg);
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ li(a2, Operand(profiling_counter_));
344 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
345 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
346 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
350 void FullCodeGenerator::EmitProfilingCounterReset() {
351 int reset_value = FLAG_interrupt_budget;
352 if (isolate()->IsDebuggerActive()) {
353 // Detect debug break requests as soon as possible.
354 reset_value = FLAG_interrupt_budget >> 4;
356 __ li(a2, Operand(profiling_counter_));
357 __ li(a3, Operand(Smi::FromInt(reset_value)));
358 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363 Label* back_edge_target) {
364 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
365 // to make sure it is constant. Branch may emit a skip-or-jump sequence
366 // instead of the normal Branch. It seems that the "skip" part of that
367 // sequence is about as long as this Branch would be so it is safe to ignore
369 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
370 Comment cmnt(masm_, "[ Back edge bookkeeping");
372 ASSERT(back_edge_target->is_bound());
373 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
374 int weight = Min(kMaxBackEdgeWeight,
375 Max(1, distance / kCodeSizeMultiplier));
376 EmitProfilingCounterDecrement(weight);
377 __ slt(at, a3, zero_reg);
378 __ beq(at, zero_reg, &ok);
379 // Call will emit a li t9 first, so it is safe to use the delay slot.
380 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
381 // Record a mapping of this PC offset to the OSR id. This is used to find
382 // the AST id from the unoptimized code in order to use it as a key into
383 // the deoptimization input data found in the optimized code.
384 RecordBackEdge(stmt->OsrEntryId());
385 EmitProfilingCounterReset();
388 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
389 // Record a mapping of the OSR id to this PC. This is used if the OSR
390 // entry becomes the target of a bailout. We don't expect it to be, but
391 // we want it to work if it is.
392 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
396 void FullCodeGenerator::EmitReturnSequence() {
397 Comment cmnt(masm_, "[ Return sequence");
398 if (return_label_.is_bound()) {
399 __ Branch(&return_label_);
401 __ bind(&return_label_);
403 // Push the return value on the stack as the parameter.
404 // Runtime::TraceExit returns its parameter in v0.
406 __ CallRuntime(Runtime::kTraceExit, 1);
408 // Pretend that the exit is a backwards jump to the entry.
410 if (info_->ShouldSelfOptimize()) {
411 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 int distance = masm_->pc_offset();
414 weight = Min(kMaxBackEdgeWeight,
415 Max(1, distance / kCodeSizeMultiplier));
417 EmitProfilingCounterDecrement(weight);
419 __ Branch(&ok, ge, a3, Operand(zero_reg));
421 __ Call(isolate()->builtins()->InterruptCheck(),
422 RelocInfo::CODE_TARGET);
424 EmitProfilingCounterReset();
428 // Add a label for checking the size of the code used for returning.
429 Label check_exit_codesize;
430 masm_->bind(&check_exit_codesize);
432 // Make sure that the constant pool is not emitted inside of the return
434 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
435 // Here we use masm_-> instead of the __ macro to avoid the code coverage
436 // tool from instrumenting as we rely on the code size here.
437 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
438 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
441 int no_frame_start = masm_->pc_offset();
442 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
443 masm_->Addu(sp, sp, Operand(sp_delta));
445 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
449 // Check that the size of the code used for returning is large enough
450 // for the debugger's requirements.
451 ASSERT(Assembler::kJSReturnSequenceInstructions <=
452 masm_->InstructionsGeneratedSince(&check_exit_codesize));
458 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
459 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
463 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
464 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
465 codegen()->GetVar(result_register(), var);
469 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
470 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
471 codegen()->GetVar(result_register(), var);
472 __ push(result_register());
476 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
477 // For simplicity we always test the accumulator register.
478 codegen()->GetVar(result_register(), var);
479 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
480 codegen()->DoTest(this);
484 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
488 void FullCodeGenerator::AccumulatorValueContext::Plug(
489 Heap::RootListIndex index) const {
490 __ LoadRoot(result_register(), index);
494 void FullCodeGenerator::StackValueContext::Plug(
495 Heap::RootListIndex index) const {
496 __ LoadRoot(result_register(), index);
497 __ push(result_register());
501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
502 codegen()->PrepareForBailoutBeforeSplit(condition(),
506 if (index == Heap::kUndefinedValueRootIndex ||
507 index == Heap::kNullValueRootIndex ||
508 index == Heap::kFalseValueRootIndex) {
509 if (false_label_ != fall_through_) __ Branch(false_label_);
510 } else if (index == Heap::kTrueValueRootIndex) {
511 if (true_label_ != fall_through_) __ Branch(true_label_);
513 __ LoadRoot(result_register(), index);
514 codegen()->DoTest(this);
519 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
523 void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Handle<Object> lit) const {
525 __ li(result_register(), Operand(lit));
529 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
530 // Immediates cannot be pushed directly.
531 __ li(result_register(), Operand(lit));
532 __ push(result_register());
536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
537 codegen()->PrepareForBailoutBeforeSplit(condition(),
541 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
543 if (false_label_ != fall_through_) __ Branch(false_label_);
544 } else if (lit->IsTrue() || lit->IsJSObject()) {
545 if (true_label_ != fall_through_) __ Branch(true_label_);
546 } else if (lit->IsString()) {
547 if (String::cast(*lit)->length() == 0) {
548 if (false_label_ != fall_through_) __ Branch(false_label_);
550 if (true_label_ != fall_through_) __ Branch(true_label_);
552 } else if (lit->IsSmi()) {
553 if (Smi::cast(*lit)->value() == 0) {
554 if (false_label_ != fall_through_) __ Branch(false_label_);
556 if (true_label_ != fall_through_) __ Branch(true_label_);
559 // For simplicity we always test the accumulator register.
560 __ li(result_register(), Operand(lit));
561 codegen()->DoTest(this);
566 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
567 Register reg) const {
573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
575 Register reg) const {
578 __ Move(result_register(), reg);
582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
583 Register reg) const {
585 if (count > 1) __ Drop(count - 1);
586 __ sw(reg, MemOperand(sp, 0));
590 void FullCodeGenerator::TestContext::DropAndPlug(int count,
591 Register reg) const {
593 // For simplicity we always test the accumulator register.
595 __ Move(result_register(), reg);
596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
597 codegen()->DoTest(this);
601 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
602 Label* materialize_false) const {
603 ASSERT(materialize_true == materialize_false);
604 __ bind(materialize_true);
608 void FullCodeGenerator::AccumulatorValueContext::Plug(
609 Label* materialize_true,
610 Label* materialize_false) const {
612 __ bind(materialize_true);
613 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
615 __ bind(materialize_false);
616 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
621 void FullCodeGenerator::StackValueContext::Plug(
622 Label* materialize_true,
623 Label* materialize_false) const {
625 __ bind(materialize_true);
626 __ LoadRoot(at, Heap::kTrueValueRootIndex);
627 // Push the value as the following branch can clobber at in long branch mode.
630 __ bind(materialize_false);
631 __ LoadRoot(at, Heap::kFalseValueRootIndex);
637 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
638 Label* materialize_false) const {
639 ASSERT(materialize_true == true_label_);
640 ASSERT(materialize_false == false_label_);
644 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
648 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
649 Heap::RootListIndex value_root_index =
650 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
651 __ LoadRoot(result_register(), value_root_index);
655 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
656 Heap::RootListIndex value_root_index =
657 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
658 __ LoadRoot(at, value_root_index);
663 void FullCodeGenerator::TestContext::Plug(bool flag) const {
664 codegen()->PrepareForBailoutBeforeSplit(condition(),
669 if (true_label_ != fall_through_) __ Branch(true_label_);
671 if (false_label_ != fall_through_) __ Branch(false_label_);
676 void FullCodeGenerator::DoTest(Expression* condition,
679 Label* fall_through) {
680 __ mov(a0, result_register());
681 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
682 CallIC(ic, NOT_CONTEXTUAL, condition->test_id());
683 __ mov(at, zero_reg);
684 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
688 void FullCodeGenerator::Split(Condition cc,
693 Label* fall_through) {
694 if (if_false == fall_through) {
695 __ Branch(if_true, cc, lhs, rhs);
696 } else if (if_true == fall_through) {
697 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
699 __ Branch(if_true, cc, lhs, rhs);
705 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
706 ASSERT(var->IsStackAllocated());
707 // Offset is negative because higher indexes are at lower addresses.
708 int offset = -var->index() * kPointerSize;
709 // Adjust by a (parameter or local) base offset.
710 if (var->IsParameter()) {
711 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
713 offset += JavaScriptFrameConstants::kLocal0Offset;
715 return MemOperand(fp, offset);
719 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
720 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
721 if (var->IsContextSlot()) {
722 int context_chain_length = scope()->ContextChainLength(var->scope());
723 __ LoadContext(scratch, context_chain_length);
724 return ContextOperand(scratch, var->index());
726 return StackOperand(var);
731 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
732 // Use destination as scratch.
733 MemOperand location = VarOperand(var, dest);
734 __ lw(dest, location);
738 void FullCodeGenerator::SetVar(Variable* var,
742 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
743 ASSERT(!scratch0.is(src));
744 ASSERT(!scratch0.is(scratch1));
745 ASSERT(!scratch1.is(src));
746 MemOperand location = VarOperand(var, scratch0);
747 __ sw(src, location);
748 // Emit the write barrier code if the location is in the heap.
749 if (var->IsContextSlot()) {
750 __ RecordWriteContextSlot(scratch0,
760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
761 bool should_normalize,
764 // Only prepare for bailouts before splits if we're in a test
765 // context. Otherwise, we let the Visit function deal with the
766 // preparation to avoid preparing with the same AST id twice.
767 if (!context()->IsTest() || !info_->IsOptimizable()) return;
770 if (should_normalize) __ Branch(&skip);
771 PrepareForBailout(expr, TOS_REG);
772 if (should_normalize) {
773 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
774 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
780 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
781 // The variable in the declaration always resides in the current function
783 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
784 if (generate_debug_code_) {
785 // Check that we're not inside a with or catch context.
786 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
787 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
788 __ Check(ne, kDeclarationInWithContext,
790 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
791 __ Check(ne, kDeclarationInCatchContext,
797 void FullCodeGenerator::VisitVariableDeclaration(
798 VariableDeclaration* declaration) {
799 // If it was not possible to allocate the variable at compile time, we
800 // need to "declare" it at runtime to make sure it actually exists in the
802 VariableProxy* proxy = declaration->proxy();
803 VariableMode mode = declaration->mode();
804 Variable* variable = proxy->var();
805 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
806 switch (variable->location()) {
807 case Variable::UNALLOCATED:
808 globals_->Add(variable->name(), zone());
809 globals_->Add(variable->binding_needs_init()
810 ? isolate()->factory()->the_hole_value()
811 : isolate()->factory()->undefined_value(),
815 case Variable::PARAMETER:
816 case Variable::LOCAL:
818 Comment cmnt(masm_, "[ VariableDeclaration");
819 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
820 __ sw(t0, StackOperand(variable));
824 case Variable::CONTEXT:
826 Comment cmnt(masm_, "[ VariableDeclaration");
827 EmitDebugCheckDeclarationContext(variable);
828 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
829 __ sw(at, ContextOperand(cp, variable->index()));
830 // No write barrier since the_hole_value is in old space.
831 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
835 case Variable::LOOKUP: {
836 Comment cmnt(masm_, "[ VariableDeclaration");
837 __ li(a2, Operand(variable->name()));
838 // Declaration nodes are always introduced in one of four modes.
839 ASSERT(IsDeclaredVariableMode(mode));
840 PropertyAttributes attr =
841 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
842 __ li(a1, Operand(Smi::FromInt(attr)));
843 // Push initial value, if any.
844 // Note: For variables we must not push an initial value (such as
845 // 'undefined') because we may have a (legal) redeclaration and we
846 // must not destroy the current value.
848 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
849 __ Push(cp, a2, a1, a0);
851 ASSERT(Smi::FromInt(0) == 0);
852 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
853 __ Push(cp, a2, a1, a0);
855 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
862 void FullCodeGenerator::VisitFunctionDeclaration(
863 FunctionDeclaration* declaration) {
864 VariableProxy* proxy = declaration->proxy();
865 Variable* variable = proxy->var();
866 switch (variable->location()) {
867 case Variable::UNALLOCATED: {
868 globals_->Add(variable->name(), zone());
869 Handle<SharedFunctionInfo> function =
870 Compiler::BuildFunctionInfo(declaration->fun(), script());
871 // Check for stack-overflow exception.
872 if (function.is_null()) return SetStackOverflow();
873 globals_->Add(function, zone());
877 case Variable::PARAMETER:
878 case Variable::LOCAL: {
879 Comment cmnt(masm_, "[ FunctionDeclaration");
880 VisitForAccumulatorValue(declaration->fun());
881 __ sw(result_register(), StackOperand(variable));
885 case Variable::CONTEXT: {
886 Comment cmnt(masm_, "[ FunctionDeclaration");
887 EmitDebugCheckDeclarationContext(variable);
888 VisitForAccumulatorValue(declaration->fun());
889 __ sw(result_register(), ContextOperand(cp, variable->index()));
890 int offset = Context::SlotOffset(variable->index());
891 // We know that we have written a function, which is not a smi.
892 __ RecordWriteContextSlot(cp,
900 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
904 case Variable::LOOKUP: {
905 Comment cmnt(masm_, "[ FunctionDeclaration");
906 __ li(a2, Operand(variable->name()));
907 __ li(a1, Operand(Smi::FromInt(NONE)));
909 // Push initial value for function declaration.
910 VisitForStackValue(declaration->fun());
911 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
918 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
919 Variable* variable = declaration->proxy()->var();
920 ASSERT(variable->location() == Variable::CONTEXT);
921 ASSERT(variable->interface()->IsFrozen());
923 Comment cmnt(masm_, "[ ModuleDeclaration");
924 EmitDebugCheckDeclarationContext(variable);
926 // Load instance object.
927 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
928 __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
929 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
932 __ sw(a1, ContextOperand(cp, variable->index()));
933 // We know that we have written a module, which is not a smi.
934 __ RecordWriteContextSlot(cp,
935 Context::SlotOffset(variable->index()),
942 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
944 // Traverse into body.
945 Visit(declaration->module());
949 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
950 VariableProxy* proxy = declaration->proxy();
951 Variable* variable = proxy->var();
952 switch (variable->location()) {
953 case Variable::UNALLOCATED:
957 case Variable::CONTEXT: {
958 Comment cmnt(masm_, "[ ImportDeclaration");
959 EmitDebugCheckDeclarationContext(variable);
964 case Variable::PARAMETER:
965 case Variable::LOCAL:
966 case Variable::LOOKUP:
972 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
977 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
978 // Call the runtime to declare the globals.
979 // The context is the first argument.
980 __ li(a1, Operand(pairs));
981 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
983 __ CallRuntime(Runtime::kDeclareGlobals, 3);
984 // Return value is ignored.
988 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
989 // Call the runtime to declare the modules.
990 __ Push(descriptions);
991 __ CallRuntime(Runtime::kDeclareModules, 1);
992 // Return value is ignored.
996 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
997 Comment cmnt(masm_, "[ SwitchStatement");
998 Breakable nested_statement(this, stmt);
999 SetStatementPosition(stmt);
1001 // Keep the switch value on the stack until a case matches.
1002 VisitForStackValue(stmt->tag());
1003 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1005 ZoneList<CaseClause*>* clauses = stmt->cases();
1006 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1008 Label next_test; // Recycled for each test.
1009 // Compile all the tests with branches to their bodies.
1010 for (int i = 0; i < clauses->length(); i++) {
1011 CaseClause* clause = clauses->at(i);
1012 clause->body_target()->Unuse();
1014 // The default is not a test, but remember it as final fall through.
1015 if (clause->is_default()) {
1016 default_clause = clause;
1020 Comment cmnt(masm_, "[ Case comparison");
1021 __ bind(&next_test);
1024 // Compile the label expression.
1025 VisitForAccumulatorValue(clause->label());
1026 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1028 // Perform the comparison as if via '==='.
1029 __ lw(a1, MemOperand(sp, 0)); // Switch value.
1030 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1031 JumpPatchSite patch_site(masm_);
1032 if (inline_smi_code) {
1035 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1037 __ Branch(&next_test, ne, a1, Operand(a0));
1038 __ Drop(1); // Switch value is no longer needed.
1039 __ Branch(clause->body_target());
1041 __ bind(&slow_case);
1044 // Record position before stub call for type feedback.
1045 SetSourcePosition(clause->position());
1046 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1047 CallIC(ic, NOT_CONTEXTUAL, clause->CompareId());
1048 patch_site.EmitPatchInfo();
1052 PrepareForBailout(clause, TOS_REG);
1053 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1054 __ Branch(&next_test, ne, v0, Operand(at));
1056 __ Branch(clause->body_target());
1059 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1060 __ Drop(1); // Switch value is no longer needed.
1061 __ Branch(clause->body_target());
1064 // Discard the test value and jump to the default if present, otherwise to
1065 // the end of the statement.
1066 __ bind(&next_test);
1067 __ Drop(1); // Switch value is no longer needed.
1068 if (default_clause == NULL) {
1069 __ Branch(nested_statement.break_label());
1071 __ Branch(default_clause->body_target());
1074 // Compile all the case bodies.
1075 for (int i = 0; i < clauses->length(); i++) {
1076 Comment cmnt(masm_, "[ Case body");
1077 CaseClause* clause = clauses->at(i);
1078 __ bind(clause->body_target());
1079 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1080 VisitStatements(clause->statements());
1083 __ bind(nested_statement.break_label());
1084 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1088 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1089 Comment cmnt(masm_, "[ ForInStatement");
1090 SetStatementPosition(stmt);
1093 ForIn loop_statement(this, stmt);
1094 increment_loop_depth();
1096 // Get the object to enumerate over. If the object is null or undefined, skip
1097 // over the loop. See ECMA-262 version 5, section 12.6.4.
1098 VisitForAccumulatorValue(stmt->enumerable());
1099 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1100 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1101 __ Branch(&exit, eq, a0, Operand(at));
1102 Register null_value = t1;
1103 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1104 __ Branch(&exit, eq, a0, Operand(null_value));
1105 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1107 // Convert the object to a JS object.
1108 Label convert, done_convert;
1109 __ JumpIfSmi(a0, &convert);
1110 __ GetObjectType(a0, a1, a1);
1111 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1114 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1116 __ bind(&done_convert);
1119 // Check for proxies.
1121 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1122 __ GetObjectType(a0, a1, a1);
1123 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1125 // Check cache validity in generated code. This is a fast case for
1126 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1127 // guarantee cache validity, call the runtime system to check cache
1128 // validity or get the property names in a fixed array.
1129 __ CheckEnumCache(null_value, &call_runtime);
1131 // The enum cache is valid. Load the map of the object being
1132 // iterated over and use the cache for the iteration.
1134 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1135 __ Branch(&use_cache);
1137 // Get the set of properties to enumerate.
1138 __ bind(&call_runtime);
1139 __ push(a0); // Duplicate the enumerable object on the stack.
1140 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1142 // If we got a map from the runtime call, we can do a fast
1143 // modification check. Otherwise, we got a fixed array, and we have
1144 // to do a slow check.
1146 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1147 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1148 __ Branch(&fixed_array, ne, a2, Operand(at));
1150 // We got a map in register v0. Get the enumeration cache from it.
1151 Label no_descriptors;
1152 __ bind(&use_cache);
1154 __ EnumLength(a1, v0);
1155 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1157 __ LoadInstanceDescriptors(v0, a2);
1158 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1159 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1161 // Set up the four remaining stack slots.
1162 __ li(a0, Operand(Smi::FromInt(0)));
1163 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1164 __ Push(v0, a2, a1, a0);
1167 __ bind(&no_descriptors);
1171 // We got a fixed array in register v0. Iterate through that.
1173 __ bind(&fixed_array);
1175 Handle<Cell> cell = isolate()->factory()->NewCell(
1176 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1178 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1180 __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1181 __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset));
1183 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1184 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1185 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1186 __ GetObjectType(a2, a3, a3);
1187 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1188 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1189 __ bind(&non_proxy);
1190 __ Push(a1, v0); // Smi and array
1191 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1192 __ li(a0, Operand(Smi::FromInt(0)));
1193 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1195 // Generate code for doing the condition check.
1196 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1198 // Load the current count to a0, load the length to a1.
1199 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1200 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1201 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1203 // Get the current entry of the array into register a3.
1204 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1205 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1206 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1207 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1208 __ lw(a3, MemOperand(t0)); // Current entry.
1210 // Get the expected map from the stack or a smi in the
1211 // permanent slow case into register a2.
1212 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1214 // Check if the expected map still matches that of the enumerable.
1215 // If not, we may have to filter the key.
1217 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1218 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1219 __ Branch(&update_each, eq, t0, Operand(a2));
1221 // For proxies, no filtering is done.
1222 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1223 ASSERT_EQ(Smi::FromInt(0), 0);
1224 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1226 // Convert the entry to a string or (smi) 0 if it isn't a property
1227 // any more. If the property has been removed while iterating, we
1229 __ Push(a1, a3); // Enumerable and current entry.
1230 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1231 __ mov(a3, result_register());
1232 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1234 // Update the 'each' property or variable from the possibly filtered
1235 // entry in register a3.
1236 __ bind(&update_each);
1237 __ mov(result_register(), a3);
1238 // Perform the assignment as if via '='.
1239 { EffectContext context(this);
1240 EmitAssignment(stmt->each());
1243 // Generate code for the body of the loop.
1244 Visit(stmt->body());
1246 // Generate code for the going to the next element by incrementing
1247 // the index (smi) stored on top of the stack.
1248 __ bind(loop_statement.continue_label());
1250 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1253 EmitBackEdgeBookkeeping(stmt, &loop);
1256 // Remove the pointers stored on the stack.
1257 __ bind(loop_statement.break_label());
1260 // Exit and decrement the loop depth.
1261 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1263 decrement_loop_depth();
1267 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1268 Comment cmnt(masm_, "[ ForOfStatement");
1269 SetStatementPosition(stmt);
1271 Iteration loop_statement(this, stmt);
1272 increment_loop_depth();
1274 // var iterator = iterable[@@iterator]()
1275 VisitForAccumulatorValue(stmt->assign_iterator());
1278 // As with for-in, skip the loop if the iterator is null or undefined.
1279 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1280 __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
1281 __ LoadRoot(at, Heap::kNullValueRootIndex);
1282 __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
1284 // Convert the iterator to a JS object.
1285 Label convert, done_convert;
1286 __ JumpIfSmi(a0, &convert);
1287 __ GetObjectType(a0, a1, a1);
1288 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1291 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1293 __ bind(&done_convert);
1297 __ bind(loop_statement.continue_label());
1299 // result = iterator.next()
1300 VisitForEffect(stmt->next_result());
1302 // if (result.done) break;
1303 Label result_not_done;
1304 VisitForControl(stmt->result_done(),
1305 loop_statement.break_label(),
1308 __ bind(&result_not_done);
1310 // each = result.value
1311 VisitForEffect(stmt->assign_each());
1313 // Generate code for the body of the loop.
1314 Visit(stmt->body());
1316 // Check stack before looping.
1317 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1318 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1319 __ jmp(loop_statement.continue_label());
1321 // Exit and decrement the loop depth.
1322 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1323 __ bind(loop_statement.break_label());
1324 decrement_loop_depth();
1328 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1330 // Use the fast case closure allocation code that allocates in new
1331 // space for nested functions that don't need literals cloning. If
1332 // we're running with the --always-opt or the --prepare-always-opt
1333 // flag, we need to use the runtime function so that the new function
1334 // we are creating here gets a chance to have its code optimized and
1335 // doesn't just get a copy of the existing unoptimized code.
1336 if (!FLAG_always_opt &&
1337 !FLAG_prepare_always_opt &&
1339 scope()->is_function_scope() &&
1340 info->num_literals() == 0) {
1341 FastNewClosureStub stub(info->language_mode(), info->is_generator());
1342 __ li(a2, Operand(info));
1345 __ li(a0, Operand(info));
1346 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1347 : Heap::kFalseValueRootIndex);
1348 __ Push(cp, a0, a1);
1349 __ CallRuntime(Runtime::kNewClosure, 3);
1351 context()->Plug(v0);
1355 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1356 Comment cmnt(masm_, "[ VariableProxy");
1357 EmitVariableLoad(expr);
1361 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1362 TypeofState typeof_state,
1364 Register current = cp;
1370 if (s->num_heap_slots() > 0) {
1371 if (s->calls_non_strict_eval()) {
1372 // Check that extension is NULL.
1373 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1374 __ Branch(slow, ne, temp, Operand(zero_reg));
1376 // Load next context in chain.
1377 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1378 // Walk the rest of the chain without clobbering cp.
1381 // If no outer scope calls eval, we do not need to check more
1382 // context extensions.
1383 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1384 s = s->outer_scope();
1387 if (s->is_eval_scope()) {
1389 if (!current.is(next)) {
1390 __ Move(next, current);
1393 // Terminate at native context.
1394 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1395 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1396 __ Branch(&fast, eq, temp, Operand(t0));
1397 // Check that extension is NULL.
1398 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1399 __ Branch(slow, ne, temp, Operand(zero_reg));
1400 // Load next context in chain.
1401 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1406 __ lw(a0, GlobalObjectOperand());
1407 __ li(a2, Operand(var->name()));
1408 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1415 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1417 ASSERT(var->IsContextSlot());
1418 Register context = cp;
1422 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1423 if (s->num_heap_slots() > 0) {
1424 if (s->calls_non_strict_eval()) {
1425 // Check that extension is NULL.
1426 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1427 __ Branch(slow, ne, temp, Operand(zero_reg));
1429 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1430 // Walk the rest of the chain without clobbering cp.
1434 // Check that last extension is NULL.
1435 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1436 __ Branch(slow, ne, temp, Operand(zero_reg));
1438 // This function is used only for loads, not stores, so it's safe to
1439 // return an cp-based operand (the write barrier cannot be allowed to
1440 // destroy the cp register).
1441 return ContextOperand(context, var->index());
1445 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1446 TypeofState typeof_state,
1449 // Generate fast-case code for variables that might be shadowed by
1450 // eval-introduced variables. Eval is used a lot without
1451 // introducing variables. In those cases, we do not want to
1452 // perform a runtime call for all variables in the scope
1453 // containing the eval.
1454 if (var->mode() == DYNAMIC_GLOBAL) {
1455 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1457 } else if (var->mode() == DYNAMIC_LOCAL) {
1458 Variable* local = var->local_if_not_shadowed();
1459 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1460 if (local->mode() == LET ||
1461 local->mode() == CONST ||
1462 local->mode() == CONST_HARMONY) {
1463 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1464 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1465 if (local->mode() == CONST) {
1466 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1467 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1468 } else { // LET || CONST_HARMONY
1469 __ Branch(done, ne, at, Operand(zero_reg));
1470 __ li(a0, Operand(var->name()));
1472 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1480 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1481 // Record position before possible IC call.
1482 SetSourcePosition(proxy->position());
1483 Variable* var = proxy->var();
1485 // Three cases: global variables, lookup variables, and all other types of
1487 switch (var->location()) {
1488 case Variable::UNALLOCATED: {
1489 Comment cmnt(masm_, "Global variable");
1490 // Use inline caching. Variable name is passed in a2 and the global
1491 // object (receiver) in a0.
1492 __ lw(a0, GlobalObjectOperand());
1493 __ li(a2, Operand(var->name()));
1494 CallLoadIC(CONTEXTUAL);
1495 context()->Plug(v0);
1499 case Variable::PARAMETER:
1500 case Variable::LOCAL:
1501 case Variable::CONTEXT: {
1502 Comment cmnt(masm_, var->IsContextSlot()
1503 ? "Context variable"
1504 : "Stack variable");
1505 if (var->binding_needs_init()) {
1506 // var->scope() may be NULL when the proxy is located in eval code and
1507 // refers to a potential outside binding. Currently those bindings are
1508 // always looked up dynamically, i.e. in that case
1509 // var->location() == LOOKUP.
1511 ASSERT(var->scope() != NULL);
1513 // Check if the binding really needs an initialization check. The check
1514 // can be skipped in the following situation: we have a LET or CONST
1515 // binding in harmony mode, both the Variable and the VariableProxy have
1516 // the same declaration scope (i.e. they are both in global code, in the
1517 // same function or in the same eval code) and the VariableProxy is in
1518 // the source physically located after the initializer of the variable.
1520 // We cannot skip any initialization checks for CONST in non-harmony
1521 // mode because const variables may be declared but never initialized:
1522 // if (false) { const x; }; var y = x;
1524 // The condition on the declaration scopes is a conservative check for
1525 // nested functions that access a binding and are called before the
1526 // binding is initialized:
1527 // function() { f(); let x = 1; function f() { x = 2; } }
1529 bool skip_init_check;
1530 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1531 skip_init_check = false;
1533 // Check that we always have valid source position.
1534 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1535 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1536 skip_init_check = var->mode() != CONST &&
1537 var->initializer_position() < proxy->position();
1540 if (!skip_init_check) {
1541 // Let and const need a read barrier.
1543 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1544 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1545 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1546 // Throw a reference error when using an uninitialized let/const
1547 // binding in harmony mode.
1549 __ Branch(&done, ne, at, Operand(zero_reg));
1550 __ li(a0, Operand(var->name()));
1552 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1555 // Uninitalized const bindings outside of harmony mode are unholed.
1556 ASSERT(var->mode() == CONST);
1557 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1558 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1560 context()->Plug(v0);
1564 context()->Plug(var);
1568 case Variable::LOOKUP: {
1570 // Generate code for loading from variables potentially shadowed
1571 // by eval-introduced variables.
1572 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1574 Comment cmnt(masm_, "Lookup variable");
1575 __ li(a1, Operand(var->name()));
1576 __ Push(cp, a1); // Context and name.
1577 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1579 context()->Plug(v0);
1585 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1586 Comment cmnt(masm_, "[ RegExpLiteral");
1588 // Registers will be used as follows:
1589 // t1 = materialized value (RegExp literal)
1590 // t0 = JS function, literals array
1591 // a3 = literal index
1592 // a2 = RegExp pattern
1593 // a1 = RegExp flags
1594 // a0 = RegExp literal clone
1595 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1596 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1597 int literal_offset =
1598 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1599 __ lw(t1, FieldMemOperand(t0, literal_offset));
1600 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1601 __ Branch(&materialized, ne, t1, Operand(at));
1603 // Create regexp literal using runtime function.
1604 // Result will be in v0.
1605 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1606 __ li(a2, Operand(expr->pattern()));
1607 __ li(a1, Operand(expr->flags()));
1608 __ Push(t0, a3, a2, a1);
1609 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1612 __ bind(&materialized);
1613 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1614 Label allocated, runtime_allocate;
1615 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1618 __ bind(&runtime_allocate);
1619 __ li(a0, Operand(Smi::FromInt(size)));
1621 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1624 __ bind(&allocated);
1626 // After this, registers are used as follows:
1627 // v0: Newly allocated regexp.
1628 // t1: Materialized regexp.
1630 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1631 context()->Plug(v0);
1635 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1636 if (expression == NULL) {
1637 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1640 VisitForStackValue(expression);
1645 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1646 Comment cmnt(masm_, "[ ObjectLiteral");
1648 expr->BuildConstantProperties(isolate());
1649 Handle<FixedArray> constant_properties = expr->constant_properties();
1650 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1651 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1652 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1653 __ li(a1, Operand(constant_properties));
1654 int flags = expr->fast_elements()
1655 ? ObjectLiteral::kFastElements
1656 : ObjectLiteral::kNoFlags;
1657 flags |= expr->has_function()
1658 ? ObjectLiteral::kHasFunction
1659 : ObjectLiteral::kNoFlags;
1660 __ li(a0, Operand(Smi::FromInt(flags)));
1661 int properties_count = constant_properties->length() / 2;
1662 if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1663 expr->depth() > 1 || Serializer::enabled() ||
1664 flags != ObjectLiteral::kFastElements ||
1665 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1666 __ Push(a3, a2, a1, a0);
1667 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1669 FastCloneShallowObjectStub stub(properties_count);
1673 // If result_saved is true the result is on top of the stack. If
1674 // result_saved is false the result is in v0.
1675 bool result_saved = false;
1677 // Mark all computed expressions that are bound to a key that
1678 // is shadowed by a later occurrence of the same key. For the
1679 // marked expressions, no store code is emitted.
1680 expr->CalculateEmitStore(zone());
1682 AccessorTable accessor_table(zone());
1683 for (int i = 0; i < expr->properties()->length(); i++) {
1684 ObjectLiteral::Property* property = expr->properties()->at(i);
1685 if (property->IsCompileTimeValue()) continue;
1687 Literal* key = property->key();
1688 Expression* value = property->value();
1689 if (!result_saved) {
1690 __ push(v0); // Save result on stack.
1691 result_saved = true;
1693 switch (property->kind()) {
1694 case ObjectLiteral::Property::CONSTANT:
1696 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1697 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1699 case ObjectLiteral::Property::COMPUTED:
1700 if (key->value()->IsInternalizedString()) {
1701 if (property->emit_store()) {
1702 VisitForAccumulatorValue(value);
1703 __ mov(a0, result_register());
1704 __ li(a2, Operand(key->value()));
1705 __ lw(a1, MemOperand(sp));
1706 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId());
1707 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1709 VisitForEffect(value);
1713 // Duplicate receiver on stack.
1714 __ lw(a0, MemOperand(sp));
1716 VisitForStackValue(key);
1717 VisitForStackValue(value);
1718 if (property->emit_store()) {
1719 __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1721 __ CallRuntime(Runtime::kSetProperty, 4);
1726 case ObjectLiteral::Property::PROTOTYPE:
1727 // Duplicate receiver on stack.
1728 __ lw(a0, MemOperand(sp));
1730 VisitForStackValue(value);
1731 if (property->emit_store()) {
1732 __ CallRuntime(Runtime::kSetPrototype, 2);
1737 case ObjectLiteral::Property::GETTER:
1738 accessor_table.lookup(key)->second->getter = value;
1740 case ObjectLiteral::Property::SETTER:
1741 accessor_table.lookup(key)->second->setter = value;
1746 // Emit code to define accessors, using only a single call to the runtime for
1747 // each pair of corresponding getters and setters.
1748 for (AccessorTable::Iterator it = accessor_table.begin();
1749 it != accessor_table.end();
1751 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1753 VisitForStackValue(it->first);
1754 EmitAccessor(it->second->getter);
1755 EmitAccessor(it->second->setter);
1756 __ li(a0, Operand(Smi::FromInt(NONE)));
1758 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1761 if (expr->has_function()) {
1762 ASSERT(result_saved);
1763 __ lw(a0, MemOperand(sp));
1765 __ CallRuntime(Runtime::kToFastProperties, 1);
1769 context()->PlugTOS();
1771 context()->Plug(v0);
1776 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1777 Comment cmnt(masm_, "[ ArrayLiteral");
1779 expr->BuildConstantElements(isolate());
1780 int flags = expr->depth() == 1
1781 ? ArrayLiteral::kShallowElements
1782 : ArrayLiteral::kNoFlags;
1784 ZoneList<Expression*>* subexprs = expr->values();
1785 int length = subexprs->length();
1787 Handle<FixedArray> constant_elements = expr->constant_elements();
1788 ASSERT_EQ(2, constant_elements->length());
1789 ElementsKind constant_elements_kind =
1790 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1791 bool has_fast_elements =
1792 IsFastObjectElementsKind(constant_elements_kind);
1793 Handle<FixedArrayBase> constant_elements_values(
1794 FixedArrayBase::cast(constant_elements->get(1)));
1796 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1797 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1798 // If the only customer of allocation sites is transitioning, then
1799 // we can turn it off if we don't have anywhere else to transition to.
1800 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1803 __ mov(a0, result_register());
1804 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1805 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1806 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1807 __ li(a1, Operand(constant_elements));
1808 if (has_fast_elements && constant_elements_values->map() ==
1809 isolate()->heap()->fixed_cow_array_map()) {
1810 FastCloneShallowArrayStub stub(
1811 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1812 allocation_site_mode,
1815 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1817 } else if (expr->depth() > 1 || Serializer::enabled() ||
1818 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1819 __ li(a0, Operand(Smi::FromInt(flags)));
1820 __ Push(a3, a2, a1, a0);
1821 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1823 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1824 FLAG_smi_only_arrays);
1825 FastCloneShallowArrayStub::Mode mode =
1826 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1828 if (has_fast_elements) {
1829 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1832 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1836 bool result_saved = false; // Is the result saved to the stack?
1838 // Emit code to evaluate all the non-constant subexpressions and to store
1839 // them into the newly cloned array.
1840 for (int i = 0; i < length; i++) {
1841 Expression* subexpr = subexprs->at(i);
1842 // If the subexpression is a literal or a simple materialized literal it
1843 // is already set in the cloned array.
1844 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1846 if (!result_saved) {
1847 __ push(v0); // array literal
1848 __ Push(Smi::FromInt(expr->literal_index()));
1849 result_saved = true;
1852 VisitForAccumulatorValue(subexpr);
1854 if (IsFastObjectElementsKind(constant_elements_kind)) {
1855 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1856 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1857 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1858 __ sw(result_register(), FieldMemOperand(a1, offset));
1859 // Update the write barrier for the array store.
1860 __ RecordWriteField(a1, offset, result_register(), a2,
1861 kRAHasBeenSaved, kDontSaveFPRegs,
1862 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1864 __ li(a3, Operand(Smi::FromInt(i)));
1865 __ mov(a0, result_register());
1866 StoreArrayLiteralElementStub stub;
1870 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1873 __ Pop(); // literal index
1874 context()->PlugTOS();
1876 context()->Plug(v0);
1881 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1882 Comment cmnt(masm_, "[ Assignment");
1883 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1884 // on the left-hand side.
1885 if (!expr->target()->IsValidLeftHandSide()) {
1886 VisitForEffect(expr->target());
1890 // Left-hand side can only be a property, a global or a (parameter or local)
1892 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1893 LhsKind assign_type = VARIABLE;
1894 Property* property = expr->target()->AsProperty();
1895 if (property != NULL) {
1896 assign_type = (property->key()->IsPropertyName())
1901 // Evaluate LHS expression.
1902 switch (assign_type) {
1904 // Nothing to do here.
1906 case NAMED_PROPERTY:
1907 if (expr->is_compound()) {
1908 // We need the receiver both on the stack and in the accumulator.
1909 VisitForAccumulatorValue(property->obj());
1910 __ push(result_register());
1912 VisitForStackValue(property->obj());
1915 case KEYED_PROPERTY:
1916 // We need the key and receiver on both the stack and in v0 and a1.
1917 if (expr->is_compound()) {
1918 VisitForStackValue(property->obj());
1919 VisitForAccumulatorValue(property->key());
1920 __ lw(a1, MemOperand(sp, 0));
1923 VisitForStackValue(property->obj());
1924 VisitForStackValue(property->key());
1929 // For compound assignments we need another deoptimization point after the
1930 // variable/property load.
1931 if (expr->is_compound()) {
1932 { AccumulatorValueContext context(this);
1933 switch (assign_type) {
1935 EmitVariableLoad(expr->target()->AsVariableProxy());
1936 PrepareForBailout(expr->target(), TOS_REG);
1938 case NAMED_PROPERTY:
1939 EmitNamedPropertyLoad(property);
1940 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1942 case KEYED_PROPERTY:
1943 EmitKeyedPropertyLoad(property);
1944 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1949 Token::Value op = expr->binary_op();
1950 __ push(v0); // Left operand goes on the stack.
1951 VisitForAccumulatorValue(expr->value());
1953 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1956 SetSourcePosition(expr->position() + 1);
1957 AccumulatorValueContext context(this);
1958 if (ShouldInlineSmiCase(op)) {
1959 EmitInlineSmiBinaryOp(expr->binary_operation(),
1965 EmitBinaryOp(expr->binary_operation(), op, mode);
1968 // Deoptimization point in case the binary operation may have side effects.
1969 PrepareForBailout(expr->binary_operation(), TOS_REG);
1971 VisitForAccumulatorValue(expr->value());
1974 // Record source position before possible IC call.
1975 SetSourcePosition(expr->position());
1978 switch (assign_type) {
1980 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1982 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1983 context()->Plug(v0);
1985 case NAMED_PROPERTY:
1986 EmitNamedPropertyAssignment(expr);
1988 case KEYED_PROPERTY:
1989 EmitKeyedPropertyAssignment(expr);
1995 void FullCodeGenerator::VisitYield(Yield* expr) {
1996 Comment cmnt(masm_, "[ Yield");
1997 // Evaluate yielded value first; the initial iterator definition depends on
1998 // this. It stays on the stack while we update the iterator.
1999 VisitForStackValue(expr->expression());
2001 switch (expr->yield_kind()) {
2002 case Yield::SUSPEND:
2003 // Pop value from top-of-stack slot; box result into result register.
2004 EmitCreateIteratorResult(false);
2005 __ push(result_register());
2007 case Yield::INITIAL: {
2008 Label suspend, continuation, post_runtime, resume;
2012 __ bind(&continuation);
2016 VisitForAccumulatorValue(expr->generator_object());
2017 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2018 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2019 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2020 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2022 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2023 kRAHasBeenSaved, kDontSaveFPRegs);
2024 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2025 __ Branch(&post_runtime, eq, sp, Operand(a1));
2026 __ push(v0); // generator object
2027 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2028 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2029 __ bind(&post_runtime);
2030 __ pop(result_register());
2031 EmitReturnSequence();
2034 context()->Plug(result_register());
2038 case Yield::FINAL: {
2039 VisitForAccumulatorValue(expr->generator_object());
2040 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2041 __ sw(a1, FieldMemOperand(result_register(),
2042 JSGeneratorObject::kContinuationOffset));
2043 // Pop value from top-of-stack slot, box result into result register.
2044 EmitCreateIteratorResult(true);
2045 EmitUnwindBeforeReturn();
2046 EmitReturnSequence();
2050 case Yield::DELEGATING: {
2051 VisitForStackValue(expr->generator_object());
2053 // Initial stack layout is as follows:
2054 // [sp + 1 * kPointerSize] iter
2055 // [sp + 0 * kPointerSize] g
2057 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2058 Label l_next, l_call, l_loop;
2059 // Initial send value is undefined.
2060 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2063 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2066 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2067 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2068 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2069 __ Push(a2, a3, a0); // "throw", iter, except
2072 // try { received = %yield result }
2073 // Shuffle the received result above a try handler and yield it without
2076 __ pop(a0); // result
2077 __ PushTryHandler(StackHandler::CATCH, expr->index());
2078 const int handler_size = StackHandlerConstants::kSize;
2079 __ push(a0); // result
2081 __ bind(&l_continuation);
2084 __ bind(&l_suspend);
2085 const int generator_object_depth = kPointerSize + handler_size;
2086 __ lw(a0, MemOperand(sp, generator_object_depth));
2088 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2089 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2090 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2091 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2093 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2094 kRAHasBeenSaved, kDontSaveFPRegs);
2095 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2096 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2097 __ pop(v0); // result
2098 EmitReturnSequence();
2100 __ bind(&l_resume); // received in a0
2103 // receiver = iter; f = 'next'; arg = received;
2105 __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next"
2106 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2107 __ Push(a2, a3, a0); // "next", iter, received
2109 // result = receiver[f](arg);
2111 __ lw(a1, MemOperand(sp, kPointerSize));
2112 __ lw(a0, MemOperand(sp, 2 * kPointerSize));
2113 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2114 CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None());
2117 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2118 CallFunctionStub stub(1, CALL_AS_METHOD);
2121 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2122 __ Drop(1); // The function is still on the stack; drop it.
2124 // if (!result.done) goto l_try;
2127 __ push(a0); // save result
2128 __ LoadRoot(a2, Heap::kdone_stringRootIndex); // "done"
2129 CallLoadIC(NOT_CONTEXTUAL); // result.done in v0
2131 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2133 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2136 __ pop(a0); // result
2137 __ LoadRoot(a2, Heap::kvalue_stringRootIndex); // "value"
2138 CallLoadIC(NOT_CONTEXTUAL); // result.value in v0
2139 context()->DropAndPlug(2, v0); // drop iter and g
2146 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2148 JSGeneratorObject::ResumeMode resume_mode) {
2149 // The value stays in a0, and is ultimately read by the resumed generator, as
2150 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2151 // is read to throw the value when the resumed generator is already closed.
2152 // a1 will hold the generator object until the activation has been resumed.
2153 VisitForStackValue(generator);
2154 VisitForAccumulatorValue(value);
2157 // Check generator state.
2158 Label wrong_state, closed_state, done;
2159 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2160 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2161 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2162 __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2163 __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2165 // Load suspended function and context.
2166 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2167 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2169 // Load receiver and store as the first argument.
2170 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2173 // Push holes for the rest of the arguments to the generator function.
2174 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2176 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2177 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2178 Label push_argument_holes, push_frame;
2179 __ bind(&push_argument_holes);
2180 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2181 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2183 __ jmp(&push_argument_holes);
2185 // Enter a new JavaScript frame, and initialize its slots as they were when
2186 // the generator was suspended.
2188 __ bind(&push_frame);
2189 __ Call(&resume_frame);
2191 __ bind(&resume_frame);
2192 // ra = return address.
2193 // fp = caller's frame pointer.
2194 // cp = callee's context,
2195 // t0 = callee's JS function.
2196 __ Push(ra, fp, cp, t0);
2197 // Adjust FP to point to saved FP.
2198 __ Addu(fp, sp, 2 * kPointerSize);
2200 // Load the operand stack size.
2201 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2202 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2205 // If we are sending a value and there is no operand stack, we can jump back
2207 if (resume_mode == JSGeneratorObject::NEXT) {
2209 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2210 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2211 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2213 __ Addu(a3, a3, Operand(a2));
2214 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2215 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2217 __ bind(&slow_resume);
2220 // Otherwise, we push holes for the operand stack and call the runtime to fix
2221 // up the stack and the handlers.
2222 Label push_operand_holes, call_resume;
2223 __ bind(&push_operand_holes);
2224 __ Subu(a3, a3, Operand(1));
2225 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2227 __ Branch(&push_operand_holes);
2228 __ bind(&call_resume);
2229 ASSERT(!result_register().is(a1));
2230 __ Push(a1, result_register());
2231 __ Push(Smi::FromInt(resume_mode));
2232 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2233 // Not reached: the runtime call returns elsewhere.
2234 __ stop("not-reached");
2236 // Reach here when generator is closed.
2237 __ bind(&closed_state);
2238 if (resume_mode == JSGeneratorObject::NEXT) {
2239 // Return completed iterator result when generator is closed.
2240 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2242 // Pop value from top-of-stack slot; box result into result register.
2243 EmitCreateIteratorResult(true);
2245 // Throw the provided value.
2247 __ CallRuntime(Runtime::kThrow, 1);
2251 // Throw error if we attempt to operate on a running generator.
2252 __ bind(&wrong_state);
2254 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2257 context()->Plug(result_register());
2261 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2265 Handle<Map> map(isolate()->native_context()->generator_result_map());
2267 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2270 __ bind(&gc_required);
2271 __ Push(Smi::FromInt(map->instance_size()));
2272 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2273 __ lw(context_register(),
2274 MemOperand(fp, StandardFrameConstants::kContextOffset));
2276 __ bind(&allocated);
2277 __ li(a1, Operand(map));
2279 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2280 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2281 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2282 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2283 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2284 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2286 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2288 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2290 // Only the value field needs a write barrier, as the other values are in the
2292 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2293 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2297 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2298 SetSourcePosition(prop->position());
2299 Literal* key = prop->key()->AsLiteral();
2300 __ mov(a0, result_register());
2301 __ li(a2, Operand(key->value()));
2302 // Call load IC. It has arguments receiver and property name a0 and a2.
2303 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2307 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2308 SetSourcePosition(prop->position());
2309 __ mov(a0, result_register());
2310 // Call keyed load IC. It has arguments key and receiver in a0 and a1.
2311 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2312 CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2316 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2319 Expression* left_expr,
2320 Expression* right_expr) {
2321 Label done, smi_case, stub_call;
2323 Register scratch1 = a2;
2324 Register scratch2 = a3;
2326 // Get the arguments.
2328 Register right = a0;
2330 __ mov(a0, result_register());
2332 // Perform combined smi check on both operands.
2333 __ Or(scratch1, left, Operand(right));
2334 STATIC_ASSERT(kSmiTag == 0);
2335 JumpPatchSite patch_site(masm_);
2336 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2338 __ bind(&stub_call);
2339 BinaryOpICStub stub(op, mode);
2340 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2341 expr->BinaryOperationFeedbackId());
2342 patch_site.EmitPatchInfo();
2346 // Smi case. This code works the same way as the smi-smi case in the type
2347 // recording binary operation stub, see
2350 __ GetLeastBitsFromSmi(scratch1, right, 5);
2351 __ srav(right, left, scratch1);
2352 __ And(v0, right, Operand(~kSmiTagMask));
2355 __ SmiUntag(scratch1, left);
2356 __ GetLeastBitsFromSmi(scratch2, right, 5);
2357 __ sllv(scratch1, scratch1, scratch2);
2358 __ Addu(scratch2, scratch1, Operand(0x40000000));
2359 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2360 __ SmiTag(v0, scratch1);
2364 __ SmiUntag(scratch1, left);
2365 __ GetLeastBitsFromSmi(scratch2, right, 5);
2366 __ srlv(scratch1, scratch1, scratch2);
2367 __ And(scratch2, scratch1, 0xc0000000);
2368 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2369 __ SmiTag(v0, scratch1);
2373 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2374 __ BranchOnOverflow(&stub_call, scratch1);
2377 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2378 __ BranchOnOverflow(&stub_call, scratch1);
2381 __ SmiUntag(scratch1, right);
2382 __ Mult(left, scratch1);
2385 __ sra(scratch1, scratch1, 31);
2386 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2388 __ Branch(&done, ne, v0, Operand(zero_reg));
2389 __ Addu(scratch2, right, left);
2390 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2391 ASSERT(Smi::FromInt(0) == 0);
2392 __ mov(v0, zero_reg);
2396 __ Or(v0, left, Operand(right));
2398 case Token::BIT_AND:
2399 __ And(v0, left, Operand(right));
2401 case Token::BIT_XOR:
2402 __ Xor(v0, left, Operand(right));
2409 context()->Plug(v0);
2413 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2415 OverwriteMode mode) {
2416 __ mov(a0, result_register());
2418 BinaryOpICStub stub(op, mode);
2419 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2420 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2421 expr->BinaryOperationFeedbackId());
2422 patch_site.EmitPatchInfo();
2423 context()->Plug(v0);
2427 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2428 // Invalid left-hand sides are rewritten by the parser to have a 'throw
2429 // ReferenceError' on the left-hand side.
2430 if (!expr->IsValidLeftHandSide()) {
2431 VisitForEffect(expr);
2435 // Left-hand side can only be a property, a global or a (parameter or local)
2437 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2438 LhsKind assign_type = VARIABLE;
2439 Property* prop = expr->AsProperty();
2441 assign_type = (prop->key()->IsPropertyName())
2446 switch (assign_type) {
2448 Variable* var = expr->AsVariableProxy()->var();
2449 EffectContext context(this);
2450 EmitVariableAssignment(var, Token::ASSIGN);
2453 case NAMED_PROPERTY: {
2454 __ push(result_register()); // Preserve value.
2455 VisitForAccumulatorValue(prop->obj());
2456 __ mov(a1, result_register());
2457 __ pop(a0); // Restore value.
2458 __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2459 CallStoreIC(NOT_CONTEXTUAL);
2462 case KEYED_PROPERTY: {
2463 __ push(result_register()); // Preserve value.
2464 VisitForStackValue(prop->obj());
2465 VisitForAccumulatorValue(prop->key());
2466 __ mov(a1, result_register());
2467 __ Pop(a0, a2); // a0 = restored value.
2468 Handle<Code> ic = is_classic_mode()
2469 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2470 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2475 context()->Plug(v0);
2479 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2481 if (var->IsUnallocated()) {
2482 // Global var, const, or let.
2483 __ mov(a0, result_register());
2484 __ li(a2, Operand(var->name()));
2485 __ lw(a1, GlobalObjectOperand());
2486 CallStoreIC(CONTEXTUAL);
2487 } else if (op == Token::INIT_CONST) {
2488 // Const initializers need a write barrier.
2489 ASSERT(!var->IsParameter()); // No const parameters.
2490 if (var->IsStackLocal()) {
2492 __ lw(a1, StackOperand(var));
2493 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2494 __ Branch(&skip, ne, a1, Operand(t0));
2495 __ sw(result_register(), StackOperand(var));
2498 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2499 // Like var declarations, const declarations are hoisted to function
2500 // scope. However, unlike var initializers, const initializers are
2501 // able to drill a hole to that function context, even from inside a
2502 // 'with' context. We thus bypass the normal static scope lookup for
2503 // var->IsContextSlot().
2504 __ li(a0, Operand(var->name()));
2505 __ Push(v0, cp, a0); // Context and name.
2506 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2509 } else if (var->mode() == LET && op != Token::INIT_LET) {
2510 // Non-initializing assignment to let variable needs a write barrier.
2511 if (var->IsLookupSlot()) {
2512 __ li(a1, Operand(var->name()));
2513 __ li(a0, Operand(Smi::FromInt(language_mode())));
2514 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2515 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2517 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2519 MemOperand location = VarOperand(var, a1);
2520 __ lw(a3, location);
2521 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2522 __ Branch(&assign, ne, a3, Operand(t0));
2523 __ li(a3, Operand(var->name()));
2525 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2526 // Perform the assignment.
2528 __ sw(result_register(), location);
2529 if (var->IsContextSlot()) {
2530 // RecordWrite may destroy all its register arguments.
2531 __ mov(a3, result_register());
2532 int offset = Context::SlotOffset(var->index());
2533 __ RecordWriteContextSlot(
2534 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2538 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2539 // Assignment to var or initializing assignment to let/const
2541 if (var->IsStackAllocated() || var->IsContextSlot()) {
2542 MemOperand location = VarOperand(var, a1);
2543 if (generate_debug_code_ && op == Token::INIT_LET) {
2544 // Check for an uninitialized let binding.
2545 __ lw(a2, location);
2546 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2547 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2549 // Perform the assignment.
2550 __ sw(v0, location);
2551 if (var->IsContextSlot()) {
2553 int offset = Context::SlotOffset(var->index());
2554 __ RecordWriteContextSlot(
2555 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2558 ASSERT(var->IsLookupSlot());
2559 __ li(a1, Operand(var->name()));
2560 __ li(a0, Operand(Smi::FromInt(language_mode())));
2561 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2562 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2565 // Non-initializing assignments to consts are ignored.
2569 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2570 // Assignment to a property, using a named store IC.
2571 Property* prop = expr->target()->AsProperty();
2572 ASSERT(prop != NULL);
2573 ASSERT(prop->key()->AsLiteral() != NULL);
2575 // Record source code position before IC call.
2576 SetSourcePosition(expr->position());
2577 __ mov(a0, result_register()); // Load the value.
2578 __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2581 CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2583 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2584 context()->Plug(v0);
2588 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2589 // Assignment to a property, using a keyed store IC.
2591 // Record source code position before IC call.
2592 SetSourcePosition(expr->position());
2593 // Call keyed store IC.
2594 // The arguments are:
2595 // - a0 is the value,
2597 // - a2 is the receiver.
2598 __ mov(a0, result_register());
2599 __ Pop(a2, a1); // a1 = key.
2601 Handle<Code> ic = is_classic_mode()
2602 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2603 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2604 CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2606 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2607 context()->Plug(v0);
2611 void FullCodeGenerator::VisitProperty(Property* expr) {
2612 Comment cmnt(masm_, "[ Property");
2613 Expression* key = expr->key();
2615 if (key->IsPropertyName()) {
2616 VisitForAccumulatorValue(expr->obj());
2617 EmitNamedPropertyLoad(expr);
2618 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2619 context()->Plug(v0);
2621 VisitForStackValue(expr->obj());
2622 VisitForAccumulatorValue(expr->key());
2624 EmitKeyedPropertyLoad(expr);
2625 context()->Plug(v0);
2630 void FullCodeGenerator::CallIC(Handle<Code> code,
2631 ContextualMode mode,
2632 TypeFeedbackId id) {
2634 ASSERT(mode != CONTEXTUAL || id.IsNone());
2635 __ Call(code, RelocInfo::CODE_TARGET, id);
2639 // Code common for calls using the IC.
2640 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2641 Expression* callee = expr->expression();
2642 ZoneList<Expression*>* args = expr->arguments();
2643 int arg_count = args->length();
2645 CallFunctionFlags flags;
2646 // Get the target function.
2647 if (callee->IsVariableProxy()) {
2648 { StackValueContext context(this);
2649 EmitVariableLoad(callee->AsVariableProxy());
2650 PrepareForBailout(callee, NO_REGISTERS);
2652 // Push undefined as receiver. This is patched in the method prologue if it
2653 // is a classic mode method.
2654 __ Push(isolate()->factory()->undefined_value());
2655 flags = NO_CALL_FUNCTION_FLAGS;
2657 // Load the function from the receiver.
2658 ASSERT(callee->IsProperty());
2659 __ lw(v0, MemOperand(sp, 0));
2660 EmitNamedPropertyLoad(callee->AsProperty());
2661 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2662 // Push the target function under the receiver.
2663 __ lw(at, MemOperand(sp, 0));
2665 __ sw(v0, MemOperand(sp, kPointerSize));
2666 flags = CALL_AS_METHOD;
2669 // Load the arguments.
2670 { PreservePositionScope scope(masm()->positions_recorder());
2671 for (int i = 0; i < arg_count; i++) {
2672 VisitForStackValue(args->at(i));
2675 // Record source position for debugger.
2676 SetSourcePosition(expr->position());
2677 CallFunctionStub stub(arg_count, flags);
2678 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2681 RecordJSReturnSite(expr);
2683 // Restore context register.
2684 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2686 context()->DropAndPlug(1, v0);
2690 // Code common for calls using the IC.
2691 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2694 VisitForAccumulatorValue(key);
2696 Expression* callee = expr->expression();
2697 ZoneList<Expression*>* args = expr->arguments();
2698 int arg_count = args->length();
2700 // Load the function from the receiver.
2701 ASSERT(callee->IsProperty());
2702 __ lw(a1, MemOperand(sp, 0));
2703 EmitKeyedPropertyLoad(callee->AsProperty());
2704 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2706 // Push the target function under the receiver.
2707 __ lw(at, MemOperand(sp, 0));
2709 __ sw(v0, MemOperand(sp, kPointerSize));
2711 { PreservePositionScope scope(masm()->positions_recorder());
2712 for (int i = 0; i < arg_count; i++) {
2713 VisitForStackValue(args->at(i));
2717 // Record source position for debugger.
2718 SetSourcePosition(expr->position());
2719 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2720 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2723 RecordJSReturnSite(expr);
2724 // Restore context register.
2725 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2727 context()->DropAndPlug(1, v0);
2731 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2732 // Code common for calls using the call stub.
2733 ZoneList<Expression*>* args = expr->arguments();
2734 int arg_count = args->length();
2735 { PreservePositionScope scope(masm()->positions_recorder());
2736 for (int i = 0; i < arg_count; i++) {
2737 VisitForStackValue(args->at(i));
2740 // Record source position for debugger.
2741 SetSourcePosition(expr->position());
2743 Handle<Object> uninitialized =
2744 TypeFeedbackCells::UninitializedSentinel(isolate());
2745 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2746 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2747 __ li(a2, Operand(cell));
2749 // Record call targets in unoptimized code.
2750 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2751 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2752 __ CallStub(&stub, expr->CallFeedbackId());
2753 RecordJSReturnSite(expr);
2754 // Restore context register.
2755 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2756 context()->DropAndPlug(1, v0);
2760 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2761 // t2: copy of the first argument or undefined if it doesn't exist.
2762 if (arg_count > 0) {
2763 __ lw(t2, MemOperand(sp, arg_count * kPointerSize));
2765 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
2768 // t1: the receiver of the enclosing function.
2769 int receiver_offset = 2 + info_->scope()->num_parameters();
2770 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
2772 // t0: the language mode.
2773 __ li(t0, Operand(Smi::FromInt(language_mode())));
2775 // a1: the start position of the scope the calls resides in.
2776 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2778 // Do the runtime call.
2779 __ Push(t2, t1, t0, a1);
2780 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2784 void FullCodeGenerator::VisitCall(Call* expr) {
2786 // We want to verify that RecordJSReturnSite gets called on all paths
2787 // through this function. Avoid early returns.
2788 expr->return_is_recorded_ = false;
2791 Comment cmnt(masm_, "[ Call");
2792 Expression* callee = expr->expression();
2793 Call::CallType call_type = expr->GetCallType(isolate());
2795 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2796 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2797 // resolve the function we need to call and the receiver of the
2798 // call. Then we call the resolved function using the given
2800 ZoneList<Expression*>* args = expr->arguments();
2801 int arg_count = args->length();
2803 { PreservePositionScope pos_scope(masm()->positions_recorder());
2804 VisitForStackValue(callee);
2805 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2806 __ push(a2); // Reserved receiver slot.
2808 // Push the arguments.
2809 for (int i = 0; i < arg_count; i++) {
2810 VisitForStackValue(args->at(i));
2813 // Push a copy of the function (found below the arguments) and
2815 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2817 EmitResolvePossiblyDirectEval(arg_count);
2819 // The runtime call returns a pair of values in v0 (function) and
2820 // v1 (receiver). Touch up the stack with the right values.
2821 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2822 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2824 // Record source position for debugger.
2825 SetSourcePosition(expr->position());
2826 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2827 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2829 RecordJSReturnSite(expr);
2830 // Restore context register.
2831 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2832 context()->DropAndPlug(1, v0);
2833 } else if (call_type == Call::GLOBAL_CALL) {
2834 EmitCallWithIC(expr);
2835 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2836 // Call to a lookup slot (dynamically introduced variable).
2837 VariableProxy* proxy = callee->AsVariableProxy();
2840 { PreservePositionScope scope(masm()->positions_recorder());
2841 // Generate code for loading from variables potentially shadowed
2842 // by eval-introduced variables.
2843 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2847 // Call the runtime to find the function to call (returned in v0)
2848 // and the object holding it (returned in v1).
2849 ASSERT(!context_register().is(a2));
2850 __ li(a2, Operand(proxy->name()));
2851 __ Push(context_register(), a2);
2852 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2853 __ Push(v0, v1); // Function, receiver.
2855 // If fast case code has been generated, emit code to push the
2856 // function and receiver and have the slow path jump around this
2858 if (done.is_linked()) {
2864 // The receiver is implicitly the global receiver. Indicate this
2865 // by passing the hole to the call function stub.
2866 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2871 // The receiver is either the global receiver or an object found
2872 // by LoadContextSlot.
2873 EmitCallWithStub(expr);
2874 } else if (call_type == Call::PROPERTY_CALL) {
2875 Property* property = callee->AsProperty();
2876 { PreservePositionScope scope(masm()->positions_recorder());
2877 VisitForStackValue(property->obj());
2879 if (property->key()->IsPropertyName()) {
2880 EmitCallWithIC(expr);
2882 EmitKeyedCallWithIC(expr, property->key());
2885 ASSERT(call_type == Call::OTHER_CALL);
2886 // Call to an arbitrary expression not handled specially above.
2887 { PreservePositionScope scope(masm()->positions_recorder());
2888 VisitForStackValue(callee);
2890 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2892 // Emit function call.
2893 EmitCallWithStub(expr);
2897 // RecordJSReturnSite should have been called.
2898 ASSERT(expr->return_is_recorded_);
2903 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2904 Comment cmnt(masm_, "[ CallNew");
2905 // According to ECMA-262, section 11.2.2, page 44, the function
2906 // expression in new calls must be evaluated before the
2909 // Push constructor on the stack. If it's not a function it's used as
2910 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2912 VisitForStackValue(expr->expression());
2914 // Push the arguments ("left-to-right") on the stack.
2915 ZoneList<Expression*>* args = expr->arguments();
2916 int arg_count = args->length();
2917 for (int i = 0; i < arg_count; i++) {
2918 VisitForStackValue(args->at(i));
2921 // Call the construct call builtin that handles allocation and
2922 // constructor invocation.
2923 SetSourcePosition(expr->position());
2925 // Load function and argument count into a1 and a0.
2926 __ li(a0, Operand(arg_count));
2927 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2929 // Record call targets in unoptimized code.
2930 Handle<Object> uninitialized =
2931 TypeFeedbackCells::UninitializedSentinel(isolate());
2932 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2933 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2934 __ li(a2, Operand(cell));
2936 CallConstructStub stub(RECORD_CALL_TARGET);
2937 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2938 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2939 context()->Plug(v0);
2943 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2944 ZoneList<Expression*>* args = expr->arguments();
2945 ASSERT(args->length() == 1);
2947 VisitForAccumulatorValue(args->at(0));
2949 Label materialize_true, materialize_false;
2950 Label* if_true = NULL;
2951 Label* if_false = NULL;
2952 Label* fall_through = NULL;
2953 context()->PrepareTest(&materialize_true, &materialize_false,
2954 &if_true, &if_false, &fall_through);
2956 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2958 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2960 context()->Plug(if_true, if_false);
2964 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2965 ZoneList<Expression*>* args = expr->arguments();
2966 ASSERT(args->length() == 1);
2968 VisitForAccumulatorValue(args->at(0));
2970 Label materialize_true, materialize_false;
2971 Label* if_true = NULL;
2972 Label* if_false = NULL;
2973 Label* fall_through = NULL;
2974 context()->PrepareTest(&materialize_true, &materialize_false,
2975 &if_true, &if_false, &fall_through);
2977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2978 __ NonNegativeSmiTst(v0, at);
2979 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2981 context()->Plug(if_true, if_false);
2985 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2986 ZoneList<Expression*>* args = expr->arguments();
2987 ASSERT(args->length() == 1);
2989 VisitForAccumulatorValue(args->at(0));
2991 Label materialize_true, materialize_false;
2992 Label* if_true = NULL;
2993 Label* if_false = NULL;
2994 Label* fall_through = NULL;
2995 context()->PrepareTest(&materialize_true, &materialize_false,
2996 &if_true, &if_false, &fall_through);
2998 __ JumpIfSmi(v0, if_false);
2999 __ LoadRoot(at, Heap::kNullValueRootIndex);
3000 __ Branch(if_true, eq, v0, Operand(at));
3001 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3002 // Undetectable objects behave like undefined when tested with typeof.
3003 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3004 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3005 __ Branch(if_false, ne, at, Operand(zero_reg));
3006 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3007 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3008 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3009 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3010 if_true, if_false, fall_through);
3012 context()->Plug(if_true, if_false);
3016 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3017 ZoneList<Expression*>* args = expr->arguments();
3018 ASSERT(args->length() == 1);
3020 VisitForAccumulatorValue(args->at(0));
3022 Label materialize_true, materialize_false;
3023 Label* if_true = NULL;
3024 Label* if_false = NULL;
3025 Label* fall_through = NULL;
3026 context()->PrepareTest(&materialize_true, &materialize_false,
3027 &if_true, &if_false, &fall_through);
3029 __ JumpIfSmi(v0, if_false);
3030 __ GetObjectType(v0, a1, a1);
3031 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3032 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3033 if_true, if_false, fall_through);
3035 context()->Plug(if_true, if_false);
3039 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3040 ZoneList<Expression*>* args = expr->arguments();
3041 ASSERT(args->length() == 1);
3043 VisitForAccumulatorValue(args->at(0));
3045 Label materialize_true, materialize_false;
3046 Label* if_true = NULL;
3047 Label* if_false = NULL;
3048 Label* fall_through = NULL;
3049 context()->PrepareTest(&materialize_true, &materialize_false,
3050 &if_true, &if_false, &fall_through);
3052 __ JumpIfSmi(v0, if_false);
3053 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3054 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3055 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3056 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3057 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3059 context()->Plug(if_true, if_false);
3063 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3064 CallRuntime* expr) {
3065 ZoneList<Expression*>* args = expr->arguments();
3066 ASSERT(args->length() == 1);
3068 VisitForAccumulatorValue(args->at(0));
3070 Label materialize_true, materialize_false, skip_lookup;
3071 Label* if_true = NULL;
3072 Label* if_false = NULL;
3073 Label* fall_through = NULL;
3074 context()->PrepareTest(&materialize_true, &materialize_false,
3075 &if_true, &if_false, &fall_through);
3077 __ AssertNotSmi(v0);
3079 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3080 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3081 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3082 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3084 // Check for fast case object. Generate false result for slow case object.
3085 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3086 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3087 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3088 __ Branch(if_false, eq, a2, Operand(t0));
3090 // Look for valueOf name in the descriptor array, and indicate false if
3091 // found. Since we omit an enumeration index check, if it is added via a
3092 // transition that shares its descriptor array, this is a false positive.
3093 Label entry, loop, done;
3095 // Skip loop if no descriptors are valid.
3096 __ NumberOfOwnDescriptors(a3, a1);
3097 __ Branch(&done, eq, a3, Operand(zero_reg));
3099 __ LoadInstanceDescriptors(a1, t0);
3100 // t0: descriptor array.
3101 // a3: valid entries in the descriptor array.
3102 STATIC_ASSERT(kSmiTag == 0);
3103 STATIC_ASSERT(kSmiTagSize == 1);
3104 STATIC_ASSERT(kPointerSize == 4);
3105 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3107 // Calculate location of the first key name.
3108 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3109 // Calculate the end of the descriptor array.
3111 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
3112 __ Addu(a2, a2, t1);
3114 // Loop through all the keys in the descriptor array. If one of these is the
3115 // string "valueOf" the result is false.
3116 // The use of t2 to store the valueOf string assumes that it is not otherwise
3117 // used in the loop below.
3118 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3121 __ lw(a3, MemOperand(t0, 0));
3122 __ Branch(if_false, eq, a3, Operand(t2));
3123 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3125 __ Branch(&loop, ne, t0, Operand(a2));
3129 // Set the bit in the map to indicate that there is no local valueOf field.
3130 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3131 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3132 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3134 __ bind(&skip_lookup);
3136 // If a valueOf property is not found on the object check that its
3137 // prototype is the un-modified String prototype. If not result is false.
3138 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3139 __ JumpIfSmi(a2, if_false);
3140 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3141 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3142 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3143 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3144 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3145 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3147 context()->Plug(if_true, if_false);
3151 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3152 ZoneList<Expression*>* args = expr->arguments();
3153 ASSERT(args->length() == 1);
3155 VisitForAccumulatorValue(args->at(0));
3157 Label materialize_true, materialize_false;
3158 Label* if_true = NULL;
3159 Label* if_false = NULL;
3160 Label* fall_through = NULL;
3161 context()->PrepareTest(&materialize_true, &materialize_false,
3162 &if_true, &if_false, &fall_through);
3164 __ JumpIfSmi(v0, if_false);
3165 __ GetObjectType(v0, a1, a2);
3166 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3167 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3168 __ Branch(if_false);
3170 context()->Plug(if_true, if_false);
3174 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3175 ZoneList<Expression*>* args = expr->arguments();
3176 ASSERT(args->length() == 1);
3178 VisitForAccumulatorValue(args->at(0));
3180 Label materialize_true, materialize_false;
3181 Label* if_true = NULL;
3182 Label* if_false = NULL;
3183 Label* fall_through = NULL;
3184 context()->PrepareTest(&materialize_true, &materialize_false,
3185 &if_true, &if_false, &fall_through);
3187 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3188 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3189 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3190 __ li(t0, 0x80000000);
3192 __ Branch(¬_nan, ne, a2, Operand(t0));
3193 __ mov(t0, zero_reg);
3197 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3198 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3200 context()->Plug(if_true, if_false);
3204 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3205 ZoneList<Expression*>* args = expr->arguments();
3206 ASSERT(args->length() == 1);
3208 VisitForAccumulatorValue(args->at(0));
3210 Label materialize_true, materialize_false;
3211 Label* if_true = NULL;
3212 Label* if_false = NULL;
3213 Label* fall_through = NULL;
3214 context()->PrepareTest(&materialize_true, &materialize_false,
3215 &if_true, &if_false, &fall_through);
3217 __ JumpIfSmi(v0, if_false);
3218 __ GetObjectType(v0, a1, a1);
3219 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3220 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3221 if_true, if_false, fall_through);
3223 context()->Plug(if_true, if_false);
3227 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3228 ZoneList<Expression*>* args = expr->arguments();
3229 ASSERT(args->length() == 1);
3231 VisitForAccumulatorValue(args->at(0));
3233 Label materialize_true, materialize_false;
3234 Label* if_true = NULL;
3235 Label* if_false = NULL;
3236 Label* fall_through = NULL;
3237 context()->PrepareTest(&materialize_true, &materialize_false,
3238 &if_true, &if_false, &fall_through);
3240 __ JumpIfSmi(v0, if_false);
3241 __ GetObjectType(v0, a1, a1);
3242 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3243 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3245 context()->Plug(if_true, if_false);
3249 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3250 ASSERT(expr->arguments()->length() == 0);
3252 Label materialize_true, materialize_false;
3253 Label* if_true = NULL;
3254 Label* if_false = NULL;
3255 Label* fall_through = NULL;
3256 context()->PrepareTest(&materialize_true, &materialize_false,
3257 &if_true, &if_false, &fall_through);
3259 // Get the frame pointer for the calling frame.
3260 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3262 // Skip the arguments adaptor frame if it exists.
3263 Label check_frame_marker;
3264 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3265 __ Branch(&check_frame_marker, ne,
3266 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3267 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3269 // Check the marker in the calling frame.
3270 __ bind(&check_frame_marker);
3271 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3272 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3273 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3274 if_true, if_false, fall_through);
3276 context()->Plug(if_true, if_false);
3280 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3281 ZoneList<Expression*>* args = expr->arguments();
3282 ASSERT(args->length() == 2);
3284 // Load the two objects into registers and perform the comparison.
3285 VisitForStackValue(args->at(0));
3286 VisitForAccumulatorValue(args->at(1));
3288 Label materialize_true, materialize_false;
3289 Label* if_true = NULL;
3290 Label* if_false = NULL;
3291 Label* fall_through = NULL;
3292 context()->PrepareTest(&materialize_true, &materialize_false,
3293 &if_true, &if_false, &fall_through);
3296 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3297 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3299 context()->Plug(if_true, if_false);
3303 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3304 ZoneList<Expression*>* args = expr->arguments();
3305 ASSERT(args->length() == 1);
3307 // ArgumentsAccessStub expects the key in a1 and the formal
3308 // parameter count in a0.
3309 VisitForAccumulatorValue(args->at(0));
3311 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3312 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3314 context()->Plug(v0);
3318 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3319 ASSERT(expr->arguments()->length() == 0);
3321 // Get the number of formal parameters.
3322 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3324 // Check if the calling frame is an arguments adaptor frame.
3325 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3326 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3327 __ Branch(&exit, ne, a3,
3328 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3330 // Arguments adaptor case: Read the arguments length from the
3332 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3335 context()->Plug(v0);
3339 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3340 ZoneList<Expression*>* args = expr->arguments();
3341 ASSERT(args->length() == 1);
3342 Label done, null, function, non_function_constructor;
3344 VisitForAccumulatorValue(args->at(0));
3346 // If the object is a smi, we return null.
3347 __ JumpIfSmi(v0, &null);
3349 // Check that the object is a JS object but take special care of JS
3350 // functions to make sure they have 'Function' as their class.
3351 // Assume that there are only two callable types, and one of them is at
3352 // either end of the type range for JS object types. Saves extra comparisons.
3353 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3354 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3355 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3357 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3358 FIRST_SPEC_OBJECT_TYPE + 1);
3359 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3361 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3362 LAST_SPEC_OBJECT_TYPE - 1);
3363 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3364 // Assume that there is no larger type.
3365 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3367 // Check if the constructor in the map is a JS function.
3368 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3369 __ GetObjectType(v0, a1, a1);
3370 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3372 // v0 now contains the constructor function. Grab the
3373 // instance class name from there.
3374 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3375 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3378 // Functions have class 'Function'.
3380 __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex);
3383 // Objects with a non-function constructor have class 'Object'.
3384 __ bind(&non_function_constructor);
3385 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3388 // Non-JS objects have class null.
3390 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3395 context()->Plug(v0);
3399 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3400 // Conditionally generate a log call.
3402 // 0 (literal string): The type of logging (corresponds to the flags).
3403 // This is used to determine whether or not to generate the log call.
3404 // 1 (string): Format string. Access the string at argument index 2
3405 // with '%2s' (see Logger::LogRuntime for all the formats).
3406 // 2 (array): Arguments to the format string.
3407 ZoneList<Expression*>* args = expr->arguments();
3408 ASSERT_EQ(args->length(), 3);
3409 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3410 VisitForStackValue(args->at(1));
3411 VisitForStackValue(args->at(2));
3412 __ CallRuntime(Runtime::kLog, 2);
3415 // Finally, we're expected to leave a value on the top of the stack.
3416 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3417 context()->Plug(v0);
3421 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3422 // Load the arguments on the stack and call the stub.
3424 ZoneList<Expression*>* args = expr->arguments();
3425 ASSERT(args->length() == 3);
3426 VisitForStackValue(args->at(0));
3427 VisitForStackValue(args->at(1));
3428 VisitForStackValue(args->at(2));
3430 context()->Plug(v0);
3434 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3435 // Load the arguments on the stack and call the stub.
3436 RegExpExecStub stub;
3437 ZoneList<Expression*>* args = expr->arguments();
3438 ASSERT(args->length() == 4);
3439 VisitForStackValue(args->at(0));
3440 VisitForStackValue(args->at(1));
3441 VisitForStackValue(args->at(2));
3442 VisitForStackValue(args->at(3));
3444 context()->Plug(v0);
3448 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3449 ZoneList<Expression*>* args = expr->arguments();
3450 ASSERT(args->length() == 1);
3452 VisitForAccumulatorValue(args->at(0)); // Load the object.
3455 // If the object is a smi return the object.
3456 __ JumpIfSmi(v0, &done);
3457 // If the object is not a value type, return the object.
3458 __ GetObjectType(v0, a1, a1);
3459 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3461 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3464 context()->Plug(v0);
3468 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3469 ZoneList<Expression*>* args = expr->arguments();
3470 ASSERT(args->length() == 2);
3471 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3472 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3474 VisitForAccumulatorValue(args->at(0)); // Load the object.
3476 Label runtime, done, not_date_object;
3477 Register object = v0;
3478 Register result = v0;
3479 Register scratch0 = t5;
3480 Register scratch1 = a1;
3482 __ JumpIfSmi(object, ¬_date_object);
3483 __ GetObjectType(object, scratch1, scratch1);
3484 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3486 if (index->value() == 0) {
3487 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3490 if (index->value() < JSDate::kFirstUncachedField) {
3491 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3492 __ li(scratch1, Operand(stamp));
3493 __ lw(scratch1, MemOperand(scratch1));
3494 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3495 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3496 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3497 kPointerSize * index->value()));
3501 __ PrepareCallCFunction(2, scratch1);
3502 __ li(a1, Operand(index));
3503 __ Move(a0, object);
3504 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3508 __ bind(¬_date_object);
3509 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3511 context()->Plug(v0);
3515 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3516 ZoneList<Expression*>* args = expr->arguments();
3517 ASSERT_EQ(3, args->length());
3519 Register string = v0;
3520 Register index = a1;
3521 Register value = a2;
3523 VisitForStackValue(args->at(1)); // index
3524 VisitForStackValue(args->at(2)); // value
3525 VisitForAccumulatorValue(args->at(0)); // string
3526 __ Pop(index, value);
3528 if (FLAG_debug_code) {
3529 __ SmiTst(value, at);
3530 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3531 __ SmiTst(index, at);
3532 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3533 __ SmiUntag(index, index);
3534 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3535 Register scratch = t5;
3536 __ EmitSeqStringSetCharCheck(
3537 string, index, value, scratch, one_byte_seq_type);
3538 __ SmiTag(index, index);
3541 __ SmiUntag(value, value);
3544 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3546 __ Addu(at, at, index);
3547 __ sb(value, MemOperand(at));
3548 context()->Plug(string);
3552 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3553 ZoneList<Expression*>* args = expr->arguments();
3554 ASSERT_EQ(3, args->length());
3556 Register string = v0;
3557 Register index = a1;
3558 Register value = a2;
3560 VisitForStackValue(args->at(1)); // index
3561 VisitForStackValue(args->at(2)); // value
3562 VisitForAccumulatorValue(args->at(0)); // string
3563 __ Pop(index, value);
3565 if (FLAG_debug_code) {
3566 __ SmiTst(value, at);
3567 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3568 __ SmiTst(index, at);
3569 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3570 __ SmiUntag(index, index);
3571 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3572 Register scratch = t5;
3573 __ EmitSeqStringSetCharCheck(
3574 string, index, value, scratch, two_byte_seq_type);
3575 __ SmiTag(index, index);
3578 __ SmiUntag(value, value);
3581 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3582 __ Addu(at, at, index);
3583 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3584 __ sh(value, MemOperand(at));
3585 context()->Plug(string);
3589 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3590 // Load the arguments on the stack and call the runtime function.
3591 ZoneList<Expression*>* args = expr->arguments();
3592 ASSERT(args->length() == 2);
3593 VisitForStackValue(args->at(0));
3594 VisitForStackValue(args->at(1));
3595 MathPowStub stub(MathPowStub::ON_STACK);
3597 context()->Plug(v0);
3601 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3602 ZoneList<Expression*>* args = expr->arguments();
3603 ASSERT(args->length() == 2);
3605 VisitForStackValue(args->at(0)); // Load the object.
3606 VisitForAccumulatorValue(args->at(1)); // Load the value.
3607 __ pop(a1); // v0 = value. a1 = object.
3610 // If the object is a smi, return the value.
3611 __ JumpIfSmi(a1, &done);
3613 // If the object is not a value type, return the value.
3614 __ GetObjectType(a1, a2, a2);
3615 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3618 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3619 // Update the write barrier. Save the value as it will be
3620 // overwritten by the write barrier code and is needed afterward.
3622 __ RecordWriteField(
3623 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3626 context()->Plug(v0);
3630 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3631 ZoneList<Expression*>* args = expr->arguments();
3632 ASSERT_EQ(args->length(), 1);
3634 // Load the argument into a0 and call the stub.
3635 VisitForAccumulatorValue(args->at(0));
3636 __ mov(a0, result_register());
3638 NumberToStringStub stub;
3640 context()->Plug(v0);
3644 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3645 ZoneList<Expression*>* args = expr->arguments();
3646 ASSERT(args->length() == 1);
3648 VisitForAccumulatorValue(args->at(0));
3651 StringCharFromCodeGenerator generator(v0, a1);
3652 generator.GenerateFast(masm_);
3655 NopRuntimeCallHelper call_helper;
3656 generator.GenerateSlow(masm_, call_helper);
3659 context()->Plug(a1);
3663 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3664 ZoneList<Expression*>* args = expr->arguments();
3665 ASSERT(args->length() == 2);
3667 VisitForStackValue(args->at(0));
3668 VisitForAccumulatorValue(args->at(1));
3669 __ mov(a0, result_register());
3671 Register object = a1;
3672 Register index = a0;
3673 Register result = v0;
3677 Label need_conversion;
3678 Label index_out_of_range;
3680 StringCharCodeAtGenerator generator(object,
3685 &index_out_of_range,
3686 STRING_INDEX_IS_NUMBER);
3687 generator.GenerateFast(masm_);
3690 __ bind(&index_out_of_range);
3691 // When the index is out of range, the spec requires us to return
3693 __ LoadRoot(result, Heap::kNanValueRootIndex);
3696 __ bind(&need_conversion);
3697 // Load the undefined value into the result register, which will
3698 // trigger conversion.
3699 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3702 NopRuntimeCallHelper call_helper;
3703 generator.GenerateSlow(masm_, call_helper);
3706 context()->Plug(result);
3710 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3711 ZoneList<Expression*>* args = expr->arguments();
3712 ASSERT(args->length() == 2);
3714 VisitForStackValue(args->at(0));
3715 VisitForAccumulatorValue(args->at(1));
3716 __ mov(a0, result_register());
3718 Register object = a1;
3719 Register index = a0;
3720 Register scratch = a3;
3721 Register result = v0;
3725 Label need_conversion;
3726 Label index_out_of_range;
3728 StringCharAtGenerator generator(object,
3734 &index_out_of_range,
3735 STRING_INDEX_IS_NUMBER);
3736 generator.GenerateFast(masm_);
3739 __ bind(&index_out_of_range);
3740 // When the index is out of range, the spec requires us to return
3741 // the empty string.
3742 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3745 __ bind(&need_conversion);
3746 // Move smi zero into the result register, which will trigger
3748 __ li(result, Operand(Smi::FromInt(0)));
3751 NopRuntimeCallHelper call_helper;
3752 generator.GenerateSlow(masm_, call_helper);
3755 context()->Plug(result);
3759 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3760 ZoneList<Expression*>* args = expr->arguments();
3761 ASSERT_EQ(2, args->length());
3762 VisitForStackValue(args->at(0));
3763 VisitForAccumulatorValue(args->at(1));
3766 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3767 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3769 context()->Plug(v0);
3773 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3774 ZoneList<Expression*>* args = expr->arguments();
3775 ASSERT_EQ(2, args->length());
3777 VisitForStackValue(args->at(0));
3778 VisitForStackValue(args->at(1));
3780 StringCompareStub stub;
3782 context()->Plug(v0);
3786 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3787 // Load the argument on the stack and call the runtime function.
3788 ZoneList<Expression*>* args = expr->arguments();
3789 ASSERT(args->length() == 1);
3790 VisitForStackValue(args->at(0));
3791 __ CallRuntime(Runtime::kMath_log, 1);
3792 context()->Plug(v0);
3796 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3797 // Load the argument on the stack and call the runtime function.
3798 ZoneList<Expression*>* args = expr->arguments();
3799 ASSERT(args->length() == 1);
3800 VisitForStackValue(args->at(0));
3801 __ CallRuntime(Runtime::kMath_sqrt, 1);
3802 context()->Plug(v0);
3806 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3807 ZoneList<Expression*>* args = expr->arguments();
3808 ASSERT(args->length() >= 2);
3810 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3811 for (int i = 0; i < arg_count + 1; i++) {
3812 VisitForStackValue(args->at(i));
3814 VisitForAccumulatorValue(args->last()); // Function.
3816 Label runtime, done;
3817 // Check for non-function argument (including proxy).
3818 __ JumpIfSmi(v0, &runtime);
3819 __ GetObjectType(v0, a1, a1);
3820 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3822 // InvokeFunction requires the function in a1. Move it in there.
3823 __ mov(a1, result_register());
3824 ParameterCount count(arg_count);
3825 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
3826 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3831 __ CallRuntime(Runtime::kCall, args->length());
3834 context()->Plug(v0);
3838 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3839 RegExpConstructResultStub stub;
3840 ZoneList<Expression*>* args = expr->arguments();
3841 ASSERT(args->length() == 3);
3842 VisitForStackValue(args->at(0));
3843 VisitForStackValue(args->at(1));
3844 VisitForAccumulatorValue(args->at(2));
3845 __ mov(a0, result_register());
3849 context()->Plug(v0);
3853 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3854 ZoneList<Expression*>* args = expr->arguments();
3855 ASSERT_EQ(2, args->length());
3857 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3858 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3860 Handle<FixedArray> jsfunction_result_caches(
3861 isolate()->native_context()->jsfunction_result_caches());
3862 if (jsfunction_result_caches->length() <= cache_id) {
3863 __ Abort(kAttemptToUseUndefinedCache);
3864 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3865 context()->Plug(v0);
3869 VisitForAccumulatorValue(args->at(1));
3872 Register cache = a1;
3873 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3874 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3877 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3879 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3882 Label done, not_found;
3883 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3884 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3885 // a2 now holds finger offset as a smi.
3886 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3887 // a3 now points to the start of fixed array elements.
3888 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3889 __ addu(a3, a3, at);
3890 // a3 now points to key of indexed element of cache.
3891 __ lw(a2, MemOperand(a3));
3892 __ Branch(¬_found, ne, key, Operand(a2));
3894 __ lw(v0, MemOperand(a3, kPointerSize));
3897 __ bind(¬_found);
3898 // Call runtime to perform the lookup.
3899 __ Push(cache, key);
3900 __ CallRuntime(Runtime::kGetFromCache, 2);
3903 context()->Plug(v0);
3907 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3908 ZoneList<Expression*>* args = expr->arguments();
3909 VisitForAccumulatorValue(args->at(0));
3911 Label materialize_true, materialize_false;
3912 Label* if_true = NULL;
3913 Label* if_false = NULL;
3914 Label* fall_through = NULL;
3915 context()->PrepareTest(&materialize_true, &materialize_false,
3916 &if_true, &if_false, &fall_through);
3918 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3919 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3921 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3922 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3924 context()->Plug(if_true, if_false);
3928 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3929 ZoneList<Expression*>* args = expr->arguments();
3930 ASSERT(args->length() == 1);
3931 VisitForAccumulatorValue(args->at(0));
3933 __ AssertString(v0);
3935 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3936 __ IndexFromHash(v0, v0);
3938 context()->Plug(v0);
3942 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3943 Label bailout, done, one_char_separator, long_separator,
3944 non_trivial_array, not_size_one_array, loop,
3945 empty_separator_loop, one_char_separator_loop,
3946 one_char_separator_loop_entry, long_separator_loop;
3947 ZoneList<Expression*>* args = expr->arguments();
3948 ASSERT(args->length() == 2);
3949 VisitForStackValue(args->at(1));
3950 VisitForAccumulatorValue(args->at(0));
3952 // All aliases of the same register have disjoint lifetimes.
3953 Register array = v0;
3954 Register elements = no_reg; // Will be v0.
3955 Register result = no_reg; // Will be v0.
3956 Register separator = a1;
3957 Register array_length = a2;
3958 Register result_pos = no_reg; // Will be a2.
3959 Register string_length = a3;
3960 Register string = t0;
3961 Register element = t1;
3962 Register elements_end = t2;
3963 Register scratch1 = t3;
3964 Register scratch2 = t5;
3965 Register scratch3 = t4;
3967 // Separator operand is on the stack.
3970 // Check that the array is a JSArray.
3971 __ JumpIfSmi(array, &bailout);
3972 __ GetObjectType(array, scratch1, scratch2);
3973 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3975 // Check that the array has fast elements.
3976 __ CheckFastElements(scratch1, scratch2, &bailout);
3978 // If the array has length zero, return the empty string.
3979 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3980 __ SmiUntag(array_length);
3981 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3982 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3985 __ bind(&non_trivial_array);
3987 // Get the FixedArray containing array's elements.
3989 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3990 array = no_reg; // End of array's live range.
3992 // Check that all array elements are sequential ASCII strings, and
3993 // accumulate the sum of their lengths, as a smi-encoded value.
3994 __ mov(string_length, zero_reg);
3996 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3997 __ sll(elements_end, array_length, kPointerSizeLog2);
3998 __ Addu(elements_end, element, elements_end);
3999 // Loop condition: while (element < elements_end).
4000 // Live values in registers:
4001 // elements: Fixed array of strings.
4002 // array_length: Length of the fixed array of strings (not smi)
4003 // separator: Separator string
4004 // string_length: Accumulated sum of string lengths (smi).
4005 // element: Current array element.
4006 // elements_end: Array end.
4007 if (generate_debug_code_) {
4008 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
4009 array_length, Operand(zero_reg));
4012 __ lw(string, MemOperand(element));
4013 __ Addu(element, element, kPointerSize);
4014 __ JumpIfSmi(string, &bailout);
4015 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4016 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4017 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4018 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4019 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4020 __ BranchOnOverflow(&bailout, scratch3);
4021 __ Branch(&loop, lt, element, Operand(elements_end));
4023 // If array_length is 1, return elements[0], a string.
4024 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4025 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4028 __ bind(¬_size_one_array);
4030 // Live values in registers:
4031 // separator: Separator string
4032 // array_length: Length of the array.
4033 // string_length: Sum of string lengths (smi).
4034 // elements: FixedArray of strings.
4036 // Check that the separator is a flat ASCII string.
4037 __ JumpIfSmi(separator, &bailout);
4038 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4039 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4040 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4042 // Add (separator length times array_length) - separator length to the
4043 // string_length to get the length of the result string. array_length is not
4044 // smi but the other values are, so the result is a smi.
4045 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4046 __ Subu(string_length, string_length, Operand(scratch1));
4047 __ Mult(array_length, scratch1);
4048 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4051 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4053 __ And(scratch3, scratch2, Operand(0x80000000));
4054 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4055 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4056 __ BranchOnOverflow(&bailout, scratch3);
4057 __ SmiUntag(string_length);
4059 // Get first element in the array to free up the elements register to be used
4062 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4063 result = elements; // End of live range for elements.
4065 // Live values in registers:
4066 // element: First array element
4067 // separator: Separator string
4068 // string_length: Length of result string (not smi)
4069 // array_length: Length of the array.
4070 __ AllocateAsciiString(result,
4076 // Prepare for looping. Set up elements_end to end of the array. Set
4077 // result_pos to the position of the result where to write the first
4079 __ sll(elements_end, array_length, kPointerSizeLog2);
4080 __ Addu(elements_end, element, elements_end);
4081 result_pos = array_length; // End of live range for array_length.
4082 array_length = no_reg;
4085 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4087 // Check the length of the separator.
4088 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4089 __ li(at, Operand(Smi::FromInt(1)));
4090 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4091 __ Branch(&long_separator, gt, scratch1, Operand(at));
4093 // Empty separator case.
4094 __ bind(&empty_separator_loop);
4095 // Live values in registers:
4096 // result_pos: the position to which we are currently copying characters.
4097 // element: Current array element.
4098 // elements_end: Array end.
4100 // Copy next array element to the result.
4101 __ lw(string, MemOperand(element));
4102 __ Addu(element, element, kPointerSize);
4103 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4104 __ SmiUntag(string_length);
4105 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4106 __ CopyBytes(string, result_pos, string_length, scratch1);
4107 // End while (element < elements_end).
4108 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4109 ASSERT(result.is(v0));
4112 // One-character separator case.
4113 __ bind(&one_char_separator);
4114 // Replace separator with its ASCII character value.
4115 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4116 // Jump into the loop after the code that copies the separator, so the first
4117 // element is not preceded by a separator.
4118 __ jmp(&one_char_separator_loop_entry);
4120 __ bind(&one_char_separator_loop);
4121 // Live values in registers:
4122 // result_pos: the position to which we are currently copying characters.
4123 // element: Current array element.
4124 // elements_end: Array end.
4125 // separator: Single separator ASCII char (in lower byte).
4127 // Copy the separator character to the result.
4128 __ sb(separator, MemOperand(result_pos));
4129 __ Addu(result_pos, result_pos, 1);
4131 // Copy next array element to the result.
4132 __ bind(&one_char_separator_loop_entry);
4133 __ lw(string, MemOperand(element));
4134 __ Addu(element, element, kPointerSize);
4135 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4136 __ SmiUntag(string_length);
4137 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4138 __ CopyBytes(string, result_pos, string_length, scratch1);
4139 // End while (element < elements_end).
4140 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4141 ASSERT(result.is(v0));
4144 // Long separator case (separator is more than one character). Entry is at the
4145 // label long_separator below.
4146 __ bind(&long_separator_loop);
4147 // Live values in registers:
4148 // result_pos: the position to which we are currently copying characters.
4149 // element: Current array element.
4150 // elements_end: Array end.
4151 // separator: Separator string.
4153 // Copy the separator to the result.
4154 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4155 __ SmiUntag(string_length);
4158 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4159 __ CopyBytes(string, result_pos, string_length, scratch1);
4161 __ bind(&long_separator);
4162 __ lw(string, MemOperand(element));
4163 __ Addu(element, element, kPointerSize);
4164 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4165 __ SmiUntag(string_length);
4166 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4167 __ CopyBytes(string, result_pos, string_length, scratch1);
4168 // End while (element < elements_end).
4169 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4170 ASSERT(result.is(v0));
4174 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4176 context()->Plug(v0);
4180 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4181 Handle<String> name = expr->name();
4182 if (name->length() > 0 && name->Get(0) == '_') {
4183 Comment cmnt(masm_, "[ InlineRuntimeCall");
4184 EmitInlineRuntimeCall(expr);
4188 Comment cmnt(masm_, "[ CallRuntime");
4189 ZoneList<Expression*>* args = expr->arguments();
4190 int arg_count = args->length();
4192 if (expr->is_jsruntime()) {
4193 // Push the builtins object as the receiver.
4194 __ lw(a0, GlobalObjectOperand());
4195 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
4197 // Load the function from the receiver.
4198 __ li(a2, Operand(expr->name()));
4199 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4201 // Push the target function under the receiver.
4202 __ lw(at, MemOperand(sp, 0));
4204 __ sw(v0, MemOperand(sp, kPointerSize));
4206 // Push the arguments ("left-to-right").
4207 int arg_count = args->length();
4208 for (int i = 0; i < arg_count; i++) {
4209 VisitForStackValue(args->at(i));
4212 // Record source position of the IC call.
4213 SetSourcePosition(expr->position());
4214 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4215 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4218 // Restore context register.
4219 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4221 context()->DropAndPlug(1, v0);
4223 // Push the arguments ("left-to-right").
4224 for (int i = 0; i < arg_count; i++) {
4225 VisitForStackValue(args->at(i));
4228 // Call the C runtime function.
4229 __ CallRuntime(expr->function(), arg_count);
4230 context()->Plug(v0);
4235 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4236 switch (expr->op()) {
4237 case Token::DELETE: {
4238 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4239 Property* property = expr->expression()->AsProperty();
4240 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4242 if (property != NULL) {
4243 VisitForStackValue(property->obj());
4244 VisitForStackValue(property->key());
4245 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4246 ? kNonStrictMode : kStrictMode;
4247 __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
4249 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4250 context()->Plug(v0);
4251 } else if (proxy != NULL) {
4252 Variable* var = proxy->var();
4253 // Delete of an unqualified identifier is disallowed in strict mode
4254 // but "delete this" is allowed.
4255 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4256 if (var->IsUnallocated()) {
4257 __ lw(a2, GlobalObjectOperand());
4258 __ li(a1, Operand(var->name()));
4259 __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
4260 __ Push(a2, a1, a0);
4261 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4262 context()->Plug(v0);
4263 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4264 // Result of deleting non-global, non-dynamic variables is false.
4265 // The subexpression does not have side effects.
4266 context()->Plug(var->is_this());
4268 // Non-global variable. Call the runtime to try to delete from the
4269 // context where the variable was introduced.
4270 ASSERT(!context_register().is(a2));
4271 __ li(a2, Operand(var->name()));
4272 __ Push(context_register(), a2);
4273 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4274 context()->Plug(v0);
4277 // Result of deleting non-property, non-variable reference is true.
4278 // The subexpression may have side effects.
4279 VisitForEffect(expr->expression());
4280 context()->Plug(true);
4286 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4287 VisitForEffect(expr->expression());
4288 context()->Plug(Heap::kUndefinedValueRootIndex);
4293 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4294 if (context()->IsEffect()) {
4295 // Unary NOT has no side effects so it's only necessary to visit the
4296 // subexpression. Match the optimizing compiler by not branching.
4297 VisitForEffect(expr->expression());
4298 } else if (context()->IsTest()) {
4299 const TestContext* test = TestContext::cast(context());
4300 // The labels are swapped for the recursive call.
4301 VisitForControl(expr->expression(),
4302 test->false_label(),
4304 test->fall_through());
4305 context()->Plug(test->true_label(), test->false_label());
4307 // We handle value contexts explicitly rather than simply visiting
4308 // for control and plugging the control flow into the context,
4309 // because we need to prepare a pair of extra administrative AST ids
4310 // for the optimizing compiler.
4311 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4312 Label materialize_true, materialize_false, done;
4313 VisitForControl(expr->expression(),
4317 __ bind(&materialize_true);
4318 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4319 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4320 if (context()->IsStackValue()) __ push(v0);
4322 __ bind(&materialize_false);
4323 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4324 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4325 if (context()->IsStackValue()) __ push(v0);
4331 case Token::TYPEOF: {
4332 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4333 { StackValueContext context(this);
4334 VisitForTypeofValue(expr->expression());
4336 __ CallRuntime(Runtime::kTypeof, 1);
4337 context()->Plug(v0);
4347 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4348 Comment cmnt(masm_, "[ CountOperation");
4349 SetSourcePosition(expr->position());
4351 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4352 // as the left-hand side.
4353 if (!expr->expression()->IsValidLeftHandSide()) {
4354 VisitForEffect(expr->expression());
4358 // Expression can only be a property, a global or a (parameter or local)
4360 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4361 LhsKind assign_type = VARIABLE;
4362 Property* prop = expr->expression()->AsProperty();
4363 // In case of a property we use the uninitialized expression context
4364 // of the key to detect a named property.
4367 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4370 // Evaluate expression and get value.
4371 if (assign_type == VARIABLE) {
4372 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4373 AccumulatorValueContext context(this);
4374 EmitVariableLoad(expr->expression()->AsVariableProxy());
4376 // Reserve space for result of postfix operation.
4377 if (expr->is_postfix() && !context()->IsEffect()) {
4378 __ li(at, Operand(Smi::FromInt(0)));
4381 if (assign_type == NAMED_PROPERTY) {
4382 // Put the object both on the stack and in the accumulator.
4383 VisitForAccumulatorValue(prop->obj());
4385 EmitNamedPropertyLoad(prop);
4387 VisitForStackValue(prop->obj());
4388 VisitForAccumulatorValue(prop->key());
4389 __ lw(a1, MemOperand(sp, 0));
4391 EmitKeyedPropertyLoad(prop);
4395 // We need a second deoptimization point after loading the value
4396 // in case evaluating the property load my have a side effect.
4397 if (assign_type == VARIABLE) {
4398 PrepareForBailout(expr->expression(), TOS_REG);
4400 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4403 // Inline smi case if we are in a loop.
4404 Label stub_call, done;
4405 JumpPatchSite patch_site(masm_);
4407 int count_value = expr->op() == Token::INC ? 1 : -1;
4409 if (ShouldInlineSmiCase(expr->op())) {
4411 patch_site.EmitJumpIfNotSmi(v0, &slow);
4413 // Save result for postfix expressions.
4414 if (expr->is_postfix()) {
4415 if (!context()->IsEffect()) {
4416 // Save the result on the stack. If we have a named or keyed property
4417 // we store the result under the receiver that is currently on top
4419 switch (assign_type) {
4423 case NAMED_PROPERTY:
4424 __ sw(v0, MemOperand(sp, kPointerSize));
4426 case KEYED_PROPERTY:
4427 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4433 Register scratch1 = a1;
4434 Register scratch2 = t0;
4435 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4436 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4437 __ BranchOnNoOverflow(&done, scratch2);
4438 // Call stub. Undo operation first.
4443 ToNumberStub convert_stub;
4444 __ CallStub(&convert_stub);
4446 // Save result for postfix expressions.
4447 if (expr->is_postfix()) {
4448 if (!context()->IsEffect()) {
4449 // Save the result on the stack. If we have a named or keyed property
4450 // we store the result under the receiver that is currently on top
4452 switch (assign_type) {
4456 case NAMED_PROPERTY:
4457 __ sw(v0, MemOperand(sp, kPointerSize));
4459 case KEYED_PROPERTY:
4460 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4466 __ bind(&stub_call);
4468 __ li(a0, Operand(Smi::FromInt(count_value)));
4470 // Record position before stub call.
4471 SetSourcePosition(expr->position());
4473 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4474 CallIC(stub.GetCode(isolate()),
4476 expr->CountBinOpFeedbackId());
4477 patch_site.EmitPatchInfo();
4480 // Store the value returned in v0.
4481 switch (assign_type) {
4483 if (expr->is_postfix()) {
4484 { EffectContext context(this);
4485 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4487 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4490 // For all contexts except EffectConstant we have the result on
4491 // top of the stack.
4492 if (!context()->IsEffect()) {
4493 context()->PlugTOS();
4496 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4498 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4499 context()->Plug(v0);
4502 case NAMED_PROPERTY: {
4503 __ mov(a0, result_register()); // Value.
4504 __ li(a2, Operand(prop->key()->AsLiteral()->value())); // Name.
4505 __ pop(a1); // Receiver.
4506 CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4507 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4508 if (expr->is_postfix()) {
4509 if (!context()->IsEffect()) {
4510 context()->PlugTOS();
4513 context()->Plug(v0);
4517 case KEYED_PROPERTY: {
4518 __ mov(a0, result_register()); // Value.
4519 __ Pop(a2, a1); // a1 = key, a2 = receiver.
4520 Handle<Code> ic = is_classic_mode()
4521 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4522 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4523 CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4524 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4525 if (expr->is_postfix()) {
4526 if (!context()->IsEffect()) {
4527 context()->PlugTOS();
4530 context()->Plug(v0);
4538 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4539 ASSERT(!context()->IsEffect());
4540 ASSERT(!context()->IsTest());
4541 VariableProxy* proxy = expr->AsVariableProxy();
4542 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4543 Comment cmnt(masm_, "Global variable");
4544 __ lw(a0, GlobalObjectOperand());
4545 __ li(a2, Operand(proxy->name()));
4546 // Use a regular load, not a contextual load, to avoid a reference
4548 CallLoadIC(NOT_CONTEXTUAL);
4549 PrepareForBailout(expr, TOS_REG);
4550 context()->Plug(v0);
4551 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4554 // Generate code for loading from variables potentially shadowed
4555 // by eval-introduced variables.
4556 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4559 __ li(a0, Operand(proxy->name()));
4561 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4562 PrepareForBailout(expr, TOS_REG);
4565 context()->Plug(v0);
4567 // This expression cannot throw a reference error at the top level.
4568 VisitInDuplicateContext(expr);
4572 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4573 Expression* sub_expr,
4574 Handle<String> check) {
4575 Label materialize_true, materialize_false;
4576 Label* if_true = NULL;
4577 Label* if_false = NULL;
4578 Label* fall_through = NULL;
4579 context()->PrepareTest(&materialize_true, &materialize_false,
4580 &if_true, &if_false, &fall_through);
4582 { AccumulatorValueContext context(this);
4583 VisitForTypeofValue(sub_expr);
4585 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4587 if (check->Equals(isolate()->heap()->number_string())) {
4588 __ JumpIfSmi(v0, if_true);
4589 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4590 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4591 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4592 } else if (check->Equals(isolate()->heap()->string_string())) {
4593 __ JumpIfSmi(v0, if_false);
4594 // Check for undetectable objects => false.
4595 __ GetObjectType(v0, v0, a1);
4596 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4597 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4598 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4599 Split(eq, a1, Operand(zero_reg),
4600 if_true, if_false, fall_through);
4601 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4602 __ JumpIfSmi(v0, if_false);
4603 __ GetObjectType(v0, v0, a1);
4604 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4605 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4606 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4607 __ Branch(if_true, eq, v0, Operand(at));
4608 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4609 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4610 } else if (FLAG_harmony_typeof &&
4611 check->Equals(isolate()->heap()->null_string())) {
4612 __ LoadRoot(at, Heap::kNullValueRootIndex);
4613 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4614 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4615 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4616 __ Branch(if_true, eq, v0, Operand(at));
4617 __ JumpIfSmi(v0, if_false);
4618 // Check for undetectable objects => true.
4619 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4620 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4621 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4622 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4623 } else if (check->Equals(isolate()->heap()->function_string())) {
4624 __ JumpIfSmi(v0, if_false);
4625 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4626 __ GetObjectType(v0, v0, a1);
4627 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4628 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4629 if_true, if_false, fall_through);
4630 } else if (check->Equals(isolate()->heap()->object_string())) {
4631 __ JumpIfSmi(v0, if_false);
4632 if (!FLAG_harmony_typeof) {
4633 __ LoadRoot(at, Heap::kNullValueRootIndex);
4634 __ Branch(if_true, eq, v0, Operand(at));
4636 // Check for JS objects => true.
4637 __ GetObjectType(v0, v0, a1);
4638 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4639 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4640 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4641 // Check for undetectable objects => false.
4642 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4643 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4644 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4646 if (if_false != fall_through) __ jmp(if_false);
4648 context()->Plug(if_true, if_false);
4652 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4653 Comment cmnt(masm_, "[ CompareOperation");
4654 SetSourcePosition(expr->position());
4656 // First we try a fast inlined version of the compare when one of
4657 // the operands is a literal.
4658 if (TryLiteralCompare(expr)) return;
4660 // Always perform the comparison for its control flow. Pack the result
4661 // into the expression's context after the comparison is performed.
4662 Label materialize_true, materialize_false;
4663 Label* if_true = NULL;
4664 Label* if_false = NULL;
4665 Label* fall_through = NULL;
4666 context()->PrepareTest(&materialize_true, &materialize_false,
4667 &if_true, &if_false, &fall_through);
4669 Token::Value op = expr->op();
4670 VisitForStackValue(expr->left());
4673 VisitForStackValue(expr->right());
4674 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4675 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4676 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4677 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4680 case Token::INSTANCEOF: {
4681 VisitForStackValue(expr->right());
4682 InstanceofStub stub(InstanceofStub::kNoFlags);
4684 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4685 // The stub returns 0 for true.
4686 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4691 VisitForAccumulatorValue(expr->right());
4692 Condition cc = CompareIC::ComputeCondition(op);
4693 __ mov(a0, result_register());
4696 bool inline_smi_code = ShouldInlineSmiCase(op);
4697 JumpPatchSite patch_site(masm_);
4698 if (inline_smi_code) {
4700 __ Or(a2, a0, Operand(a1));
4701 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4702 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4703 __ bind(&slow_case);
4705 // Record position and call the compare IC.
4706 SetSourcePosition(expr->position());
4707 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4708 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4709 patch_site.EmitPatchInfo();
4710 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4711 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4715 // Convert the result of the comparison into one expected for this
4716 // expression's context.
4717 context()->Plug(if_true, if_false);
4721 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4722 Expression* sub_expr,
4724 Label materialize_true, materialize_false;
4725 Label* if_true = NULL;
4726 Label* if_false = NULL;
4727 Label* fall_through = NULL;
4728 context()->PrepareTest(&materialize_true, &materialize_false,
4729 &if_true, &if_false, &fall_through);
4731 VisitForAccumulatorValue(sub_expr);
4732 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4733 __ mov(a0, result_register());
4734 if (expr->op() == Token::EQ_STRICT) {
4735 Heap::RootListIndex nil_value = nil == kNullValue ?
4736 Heap::kNullValueRootIndex :
4737 Heap::kUndefinedValueRootIndex;
4738 __ LoadRoot(a1, nil_value);
4739 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4741 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4742 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4743 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4745 context()->Plug(if_true, if_false);
4749 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4750 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4751 context()->Plug(v0);
4755 Register FullCodeGenerator::result_register() {
4760 Register FullCodeGenerator::context_register() {
4765 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4766 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4767 __ sw(value, MemOperand(fp, frame_offset));
4771 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4772 __ lw(dst, ContextOperand(cp, context_index));
4776 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4777 Scope* declaration_scope = scope()->DeclarationScope();
4778 if (declaration_scope->is_global_scope() ||
4779 declaration_scope->is_module_scope()) {
4780 // Contexts nested in the native context have a canonical empty function
4781 // as their closure, not the anonymous closure containing the global
4782 // code. Pass a smi sentinel and let the runtime look up the empty
4784 __ li(at, Operand(Smi::FromInt(0)));
4785 } else if (declaration_scope->is_eval_scope()) {
4786 // Contexts created by a call to eval have the same closure as the
4787 // context calling eval, not the anonymous closure containing the eval
4788 // code. Fetch it from the context.
4789 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4791 ASSERT(declaration_scope->is_function_scope());
4792 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4798 // ----------------------------------------------------------------------------
4799 // Non-local control flow support.
4801 void FullCodeGenerator::EnterFinallyBlock() {
4802 ASSERT(!result_register().is(a1));
4803 // Store result register while executing finally block.
4804 __ push(result_register());
4805 // Cook return address in link register to stack (smi encoded Code* delta).
4806 __ Subu(a1, ra, Operand(masm_->CodeObject()));
4807 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4808 STATIC_ASSERT(0 == kSmiTag);
4809 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4811 // Store result register while executing finally block.
4814 // Store pending message while executing finally block.
4815 ExternalReference pending_message_obj =
4816 ExternalReference::address_of_pending_message_obj(isolate());
4817 __ li(at, Operand(pending_message_obj));
4818 __ lw(a1, MemOperand(at));
4821 ExternalReference has_pending_message =
4822 ExternalReference::address_of_has_pending_message(isolate());
4823 __ li(at, Operand(has_pending_message));
4824 __ lw(a1, MemOperand(at));
4828 ExternalReference pending_message_script =
4829 ExternalReference::address_of_pending_message_script(isolate());
4830 __ li(at, Operand(pending_message_script));
4831 __ lw(a1, MemOperand(at));
4836 void FullCodeGenerator::ExitFinallyBlock() {
4837 ASSERT(!result_register().is(a1));
4838 // Restore pending message from stack.
4840 ExternalReference pending_message_script =
4841 ExternalReference::address_of_pending_message_script(isolate());
4842 __ li(at, Operand(pending_message_script));
4843 __ sw(a1, MemOperand(at));
4847 ExternalReference has_pending_message =
4848 ExternalReference::address_of_has_pending_message(isolate());
4849 __ li(at, Operand(has_pending_message));
4850 __ sw(a1, MemOperand(at));
4853 ExternalReference pending_message_obj =
4854 ExternalReference::address_of_pending_message_obj(isolate());
4855 __ li(at, Operand(pending_message_obj));
4856 __ sw(a1, MemOperand(at));
4858 // Restore result register from stack.
4861 // Uncook return address and return.
4862 __ pop(result_register());
4863 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4864 __ sra(a1, a1, 1); // Un-smi-tag value.
4865 __ Addu(at, a1, Operand(masm_->CodeObject()));
4872 #define __ ACCESS_MASM(masm())
4874 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4876 int* context_length) {
4877 // The macros used here must preserve the result register.
4879 // Because the handler block contains the context of the finally
4880 // code, we can restore it directly from there for the finally code
4881 // rather than iteratively unwinding contexts via their previous
4883 __ Drop(*stack_depth); // Down to the handler block.
4884 if (*context_length > 0) {
4885 // Restore the context to its dedicated register and the stack.
4886 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4887 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4890 __ Call(finally_entry_);
4893 *context_length = 0;
4901 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4903 BackEdgeState target_state,
4904 Code* replacement_code) {
4905 static const int kInstrSize = Assembler::kInstrSize;
4906 Address branch_address = pc - 6 * kInstrSize;
4907 CodePatcher patcher(branch_address, 1);
4909 switch (target_state) {
4911 // slt at, a3, zero_reg (in case of count based interrupts)
4912 // beq at, zero_reg, ok
4913 // lui t9, <interrupt stub address> upper
4914 // ori t9, <interrupt stub address> lower
4917 // ok-label ----- pc_after points here
4918 patcher.masm()->slt(at, a3, zero_reg);
4920 case ON_STACK_REPLACEMENT:
4921 case OSR_AFTER_STACK_CHECK:
4922 // addiu at, zero_reg, 1
4923 // beq at, zero_reg, ok ;; Not changed
4924 // lui t9, <on-stack replacement address> upper
4925 // ori t9, <on-stack replacement address> lower
4926 // jalr t9 ;; Not changed
4927 // nop ;; Not changed
4928 // ok-label ----- pc_after points here
4929 patcher.masm()->addiu(at, zero_reg, 1);
4932 Address pc_immediate_load_address = pc - 4 * kInstrSize;
4933 // Replace the stack check address in the load-immediate (lui/ori pair)
4934 // with the entry address of the replacement code.
4935 Assembler::set_target_address_at(pc_immediate_load_address,
4936 replacement_code->entry());
4938 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4939 unoptimized_code, pc_immediate_load_address, replacement_code);
4943 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4945 Code* unoptimized_code,
4947 static const int kInstrSize = Assembler::kInstrSize;
4948 Address branch_address = pc - 6 * kInstrSize;
4949 Address pc_immediate_load_address = pc - 4 * kInstrSize;
4951 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
4952 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4953 ASSERT(reinterpret_cast<uint32_t>(
4954 Assembler::target_address_at(pc_immediate_load_address)) ==
4955 reinterpret_cast<uint32_t>(
4956 isolate->builtins()->InterruptCheck()->entry()));
4960 ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4962 if (reinterpret_cast<uint32_t>(
4963 Assembler::target_address_at(pc_immediate_load_address)) ==
4964 reinterpret_cast<uint32_t>(
4965 isolate->builtins()->OnStackReplacement()->entry())) {
4966 return ON_STACK_REPLACEMENT;
4969 ASSERT(reinterpret_cast<uint32_t>(
4970 Assembler::target_address_at(pc_immediate_load_address)) ==
4971 reinterpret_cast<uint32_t>(
4972 isolate->builtins()->OsrAfterStackCheck()->entry()));
4973 return OSR_AFTER_STACK_CHECK;
4977 } } // namespace v8::internal
4979 #endif // V8_TARGET_ARCH_MIPS