1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_MIPS)
32 // Note on Mips implementation:
34 // The result_register() for mips is the 'v0' register, which is defined
35 // by the ABI to contain function return values. However, the first
36 // parameter to a function is defined to be 'a0'. So there are many
37 // places where we have to move a previous result in v0 to a0 for the
38 // next call: mov(a0, v0). This is not needed on the other architectures.
40 #include "code-stubs.h"
44 #include "full-codegen.h"
45 #include "isolate-inl.h"
48 #include "stub-cache.h"
50 #include "mips/code-stubs-mips.h"
51 #include "mips/macro-assembler-mips.h"
56 #define __ ACCESS_MASM(masm_)
59 // A patch site is a location in the code which it is possible to patch. This
60 // class has a number of methods to emit the code which is patchable and the
61 // method EmitPatchInfo to record a marker back to the patchable code. This
62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63 // (raw 16 bit immediate value is used) is the delta from the pc to the first
64 // instruction of the patchable code.
65 // The marker instruction is effectively a NOP (dest is zero_reg) and will
66 // never be emitted by normal code.
67 class JumpPatchSite BASE_EMBEDDED {
69 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
71 info_emitted_ = false;
76 ASSERT(patch_site_.is_bound() == info_emitted_);
79 // When initially emitting this ensure that a jump is always generated to skip
80 // the inlined smi code.
81 void EmitJumpIfNotSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84 __ bind(&patch_site_);
86 // Always taken before patched.
87 __ Branch(target, eq, at, Operand(zero_reg));
90 // When initially emitting this ensure that a jump is never generated to skip
91 // the inlined smi code.
92 void EmitJumpIfSmi(Register reg, Label* target) {
93 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94 ASSERT(!patch_site_.is_bound() && !info_emitted_);
95 __ bind(&patch_site_);
97 // Never taken before patched.
98 __ Branch(target, ne, at, Operand(zero_reg));
101 void EmitPatchInfo() {
102 if (patch_site_.is_bound()) {
103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
107 info_emitted_ = true;
110 __ nop(); // Signals no inlined code.
115 MacroAssembler* masm_;
123 // Generate code for a JS function. On entry to the function the receiver
124 // and arguments have been pushed on the stack left to right. The actual
125 // argument count matches the formal parameter count expected by the
128 // The live registers are:
129 // o a1: the JS function object being called (i.e. ourselves)
131 // o fp: our caller's frame pointer
132 // o sp: stack pointer
133 // o ra: return address
135 // The function builds a JS frame. Please see JavaScriptFrameConstants in
136 // frames-mips.h for its layout.
137 void FullCodeGenerator::Generate() {
138 CompilationInfo* info = info_;
140 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
141 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
142 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
143 SetFunctionPosition(function());
144 Comment cmnt(masm_, "[ function compiled by full code generator");
147 if (strlen(FLAG_stop_at) > 0 &&
148 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
153 // Strict mode functions and builtins need to replace the receiver
154 // with undefined when called as functions (without an explicit
155 // receiver object). t1 is zero for method calls and non-zero for
157 if (!info->is_classic_mode() || info->is_native()) {
159 __ Branch(&ok, eq, t1, Operand(zero_reg));
160 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
161 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
162 __ sw(a2, MemOperand(sp, receiver_offset));
166 // Open a frame scope to indicate that there is a frame on the stack. The
167 // MANUAL indicates that the scope shouldn't actually generate code to set up
168 // the frame (that is done below).
169 FrameScope frame_scope(masm_, StackFrame::MANUAL);
171 int locals_count = info->scope()->num_stack_slots();
173 __ Push(ra, fp, cp, a1);
174 if (locals_count > 0) {
175 // Load undefined value here, so the value is ready for the loop
177 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
179 // Adjust fp to point to caller's fp.
180 __ Addu(fp, sp, Operand(2 * kPointerSize));
182 { Comment cmnt(masm_, "[ Allocate locals");
183 for (int i = 0; i < locals_count; i++) {
188 bool function_in_register = true;
190 // Possibly allocate a local context.
191 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
192 if (heap_slots > 0 ||
193 (scope()->is_qml_mode() && scope()->is_global_scope())) {
194 Comment cmnt(masm_, "[ Allocate local context");
195 // Argument to NewContext is the function, which is in a1.
197 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
198 FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
201 __ CallRuntime(Runtime::kNewFunctionContext, 1);
203 function_in_register = false;
204 // Context is returned in both v0 and cp. It replaces the context
205 // passed to us. It's saved in the stack and kept live in cp.
206 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
207 // Copy any necessary parameters into the context.
208 int num_parameters = info->scope()->num_parameters();
209 for (int i = 0; i < num_parameters; i++) {
210 Variable* var = scope()->parameter(i);
211 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize;
214 // Load parameter from stack.
215 __ lw(a0, MemOperand(fp, parameter_offset));
216 // Store it in the context.
217 MemOperand target = ContextOperand(cp, var->index());
220 // Update the write barrier.
221 __ RecordWriteContextSlot(
222 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
227 Variable* arguments = scope()->arguments();
228 if (arguments != NULL) {
229 // Function uses arguments object.
230 Comment cmnt(masm_, "[ Allocate arguments object");
231 if (!function_in_register) {
232 // Load this again, if it's used by the local context below.
233 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
237 // Receiver is just before the parameters on the caller's stack.
238 int num_parameters = info->scope()->num_parameters();
239 int offset = num_parameters * kPointerSize;
241 Operand(StandardFrameConstants::kCallerSPOffset + offset));
242 __ li(a1, Operand(Smi::FromInt(num_parameters)));
245 // Arguments to ArgumentsAccessStub:
246 // function, receiver address, parameter count.
247 // The stub will rewrite receiever and parameter count if the previous
248 // stack frame was an arguments adapter frame.
249 ArgumentsAccessStub::Type type;
250 if (!is_classic_mode()) {
251 type = ArgumentsAccessStub::NEW_STRICT;
252 } else if (function()->has_duplicate_parameters()) {
253 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
255 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
257 ArgumentsAccessStub stub(type);
260 SetVar(arguments, v0, a1, a2);
264 __ CallRuntime(Runtime::kTraceEnter, 0);
267 // Visit the declarations and body unless there is an illegal
269 if (scope()->HasIllegalRedeclaration()) {
270 Comment cmnt(masm_, "[ Declarations");
271 scope()->VisitIllegalRedeclaration(this);
274 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
275 { Comment cmnt(masm_, "[ Declarations");
276 // For named function expressions, declare the function name as a
278 if (scope()->is_function_scope() && scope()->function() != NULL) {
279 VariableProxy* proxy = scope()->function();
280 ASSERT(proxy->var()->mode() == CONST ||
281 proxy->var()->mode() == CONST_HARMONY);
282 ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
283 EmitDeclaration(proxy, proxy->var()->mode(), NULL);
285 VisitDeclarations(scope()->declarations());
288 { Comment cmnt(masm_, "[ Stack check");
289 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
291 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
292 __ Branch(&ok, hs, sp, Operand(t0));
298 { Comment cmnt(masm_, "[ Body");
299 ASSERT(loop_depth() == 0);
300 VisitStatements(function()->body());
301 ASSERT(loop_depth() == 0);
305 // Always emit a 'return undefined' in case control fell off the end of
307 { Comment cmnt(masm_, "[ return <undefined>;");
308 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
310 EmitReturnSequence();
314 void FullCodeGenerator::ClearAccumulator() {
315 ASSERT(Smi::FromInt(0) == 0);
316 __ mov(v0, zero_reg);
320 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
321 __ li(a2, Operand(profiling_counter_));
322 __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
323 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
324 __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
328 void FullCodeGenerator::EmitProfilingCounterReset() {
329 int reset_value = FLAG_interrupt_budget;
330 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
331 // Self-optimization is a one-off thing: if it fails, don't try again.
332 reset_value = Smi::kMaxValue;
334 if (isolate()->IsDebuggerActive()) {
335 // Detect debug break requests as soon as possible.
338 __ li(a2, Operand(profiling_counter_));
339 __ li(a3, Operand(Smi::FromInt(reset_value)));
340 __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
344 static const int kMaxBackEdgeWeight = 127;
345 static const int kBackEdgeDistanceDivisor = 142;
348 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
349 Label* back_edge_target) {
350 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
351 // to make sure it is constant. Branch may emit a skip-or-jump sequence
352 // instead of the normal Branch. It seems that the "skip" part of that
353 // sequence is about as long as this Branch would be so it is safe to ignore
355 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
356 Comment cmnt(masm_, "[ Stack check");
358 if (FLAG_count_based_interrupts) {
360 if (FLAG_weighted_back_edges) {
361 ASSERT(back_edge_target->is_bound());
362 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
363 weight = Min(kMaxBackEdgeWeight,
364 Max(1, distance / kBackEdgeDistanceDivisor));
366 EmitProfilingCounterDecrement(weight);
367 __ slt(at, a3, zero_reg);
368 __ beq(at, zero_reg, &ok);
369 // CallStub will emit a li t9 first, so it is safe to use the delay slot.
373 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
375 __ beq(at, zero_reg, &ok);
376 // CallStub will emit a li t9 first, so it is safe to use the delay slot.
380 // Record a mapping of this PC offset to the OSR id. This is used to find
381 // the AST id from the unoptimized code in order to use it as a key into
382 // the deoptimization input data found in the optimized code.
383 RecordStackCheck(stmt->OsrEntryId());
384 if (FLAG_count_based_interrupts) {
385 EmitProfilingCounterReset();
389 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
390 // Record a mapping of the OSR id to this PC. This is used if the OSR
391 // entry becomes the target of a bailout. We don't expect it to be, but
392 // we want it to work if it is.
393 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
397 void FullCodeGenerator::EmitReturnSequence() {
398 Comment cmnt(masm_, "[ Return sequence");
399 if (return_label_.is_bound()) {
400 __ Branch(&return_label_);
402 __ bind(&return_label_);
404 // Push the return value on the stack as the parameter.
405 // Runtime::TraceExit returns its parameter in v0.
407 __ CallRuntime(Runtime::kTraceExit, 1);
409 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
410 // Pretend that the exit is a backwards jump to the entry.
412 if (info_->ShouldSelfOptimize()) {
413 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
414 } else if (FLAG_weighted_back_edges) {
415 int distance = masm_->pc_offset();
416 weight = Min(kMaxBackEdgeWeight,
417 Max(1, distance / kBackEdgeDistanceDivisor));
419 EmitProfilingCounterDecrement(weight);
421 __ Branch(&ok, ge, a3, Operand(zero_reg));
423 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
424 __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
426 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
432 EmitProfilingCounterReset();
437 // Add a label for checking the size of the code used for returning.
438 Label check_exit_codesize;
439 masm_->bind(&check_exit_codesize);
441 // Make sure that the constant pool is not emitted inside of the return
443 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
444 // Here we use masm_-> instead of the __ macro to avoid the code coverage
445 // tool from instrumenting as we rely on the code size here.
446 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
447 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
450 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
451 masm_->Addu(sp, sp, Operand(sp_delta));
456 // Check that the size of the code used for returning is large enough
457 // for the debugger's requirements.
458 ASSERT(Assembler::kJSReturnSequenceInstructions <=
459 masm_->InstructionsGeneratedSince(&check_exit_codesize));
465 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
466 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
470 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
471 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var);
476 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
477 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
478 codegen()->GetVar(result_register(), var);
479 __ push(result_register());
483 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
484 // For simplicity we always test the accumulator register.
485 codegen()->GetVar(result_register(), var);
486 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
487 codegen()->DoTest(this);
491 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
495 void FullCodeGenerator::AccumulatorValueContext::Plug(
496 Heap::RootListIndex index) const {
497 __ LoadRoot(result_register(), index);
501 void FullCodeGenerator::StackValueContext::Plug(
502 Heap::RootListIndex index) const {
503 __ LoadRoot(result_register(), index);
504 __ push(result_register());
508 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
509 codegen()->PrepareForBailoutBeforeSplit(condition(),
513 if (index == Heap::kUndefinedValueRootIndex ||
514 index == Heap::kNullValueRootIndex ||
515 index == Heap::kFalseValueRootIndex) {
516 if (false_label_ != fall_through_) __ Branch(false_label_);
517 } else if (index == Heap::kTrueValueRootIndex) {
518 if (true_label_ != fall_through_) __ Branch(true_label_);
520 __ LoadRoot(result_register(), index);
521 codegen()->DoTest(this);
526 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
530 void FullCodeGenerator::AccumulatorValueContext::Plug(
531 Handle<Object> lit) const {
532 __ li(result_register(), Operand(lit));
536 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
537 // Immediates cannot be pushed directly.
538 __ li(result_register(), Operand(lit));
539 __ push(result_register());
543 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
544 codegen()->PrepareForBailoutBeforeSplit(condition(),
548 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
549 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
550 if (false_label_ != fall_through_) __ Branch(false_label_);
551 } else if (lit->IsTrue() || lit->IsJSObject()) {
552 if (true_label_ != fall_through_) __ Branch(true_label_);
553 } else if (lit->IsString()) {
554 if (String::cast(*lit)->length() == 0) {
555 if (false_label_ != fall_through_) __ Branch(false_label_);
557 if (true_label_ != fall_through_) __ Branch(true_label_);
559 } else if (lit->IsSmi()) {
560 if (Smi::cast(*lit)->value() == 0) {
561 if (false_label_ != fall_through_) __ Branch(false_label_);
563 if (true_label_ != fall_through_) __ Branch(true_label_);
566 // For simplicity we always test the accumulator register.
567 __ li(result_register(), Operand(lit));
568 codegen()->DoTest(this);
573 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
574 Register reg) const {
580 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
582 Register reg) const {
585 __ Move(result_register(), reg);
589 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
590 Register reg) const {
592 if (count > 1) __ Drop(count - 1);
593 __ sw(reg, MemOperand(sp, 0));
597 void FullCodeGenerator::TestContext::DropAndPlug(int count,
598 Register reg) const {
600 // For simplicity we always test the accumulator register.
602 __ Move(result_register(), reg);
603 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
604 codegen()->DoTest(this);
608 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
609 Label* materialize_false) const {
610 ASSERT(materialize_true == materialize_false);
611 __ bind(materialize_true);
615 void FullCodeGenerator::AccumulatorValueContext::Plug(
616 Label* materialize_true,
617 Label* materialize_false) const {
619 __ bind(materialize_true);
620 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
622 __ bind(materialize_false);
623 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
628 void FullCodeGenerator::StackValueContext::Plug(
629 Label* materialize_true,
630 Label* materialize_false) const {
632 __ bind(materialize_true);
633 __ LoadRoot(at, Heap::kTrueValueRootIndex);
636 __ bind(materialize_false);
637 __ LoadRoot(at, Heap::kFalseValueRootIndex);
643 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
644 Label* materialize_false) const {
645 ASSERT(materialize_true == true_label_);
646 ASSERT(materialize_false == false_label_);
650 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
654 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
655 Heap::RootListIndex value_root_index =
656 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
657 __ LoadRoot(result_register(), value_root_index);
661 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
662 Heap::RootListIndex value_root_index =
663 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
664 __ LoadRoot(at, value_root_index);
669 void FullCodeGenerator::TestContext::Plug(bool flag) const {
670 codegen()->PrepareForBailoutBeforeSplit(condition(),
675 if (true_label_ != fall_through_) __ Branch(true_label_);
677 if (false_label_ != fall_through_) __ Branch(false_label_);
682 void FullCodeGenerator::DoTest(Expression* condition,
685 Label* fall_through) {
686 if (CpuFeatures::IsSupported(FPU)) {
687 ToBooleanStub stub(result_register());
689 __ mov(at, zero_reg);
691 // Call the runtime to find the boolean value of the source and then
692 // translate it into control flow to the pair of labels.
693 __ push(result_register());
694 __ CallRuntime(Runtime::kToBool, 1);
695 __ LoadRoot(at, Heap::kFalseValueRootIndex);
697 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
701 void FullCodeGenerator::Split(Condition cc,
706 Label* fall_through) {
707 if (if_false == fall_through) {
708 __ Branch(if_true, cc, lhs, rhs);
709 } else if (if_true == fall_through) {
710 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
712 __ Branch(if_true, cc, lhs, rhs);
718 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
719 ASSERT(var->IsStackAllocated());
720 // Offset is negative because higher indexes are at lower addresses.
721 int offset = -var->index() * kPointerSize;
722 // Adjust by a (parameter or local) base offset.
723 if (var->IsParameter()) {
724 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
726 offset += JavaScriptFrameConstants::kLocal0Offset;
728 return MemOperand(fp, offset);
732 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
733 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
734 if (var->IsContextSlot()) {
735 int context_chain_length = scope()->ContextChainLength(var->scope());
736 __ LoadContext(scratch, context_chain_length);
737 return ContextOperand(scratch, var->index());
739 return StackOperand(var);
744 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
745 // Use destination as scratch.
746 MemOperand location = VarOperand(var, dest);
747 __ lw(dest, location);
751 void FullCodeGenerator::SetVar(Variable* var,
755 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
756 ASSERT(!scratch0.is(src));
757 ASSERT(!scratch0.is(scratch1));
758 ASSERT(!scratch1.is(src));
759 MemOperand location = VarOperand(var, scratch0);
760 __ sw(src, location);
761 // Emit the write barrier code if the location is in the heap.
762 if (var->IsContextSlot()) {
763 __ RecordWriteContextSlot(scratch0,
773 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
774 bool should_normalize,
777 // Only prepare for bailouts before splits if we're in a test
778 // context. Otherwise, we let the Visit function deal with the
779 // preparation to avoid preparing with the same AST id twice.
780 if (!context()->IsTest() || !info_->IsOptimizable()) return;
783 if (should_normalize) __ Branch(&skip);
784 PrepareForBailout(expr, TOS_REG);
785 if (should_normalize) {
786 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
787 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
793 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
795 FunctionLiteral* function) {
796 // If it was not possible to allocate the variable at compile time, we
797 // need to "declare" it at runtime to make sure it actually exists in the
799 Variable* variable = proxy->var();
800 bool binding_needs_init = (function == NULL) &&
801 (mode == CONST || mode == CONST_HARMONY || mode == LET);
802 switch (variable->location()) {
803 case Variable::UNALLOCATED:
807 case Variable::PARAMETER:
808 case Variable::LOCAL:
809 if (function != NULL) {
810 Comment cmnt(masm_, "[ Declaration");
811 VisitForAccumulatorValue(function);
812 __ sw(result_register(), StackOperand(variable));
813 } else if (binding_needs_init) {
814 Comment cmnt(masm_, "[ Declaration");
815 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
816 __ sw(t0, StackOperand(variable));
820 case Variable::CONTEXT:
821 // The variable in the decl always resides in the current function
823 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
824 if (FLAG_debug_code) {
825 // Check that we're not inside a with or catch context.
826 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
827 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
828 __ Check(ne, "Declaration in with context.",
830 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
831 __ Check(ne, "Declaration in catch context.",
834 if (function != NULL) {
835 Comment cmnt(masm_, "[ Declaration");
836 VisitForAccumulatorValue(function);
837 __ sw(result_register(), ContextOperand(cp, variable->index()));
838 int offset = Context::SlotOffset(variable->index());
839 // We know that we have written a function, which is not a smi.
840 __ RecordWriteContextSlot(cp,
848 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
849 } else if (binding_needs_init) {
850 Comment cmnt(masm_, "[ Declaration");
851 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
852 __ sw(at, ContextOperand(cp, variable->index()));
853 // No write barrier since the_hole_value is in old space.
854 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
858 case Variable::LOOKUP: {
859 Comment cmnt(masm_, "[ Declaration");
860 __ li(a2, Operand(variable->name()));
861 // Declaration nodes are always introduced in one of four modes.
862 ASSERT(mode == VAR ||
864 mode == CONST_HARMONY ||
866 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
868 __ li(a1, Operand(Smi::FromInt(attr)));
869 // Push initial value, if any.
870 // Note: For variables we must not push an initial value (such as
871 // 'undefined') because we may have a (legal) redeclaration and we
872 // must not destroy the current value.
873 if (function != NULL) {
875 // Push initial value for function declaration.
876 VisitForStackValue(function);
877 } else if (binding_needs_init) {
878 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
879 __ Push(cp, a2, a1, a0);
881 ASSERT(Smi::FromInt(0) == 0);
882 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
883 __ Push(cp, a2, a1, a0);
885 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
892 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
893 // Call the runtime to declare the globals.
894 // The context is the first argument.
895 __ li(a1, Operand(pairs));
896 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
898 __ CallRuntime(Runtime::kDeclareGlobals, 3);
899 // Return value is ignored.
903 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
904 Comment cmnt(masm_, "[ SwitchStatement");
905 Breakable nested_statement(this, stmt);
906 SetStatementPosition(stmt);
908 // Keep the switch value on the stack until a case matches.
909 VisitForStackValue(stmt->tag());
910 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
912 ZoneList<CaseClause*>* clauses = stmt->cases();
913 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
915 Label next_test; // Recycled for each test.
916 // Compile all the tests with branches to their bodies.
917 for (int i = 0; i < clauses->length(); i++) {
918 CaseClause* clause = clauses->at(i);
919 clause->body_target()->Unuse();
921 // The default is not a test, but remember it as final fall through.
922 if (clause->is_default()) {
923 default_clause = clause;
927 Comment cmnt(masm_, "[ Case comparison");
931 // Compile the label expression.
932 VisitForAccumulatorValue(clause->label());
933 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
935 // Perform the comparison as if via '==='.
936 __ lw(a1, MemOperand(sp, 0)); // Switch value.
937 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
938 JumpPatchSite patch_site(masm_);
939 if (inline_smi_code) {
942 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
944 __ Branch(&next_test, ne, a1, Operand(a0));
945 __ Drop(1); // Switch value is no longer needed.
946 __ Branch(clause->body_target());
951 // Record position before stub call for type feedback.
952 SetSourcePosition(clause->position());
953 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
954 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
955 patch_site.EmitPatchInfo();
957 __ Branch(&next_test, ne, v0, Operand(zero_reg));
958 __ Drop(1); // Switch value is no longer needed.
959 __ Branch(clause->body_target());
962 // Discard the test value and jump to the default if present, otherwise to
963 // the end of the statement.
965 __ Drop(1); // Switch value is no longer needed.
966 if (default_clause == NULL) {
967 __ Branch(nested_statement.break_label());
969 __ Branch(default_clause->body_target());
972 // Compile all the case bodies.
973 for (int i = 0; i < clauses->length(); i++) {
974 Comment cmnt(masm_, "[ Case body");
975 CaseClause* clause = clauses->at(i);
976 __ bind(clause->body_target());
977 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
978 VisitStatements(clause->statements());
981 __ bind(nested_statement.break_label());
982 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
986 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
987 Comment cmnt(masm_, "[ ForInStatement");
988 SetStatementPosition(stmt);
991 ForIn loop_statement(this, stmt);
992 increment_loop_depth();
994 // Get the object to enumerate over. Both SpiderMonkey and JSC
995 // ignore null and undefined in contrast to the specification; see
996 // ECMA-262 section 12.6.4.
997 VisitForAccumulatorValue(stmt->enumerable());
998 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
999 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1000 __ Branch(&exit, eq, a0, Operand(at));
1001 Register null_value = t1;
1002 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1003 __ Branch(&exit, eq, a0, Operand(null_value));
1004 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1006 // Convert the object to a JS object.
1007 Label convert, done_convert;
1008 __ JumpIfSmi(a0, &convert);
1009 __ GetObjectType(a0, a1, a1);
1010 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1013 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1015 __ bind(&done_convert);
1018 // Check for proxies.
1020 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1021 __ GetObjectType(a0, a1, a1);
1022 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1024 // Check cache validity in generated code. This is a fast case for
1025 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1026 // guarantee cache validity, call the runtime system to check cache
1027 // validity or get the property names in a fixed array.
1028 __ CheckEnumCache(null_value, &call_runtime);
1030 // The enum cache is valid. Load the map of the object being
1031 // iterated over and use the cache for the iteration.
1033 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1034 __ Branch(&use_cache);
1036 // Get the set of properties to enumerate.
1037 __ bind(&call_runtime);
1038 __ push(a0); // Duplicate the enumerable object on the stack.
1039 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1041 // If we got a map from the runtime call, we can do a fast
1042 // modification check. Otherwise, we got a fixed array, and we have
1043 // to do a slow check.
1046 __ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
1047 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1048 __ Branch(&fixed_array, ne, a1, Operand(at));
1050 // We got a map in register v0. Get the enumeration cache from it.
1051 __ bind(&use_cache);
1052 __ LoadInstanceDescriptors(v0, a1);
1053 __ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumerationIndexOffset));
1054 __ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset));
1056 // Set up the four remaining stack slots.
1057 __ push(v0); // Map.
1058 __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset));
1059 __ li(a0, Operand(Smi::FromInt(0)));
1060 // Push enumeration cache, enumeration cache length (as smi) and zero.
1061 __ Push(a2, a1, a0);
1064 // We got a fixed array in register v0. Iterate through that.
1066 __ bind(&fixed_array);
1068 Handle<JSGlobalPropertyCell> cell =
1069 isolate()->factory()->NewJSGlobalPropertyCell(
1071 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
1072 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1073 __ LoadHeapObject(a1, cell);
1074 __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1075 __ sw(a2, FieldMemOperand(a1, JSGlobalPropertyCell::kValueOffset));
1077 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1078 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1079 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1080 __ GetObjectType(a2, a3, a3);
1081 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1082 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1083 __ bind(&non_proxy);
1084 __ Push(a1, v0); // Smi and array
1085 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1086 __ li(a0, Operand(Smi::FromInt(0)));
1087 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1089 // Generate code for doing the condition check.
1090 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1092 // Load the current count to a0, load the length to a1.
1093 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1094 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1095 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1097 // Get the current entry of the array into register a3.
1098 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1099 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1100 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1101 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1102 __ lw(a3, MemOperand(t0)); // Current entry.
1104 // Get the expected map from the stack or a smi in the
1105 // permanent slow case into register a2.
1106 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1108 // Check if the expected map still matches that of the enumerable.
1109 // If not, we may have to filter the key.
1111 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1112 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1113 __ Branch(&update_each, eq, t0, Operand(a2));
1115 // For proxies, no filtering is done.
1116 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1117 ASSERT_EQ(Smi::FromInt(0), 0);
1118 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1120 // Convert the entry to a string or (smi) 0 if it isn't a property
1121 // any more. If the property has been removed while iterating, we
1123 __ push(a1); // Enumerable.
1124 __ push(a3); // Current entry.
1125 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1126 __ mov(a3, result_register());
1127 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1129 // Update the 'each' property or variable from the possibly filtered
1130 // entry in register a3.
1131 __ bind(&update_each);
1132 __ mov(result_register(), a3);
1133 // Perform the assignment as if via '='.
1134 { EffectContext context(this);
1135 EmitAssignment(stmt->each());
1138 // Generate code for the body of the loop.
1139 Visit(stmt->body());
1141 // Generate code for the going to the next element by incrementing
1142 // the index (smi) stored on top of the stack.
1143 __ bind(loop_statement.continue_label());
1145 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1148 EmitStackCheck(stmt, &loop);
1151 // Remove the pointers stored on the stack.
1152 __ bind(loop_statement.break_label());
1155 // Exit and decrement the loop depth.
1156 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1158 decrement_loop_depth();
1162 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1164 // Use the fast case closure allocation code that allocates in new
1165 // space for nested functions that don't need literals cloning. If
1166 // we're running with the --always-opt or the --prepare-always-opt
1167 // flag, we need to use the runtime function so that the new function
1168 // we are creating here gets a chance to have its code optimized and
1169 // doesn't just get a copy of the existing unoptimized code.
1170 if (!FLAG_always_opt &&
1171 !FLAG_prepare_always_opt &&
1173 scope()->is_function_scope() &&
1174 info->num_literals() == 0) {
1175 FastNewClosureStub stub(info->language_mode());
1176 __ li(a0, Operand(info));
1180 __ li(a0, Operand(info));
1181 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1182 : Heap::kFalseValueRootIndex);
1183 __ Push(cp, a0, a1);
1184 __ CallRuntime(Runtime::kNewClosure, 3);
1186 context()->Plug(v0);
1190 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1191 Comment cmnt(masm_, "[ VariableProxy");
1192 EmitVariableLoad(expr);
1196 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1197 TypeofState typeof_state,
1199 Register current = cp;
1205 if (s->num_heap_slots() > 0) {
1206 if (s->calls_non_strict_eval()) {
1207 // Check that extension is NULL.
1208 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1209 __ Branch(slow, ne, temp, Operand(zero_reg));
1211 // Load next context in chain.
1212 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1213 // Walk the rest of the chain without clobbering cp.
1216 // If no outer scope calls eval, we do not need to check more
1217 // context extensions.
1218 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1219 s = s->outer_scope();
1222 if (s->is_eval_scope()) {
1224 if (!current.is(next)) {
1225 __ Move(next, current);
1228 // Terminate at global context.
1229 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1230 __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex);
1231 __ Branch(&fast, eq, temp, Operand(t0));
1232 // Check that extension is NULL.
1233 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1234 __ Branch(slow, ne, temp, Operand(zero_reg));
1235 // Load next context in chain.
1236 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1241 __ lw(a0, var->is_qml_global() ? QmlGlobalObjectOperand():GlobalObjectOperand());
1242 __ li(a2, Operand(var->name()));
1243 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1244 ? RelocInfo::CODE_TARGET
1245 : RelocInfo::CODE_TARGET_CONTEXT;
1246 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1251 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1253 ASSERT(var->IsContextSlot());
1254 Register context = cp;
1258 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1259 if (s->num_heap_slots() > 0) {
1260 if (s->calls_non_strict_eval()) {
1261 // Check that extension is NULL.
1262 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1263 __ Branch(slow, ne, temp, Operand(zero_reg));
1265 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1266 // Walk the rest of the chain without clobbering cp.
1270 // Check that last extension is NULL.
1271 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1272 __ Branch(slow, ne, temp, Operand(zero_reg));
1274 // This function is used only for loads, not stores, so it's safe to
1275 // return an cp-based operand (the write barrier cannot be allowed to
1276 // destroy the cp register).
1277 return ContextOperand(context, var->index());
1281 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1282 TypeofState typeof_state,
1285 // Generate fast-case code for variables that might be shadowed by
1286 // eval-introduced variables. Eval is used a lot without
1287 // introducing variables. In those cases, we do not want to
1288 // perform a runtime call for all variables in the scope
1289 // containing the eval.
1290 if (var->mode() == DYNAMIC_GLOBAL) {
1291 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1293 } else if (var->mode() == DYNAMIC_LOCAL) {
1294 Variable* local = var->local_if_not_shadowed();
1295 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1296 if (local->mode() == CONST ||
1297 local->mode() == CONST_HARMONY ||
1298 local->mode() == LET) {
1299 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1300 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1301 if (local->mode() == CONST) {
1302 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1303 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1304 } else { // LET || CONST_HARMONY
1305 __ Branch(done, ne, at, Operand(zero_reg));
1306 __ li(a0, Operand(var->name()));
1308 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1316 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1317 // Record position before possible IC call.
1318 SetSourcePosition(proxy->position());
1319 Variable* var = proxy->var();
1321 // Three cases: global variables, lookup variables, and all other types of
1323 switch (var->location()) {
1324 case Variable::UNALLOCATED: {
1325 Comment cmnt(masm_, "Global variable");
1326 // Use inline caching. Variable name is passed in a2 and the global
1327 // object (receiver) in a0.
1328 __ lw(a0, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
1329 __ li(a2, Operand(var->name()));
1330 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1331 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1332 context()->Plug(v0);
1336 case Variable::PARAMETER:
1337 case Variable::LOCAL:
1338 case Variable::CONTEXT: {
1339 Comment cmnt(masm_, var->IsContextSlot()
1340 ? "Context variable"
1341 : "Stack variable");
1342 if (var->binding_needs_init()) {
1343 // var->scope() may be NULL when the proxy is located in eval code and
1344 // refers to a potential outside binding. Currently those bindings are
1345 // always looked up dynamically, i.e. in that case
1346 // var->location() == LOOKUP.
1348 ASSERT(var->scope() != NULL);
1350 // Check if the binding really needs an initialization check. The check
1351 // can be skipped in the following situation: we have a LET or CONST
1352 // binding in harmony mode, both the Variable and the VariableProxy have
1353 // the same declaration scope (i.e. they are both in global code, in the
1354 // same function or in the same eval code) and the VariableProxy is in
1355 // the source physically located after the initializer of the variable.
1357 // We cannot skip any initialization checks for CONST in non-harmony
1358 // mode because const variables may be declared but never initialized:
1359 // if (false) { const x; }; var y = x;
1361 // The condition on the declaration scopes is a conservative check for
1362 // nested functions that access a binding and are called before the
1363 // binding is initialized:
1364 // function() { f(); let x = 1; function f() { x = 2; } }
1366 bool skip_init_check;
1367 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1368 skip_init_check = false;
1370 // Check that we always have valid source position.
1371 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1372 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1373 skip_init_check = var->mode() != CONST &&
1374 var->initializer_position() < proxy->position();
1377 if (!skip_init_check) {
1378 // Let and const need a read barrier.
1380 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1381 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1382 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1383 // Throw a reference error when using an uninitialized let/const
1384 // binding in harmony mode.
1386 __ Branch(&done, ne, at, Operand(zero_reg));
1387 __ li(a0, Operand(var->name()));
1389 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1392 // Uninitalized const bindings outside of harmony mode are unholed.
1393 ASSERT(var->mode() == CONST);
1394 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1395 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1397 context()->Plug(v0);
1401 context()->Plug(var);
1405 case Variable::LOOKUP: {
1407 // Generate code for loading from variables potentially shadowed
1408 // by eval-introduced variables.
1409 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1411 Comment cmnt(masm_, "Lookup variable");
1412 __ li(a1, Operand(var->name()));
1413 __ Push(cp, a1); // Context and name.
1414 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1416 context()->Plug(v0);
1422 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1423 Comment cmnt(masm_, "[ RegExpLiteral");
1425 // Registers will be used as follows:
1426 // t1 = materialized value (RegExp literal)
1427 // t0 = JS function, literals array
1428 // a3 = literal index
1429 // a2 = RegExp pattern
1430 // a1 = RegExp flags
1431 // a0 = RegExp literal clone
1432 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1433 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1434 int literal_offset =
1435 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1436 __ lw(t1, FieldMemOperand(t0, literal_offset));
1437 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1438 __ Branch(&materialized, ne, t1, Operand(at));
1440 // Create regexp literal using runtime function.
1441 // Result will be in v0.
1442 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1443 __ li(a2, Operand(expr->pattern()));
1444 __ li(a1, Operand(expr->flags()));
1445 __ Push(t0, a3, a2, a1);
1446 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1449 __ bind(&materialized);
1450 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1451 Label allocated, runtime_allocate;
1452 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1455 __ bind(&runtime_allocate);
1457 __ li(a0, Operand(Smi::FromInt(size)));
1459 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1462 __ bind(&allocated);
1464 // After this, registers are used as follows:
1465 // v0: Newly allocated regexp.
1466 // t1: Materialized regexp.
1468 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1469 context()->Plug(v0);
1473 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1474 if (expression == NULL) {
1475 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1478 VisitForStackValue(expression);
1483 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1484 Comment cmnt(masm_, "[ ObjectLiteral");
1485 Handle<FixedArray> constant_properties = expr->constant_properties();
1486 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1487 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1488 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1489 __ li(a1, Operand(constant_properties));
1490 int flags = expr->fast_elements()
1491 ? ObjectLiteral::kFastElements
1492 : ObjectLiteral::kNoFlags;
1493 flags |= expr->has_function()
1494 ? ObjectLiteral::kHasFunction
1495 : ObjectLiteral::kNoFlags;
1496 __ li(a0, Operand(Smi::FromInt(flags)));
1497 __ Push(a3, a2, a1, a0);
1498 int properties_count = constant_properties->length() / 2;
1499 if (expr->depth() > 1) {
1500 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1501 } else if (flags != ObjectLiteral::kFastElements ||
1502 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1503 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1505 FastCloneShallowObjectStub stub(properties_count);
1509 // If result_saved is true the result is on top of the stack. If
1510 // result_saved is false the result is in v0.
1511 bool result_saved = false;
1513 // Mark all computed expressions that are bound to a key that
1514 // is shadowed by a later occurrence of the same key. For the
1515 // marked expressions, no store code is emitted.
1516 expr->CalculateEmitStore();
1518 AccessorTable accessor_table(isolate()->zone());
1519 for (int i = 0; i < expr->properties()->length(); i++) {
1520 ObjectLiteral::Property* property = expr->properties()->at(i);
1521 if (property->IsCompileTimeValue()) continue;
1523 Literal* key = property->key();
1524 Expression* value = property->value();
1525 if (!result_saved) {
1526 __ push(v0); // Save result on stack.
1527 result_saved = true;
1529 switch (property->kind()) {
1530 case ObjectLiteral::Property::CONSTANT:
1532 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1533 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1535 case ObjectLiteral::Property::COMPUTED:
1536 if (key->handle()->IsSymbol()) {
1537 if (property->emit_store()) {
1538 VisitForAccumulatorValue(value);
1539 __ mov(a0, result_register());
1540 __ li(a2, Operand(key->handle()));
1541 __ lw(a1, MemOperand(sp));
1542 Handle<Code> ic = is_classic_mode()
1543 ? isolate()->builtins()->StoreIC_Initialize()
1544 : isolate()->builtins()->StoreIC_Initialize_Strict();
1545 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1546 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1548 VisitForEffect(value);
1553 case ObjectLiteral::Property::PROTOTYPE:
1554 // Duplicate receiver on stack.
1555 __ lw(a0, MemOperand(sp));
1557 VisitForStackValue(key);
1558 VisitForStackValue(value);
1559 if (property->emit_store()) {
1560 __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1562 __ CallRuntime(Runtime::kSetProperty, 4);
1567 case ObjectLiteral::Property::GETTER:
1568 accessor_table.lookup(key)->second->getter = value;
1570 case ObjectLiteral::Property::SETTER:
1571 accessor_table.lookup(key)->second->setter = value;
1576 // Emit code to define accessors, using only a single call to the runtime for
1577 // each pair of corresponding getters and setters.
1578 for (AccessorTable::Iterator it = accessor_table.begin();
1579 it != accessor_table.end();
1581 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1583 VisitForStackValue(it->first);
1584 EmitAccessor(it->second->getter);
1585 EmitAccessor(it->second->setter);
1586 __ li(a0, Operand(Smi::FromInt(NONE)));
1588 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1591 if (expr->has_function()) {
1592 ASSERT(result_saved);
1593 __ lw(a0, MemOperand(sp));
1595 __ CallRuntime(Runtime::kToFastProperties, 1);
1599 context()->PlugTOS();
1601 context()->Plug(v0);
1606 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1607 Comment cmnt(masm_, "[ ArrayLiteral");
1609 ZoneList<Expression*>* subexprs = expr->values();
1610 int length = subexprs->length();
1612 Handle<FixedArray> constant_elements = expr->constant_elements();
1613 ASSERT_EQ(2, constant_elements->length());
1614 ElementsKind constant_elements_kind =
1615 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1616 bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
1617 Handle<FixedArrayBase> constant_elements_values(
1618 FixedArrayBase::cast(constant_elements->get(1)));
1620 __ mov(a0, result_register());
1621 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1622 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1623 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1624 __ li(a1, Operand(constant_elements));
1625 __ Push(a3, a2, a1);
1626 if (has_fast_elements && constant_elements_values->map() ==
1627 isolate()->heap()->fixed_cow_array_map()) {
1628 FastCloneShallowArrayStub stub(
1629 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1631 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1633 } else if (expr->depth() > 1) {
1634 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1635 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1636 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1638 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1639 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1640 FLAG_smi_only_arrays);
1641 FastCloneShallowArrayStub::Mode mode = has_fast_elements
1642 ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1643 : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1644 FastCloneShallowArrayStub stub(mode, length);
1648 bool result_saved = false; // Is the result saved to the stack?
1650 // Emit code to evaluate all the non-constant subexpressions and to store
1651 // them into the newly cloned array.
1652 for (int i = 0; i < length; i++) {
1653 Expression* subexpr = subexprs->at(i);
1654 // If the subexpression is a literal or a simple materialized literal it
1655 // is already set in the cloned array.
1656 if (subexpr->AsLiteral() != NULL ||
1657 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1661 if (!result_saved) {
1663 result_saved = true;
1666 VisitForAccumulatorValue(subexpr);
1668 if (constant_elements_kind == FAST_ELEMENTS) {
1669 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1670 __ lw(t2, MemOperand(sp)); // Copy of array literal.
1671 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1672 __ sw(result_register(), FieldMemOperand(a1, offset));
1673 // Update the write barrier for the array store.
1674 __ RecordWriteField(a1, offset, result_register(), a2,
1675 kRAHasBeenSaved, kDontSaveFPRegs,
1676 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1678 __ lw(a1, MemOperand(sp)); // Copy of array literal.
1679 __ lw(a2, FieldMemOperand(a1, JSObject::kMapOffset));
1680 __ li(a3, Operand(Smi::FromInt(i)));
1681 __ li(t0, Operand(Smi::FromInt(expr->literal_index())));
1682 __ mov(a0, result_register());
1683 StoreArrayLiteralElementStub stub;
1687 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1690 context()->PlugTOS();
1692 context()->Plug(v0);
1697 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1698 Comment cmnt(masm_, "[ Assignment");
1699 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1700 // on the left-hand side.
1701 if (!expr->target()->IsValidLeftHandSide()) {
1702 VisitForEffect(expr->target());
1706 // Left-hand side can only be a property, a global or a (parameter or local)
1708 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1709 LhsKind assign_type = VARIABLE;
1710 Property* property = expr->target()->AsProperty();
1711 if (property != NULL) {
1712 assign_type = (property->key()->IsPropertyName())
1717 // Evaluate LHS expression.
1718 switch (assign_type) {
1720 // Nothing to do here.
1722 case NAMED_PROPERTY:
1723 if (expr->is_compound()) {
1724 // We need the receiver both on the stack and in the accumulator.
1725 VisitForAccumulatorValue(property->obj());
1726 __ push(result_register());
1728 VisitForStackValue(property->obj());
1731 case KEYED_PROPERTY:
1732 // We need the key and receiver on both the stack and in v0 and a1.
1733 if (expr->is_compound()) {
1734 VisitForStackValue(property->obj());
1735 VisitForAccumulatorValue(property->key());
1736 __ lw(a1, MemOperand(sp, 0));
1739 VisitForStackValue(property->obj());
1740 VisitForStackValue(property->key());
1745 // For compound assignments we need another deoptimization point after the
1746 // variable/property load.
1747 if (expr->is_compound()) {
1748 { AccumulatorValueContext context(this);
1749 switch (assign_type) {
1751 EmitVariableLoad(expr->target()->AsVariableProxy());
1752 PrepareForBailout(expr->target(), TOS_REG);
1754 case NAMED_PROPERTY:
1755 EmitNamedPropertyLoad(property);
1756 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1758 case KEYED_PROPERTY:
1759 EmitKeyedPropertyLoad(property);
1760 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1765 Token::Value op = expr->binary_op();
1766 __ push(v0); // Left operand goes on the stack.
1767 VisitForAccumulatorValue(expr->value());
1769 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1772 SetSourcePosition(expr->position() + 1);
1773 AccumulatorValueContext context(this);
1774 if (ShouldInlineSmiCase(op)) {
1775 EmitInlineSmiBinaryOp(expr->binary_operation(),
1781 EmitBinaryOp(expr->binary_operation(), op, mode);
1784 // Deoptimization point in case the binary operation may have side effects.
1785 PrepareForBailout(expr->binary_operation(), TOS_REG);
1787 VisitForAccumulatorValue(expr->value());
1790 // Record source position before possible IC call.
1791 SetSourcePosition(expr->position());
1794 switch (assign_type) {
1796 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1798 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1799 context()->Plug(v0);
1801 case NAMED_PROPERTY:
1802 EmitNamedPropertyAssignment(expr);
1804 case KEYED_PROPERTY:
1805 EmitKeyedPropertyAssignment(expr);
1811 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1812 SetSourcePosition(prop->position());
1813 Literal* key = prop->key()->AsLiteral();
1814 __ mov(a0, result_register());
1815 __ li(a2, Operand(key->handle()));
1816 // Call load IC. It has arguments receiver and property name a0 and a2.
1817 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1818 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1822 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1823 SetSourcePosition(prop->position());
1824 __ mov(a0, result_register());
1825 // Call keyed load IC. It has arguments key and receiver in a0 and a1.
1826 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1827 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1831 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1834 Expression* left_expr,
1835 Expression* right_expr) {
1836 Label done, smi_case, stub_call;
1838 Register scratch1 = a2;
1839 Register scratch2 = a3;
1841 // Get the arguments.
1843 Register right = a0;
1845 __ mov(a0, result_register());
1847 // Perform combined smi check on both operands.
1848 __ Or(scratch1, left, Operand(right));
1849 STATIC_ASSERT(kSmiTag == 0);
1850 JumpPatchSite patch_site(masm_);
1851 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1853 __ bind(&stub_call);
1854 BinaryOpStub stub(op, mode);
1855 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1856 patch_site.EmitPatchInfo();
1860 // Smi case. This code works the same way as the smi-smi case in the type
1861 // recording binary operation stub, see
1862 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1865 __ Branch(&stub_call);
1866 __ GetLeastBitsFromSmi(scratch1, right, 5);
1867 __ srav(right, left, scratch1);
1868 __ And(v0, right, Operand(~kSmiTagMask));
1871 __ Branch(&stub_call);
1872 __ SmiUntag(scratch1, left);
1873 __ GetLeastBitsFromSmi(scratch2, right, 5);
1874 __ sllv(scratch1, scratch1, scratch2);
1875 __ Addu(scratch2, scratch1, Operand(0x40000000));
1876 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1877 __ SmiTag(v0, scratch1);
1881 __ Branch(&stub_call);
1882 __ SmiUntag(scratch1, left);
1883 __ GetLeastBitsFromSmi(scratch2, right, 5);
1884 __ srlv(scratch1, scratch1, scratch2);
1885 __ And(scratch2, scratch1, 0xc0000000);
1886 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1887 __ SmiTag(v0, scratch1);
1891 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1892 __ BranchOnOverflow(&stub_call, scratch1);
1895 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
1896 __ BranchOnOverflow(&stub_call, scratch1);
1899 __ SmiUntag(scratch1, right);
1900 __ Mult(left, scratch1);
1903 __ sra(scratch1, scratch1, 31);
1904 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
1906 __ Branch(&done, ne, v0, Operand(zero_reg));
1907 __ Addu(scratch2, right, left);
1908 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1909 ASSERT(Smi::FromInt(0) == 0);
1910 __ mov(v0, zero_reg);
1914 __ Or(v0, left, Operand(right));
1916 case Token::BIT_AND:
1917 __ And(v0, left, Operand(right));
1919 case Token::BIT_XOR:
1920 __ Xor(v0, left, Operand(right));
1927 context()->Plug(v0);
1931 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1933 OverwriteMode mode) {
1934 __ mov(a0, result_register());
1936 BinaryOpStub stub(op, mode);
1937 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1938 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1939 patch_site.EmitPatchInfo();
1940 context()->Plug(v0);
1944 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1945 // Invalid left-hand sides are rewritten to have a 'throw
1946 // ReferenceError' on the left-hand side.
1947 if (!expr->IsValidLeftHandSide()) {
1948 VisitForEffect(expr);
1952 // Left-hand side can only be a property, a global or a (parameter or local)
1954 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1955 LhsKind assign_type = VARIABLE;
1956 Property* prop = expr->AsProperty();
1958 assign_type = (prop->key()->IsPropertyName())
1963 switch (assign_type) {
1965 Variable* var = expr->AsVariableProxy()->var();
1966 EffectContext context(this);
1967 EmitVariableAssignment(var, Token::ASSIGN);
1970 case NAMED_PROPERTY: {
1971 __ push(result_register()); // Preserve value.
1972 VisitForAccumulatorValue(prop->obj());
1973 __ mov(a1, result_register());
1974 __ pop(a0); // Restore value.
1975 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
1976 Handle<Code> ic = is_classic_mode()
1977 ? isolate()->builtins()->StoreIC_Initialize()
1978 : isolate()->builtins()->StoreIC_Initialize_Strict();
1982 case KEYED_PROPERTY: {
1983 __ push(result_register()); // Preserve value.
1984 VisitForStackValue(prop->obj());
1985 VisitForAccumulatorValue(prop->key());
1986 __ mov(a1, result_register());
1988 __ pop(a0); // Restore value.
1989 Handle<Code> ic = is_classic_mode()
1990 ? isolate()->builtins()->KeyedStoreIC_Initialize()
1991 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1996 context()->Plug(v0);
2000 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2002 if (var->IsUnallocated()) {
2003 // Global var, const, or let.
2004 __ mov(a0, result_register());
2005 __ li(a2, Operand(var->name()));
2006 __ lw(a1, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
2007 Handle<Code> ic = is_classic_mode()
2008 ? isolate()->builtins()->StoreIC_Initialize()
2009 : isolate()->builtins()->StoreIC_Initialize_Strict();
2010 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2012 } else if (op == Token::INIT_CONST) {
2013 // Const initializers need a write barrier.
2014 ASSERT(!var->IsParameter()); // No const parameters.
2015 if (var->IsStackLocal()) {
2017 __ lw(a1, StackOperand(var));
2018 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2019 __ Branch(&skip, ne, a1, Operand(t0));
2020 __ sw(result_register(), StackOperand(var));
2023 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2024 // Like var declarations, const declarations are hoisted to function
2025 // scope. However, unlike var initializers, const initializers are
2026 // able to drill a hole to that function context, even from inside a
2027 // 'with' context. We thus bypass the normal static scope lookup for
2028 // var->IsContextSlot().
2030 __ li(a0, Operand(var->name()));
2031 __ Push(cp, a0); // Context and name.
2032 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2035 } else if (var->mode() == LET && op != Token::INIT_LET) {
2036 // Non-initializing assignment to let variable needs a write barrier.
2037 if (var->IsLookupSlot()) {
2038 __ push(v0); // Value.
2039 __ li(a1, Operand(var->name()));
2040 __ li(a0, Operand(Smi::FromInt(language_mode())));
2041 __ Push(cp, a1, a0); // Context, name, strict mode.
2042 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2044 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2046 MemOperand location = VarOperand(var, a1);
2047 __ lw(a3, location);
2048 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2049 __ Branch(&assign, ne, a3, Operand(t0));
2050 __ li(a3, Operand(var->name()));
2052 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2053 // Perform the assignment.
2055 __ sw(result_register(), location);
2056 if (var->IsContextSlot()) {
2057 // RecordWrite may destroy all its register arguments.
2058 __ mov(a3, result_register());
2059 int offset = Context::SlotOffset(var->index());
2060 __ RecordWriteContextSlot(
2061 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2065 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2066 // Assignment to var or initializing assignment to let/const
2068 if (var->IsStackAllocated() || var->IsContextSlot()) {
2069 MemOperand location = VarOperand(var, a1);
2070 if (FLAG_debug_code && op == Token::INIT_LET) {
2071 // Check for an uninitialized let binding.
2072 __ lw(a2, location);
2073 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2074 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
2076 // Perform the assignment.
2077 __ sw(v0, location);
2078 if (var->IsContextSlot()) {
2080 int offset = Context::SlotOffset(var->index());
2081 __ RecordWriteContextSlot(
2082 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2085 ASSERT(var->IsLookupSlot());
2086 __ push(v0); // Value.
2087 __ li(a1, Operand(var->name()));
2088 __ li(a0, Operand(Smi::FromInt(language_mode())));
2089 __ Push(cp, a1, a0); // Context, name, strict mode.
2090 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2093 // Non-initializing assignments to consts are ignored.
2097 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2098 // Assignment to a property, using a named store IC.
2099 Property* prop = expr->target()->AsProperty();
2100 ASSERT(prop != NULL);
2101 ASSERT(prop->key()->AsLiteral() != NULL);
2103 // If the assignment starts a block of assignments to the same object,
2104 // change to slow case to avoid the quadratic behavior of repeatedly
2105 // adding fast properties.
2106 if (expr->starts_initialization_block()) {
2107 __ push(result_register());
2108 __ lw(t0, MemOperand(sp, kPointerSize)); // Receiver is now under value.
2110 __ CallRuntime(Runtime::kToSlowProperties, 1);
2111 __ pop(result_register());
2114 // Record source code position before IC call.
2115 SetSourcePosition(expr->position());
2116 __ mov(a0, result_register()); // Load the value.
2117 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2118 // Load receiver to a1. Leave a copy in the stack if needed for turning the
2119 // receiver into fast case.
2120 if (expr->ends_initialization_block()) {
2121 __ lw(a1, MemOperand(sp));
2126 Handle<Code> ic = is_classic_mode()
2127 ? isolate()->builtins()->StoreIC_Initialize()
2128 : isolate()->builtins()->StoreIC_Initialize_Strict();
2129 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2131 // If the assignment ends an initialization block, revert to fast case.
2132 if (expr->ends_initialization_block()) {
2133 __ push(v0); // Result of assignment, saved even if not needed.
2134 // Receiver is under the result value.
2135 __ lw(t0, MemOperand(sp, kPointerSize));
2137 __ CallRuntime(Runtime::kToFastProperties, 1);
2141 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2142 context()->Plug(v0);
2146 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2147 // Assignment to a property, using a keyed store IC.
2149 // If the assignment starts a block of assignments to the same object,
2150 // change to slow case to avoid the quadratic behavior of repeatedly
2151 // adding fast properties.
2152 if (expr->starts_initialization_block()) {
2153 __ push(result_register());
2154 // Receiver is now under the key and value.
2155 __ lw(t0, MemOperand(sp, 2 * kPointerSize));
2157 __ CallRuntime(Runtime::kToSlowProperties, 1);
2158 __ pop(result_register());
2161 // Record source code position before IC call.
2162 SetSourcePosition(expr->position());
2163 // Call keyed store IC.
2164 // The arguments are:
2165 // - a0 is the value,
2167 // - a2 is the receiver.
2168 __ mov(a0, result_register());
2170 // Load receiver to a2. Leave a copy in the stack if needed for turning the
2171 // receiver into fast case.
2172 if (expr->ends_initialization_block()) {
2173 __ lw(a2, MemOperand(sp));
2178 Handle<Code> ic = is_classic_mode()
2179 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2180 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2181 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2183 // If the assignment ends an initialization block, revert to fast case.
2184 if (expr->ends_initialization_block()) {
2185 __ push(v0); // Result of assignment, saved even if not needed.
2186 // Receiver is under the result value.
2187 __ lw(t0, MemOperand(sp, kPointerSize));
2189 __ CallRuntime(Runtime::kToFastProperties, 1);
2193 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2194 context()->Plug(v0);
2198 void FullCodeGenerator::VisitProperty(Property* expr) {
2199 Comment cmnt(masm_, "[ Property");
2200 Expression* key = expr->key();
2202 if (key->IsPropertyName()) {
2203 VisitForAccumulatorValue(expr->obj());
2204 EmitNamedPropertyLoad(expr);
2205 context()->Plug(v0);
2207 VisitForStackValue(expr->obj());
2208 VisitForAccumulatorValue(expr->key());
2210 EmitKeyedPropertyLoad(expr);
2211 context()->Plug(v0);
2216 void FullCodeGenerator::CallIC(Handle<Code> code,
2217 RelocInfo::Mode rmode,
2220 __ Call(code, rmode, ast_id);
2224 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2225 Handle<Object> name,
2226 RelocInfo::Mode mode) {
2227 // Code common for calls using the IC.
2228 ZoneList<Expression*>* args = expr->arguments();
2229 int arg_count = args->length();
2230 { PreservePositionScope scope(masm()->positions_recorder());
2231 for (int i = 0; i < arg_count; i++) {
2232 VisitForStackValue(args->at(i));
2234 __ li(a2, Operand(name));
2236 // Record source position for debugger.
2237 SetSourcePosition(expr->position());
2238 // Call the IC initialization code.
2240 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2241 CallIC(ic, mode, expr->id());
2242 RecordJSReturnSite(expr);
2243 // Restore context register.
2244 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2245 context()->Plug(v0);
2249 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2252 VisitForAccumulatorValue(key);
2254 // Swap the name of the function and the receiver on the stack to follow
2255 // the calling convention for call ICs.
2260 // Code common for calls using the IC.
2261 ZoneList<Expression*>* args = expr->arguments();
2262 int arg_count = args->length();
2263 { PreservePositionScope scope(masm()->positions_recorder());
2264 for (int i = 0; i < arg_count; i++) {
2265 VisitForStackValue(args->at(i));
2268 // Record source position for debugger.
2269 SetSourcePosition(expr->position());
2270 // Call the IC initialization code.
2272 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2273 __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2274 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2275 RecordJSReturnSite(expr);
2276 // Restore context register.
2277 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2278 context()->DropAndPlug(1, v0); // Drop the key still on the stack.
2282 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2283 // Code common for calls using the call stub.
2284 ZoneList<Expression*>* args = expr->arguments();
2285 int arg_count = args->length();
2286 { PreservePositionScope scope(masm()->positions_recorder());
2287 for (int i = 0; i < arg_count; i++) {
2288 VisitForStackValue(args->at(i));
2291 // Record source position for debugger.
2292 SetSourcePosition(expr->position());
2293 CallFunctionStub stub(arg_count, flags);
2294 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2296 RecordJSReturnSite(expr);
2297 // Restore context register.
2298 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2299 context()->DropAndPlug(1, v0);
2303 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2304 // Push copy of the first argument or undefined if it doesn't exist.
2305 if (arg_count > 0) {
2306 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2308 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2312 // Push the receiver of the enclosing function.
2313 int receiver_offset = 2 + info_->scope()->num_parameters();
2314 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2316 // Push the language mode.
2317 __ li(a1, Operand(Smi::FromInt(language_mode())));
2320 // Push the start position of the scope the calls resides in.
2321 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2324 // Push the qml mode flag.
2325 __ li(a1, Operand(Smi::FromInt(is_qml_mode())));
2328 // Do the runtime call.
2329 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2333 void FullCodeGenerator::VisitCall(Call* expr) {
2335 // We want to verify that RecordJSReturnSite gets called on all paths
2336 // through this function. Avoid early returns.
2337 expr->return_is_recorded_ = false;
2340 Comment cmnt(masm_, "[ Call");
2341 Expression* callee = expr->expression();
2342 VariableProxy* proxy = callee->AsVariableProxy();
2343 Property* property = callee->AsProperty();
2345 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2346 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2347 // resolve the function we need to call and the receiver of the
2348 // call. Then we call the resolved function using the given
2350 ZoneList<Expression*>* args = expr->arguments();
2351 int arg_count = args->length();
2353 { PreservePositionScope pos_scope(masm()->positions_recorder());
2354 VisitForStackValue(callee);
2355 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2356 __ push(a2); // Reserved receiver slot.
2358 // Push the arguments.
2359 for (int i = 0; i < arg_count; i++) {
2360 VisitForStackValue(args->at(i));
2363 // Push a copy of the function (found below the arguments) and
2365 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2367 EmitResolvePossiblyDirectEval(arg_count);
2369 // The runtime call returns a pair of values in v0 (function) and
2370 // v1 (receiver). Touch up the stack with the right values.
2371 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2372 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2374 // Record source position for debugger.
2375 SetSourcePosition(expr->position());
2376 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2377 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2379 RecordJSReturnSite(expr);
2380 // Restore context register.
2381 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2382 context()->DropAndPlug(1, v0);
2383 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2384 // Push global object as receiver for the call IC.
2385 __ lw(a0, proxy->var()->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
2387 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2388 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2389 // Call to a lookup slot (dynamically introduced variable).
2392 { PreservePositionScope scope(masm()->positions_recorder());
2393 // Generate code for loading from variables potentially shadowed
2394 // by eval-introduced variables.
2395 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2399 // Call the runtime to find the function to call (returned in v0)
2400 // and the object holding it (returned in v1).
2401 __ push(context_register());
2402 __ li(a2, Operand(proxy->name()));
2404 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2405 __ Push(v0, v1); // Function, receiver.
2407 // If fast case code has been generated, emit code to push the
2408 // function and receiver and have the slow path jump around this
2410 if (done.is_linked()) {
2416 // The receiver is implicitly the global receiver. Indicate this
2417 // by passing the hole to the call function stub.
2418 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2423 // The receiver is either the global receiver or an object found
2424 // by LoadContextSlot. That object could be the hole if the
2425 // receiver is implicitly the global object.
2426 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2427 } else if (property != NULL) {
2428 { PreservePositionScope scope(masm()->positions_recorder());
2429 VisitForStackValue(property->obj());
2431 if (property->key()->IsPropertyName()) {
2432 EmitCallWithIC(expr,
2433 property->key()->AsLiteral()->handle(),
2434 RelocInfo::CODE_TARGET);
2436 EmitKeyedCallWithIC(expr, property->key());
2439 // Call to an arbitrary expression not handled specially above.
2440 { PreservePositionScope scope(masm()->positions_recorder());
2441 VisitForStackValue(callee);
2443 // Load global receiver object.
2444 __ lw(a1, GlobalObjectOperand());
2445 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2447 // Emit function call.
2448 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2452 // RecordJSReturnSite should have been called.
2453 ASSERT(expr->return_is_recorded_);
2458 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2459 Comment cmnt(masm_, "[ CallNew");
2460 // According to ECMA-262, section 11.2.2, page 44, the function
2461 // expression in new calls must be evaluated before the
2464 // Push constructor on the stack. If it's not a function it's used as
2465 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2467 VisitForStackValue(expr->expression());
2469 // Push the arguments ("left-to-right") on the stack.
2470 ZoneList<Expression*>* args = expr->arguments();
2471 int arg_count = args->length();
2472 for (int i = 0; i < arg_count; i++) {
2473 VisitForStackValue(args->at(i));
2476 // Call the construct call builtin that handles allocation and
2477 // constructor invocation.
2478 SetSourcePosition(expr->position());
2480 // Load function and argument count into a1 and a0.
2481 __ li(a0, Operand(arg_count));
2482 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2484 // Record call targets in unoptimized code, but not in the snapshot.
2485 CallFunctionFlags flags;
2486 if (!Serializer::enabled()) {
2487 flags = RECORD_CALL_TARGET;
2488 Handle<Object> uninitialized =
2489 TypeFeedbackCells::UninitializedSentinel(isolate());
2490 Handle<JSGlobalPropertyCell> cell =
2491 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2492 RecordTypeFeedbackCell(expr->id(), cell);
2493 __ li(a2, Operand(cell));
2495 flags = NO_CALL_FUNCTION_FLAGS;
2498 CallConstructStub stub(flags);
2499 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2500 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2501 context()->Plug(v0);
2505 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2506 ZoneList<Expression*>* args = expr->arguments();
2507 ASSERT(args->length() == 1);
2509 VisitForAccumulatorValue(args->at(0));
2511 Label materialize_true, materialize_false;
2512 Label* if_true = NULL;
2513 Label* if_false = NULL;
2514 Label* fall_through = NULL;
2515 context()->PrepareTest(&materialize_true, &materialize_false,
2516 &if_true, &if_false, &fall_through);
2518 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2519 __ And(t0, v0, Operand(kSmiTagMask));
2520 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2522 context()->Plug(if_true, if_false);
2526 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2527 ZoneList<Expression*>* args = expr->arguments();
2528 ASSERT(args->length() == 1);
2530 VisitForAccumulatorValue(args->at(0));
2532 Label materialize_true, materialize_false;
2533 Label* if_true = NULL;
2534 Label* if_false = NULL;
2535 Label* fall_through = NULL;
2536 context()->PrepareTest(&materialize_true, &materialize_false,
2537 &if_true, &if_false, &fall_through);
2539 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2540 __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
2541 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2543 context()->Plug(if_true, if_false);
2547 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2548 ZoneList<Expression*>* args = expr->arguments();
2549 ASSERT(args->length() == 1);
2551 VisitForAccumulatorValue(args->at(0));
2553 Label materialize_true, materialize_false;
2554 Label* if_true = NULL;
2555 Label* if_false = NULL;
2556 Label* fall_through = NULL;
2557 context()->PrepareTest(&materialize_true, &materialize_false,
2558 &if_true, &if_false, &fall_through);
2560 __ JumpIfSmi(v0, if_false);
2561 __ LoadRoot(at, Heap::kNullValueRootIndex);
2562 __ Branch(if_true, eq, v0, Operand(at));
2563 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
2564 // Undetectable objects behave like undefined when tested with typeof.
2565 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2566 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2567 __ Branch(if_false, ne, at, Operand(zero_reg));
2568 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2569 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2570 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2571 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2572 if_true, if_false, fall_through);
2574 context()->Plug(if_true, if_false);
2578 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2579 ZoneList<Expression*>* args = expr->arguments();
2580 ASSERT(args->length() == 1);
2582 VisitForAccumulatorValue(args->at(0));
2584 Label materialize_true, materialize_false;
2585 Label* if_true = NULL;
2586 Label* if_false = NULL;
2587 Label* fall_through = NULL;
2588 context()->PrepareTest(&materialize_true, &materialize_false,
2589 &if_true, &if_false, &fall_through);
2591 __ JumpIfSmi(v0, if_false);
2592 __ GetObjectType(v0, a1, a1);
2593 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2594 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2595 if_true, if_false, fall_through);
2597 context()->Plug(if_true, if_false);
2601 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2602 ZoneList<Expression*>* args = expr->arguments();
2603 ASSERT(args->length() == 1);
2605 VisitForAccumulatorValue(args->at(0));
2607 Label materialize_true, materialize_false;
2608 Label* if_true = NULL;
2609 Label* if_false = NULL;
2610 Label* fall_through = NULL;
2611 context()->PrepareTest(&materialize_true, &materialize_false,
2612 &if_true, &if_false, &fall_through);
2614 __ JumpIfSmi(v0, if_false);
2615 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2616 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2617 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2618 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2619 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2621 context()->Plug(if_true, if_false);
2625 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2626 CallRuntime* expr) {
2627 ZoneList<Expression*>* args = expr->arguments();
2628 ASSERT(args->length() == 1);
2630 VisitForAccumulatorValue(args->at(0));
2632 Label materialize_true, materialize_false;
2633 Label* if_true = NULL;
2634 Label* if_false = NULL;
2635 Label* fall_through = NULL;
2636 context()->PrepareTest(&materialize_true, &materialize_false,
2637 &if_true, &if_false, &fall_through);
2639 if (FLAG_debug_code) __ AbortIfSmi(v0);
2641 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2642 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
2643 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2644 __ Branch(if_true, ne, t0, Operand(zero_reg));
2646 // Check for fast case object. Generate false result for slow case object.
2647 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2648 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
2649 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2650 __ Branch(if_false, eq, a2, Operand(t0));
2652 // Look for valueOf symbol in the descriptor array, and indicate false if
2653 // found. The type is not checked, so if it is a transition it is a false
2655 __ LoadInstanceDescriptors(a1, t0);
2656 __ lw(a3, FieldMemOperand(t0, FixedArray::kLengthOffset));
2657 // t0: descriptor array
2658 // a3: length of descriptor array
2659 // Calculate the end of the descriptor array.
2660 STATIC_ASSERT(kSmiTag == 0);
2661 STATIC_ASSERT(kSmiTagSize == 1);
2662 STATIC_ASSERT(kPointerSize == 4);
2663 __ Addu(a2, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2664 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
2665 __ Addu(a2, a2, t1);
2667 // Calculate location of the first key name.
2670 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
2671 DescriptorArray::kFirstIndex * kPointerSize));
2672 // Loop through all the keys in the descriptor array. If one of these is the
2673 // symbol valueOf the result is false.
2675 // The use of t2 to store the valueOf symbol asumes that it is not otherwise
2676 // used in the loop below.
2677 __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2680 __ lw(a3, MemOperand(t0, 0));
2681 __ Branch(if_false, eq, a3, Operand(t2));
2682 __ Addu(t0, t0, Operand(kPointerSize));
2684 __ Branch(&loop, ne, t0, Operand(a2));
2686 // If a valueOf property is not found on the object check that it's
2687 // prototype is the un-modified String prototype. If not result is false.
2688 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
2689 __ JumpIfSmi(a2, if_false);
2690 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
2691 __ lw(a3, ContextOperand(cp, Context::GLOBAL_INDEX));
2692 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
2693 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2694 __ Branch(if_false, ne, a2, Operand(a3));
2696 // Set the bit in the map to indicate that it has been checked safe for
2697 // default valueOf and set true result.
2698 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
2699 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2700 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
2703 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2704 context()->Plug(if_true, if_false);
2708 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2709 ZoneList<Expression*>* args = expr->arguments();
2710 ASSERT(args->length() == 1);
2712 VisitForAccumulatorValue(args->at(0));
2714 Label materialize_true, materialize_false;
2715 Label* if_true = NULL;
2716 Label* if_false = NULL;
2717 Label* fall_through = NULL;
2718 context()->PrepareTest(&materialize_true, &materialize_false,
2719 &if_true, &if_false, &fall_through);
2721 __ JumpIfSmi(v0, if_false);
2722 __ GetObjectType(v0, a1, a2);
2723 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2724 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
2725 __ Branch(if_false);
2727 context()->Plug(if_true, if_false);
2731 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2732 ZoneList<Expression*>* args = expr->arguments();
2733 ASSERT(args->length() == 1);
2735 VisitForAccumulatorValue(args->at(0));
2737 Label materialize_true, materialize_false;
2738 Label* if_true = NULL;
2739 Label* if_false = NULL;
2740 Label* fall_through = NULL;
2741 context()->PrepareTest(&materialize_true, &materialize_false,
2742 &if_true, &if_false, &fall_through);
2744 __ JumpIfSmi(v0, if_false);
2745 __ GetObjectType(v0, a1, a1);
2746 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2747 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2748 if_true, if_false, fall_through);
2750 context()->Plug(if_true, if_false);
2754 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2755 ZoneList<Expression*>* args = expr->arguments();
2756 ASSERT(args->length() == 1);
2758 VisitForAccumulatorValue(args->at(0));
2760 Label materialize_true, materialize_false;
2761 Label* if_true = NULL;
2762 Label* if_false = NULL;
2763 Label* fall_through = NULL;
2764 context()->PrepareTest(&materialize_true, &materialize_false,
2765 &if_true, &if_false, &fall_through);
2767 __ JumpIfSmi(v0, if_false);
2768 __ GetObjectType(v0, a1, a1);
2769 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2770 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2772 context()->Plug(if_true, if_false);
2776 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2777 ASSERT(expr->arguments()->length() == 0);
2779 Label materialize_true, materialize_false;
2780 Label* if_true = NULL;
2781 Label* if_false = NULL;
2782 Label* fall_through = NULL;
2783 context()->PrepareTest(&materialize_true, &materialize_false,
2784 &if_true, &if_false, &fall_through);
2786 // Get the frame pointer for the calling frame.
2787 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2789 // Skip the arguments adaptor frame if it exists.
2790 Label check_frame_marker;
2791 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
2792 __ Branch(&check_frame_marker, ne,
2793 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2794 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
2796 // Check the marker in the calling frame.
2797 __ bind(&check_frame_marker);
2798 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
2799 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2800 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
2801 if_true, if_false, fall_through);
2803 context()->Plug(if_true, if_false);
2807 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2808 ZoneList<Expression*>* args = expr->arguments();
2809 ASSERT(args->length() == 2);
2811 // Load the two objects into registers and perform the comparison.
2812 VisitForStackValue(args->at(0));
2813 VisitForAccumulatorValue(args->at(1));
2815 Label materialize_true, materialize_false;
2816 Label* if_true = NULL;
2817 Label* if_false = NULL;
2818 Label* fall_through = NULL;
2819 context()->PrepareTest(&materialize_true, &materialize_false,
2820 &if_true, &if_false, &fall_through);
2823 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2824 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
2826 context()->Plug(if_true, if_false);
2830 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2831 ZoneList<Expression*>* args = expr->arguments();
2832 ASSERT(args->length() == 1);
2834 // ArgumentsAccessStub expects the key in a1 and the formal
2835 // parameter count in a0.
2836 VisitForAccumulatorValue(args->at(0));
2838 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2839 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2841 context()->Plug(v0);
2845 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2846 ASSERT(expr->arguments()->length() == 0);
2848 // Get the number of formal parameters.
2849 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2851 // Check if the calling frame is an arguments adaptor frame.
2852 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2853 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
2854 __ Branch(&exit, ne, a3,
2855 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2857 // Arguments adaptor case: Read the arguments length from the
2859 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2862 context()->Plug(v0);
2866 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2867 ZoneList<Expression*>* args = expr->arguments();
2868 ASSERT(args->length() == 1);
2869 Label done, null, function, non_function_constructor;
2871 VisitForAccumulatorValue(args->at(0));
2873 // If the object is a smi, we return null.
2874 __ JumpIfSmi(v0, &null);
2876 // Check that the object is a JS object but take special care of JS
2877 // functions to make sure they have 'Function' as their class.
2878 // Assume that there are only two callable types, and one of them is at
2879 // either end of the type range for JS object types. Saves extra comparisons.
2880 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2881 __ GetObjectType(v0, v0, a1); // Map is now in v0.
2882 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2884 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2885 FIRST_SPEC_OBJECT_TYPE + 1);
2886 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2888 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2889 LAST_SPEC_OBJECT_TYPE - 1);
2890 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
2891 // Assume that there is no larger type.
2892 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2894 // Check if the constructor in the map is a JS function.
2895 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
2896 __ GetObjectType(v0, a1, a1);
2897 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
2899 // v0 now contains the constructor function. Grab the
2900 // instance class name from there.
2901 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2902 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2905 // Functions have class 'Function'.
2907 __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
2910 // Objects with a non-function constructor have class 'Object'.
2911 __ bind(&non_function_constructor);
2912 __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
2915 // Non-JS objects have class null.
2917 __ LoadRoot(v0, Heap::kNullValueRootIndex);
2922 context()->Plug(v0);
2926 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2927 // Conditionally generate a log call.
2929 // 0 (literal string): The type of logging (corresponds to the flags).
2930 // This is used to determine whether or not to generate the log call.
2931 // 1 (string): Format string. Access the string at argument index 2
2932 // with '%2s' (see Logger::LogRuntime for all the formats).
2933 // 2 (array): Arguments to the format string.
2934 ZoneList<Expression*>* args = expr->arguments();
2935 ASSERT_EQ(args->length(), 3);
2936 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2937 VisitForStackValue(args->at(1));
2938 VisitForStackValue(args->at(2));
2939 __ CallRuntime(Runtime::kLog, 2);
2942 // Finally, we're expected to leave a value on the top of the stack.
2943 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2944 context()->Plug(v0);
2948 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2949 ASSERT(expr->arguments()->length() == 0);
2950 Label slow_allocate_heapnumber;
2951 Label heapnumber_allocated;
2953 // Save the new heap number in callee-saved register s0, since
2954 // we call out to external C code below.
2955 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
2956 __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber);
2957 __ jmp(&heapnumber_allocated);
2959 __ bind(&slow_allocate_heapnumber);
2961 // Allocate a heap number.
2962 __ CallRuntime(Runtime::kNumberAlloc, 0);
2963 __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call.
2965 __ bind(&heapnumber_allocated);
2967 // Convert 32 random bits in v0 to 0.(32 random bits) in a double
2969 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2970 if (CpuFeatures::IsSupported(FPU)) {
2971 __ PrepareCallCFunction(1, a0);
2972 __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX));
2973 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
2974 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2976 CpuFeatures::Scope scope(FPU);
2977 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2978 __ li(a1, Operand(0x41300000));
2979 // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
2980 __ Move(f12, v0, a1);
2981 // Move 0x4130000000000000 to FPU.
2982 __ Move(f14, zero_reg, a1);
2983 // Subtract and store the result in the heap number.
2984 __ sub_d(f0, f12, f14);
2985 __ sdc1(f0, FieldMemOperand(s0, HeapNumber::kValueOffset));
2988 __ PrepareCallCFunction(2, a0);
2990 __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
2991 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset));
2993 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2996 context()->Plug(v0);
3000 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3001 // Load the arguments on the stack and call the stub.
3003 ZoneList<Expression*>* args = expr->arguments();
3004 ASSERT(args->length() == 3);
3005 VisitForStackValue(args->at(0));
3006 VisitForStackValue(args->at(1));
3007 VisitForStackValue(args->at(2));
3009 context()->Plug(v0);
3013 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3014 // Load the arguments on the stack and call the stub.
3015 RegExpExecStub stub;
3016 ZoneList<Expression*>* args = expr->arguments();
3017 ASSERT(args->length() == 4);
3018 VisitForStackValue(args->at(0));
3019 VisitForStackValue(args->at(1));
3020 VisitForStackValue(args->at(2));
3021 VisitForStackValue(args->at(3));
3023 context()->Plug(v0);
3027 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3028 ZoneList<Expression*>* args = expr->arguments();
3029 ASSERT(args->length() == 1);
3031 VisitForAccumulatorValue(args->at(0)); // Load the object.
3034 // If the object is a smi return the object.
3035 __ JumpIfSmi(v0, &done);
3036 // If the object is not a value type, return the object.
3037 __ GetObjectType(v0, a1, a1);
3038 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3040 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3043 context()->Plug(v0);
3047 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3048 ZoneList<Expression*>* args = expr->arguments();
3049 ASSERT(args->length() == 2);
3050 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3051 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3053 VisitForAccumulatorValue(args->at(0)); // Load the object.
3055 Label runtime, done;
3056 Register object = v0;
3057 Register result = v0;
3058 Register scratch0 = t5;
3059 Register scratch1 = a1;
3062 __ AbortIfSmi(object);
3063 __ GetObjectType(object, scratch1, scratch1);
3064 __ Assert(eq, "Trying to get date field from non-date.",
3065 scratch1, Operand(JS_DATE_TYPE));
3068 if (index->value() == 0) {
3069 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3071 if (index->value() < JSDate::kFirstUncachedField) {
3072 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3073 __ li(scratch1, Operand(stamp));
3074 __ lw(scratch1, MemOperand(scratch1));
3075 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3076 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3077 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3078 kPointerSize * index->value()));
3082 __ PrepareCallCFunction(2, scratch1);
3083 __ li(a1, Operand(index));
3084 __ Move(a0, object);
3085 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3089 context()->Plug(v0);
3093 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3094 // Load the arguments on the stack and call the runtime function.
3095 ZoneList<Expression*>* args = expr->arguments();
3096 ASSERT(args->length() == 2);
3097 VisitForStackValue(args->at(0));
3098 VisitForStackValue(args->at(1));
3099 if (CpuFeatures::IsSupported(FPU)) {
3100 MathPowStub stub(MathPowStub::ON_STACK);
3103 __ CallRuntime(Runtime::kMath_pow, 2);
3105 context()->Plug(v0);
3109 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3110 ZoneList<Expression*>* args = expr->arguments();
3111 ASSERT(args->length() == 2);
3113 VisitForStackValue(args->at(0)); // Load the object.
3114 VisitForAccumulatorValue(args->at(1)); // Load the value.
3115 __ pop(a1); // v0 = value. a1 = object.
3118 // If the object is a smi, return the value.
3119 __ JumpIfSmi(a1, &done);
3121 // If the object is not a value type, return the value.
3122 __ GetObjectType(a1, a2, a2);
3123 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3126 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3127 // Update the write barrier. Save the value as it will be
3128 // overwritten by the write barrier code and is needed afterward.
3130 __ RecordWriteField(
3131 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3134 context()->Plug(v0);
3138 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3139 ZoneList<Expression*>* args = expr->arguments();
3140 ASSERT_EQ(args->length(), 1);
3142 // Load the argument on the stack and call the stub.
3143 VisitForStackValue(args->at(0));
3145 NumberToStringStub stub;
3147 context()->Plug(v0);
3151 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3152 ZoneList<Expression*>* args = expr->arguments();
3153 ASSERT(args->length() == 1);
3155 VisitForAccumulatorValue(args->at(0));
3158 StringCharFromCodeGenerator generator(v0, a1);
3159 generator.GenerateFast(masm_);
3162 NopRuntimeCallHelper call_helper;
3163 generator.GenerateSlow(masm_, call_helper);
3166 context()->Plug(a1);
3170 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3171 ZoneList<Expression*>* args = expr->arguments();
3172 ASSERT(args->length() == 2);
3174 VisitForStackValue(args->at(0));
3175 VisitForAccumulatorValue(args->at(1));
3176 __ mov(a0, result_register());
3178 Register object = a1;
3179 Register index = a0;
3180 Register result = v0;
3184 Label need_conversion;
3185 Label index_out_of_range;
3187 StringCharCodeAtGenerator generator(object,
3192 &index_out_of_range,
3193 STRING_INDEX_IS_NUMBER);
3194 generator.GenerateFast(masm_);
3197 __ bind(&index_out_of_range);
3198 // When the index is out of range, the spec requires us to return
3200 __ LoadRoot(result, Heap::kNanValueRootIndex);
3203 __ bind(&need_conversion);
3204 // Load the undefined value into the result register, which will
3205 // trigger conversion.
3206 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3209 NopRuntimeCallHelper call_helper;
3210 generator.GenerateSlow(masm_, call_helper);
3213 context()->Plug(result);
3217 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3218 ZoneList<Expression*>* args = expr->arguments();
3219 ASSERT(args->length() == 2);
3221 VisitForStackValue(args->at(0));
3222 VisitForAccumulatorValue(args->at(1));
3223 __ mov(a0, result_register());
3225 Register object = a1;
3226 Register index = a0;
3227 Register scratch = a3;
3228 Register result = v0;
3232 Label need_conversion;
3233 Label index_out_of_range;
3235 StringCharAtGenerator generator(object,
3241 &index_out_of_range,
3242 STRING_INDEX_IS_NUMBER);
3243 generator.GenerateFast(masm_);
3246 __ bind(&index_out_of_range);
3247 // When the index is out of range, the spec requires us to return
3248 // the empty string.
3249 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3252 __ bind(&need_conversion);
3253 // Move smi zero into the result register, which will trigger
3255 __ li(result, Operand(Smi::FromInt(0)));
3258 NopRuntimeCallHelper call_helper;
3259 generator.GenerateSlow(masm_, call_helper);
3262 context()->Plug(result);
3266 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3267 ZoneList<Expression*>* args = expr->arguments();
3268 ASSERT_EQ(2, args->length());
3269 VisitForStackValue(args->at(0));
3270 VisitForStackValue(args->at(1));
3272 StringAddStub stub(NO_STRING_ADD_FLAGS);
3274 context()->Plug(v0);
3278 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3279 ZoneList<Expression*>* args = expr->arguments();
3280 ASSERT_EQ(2, args->length());
3282 VisitForStackValue(args->at(0));
3283 VisitForStackValue(args->at(1));
3285 StringCompareStub stub;
3287 context()->Plug(v0);
3291 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3292 // Load the argument on the stack and call the stub.
3293 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3294 TranscendentalCacheStub::TAGGED);
3295 ZoneList<Expression*>* args = expr->arguments();
3296 ASSERT(args->length() == 1);
3297 VisitForStackValue(args->at(0));
3298 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3300 context()->Plug(v0);
3304 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3305 // Load the argument on the stack and call the stub.
3306 TranscendentalCacheStub stub(TranscendentalCache::COS,
3307 TranscendentalCacheStub::TAGGED);
3308 ZoneList<Expression*>* args = expr->arguments();
3309 ASSERT(args->length() == 1);
3310 VisitForStackValue(args->at(0));
3311 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3313 context()->Plug(v0);
3317 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3318 // Load the argument on the stack and call the stub.
3319 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3320 TranscendentalCacheStub::TAGGED);
3321 ZoneList<Expression*>* args = expr->arguments();
3322 ASSERT(args->length() == 1);
3323 VisitForStackValue(args->at(0));
3324 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3326 context()->Plug(v0);
3330 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3331 // Load the argument on the stack and call the stub.
3332 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3333 TranscendentalCacheStub::TAGGED);
3334 ZoneList<Expression*>* args = expr->arguments();
3335 ASSERT(args->length() == 1);
3336 VisitForStackValue(args->at(0));
3337 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3339 context()->Plug(v0);
3343 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3344 // Load the argument on the stack and call the runtime function.
3345 ZoneList<Expression*>* args = expr->arguments();
3346 ASSERT(args->length() == 1);
3347 VisitForStackValue(args->at(0));
3348 __ CallRuntime(Runtime::kMath_sqrt, 1);
3349 context()->Plug(v0);
3353 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3354 ZoneList<Expression*>* args = expr->arguments();
3355 ASSERT(args->length() >= 2);
3357 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3358 for (int i = 0; i < arg_count + 1; i++) {
3359 VisitForStackValue(args->at(i));
3361 VisitForAccumulatorValue(args->last()); // Function.
3365 __ GetObjectType(v0, a1, a1);
3366 __ Branch(&proxy, eq, a1, Operand(JS_FUNCTION_PROXY_TYPE));
3368 // InvokeFunction requires the function in a1. Move it in there.
3369 __ mov(a1, result_register());
3370 ParameterCount count(arg_count);
3371 __ InvokeFunction(a1, count, CALL_FUNCTION,
3372 NullCallWrapper(), CALL_AS_METHOD);
3373 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3378 __ CallRuntime(Runtime::kCall, args->length());
3381 context()->Plug(v0);
3385 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3386 RegExpConstructResultStub stub;
3387 ZoneList<Expression*>* args = expr->arguments();
3388 ASSERT(args->length() == 3);
3389 VisitForStackValue(args->at(0));
3390 VisitForStackValue(args->at(1));
3391 VisitForStackValue(args->at(2));
3393 context()->Plug(v0);
3397 void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
3398 ZoneList<Expression*>* args = expr->arguments();
3399 ASSERT(args->length() == 3);
3400 VisitForStackValue(args->at(0));
3401 VisitForStackValue(args->at(1));
3402 VisitForStackValue(args->at(2));
3405 Register object = a0;
3406 Register index1 = a1;
3407 Register index2 = a2;
3408 Register elements = a3;
3409 Register scratch1 = t0;
3410 Register scratch2 = t1;
3412 __ lw(object, MemOperand(sp, 2 * kPointerSize));
3413 // Fetch the map and check if array is in fast case.
3414 // Check that object doesn't require security checks and
3415 // has no indexed interceptor.
3416 __ GetObjectType(object, scratch1, scratch2);
3417 __ Branch(&slow_case, ne, scratch2, Operand(JS_ARRAY_TYPE));
3418 // Map is now in scratch1.
3420 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
3421 __ And(scratch2, scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
3422 __ Branch(&slow_case, ne, scratch2, Operand(zero_reg));
3424 // Check the object's elements are in fast case and writable.
3425 __ lw(elements, FieldMemOperand(object, JSObject::kElementsOffset));
3426 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
3427 __ LoadRoot(scratch2, Heap::kFixedArrayMapRootIndex);
3428 __ Branch(&slow_case, ne, scratch1, Operand(scratch2));
3430 // Check that both indices are smis.
3431 __ lw(index1, MemOperand(sp, 1 * kPointerSize));
3432 __ lw(index2, MemOperand(sp, 0));
3433 __ JumpIfNotBothSmi(index1, index2, &slow_case);
3435 // Check that both indices are valid.
3437 __ lw(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
3438 __ Branch(&slow_case, ls, scratch1, Operand(index1));
3439 __ Branch(¬_hi, NegateCondition(hi), scratch1, Operand(index1));
3440 __ Branch(&slow_case, ls, scratch1, Operand(index2));
3443 // Bring the address of the elements into index1 and index2.
3444 __ Addu(scratch1, elements,
3445 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3446 __ sll(index1, index1, kPointerSizeLog2 - kSmiTagSize);
3447 __ Addu(index1, scratch1, index1);
3448 __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize);
3449 __ Addu(index2, scratch1, index2);
3452 __ lw(scratch1, MemOperand(index1, 0));
3453 __ lw(scratch2, MemOperand(index2, 0));
3454 __ sw(scratch1, MemOperand(index2, 0));
3455 __ sw(scratch2, MemOperand(index1, 0));
3457 Label no_remembered_set;
3458 __ CheckPageFlag(elements,
3460 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3462 &no_remembered_set);
3463 // Possible optimization: do a check that both values are Smis
3464 // (or them and test against Smi mask).
3466 // We are swapping two objects in an array and the incremental marker never
3467 // pauses in the middle of scanning a single object. Therefore the
3468 // incremental marker is not disturbed, so we don't need to call the
3469 // RecordWrite stub that notifies the incremental marker.
3470 __ RememberedSetHelper(elements,
3474 MacroAssembler::kFallThroughAtEnd);
3475 __ RememberedSetHelper(elements,
3479 MacroAssembler::kFallThroughAtEnd);
3481 __ bind(&no_remembered_set);
3482 // We are done. Drop elements from the stack, and return undefined.
3484 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3487 __ bind(&slow_case);
3488 __ CallRuntime(Runtime::kSwapElements, 3);
3491 context()->Plug(v0);
3495 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3496 ZoneList<Expression*>* args = expr->arguments();
3497 ASSERT_EQ(2, args->length());
3499 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3500 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3502 Handle<FixedArray> jsfunction_result_caches(
3503 isolate()->global_context()->jsfunction_result_caches());
3504 if (jsfunction_result_caches->length() <= cache_id) {
3505 __ Abort("Attempt to use undefined cache.");
3506 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3507 context()->Plug(v0);
3511 VisitForAccumulatorValue(args->at(1));
3514 Register cache = a1;
3515 __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3516 __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3519 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3521 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3524 Label done, not_found;
3525 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3526 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3527 // a2 now holds finger offset as a smi.
3528 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3529 // a3 now points to the start of fixed array elements.
3530 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3531 __ addu(a3, a3, at);
3532 // a3 now points to key of indexed element of cache.
3533 __ lw(a2, MemOperand(a3));
3534 __ Branch(¬_found, ne, key, Operand(a2));
3536 __ lw(v0, MemOperand(a3, kPointerSize));
3539 __ bind(¬_found);
3540 // Call runtime to perform the lookup.
3541 __ Push(cache, key);
3542 __ CallRuntime(Runtime::kGetFromCache, 2);
3545 context()->Plug(v0);
3549 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3550 ZoneList<Expression*>* args = expr->arguments();
3551 ASSERT_EQ(2, args->length());
3553 Register right = v0;
3558 VisitForStackValue(args->at(0));
3559 VisitForAccumulatorValue(args->at(1)); // Result (right) in v0.
3562 Label done, fail, ok;
3563 __ Branch(&ok, eq, left, Operand(right));
3564 // Fail if either is a non-HeapObject.
3565 __ And(tmp, left, Operand(right));
3566 __ JumpIfSmi(tmp, &fail);
3567 __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3568 __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3569 __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
3570 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3571 __ Branch(&fail, ne, tmp, Operand(tmp2));
3572 __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3573 __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3574 __ Branch(&ok, eq, tmp, Operand(tmp2));
3576 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3579 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3582 context()->Plug(v0);
3586 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3587 ZoneList<Expression*>* args = expr->arguments();
3588 VisitForAccumulatorValue(args->at(0));
3590 Label materialize_true, materialize_false;
3591 Label* if_true = NULL;
3592 Label* if_false = NULL;
3593 Label* fall_through = NULL;
3594 context()->PrepareTest(&materialize_true, &materialize_false,
3595 &if_true, &if_false, &fall_through);
3597 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3598 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3600 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3601 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3603 context()->Plug(if_true, if_false);
3607 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3608 ZoneList<Expression*>* args = expr->arguments();
3609 ASSERT(args->length() == 1);
3610 VisitForAccumulatorValue(args->at(0));
3612 if (FLAG_debug_code) {
3613 __ AbortIfNotString(v0);
3616 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3617 __ IndexFromHash(v0, v0);
3619 context()->Plug(v0);
3623 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3624 Label bailout, done, one_char_separator, long_separator,
3625 non_trivial_array, not_size_one_array, loop,
3626 empty_separator_loop, one_char_separator_loop,
3627 one_char_separator_loop_entry, long_separator_loop;
3628 ZoneList<Expression*>* args = expr->arguments();
3629 ASSERT(args->length() == 2);
3630 VisitForStackValue(args->at(1));
3631 VisitForAccumulatorValue(args->at(0));
3633 // All aliases of the same register have disjoint lifetimes.
3634 Register array = v0;
3635 Register elements = no_reg; // Will be v0.
3636 Register result = no_reg; // Will be v0.
3637 Register separator = a1;
3638 Register array_length = a2;
3639 Register result_pos = no_reg; // Will be a2.
3640 Register string_length = a3;
3641 Register string = t0;
3642 Register element = t1;
3643 Register elements_end = t2;
3644 Register scratch1 = t3;
3645 Register scratch2 = t5;
3646 Register scratch3 = t4;
3648 // Separator operand is on the stack.
3651 // Check that the array is a JSArray.
3652 __ JumpIfSmi(array, &bailout);
3653 __ GetObjectType(array, scratch1, scratch2);
3654 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3656 // Check that the array has fast elements.
3657 __ CheckFastElements(scratch1, scratch2, &bailout);
3659 // If the array has length zero, return the empty string.
3660 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3661 __ SmiUntag(array_length);
3662 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3663 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3666 __ bind(&non_trivial_array);
3668 // Get the FixedArray containing array's elements.
3670 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3671 array = no_reg; // End of array's live range.
3673 // Check that all array elements are sequential ASCII strings, and
3674 // accumulate the sum of their lengths, as a smi-encoded value.
3675 __ mov(string_length, zero_reg);
3677 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3678 __ sll(elements_end, array_length, kPointerSizeLog2);
3679 __ Addu(elements_end, element, elements_end);
3680 // Loop condition: while (element < elements_end).
3681 // Live values in registers:
3682 // elements: Fixed array of strings.
3683 // array_length: Length of the fixed array of strings (not smi)
3684 // separator: Separator string
3685 // string_length: Accumulated sum of string lengths (smi).
3686 // element: Current array element.
3687 // elements_end: Array end.
3688 if (FLAG_debug_code) {
3689 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
3690 array_length, Operand(zero_reg));
3693 __ lw(string, MemOperand(element));
3694 __ Addu(element, element, kPointerSize);
3695 __ JumpIfSmi(string, &bailout);
3696 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3697 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3698 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3699 __ lw(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3700 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3701 __ BranchOnOverflow(&bailout, scratch3);
3702 __ Branch(&loop, lt, element, Operand(elements_end));
3704 // If array_length is 1, return elements[0], a string.
3705 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
3706 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3709 __ bind(¬_size_one_array);
3711 // Live values in registers:
3712 // separator: Separator string
3713 // array_length: Length of the array.
3714 // string_length: Sum of string lengths (smi).
3715 // elements: FixedArray of strings.
3717 // Check that the separator is a flat ASCII string.
3718 __ JumpIfSmi(separator, &bailout);
3719 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3720 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3721 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3723 // Add (separator length times array_length) - separator length to the
3724 // string_length to get the length of the result string. array_length is not
3725 // smi but the other values are, so the result is a smi.
3726 __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3727 __ Subu(string_length, string_length, Operand(scratch1));
3728 __ Mult(array_length, scratch1);
3729 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3732 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3734 __ And(scratch3, scratch2, Operand(0x80000000));
3735 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
3736 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3737 __ BranchOnOverflow(&bailout, scratch3);
3738 __ SmiUntag(string_length);
3740 // Get first element in the array to free up the elements register to be used
3743 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3744 result = elements; // End of live range for elements.
3746 // Live values in registers:
3747 // element: First array element
3748 // separator: Separator string
3749 // string_length: Length of result string (not smi)
3750 // array_length: Length of the array.
3751 __ AllocateAsciiString(result,
3757 // Prepare for looping. Set up elements_end to end of the array. Set
3758 // result_pos to the position of the result where to write the first
3760 __ sll(elements_end, array_length, kPointerSizeLog2);
3761 __ Addu(elements_end, element, elements_end);
3762 result_pos = array_length; // End of live range for array_length.
3763 array_length = no_reg;
3766 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3768 // Check the length of the separator.
3769 __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3770 __ li(at, Operand(Smi::FromInt(1)));
3771 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3772 __ Branch(&long_separator, gt, scratch1, Operand(at));
3774 // Empty separator case.
3775 __ bind(&empty_separator_loop);
3776 // Live values in registers:
3777 // result_pos: the position to which we are currently copying characters.
3778 // element: Current array element.
3779 // elements_end: Array end.
3781 // Copy next array element to the result.
3782 __ lw(string, MemOperand(element));
3783 __ Addu(element, element, kPointerSize);
3784 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3785 __ SmiUntag(string_length);
3786 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3787 __ CopyBytes(string, result_pos, string_length, scratch1);
3788 // End while (element < elements_end).
3789 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
3790 ASSERT(result.is(v0));
3793 // One-character separator case.
3794 __ bind(&one_char_separator);
3795 // Replace separator with its ASCII character value.
3796 __ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3797 // Jump into the loop after the code that copies the separator, so the first
3798 // element is not preceded by a separator.
3799 __ jmp(&one_char_separator_loop_entry);
3801 __ bind(&one_char_separator_loop);
3802 // Live values in registers:
3803 // result_pos: the position to which we are currently copying characters.
3804 // element: Current array element.
3805 // elements_end: Array end.
3806 // separator: Single separator ASCII char (in lower byte).
3808 // Copy the separator character to the result.
3809 __ sb(separator, MemOperand(result_pos));
3810 __ Addu(result_pos, result_pos, 1);
3812 // Copy next array element to the result.
3813 __ bind(&one_char_separator_loop_entry);
3814 __ lw(string, MemOperand(element));
3815 __ Addu(element, element, kPointerSize);
3816 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3817 __ SmiUntag(string_length);
3818 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3819 __ CopyBytes(string, result_pos, string_length, scratch1);
3820 // End while (element < elements_end).
3821 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
3822 ASSERT(result.is(v0));
3825 // Long separator case (separator is more than one character). Entry is at the
3826 // label long_separator below.
3827 __ bind(&long_separator_loop);
3828 // Live values in registers:
3829 // result_pos: the position to which we are currently copying characters.
3830 // element: Current array element.
3831 // elements_end: Array end.
3832 // separator: Separator string.
3834 // Copy the separator to the result.
3835 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
3836 __ SmiUntag(string_length);
3839 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3840 __ CopyBytes(string, result_pos, string_length, scratch1);
3842 __ bind(&long_separator);
3843 __ lw(string, MemOperand(element));
3844 __ Addu(element, element, kPointerSize);
3845 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3846 __ SmiUntag(string_length);
3847 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3848 __ CopyBytes(string, result_pos, string_length, scratch1);
3849 // End while (element < elements_end).
3850 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
3851 ASSERT(result.is(v0));
3855 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3857 context()->Plug(v0);
3861 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3862 Handle<String> name = expr->name();
3863 if (name->length() > 0 && name->Get(0) == '_') {
3864 Comment cmnt(masm_, "[ InlineRuntimeCall");
3865 EmitInlineRuntimeCall(expr);
3869 Comment cmnt(masm_, "[ CallRuntime");
3870 ZoneList<Expression*>* args = expr->arguments();
3872 if (expr->is_jsruntime()) {
3873 // Prepare for calling JS runtime function.
3874 __ lw(a0, GlobalObjectOperand());
3875 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
3879 // Push the arguments ("left-to-right").
3880 int arg_count = args->length();
3881 for (int i = 0; i < arg_count; i++) {
3882 VisitForStackValue(args->at(i));
3885 if (expr->is_jsruntime()) {
3886 // Call the JS runtime function.
3887 __ li(a2, Operand(expr->name()));
3888 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3890 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3891 CallIC(ic, mode, expr->id());
3892 // Restore context register.
3893 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3895 // Call the C runtime function.
3896 __ CallRuntime(expr->function(), arg_count);
3898 context()->Plug(v0);
3902 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3903 switch (expr->op()) {
3904 case Token::DELETE: {
3905 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3906 Property* property = expr->expression()->AsProperty();
3907 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3909 if (property != NULL) {
3910 VisitForStackValue(property->obj());
3911 VisitForStackValue(property->key());
3912 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3913 ? kNonStrictMode : kStrictMode;
3914 __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
3916 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3917 context()->Plug(v0);
3918 } else if (proxy != NULL) {
3919 Variable* var = proxy->var();
3920 // Delete of an unqualified identifier is disallowed in strict mode
3921 // but "delete this" is allowed.
3922 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3923 if (var->IsUnallocated()) {
3924 __ lw(a2, var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
3925 __ li(a1, Operand(var->name()));
3926 __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3927 __ Push(a2, a1, a0);
3928 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3929 context()->Plug(v0);
3930 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3931 // Result of deleting non-global, non-dynamic variables is false.
3932 // The subexpression does not have side effects.
3933 context()->Plug(var->is_this());
3935 // Non-global variable. Call the runtime to try to delete from the
3936 // context where the variable was introduced.
3937 __ push(context_register());
3938 __ li(a2, Operand(var->name()));
3940 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3941 context()->Plug(v0);
3944 // Result of deleting non-property, non-variable reference is true.
3945 // The subexpression may have side effects.
3946 VisitForEffect(expr->expression());
3947 context()->Plug(true);
3953 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3954 VisitForEffect(expr->expression());
3955 context()->Plug(Heap::kUndefinedValueRootIndex);
3960 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3961 if (context()->IsEffect()) {
3962 // Unary NOT has no side effects so it's only necessary to visit the
3963 // subexpression. Match the optimizing compiler by not branching.
3964 VisitForEffect(expr->expression());
3965 } else if (context()->IsTest()) {
3966 const TestContext* test = TestContext::cast(context());
3967 // The labels are swapped for the recursive call.
3968 VisitForControl(expr->expression(),
3969 test->false_label(),
3971 test->fall_through());
3972 context()->Plug(test->true_label(), test->false_label());
3974 // We handle value contexts explicitly rather than simply visiting
3975 // for control and plugging the control flow into the context,
3976 // because we need to prepare a pair of extra administrative AST ids
3977 // for the optimizing compiler.
3978 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3979 Label materialize_true, materialize_false, done;
3980 VisitForControl(expr->expression(),
3984 __ bind(&materialize_true);
3985 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3986 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3987 if (context()->IsStackValue()) __ push(v0);
3989 __ bind(&materialize_false);
3990 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3991 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3992 if (context()->IsStackValue()) __ push(v0);
3998 case Token::TYPEOF: {
3999 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4000 { StackValueContext context(this);
4001 VisitForTypeofValue(expr->expression());
4003 __ CallRuntime(Runtime::kTypeof, 1);
4004 context()->Plug(v0);
4009 Comment cmt(masm_, "[ UnaryOperation (ADD)");
4010 VisitForAccumulatorValue(expr->expression());
4011 Label no_conversion;
4012 __ JumpIfSmi(result_register(), &no_conversion);
4013 __ mov(a0, result_register());
4014 ToNumberStub convert_stub;
4015 __ CallStub(&convert_stub);
4016 __ bind(&no_conversion);
4017 context()->Plug(result_register());
4022 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
4025 case Token::BIT_NOT:
4026 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
4035 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4036 const char* comment) {
4037 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
4038 Comment cmt(masm_, comment);
4039 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4040 UnaryOverwriteMode overwrite =
4041 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
4042 UnaryOpStub stub(expr->op(), overwrite);
4043 // GenericUnaryOpStub expects the argument to be in a0.
4044 VisitForAccumulatorValue(expr->expression());
4045 SetSourcePosition(expr->position());
4046 __ mov(a0, result_register());
4047 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
4048 context()->Plug(v0);
4052 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4053 Comment cmnt(masm_, "[ CountOperation");
4054 SetSourcePosition(expr->position());
4056 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4057 // as the left-hand side.
4058 if (!expr->expression()->IsValidLeftHandSide()) {
4059 VisitForEffect(expr->expression());
4063 // Expression can only be a property, a global or a (parameter or local)
4065 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4066 LhsKind assign_type = VARIABLE;
4067 Property* prop = expr->expression()->AsProperty();
4068 // In case of a property we use the uninitialized expression context
4069 // of the key to detect a named property.
4072 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4075 // Evaluate expression and get value.
4076 if (assign_type == VARIABLE) {
4077 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4078 AccumulatorValueContext context(this);
4079 EmitVariableLoad(expr->expression()->AsVariableProxy());
4081 // Reserve space for result of postfix operation.
4082 if (expr->is_postfix() && !context()->IsEffect()) {
4083 __ li(at, Operand(Smi::FromInt(0)));
4086 if (assign_type == NAMED_PROPERTY) {
4087 // Put the object both on the stack and in the accumulator.
4088 VisitForAccumulatorValue(prop->obj());
4090 EmitNamedPropertyLoad(prop);
4092 VisitForStackValue(prop->obj());
4093 VisitForAccumulatorValue(prop->key());
4094 __ lw(a1, MemOperand(sp, 0));
4096 EmitKeyedPropertyLoad(prop);
4100 // We need a second deoptimization point after loading the value
4101 // in case evaluating the property load my have a side effect.
4102 if (assign_type == VARIABLE) {
4103 PrepareForBailout(expr->expression(), TOS_REG);
4105 PrepareForBailoutForId(expr->CountId(), TOS_REG);
4108 // Call ToNumber only if operand is not a smi.
4109 Label no_conversion;
4110 __ JumpIfSmi(v0, &no_conversion);
4112 ToNumberStub convert_stub;
4113 __ CallStub(&convert_stub);
4114 __ bind(&no_conversion);
4116 // Save result for postfix expressions.
4117 if (expr->is_postfix()) {
4118 if (!context()->IsEffect()) {
4119 // Save the result on the stack. If we have a named or keyed property
4120 // we store the result under the receiver that is currently on top
4122 switch (assign_type) {
4126 case NAMED_PROPERTY:
4127 __ sw(v0, MemOperand(sp, kPointerSize));
4129 case KEYED_PROPERTY:
4130 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4135 __ mov(a0, result_register());
4137 // Inline smi case if we are in a loop.
4138 Label stub_call, done;
4139 JumpPatchSite patch_site(masm_);
4141 int count_value = expr->op() == Token::INC ? 1 : -1;
4142 __ li(a1, Operand(Smi::FromInt(count_value)));
4144 if (ShouldInlineSmiCase(expr->op())) {
4145 __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4146 __ BranchOnOverflow(&stub_call, t0); // Do stub on overflow.
4148 // We could eliminate this smi check if we split the code at
4149 // the first smi check before calling ToNumber.
4150 patch_site.EmitJumpIfSmi(v0, &done);
4151 __ bind(&stub_call);
4154 // Record position before stub call.
4155 SetSourcePosition(expr->position());
4157 BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4158 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4159 patch_site.EmitPatchInfo();
4162 // Store the value returned in v0.
4163 switch (assign_type) {
4165 if (expr->is_postfix()) {
4166 { EffectContext context(this);
4167 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4169 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4172 // For all contexts except EffectConstant we have the result on
4173 // top of the stack.
4174 if (!context()->IsEffect()) {
4175 context()->PlugTOS();
4178 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4180 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4181 context()->Plug(v0);
4184 case NAMED_PROPERTY: {
4185 __ mov(a0, result_register()); // Value.
4186 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name.
4187 __ pop(a1); // Receiver.
4188 Handle<Code> ic = is_classic_mode()
4189 ? isolate()->builtins()->StoreIC_Initialize()
4190 : isolate()->builtins()->StoreIC_Initialize_Strict();
4191 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4192 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4193 if (expr->is_postfix()) {
4194 if (!context()->IsEffect()) {
4195 context()->PlugTOS();
4198 context()->Plug(v0);
4202 case KEYED_PROPERTY: {
4203 __ mov(a0, result_register()); // Value.
4205 __ pop(a2); // Receiver.
4206 Handle<Code> ic = is_classic_mode()
4207 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4208 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4209 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4210 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4211 if (expr->is_postfix()) {
4212 if (!context()->IsEffect()) {
4213 context()->PlugTOS();
4216 context()->Plug(v0);
4224 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4225 ASSERT(!context()->IsEffect());
4226 ASSERT(!context()->IsTest());
4227 VariableProxy* proxy = expr->AsVariableProxy();
4228 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4229 Comment cmnt(masm_, "Global variable");
4230 __ lw(a0, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
4231 __ li(a2, Operand(proxy->name()));
4232 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4233 // Use a regular load, not a contextual load, to avoid a reference
4236 PrepareForBailout(expr, TOS_REG);
4237 context()->Plug(v0);
4238 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4241 // Generate code for loading from variables potentially shadowed
4242 // by eval-introduced variables.
4243 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4246 __ li(a0, Operand(proxy->name()));
4248 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4249 PrepareForBailout(expr, TOS_REG);
4252 context()->Plug(v0);
4254 // This expression cannot throw a reference error at the top level.
4255 VisitInDuplicateContext(expr);
4259 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4260 Expression* sub_expr,
4261 Handle<String> check) {
4262 Label materialize_true, materialize_false;
4263 Label* if_true = NULL;
4264 Label* if_false = NULL;
4265 Label* fall_through = NULL;
4266 context()->PrepareTest(&materialize_true, &materialize_false,
4267 &if_true, &if_false, &fall_through);
4269 { AccumulatorValueContext context(this);
4270 VisitForTypeofValue(sub_expr);
4272 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4274 if (check->Equals(isolate()->heap()->number_symbol())) {
4275 __ JumpIfSmi(v0, if_true);
4276 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4277 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4278 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4279 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4280 __ JumpIfSmi(v0, if_false);
4281 // Check for undetectable objects => false.
4282 __ GetObjectType(v0, v0, a1);
4283 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4284 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4285 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4286 Split(eq, a1, Operand(zero_reg),
4287 if_true, if_false, fall_through);
4288 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4289 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4290 __ Branch(if_true, eq, v0, Operand(at));
4291 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4292 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4293 } else if (FLAG_harmony_typeof &&
4294 check->Equals(isolate()->heap()->null_symbol())) {
4295 __ LoadRoot(at, Heap::kNullValueRootIndex);
4296 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4297 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4298 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4299 __ Branch(if_true, eq, v0, Operand(at));
4300 __ JumpIfSmi(v0, if_false);
4301 // Check for undetectable objects => true.
4302 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4303 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4304 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4305 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4306 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4307 __ JumpIfSmi(v0, if_false);
4308 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4309 __ GetObjectType(v0, v0, a1);
4310 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4311 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4312 if_true, if_false, fall_through);
4313 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4314 __ JumpIfSmi(v0, if_false);
4315 if (!FLAG_harmony_typeof) {
4316 __ LoadRoot(at, Heap::kNullValueRootIndex);
4317 __ Branch(if_true, eq, v0, Operand(at));
4319 // Check for JS objects => true.
4320 __ GetObjectType(v0, v0, a1);
4321 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4322 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4323 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4324 // Check for undetectable objects => false.
4325 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4326 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4327 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4329 if (if_false != fall_through) __ jmp(if_false);
4331 context()->Plug(if_true, if_false);
4335 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4336 Comment cmnt(masm_, "[ CompareOperation");
4337 SetSourcePosition(expr->position());
4339 // First we try a fast inlined version of the compare when one of
4340 // the operands is a literal.
4341 if (TryLiteralCompare(expr)) return;
4343 // Always perform the comparison for its control flow. Pack the result
4344 // into the expression's context after the comparison is performed.
4345 Label materialize_true, materialize_false;
4346 Label* if_true = NULL;
4347 Label* if_false = NULL;
4348 Label* fall_through = NULL;
4349 context()->PrepareTest(&materialize_true, &materialize_false,
4350 &if_true, &if_false, &fall_through);
4352 Token::Value op = expr->op();
4353 VisitForStackValue(expr->left());
4356 VisitForStackValue(expr->right());
4357 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4358 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4359 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4360 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4363 case Token::INSTANCEOF: {
4364 VisitForStackValue(expr->right());
4365 InstanceofStub stub(InstanceofStub::kNoFlags);
4367 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4368 // The stub returns 0 for true.
4369 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4374 VisitForAccumulatorValue(expr->right());
4377 case Token::EQ_STRICT:
4394 case Token::INSTANCEOF:
4398 __ mov(a0, result_register());
4401 bool inline_smi_code = ShouldInlineSmiCase(op);
4402 JumpPatchSite patch_site(masm_);
4403 if (inline_smi_code) {
4405 __ Or(a2, a0, Operand(a1));
4406 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4407 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4408 __ bind(&slow_case);
4410 // Record position and call the compare IC.
4411 SetSourcePosition(expr->position());
4412 Handle<Code> ic = CompareIC::GetUninitialized(op);
4413 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4414 patch_site.EmitPatchInfo();
4415 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4416 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4420 // Convert the result of the comparison into one expected for this
4421 // expression's context.
4422 context()->Plug(if_true, if_false);
4426 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4427 Expression* sub_expr,
4429 Label materialize_true, materialize_false;
4430 Label* if_true = NULL;
4431 Label* if_false = NULL;
4432 Label* fall_through = NULL;
4433 context()->PrepareTest(&materialize_true, &materialize_false,
4434 &if_true, &if_false, &fall_through);
4436 VisitForAccumulatorValue(sub_expr);
4437 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4438 Heap::RootListIndex nil_value = nil == kNullValue ?
4439 Heap::kNullValueRootIndex :
4440 Heap::kUndefinedValueRootIndex;
4441 __ mov(a0, result_register());
4442 __ LoadRoot(a1, nil_value);
4443 if (expr->op() == Token::EQ_STRICT) {
4444 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4446 Heap::RootListIndex other_nil_value = nil == kNullValue ?
4447 Heap::kUndefinedValueRootIndex :
4448 Heap::kNullValueRootIndex;
4449 __ Branch(if_true, eq, a0, Operand(a1));
4450 __ LoadRoot(a1, other_nil_value);
4451 __ Branch(if_true, eq, a0, Operand(a1));
4452 __ JumpIfSmi(a0, if_false);
4453 // It can be an undetectable object.
4454 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
4455 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
4456 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4457 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4459 context()->Plug(if_true, if_false);
4463 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4464 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4465 context()->Plug(v0);
4469 Register FullCodeGenerator::result_register() {
4474 Register FullCodeGenerator::context_register() {
4479 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4480 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4481 __ sw(value, MemOperand(fp, frame_offset));
4485 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4486 __ lw(dst, ContextOperand(cp, context_index));
4490 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4491 Scope* declaration_scope = scope()->DeclarationScope();
4492 if (declaration_scope->is_global_scope()) {
4493 // Contexts nested in the global context have a canonical empty function
4494 // as their closure, not the anonymous closure containing the global
4495 // code. Pass a smi sentinel and let the runtime look up the empty
4497 __ li(at, Operand(Smi::FromInt(0)));
4498 } else if (declaration_scope->is_eval_scope()) {
4499 // Contexts created by a call to eval have the same closure as the
4500 // context calling eval, not the anonymous closure containing the eval
4501 // code. Fetch it from the context.
4502 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4504 ASSERT(declaration_scope->is_function_scope());
4505 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4511 // ----------------------------------------------------------------------------
4512 // Non-local control flow support.
4514 void FullCodeGenerator::EnterFinallyBlock() {
4515 ASSERT(!result_register().is(a1));
4516 // Store result register while executing finally block.
4517 __ push(result_register());
4518 // Cook return address in link register to stack (smi encoded Code* delta).
4519 __ Subu(a1, ra, Operand(masm_->CodeObject()));
4520 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4521 STATIC_ASSERT(0 == kSmiTag);
4522 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4527 void FullCodeGenerator::ExitFinallyBlock() {
4528 ASSERT(!result_register().is(a1));
4529 // Restore result register from stack.
4531 // Uncook return address and return.
4532 __ pop(result_register());
4533 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4534 __ sra(a1, a1, 1); // Un-smi-tag value.
4535 __ Addu(at, a1, Operand(masm_->CodeObject()));
4542 #define __ ACCESS_MASM(masm())
4544 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4546 int* context_length) {
4547 // The macros used here must preserve the result register.
4549 // Because the handler block contains the context of the finally
4550 // code, we can restore it directly from there for the finally code
4551 // rather than iteratively unwinding contexts via their previous
4553 __ Drop(*stack_depth); // Down to the handler block.
4554 if (*context_length > 0) {
4555 // Restore the context to its dedicated register and the stack.
4556 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4557 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4560 __ Call(finally_entry_);
4563 *context_length = 0;
4570 } } // namespace v8::internal
4572 #endif // V8_TARGET_ARCH_MIPS