1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
28 #include "src/mips/code-stubs-mips.h"
29 #include "src/mips/macro-assembler-mips.h"
34 #define __ ACCESS_MASM(masm_)
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
49 info_emitted_ = false;
54 DCHECK(patch_site_.is_bound() == info_emitted_);
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
64 // Always taken before patched.
65 __ BranchShort(target, eq, at, Operand(zero_reg));
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
75 // Never taken before patched.
76 __ BranchShort(target, ne, at, Operand(zero_reg));
79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
88 __ nop(); // Signals no inlined code.
93 MacroAssembler* masm_;
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
115 void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
118 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
120 profiling_counter_ = isolate()->factory()->NewCell(
121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122 SetFunctionPosition(function());
123 Comment cmnt(masm_, "[ function compiled by full code generator");
125 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
134 // Sloppy mode functions and builtins need to replace the receiver with the
135 // global proxy when called as functions (without an explicit receiver
137 if (info->strict_mode() == SLOPPY && !info->is_native()) {
139 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
140 __ lw(at, MemOperand(sp, receiver_offset));
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 __ Branch(&ok, ne, a2, Operand(at));
144 __ lw(a2, GlobalObjectOperand());
145 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
147 __ sw(a2, MemOperand(sp, receiver_offset));
152 // Open a frame scope to indicate that there is a frame on the stack. The
153 // MANUAL indicates that the scope shouldn't actually generate code to set up
154 // the frame (that is done below).
155 FrameScope frame_scope(masm_, StackFrame::MANUAL);
157 info->set_prologue_offset(masm_->pc_offset());
158 __ Prologue(info->IsCodePreAgingActive());
159 info->AddNoFrameRange(0, masm_->pc_offset());
161 { Comment cmnt(masm_, "[ Allocate locals");
162 int locals_count = info->scope()->num_stack_slots();
163 // Generators allocate locals, if any, in context slots.
164 DCHECK(!info->function()->is_generator() || locals_count == 0);
165 if (locals_count > 0) {
166 if (locals_count >= 128) {
168 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
169 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
170 __ Branch(&ok, hs, t5, Operand(a2));
171 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
174 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
175 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
176 if (locals_count >= kMaxPushes) {
177 int loop_iterations = locals_count / kMaxPushes;
178 __ li(a2, Operand(loop_iterations));
180 __ bind(&loop_header);
182 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
183 for (int i = 0; i < kMaxPushes; i++) {
184 __ sw(t5, MemOperand(sp, i * kPointerSize));
186 // Continue loop if not done.
187 __ Subu(a2, a2, Operand(1));
188 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
190 int remaining = locals_count % kMaxPushes;
191 // Emit the remaining pushes.
192 __ Subu(sp, sp, Operand(remaining * kPointerSize));
193 for (int i = 0; i < remaining; i++) {
194 __ sw(t5, MemOperand(sp, i * kPointerSize));
199 bool function_in_register = true;
201 // Possibly allocate a local context.
202 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
203 if (heap_slots > 0) {
204 Comment cmnt(masm_, "[ Allocate context");
205 // Argument to NewContext is the function, which is still in a1.
206 bool need_write_barrier = true;
207 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
209 __ Push(info->scope()->GetScopeInfo());
210 __ CallRuntime(Runtime::kNewGlobalContext, 2);
211 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
212 FastNewContextStub stub(isolate(), heap_slots);
214 // Result of FastNewContextStub is always in new space.
215 need_write_barrier = false;
218 __ CallRuntime(Runtime::kNewFunctionContext, 1);
220 function_in_register = false;
221 // Context is returned in v0. It replaces the context passed to us.
222 // It's saved in the stack and kept live in cp.
224 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
225 // Copy any necessary parameters into the context.
226 int num_parameters = info->scope()->num_parameters();
227 for (int i = 0; i < num_parameters; i++) {
228 Variable* var = scope()->parameter(i);
229 if (var->IsContextSlot()) {
230 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
231 (num_parameters - 1 - i) * kPointerSize;
232 // Load parameter from stack.
233 __ lw(a0, MemOperand(fp, parameter_offset));
234 // Store it in the context.
235 MemOperand target = ContextOperand(cp, var->index());
238 // Update the write barrier.
239 if (need_write_barrier) {
240 __ RecordWriteContextSlot(
241 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
242 } else if (FLAG_debug_code) {
244 __ JumpIfInNewSpace(cp, a0, &done);
245 __ Abort(kExpectedNewSpaceObject);
252 Variable* arguments = scope()->arguments();
253 if (arguments != NULL) {
254 // Function uses arguments object.
255 Comment cmnt(masm_, "[ Allocate arguments object");
256 if (!function_in_register) {
257 // Load this again, if it's used by the local context below.
258 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
262 // Receiver is just before the parameters on the caller's stack.
263 int num_parameters = info->scope()->num_parameters();
264 int offset = num_parameters * kPointerSize;
266 Operand(StandardFrameConstants::kCallerSPOffset + offset));
267 __ li(a1, Operand(Smi::FromInt(num_parameters)));
270 // Arguments to ArgumentsAccessStub:
271 // function, receiver address, parameter count.
272 // The stub will rewrite receiever and parameter count if the previous
273 // stack frame was an arguments adapter frame.
274 ArgumentsAccessStub::Type type;
275 if (strict_mode() == STRICT) {
276 type = ArgumentsAccessStub::NEW_STRICT;
277 } else if (function()->has_duplicate_parameters()) {
278 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
280 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
282 ArgumentsAccessStub stub(isolate(), type);
285 SetVar(arguments, v0, a1, a2);
289 __ CallRuntime(Runtime::kTraceEnter, 0);
292 // Visit the declarations and body unless there is an illegal
294 if (scope()->HasIllegalRedeclaration()) {
295 Comment cmnt(masm_, "[ Declarations");
296 scope()->VisitIllegalRedeclaration(this);
299 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
300 { Comment cmnt(masm_, "[ Declarations");
301 // For named function expressions, declare the function name as a
303 if (scope()->is_function_scope() && scope()->function() != NULL) {
304 VariableDeclaration* function = scope()->function();
305 DCHECK(function->proxy()->var()->mode() == CONST ||
306 function->proxy()->var()->mode() == CONST_LEGACY);
307 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
308 VisitVariableDeclaration(function);
310 VisitDeclarations(scope()->declarations());
313 { Comment cmnt(masm_, "[ Stack check");
314 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
316 __ LoadRoot(at, Heap::kStackLimitRootIndex);
317 __ Branch(&ok, hs, sp, Operand(at));
318 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
319 PredictableCodeSizeScope predictable(masm_,
320 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
321 __ Call(stack_check, RelocInfo::CODE_TARGET);
325 { Comment cmnt(masm_, "[ Body");
326 DCHECK(loop_depth() == 0);
327 VisitStatements(function()->body());
328 DCHECK(loop_depth() == 0);
332 // Always emit a 'return undefined' in case control fell off the end of
334 { Comment cmnt(masm_, "[ return <undefined>;");
335 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
337 EmitReturnSequence();
341 void FullCodeGenerator::ClearAccumulator() {
342 DCHECK(Smi::FromInt(0) == 0);
343 __ mov(v0, zero_reg);
347 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
348 __ li(a2, Operand(profiling_counter_));
349 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
350 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
351 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
355 void FullCodeGenerator::EmitProfilingCounterReset() {
356 int reset_value = FLAG_interrupt_budget;
357 if (info_->is_debug()) {
358 // Detect debug break requests as soon as possible.
359 reset_value = FLAG_interrupt_budget >> 4;
361 __ li(a2, Operand(profiling_counter_));
362 __ li(a3, Operand(Smi::FromInt(reset_value)));
363 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
367 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
368 Label* back_edge_target) {
369 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
370 // to make sure it is constant. Branch may emit a skip-or-jump sequence
371 // instead of the normal Branch. It seems that the "skip" part of that
372 // sequence is about as long as this Branch would be so it is safe to ignore
374 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
375 Comment cmnt(masm_, "[ Back edge bookkeeping");
377 DCHECK(back_edge_target->is_bound());
378 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
379 int weight = Min(kMaxBackEdgeWeight,
380 Max(1, distance / kCodeSizeMultiplier));
381 EmitProfilingCounterDecrement(weight);
382 __ slt(at, a3, zero_reg);
383 __ beq(at, zero_reg, &ok);
384 // Call will emit a li t9 first, so it is safe to use the delay slot.
385 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
386 // Record a mapping of this PC offset to the OSR id. This is used to find
387 // the AST id from the unoptimized code in order to use it as a key into
388 // the deoptimization input data found in the optimized code.
389 RecordBackEdge(stmt->OsrEntryId());
390 EmitProfilingCounterReset();
393 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
394 // Record a mapping of the OSR id to this PC. This is used if the OSR
395 // entry becomes the target of a bailout. We don't expect it to be, but
396 // we want it to work if it is.
397 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
401 void FullCodeGenerator::EmitReturnSequence() {
402 Comment cmnt(masm_, "[ Return sequence");
403 if (return_label_.is_bound()) {
404 __ Branch(&return_label_);
406 __ bind(&return_label_);
408 // Push the return value on the stack as the parameter.
409 // Runtime::TraceExit returns its parameter in v0.
411 __ CallRuntime(Runtime::kTraceExit, 1);
413 // Pretend that the exit is a backwards jump to the entry.
415 if (info_->ShouldSelfOptimize()) {
416 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
418 int distance = masm_->pc_offset();
419 weight = Min(kMaxBackEdgeWeight,
420 Max(1, distance / kCodeSizeMultiplier));
422 EmitProfilingCounterDecrement(weight);
424 __ Branch(&ok, ge, a3, Operand(zero_reg));
426 __ Call(isolate()->builtins()->InterruptCheck(),
427 RelocInfo::CODE_TARGET);
429 EmitProfilingCounterReset();
433 // Add a label for checking the size of the code used for returning.
434 Label check_exit_codesize;
435 masm_->bind(&check_exit_codesize);
437 // Make sure that the constant pool is not emitted inside of the return
439 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
440 // Here we use masm_-> instead of the __ macro to avoid the code coverage
441 // tool from instrumenting as we rely on the code size here.
442 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
443 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
446 int no_frame_start = masm_->pc_offset();
447 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
448 masm_->Addu(sp, sp, Operand(sp_delta));
450 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
454 // Check that the size of the code used for returning is large enough
455 // for the debugger's requirements.
456 DCHECK(Assembler::kJSReturnSequenceInstructions <=
457 masm_->InstructionsGeneratedSince(&check_exit_codesize));
463 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
468 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
469 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
470 codegen()->GetVar(result_register(), var);
474 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
476 codegen()->GetVar(result_register(), var);
477 __ push(result_register());
481 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
482 // For simplicity we always test the accumulator register.
483 codegen()->GetVar(result_register(), var);
484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
485 codegen()->DoTest(this);
489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
493 void FullCodeGenerator::AccumulatorValueContext::Plug(
494 Heap::RootListIndex index) const {
495 __ LoadRoot(result_register(), index);
499 void FullCodeGenerator::StackValueContext::Plug(
500 Heap::RootListIndex index) const {
501 __ LoadRoot(result_register(), index);
502 __ push(result_register());
506 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
507 codegen()->PrepareForBailoutBeforeSplit(condition(),
511 if (index == Heap::kUndefinedValueRootIndex ||
512 index == Heap::kNullValueRootIndex ||
513 index == Heap::kFalseValueRootIndex) {
514 if (false_label_ != fall_through_) __ Branch(false_label_);
515 } else if (index == Heap::kTrueValueRootIndex) {
516 if (true_label_ != fall_through_) __ Branch(true_label_);
518 __ LoadRoot(result_register(), index);
519 codegen()->DoTest(this);
524 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
528 void FullCodeGenerator::AccumulatorValueContext::Plug(
529 Handle<Object> lit) const {
530 __ li(result_register(), Operand(lit));
534 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535 // Immediates cannot be pushed directly.
536 __ li(result_register(), Operand(lit));
537 __ push(result_register());
541 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542 codegen()->PrepareForBailoutBeforeSplit(condition(),
546 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548 if (false_label_ != fall_through_) __ Branch(false_label_);
549 } else if (lit->IsTrue() || lit->IsJSObject()) {
550 if (true_label_ != fall_through_) __ Branch(true_label_);
551 } else if (lit->IsString()) {
552 if (String::cast(*lit)->length() == 0) {
553 if (false_label_ != fall_through_) __ Branch(false_label_);
555 if (true_label_ != fall_through_) __ Branch(true_label_);
557 } else if (lit->IsSmi()) {
558 if (Smi::cast(*lit)->value() == 0) {
559 if (false_label_ != fall_through_) __ Branch(false_label_);
561 if (true_label_ != fall_through_) __ Branch(true_label_);
564 // For simplicity we always test the accumulator register.
565 __ li(result_register(), Operand(lit));
566 codegen()->DoTest(this);
571 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
572 Register reg) const {
578 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
580 Register reg) const {
583 __ Move(result_register(), reg);
587 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
588 Register reg) const {
590 if (count > 1) __ Drop(count - 1);
591 __ sw(reg, MemOperand(sp, 0));
595 void FullCodeGenerator::TestContext::DropAndPlug(int count,
596 Register reg) const {
598 // For simplicity we always test the accumulator register.
600 __ Move(result_register(), reg);
601 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602 codegen()->DoTest(this);
606 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607 Label* materialize_false) const {
608 DCHECK(materialize_true == materialize_false);
609 __ bind(materialize_true);
613 void FullCodeGenerator::AccumulatorValueContext::Plug(
614 Label* materialize_true,
615 Label* materialize_false) const {
617 __ bind(materialize_true);
618 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
620 __ bind(materialize_false);
621 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
626 void FullCodeGenerator::StackValueContext::Plug(
627 Label* materialize_true,
628 Label* materialize_false) const {
630 __ bind(materialize_true);
631 __ LoadRoot(at, Heap::kTrueValueRootIndex);
632 // Push the value as the following branch can clobber at in long branch mode.
635 __ bind(materialize_false);
636 __ LoadRoot(at, Heap::kFalseValueRootIndex);
642 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643 Label* materialize_false) const {
644 DCHECK(materialize_true == true_label_);
645 DCHECK(materialize_false == false_label_);
649 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
653 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
654 Heap::RootListIndex value_root_index =
655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656 __ LoadRoot(result_register(), value_root_index);
660 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
661 Heap::RootListIndex value_root_index =
662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663 __ LoadRoot(at, value_root_index);
668 void FullCodeGenerator::TestContext::Plug(bool flag) const {
669 codegen()->PrepareForBailoutBeforeSplit(condition(),
674 if (true_label_ != fall_through_) __ Branch(true_label_);
676 if (false_label_ != fall_through_) __ Branch(false_label_);
681 void FullCodeGenerator::DoTest(Expression* condition,
684 Label* fall_through) {
685 __ mov(a0, result_register());
686 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
687 CallIC(ic, condition->test_id());
688 __ mov(at, zero_reg);
689 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
693 void FullCodeGenerator::Split(Condition cc,
698 Label* fall_through) {
699 if (if_false == fall_through) {
700 __ Branch(if_true, cc, lhs, rhs);
701 } else if (if_true == fall_through) {
702 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
704 __ Branch(if_true, cc, lhs, rhs);
710 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
711 DCHECK(var->IsStackAllocated());
712 // Offset is negative because higher indexes are at lower addresses.
713 int offset = -var->index() * kPointerSize;
714 // Adjust by a (parameter or local) base offset.
715 if (var->IsParameter()) {
716 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
718 offset += JavaScriptFrameConstants::kLocal0Offset;
720 return MemOperand(fp, offset);
724 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
725 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
726 if (var->IsContextSlot()) {
727 int context_chain_length = scope()->ContextChainLength(var->scope());
728 __ LoadContext(scratch, context_chain_length);
729 return ContextOperand(scratch, var->index());
731 return StackOperand(var);
736 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
737 // Use destination as scratch.
738 MemOperand location = VarOperand(var, dest);
739 __ lw(dest, location);
743 void FullCodeGenerator::SetVar(Variable* var,
747 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
748 DCHECK(!scratch0.is(src));
749 DCHECK(!scratch0.is(scratch1));
750 DCHECK(!scratch1.is(src));
751 MemOperand location = VarOperand(var, scratch0);
752 __ sw(src, location);
753 // Emit the write barrier code if the location is in the heap.
754 if (var->IsContextSlot()) {
755 __ RecordWriteContextSlot(scratch0,
765 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
766 bool should_normalize,
769 // Only prepare for bailouts before splits if we're in a test
770 // context. Otherwise, we let the Visit function deal with the
771 // preparation to avoid preparing with the same AST id twice.
772 if (!context()->IsTest() || !info_->IsOptimizable()) return;
775 if (should_normalize) __ Branch(&skip);
776 PrepareForBailout(expr, TOS_REG);
777 if (should_normalize) {
778 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
779 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
785 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
786 // The variable in the declaration always resides in the current function
788 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
789 if (generate_debug_code_) {
790 // Check that we're not inside a with or catch context.
791 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
792 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
793 __ Check(ne, kDeclarationInWithContext,
795 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
796 __ Check(ne, kDeclarationInCatchContext,
802 void FullCodeGenerator::VisitVariableDeclaration(
803 VariableDeclaration* declaration) {
804 // If it was not possible to allocate the variable at compile time, we
805 // need to "declare" it at runtime to make sure it actually exists in the
807 VariableProxy* proxy = declaration->proxy();
808 VariableMode mode = declaration->mode();
809 Variable* variable = proxy->var();
810 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
811 switch (variable->location()) {
812 case Variable::UNALLOCATED:
813 globals_->Add(variable->name(), zone());
814 globals_->Add(variable->binding_needs_init()
815 ? isolate()->factory()->the_hole_value()
816 : isolate()->factory()->undefined_value(),
820 case Variable::PARAMETER:
821 case Variable::LOCAL:
823 Comment cmnt(masm_, "[ VariableDeclaration");
824 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
825 __ sw(t0, StackOperand(variable));
829 case Variable::CONTEXT:
831 Comment cmnt(masm_, "[ VariableDeclaration");
832 EmitDebugCheckDeclarationContext(variable);
833 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
834 __ sw(at, ContextOperand(cp, variable->index()));
835 // No write barrier since the_hole_value is in old space.
836 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
840 case Variable::LOOKUP: {
841 Comment cmnt(masm_, "[ VariableDeclaration");
842 __ li(a2, Operand(variable->name()));
843 // Declaration nodes are always introduced in one of four modes.
844 DCHECK(IsDeclaredVariableMode(mode));
845 PropertyAttributes attr =
846 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
847 __ li(a1, Operand(Smi::FromInt(attr)));
848 // Push initial value, if any.
849 // Note: For variables we must not push an initial value (such as
850 // 'undefined') because we may have a (legal) redeclaration and we
851 // must not destroy the current value.
853 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
854 __ Push(cp, a2, a1, a0);
856 DCHECK(Smi::FromInt(0) == 0);
857 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
858 __ Push(cp, a2, a1, a0);
860 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
867 void FullCodeGenerator::VisitFunctionDeclaration(
868 FunctionDeclaration* declaration) {
869 VariableProxy* proxy = declaration->proxy();
870 Variable* variable = proxy->var();
871 switch (variable->location()) {
872 case Variable::UNALLOCATED: {
873 globals_->Add(variable->name(), zone());
874 Handle<SharedFunctionInfo> function =
875 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
876 // Check for stack-overflow exception.
877 if (function.is_null()) return SetStackOverflow();
878 globals_->Add(function, zone());
882 case Variable::PARAMETER:
883 case Variable::LOCAL: {
884 Comment cmnt(masm_, "[ FunctionDeclaration");
885 VisitForAccumulatorValue(declaration->fun());
886 __ sw(result_register(), StackOperand(variable));
890 case Variable::CONTEXT: {
891 Comment cmnt(masm_, "[ FunctionDeclaration");
892 EmitDebugCheckDeclarationContext(variable);
893 VisitForAccumulatorValue(declaration->fun());
894 __ sw(result_register(), ContextOperand(cp, variable->index()));
895 int offset = Context::SlotOffset(variable->index());
896 // We know that we have written a function, which is not a smi.
897 __ RecordWriteContextSlot(cp,
905 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
909 case Variable::LOOKUP: {
910 Comment cmnt(masm_, "[ FunctionDeclaration");
911 __ li(a2, Operand(variable->name()));
912 __ li(a1, Operand(Smi::FromInt(NONE)));
914 // Push initial value for function declaration.
915 VisitForStackValue(declaration->fun());
916 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
923 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
924 Variable* variable = declaration->proxy()->var();
925 DCHECK(variable->location() == Variable::CONTEXT);
926 DCHECK(variable->interface()->IsFrozen());
928 Comment cmnt(masm_, "[ ModuleDeclaration");
929 EmitDebugCheckDeclarationContext(variable);
931 // Load instance object.
932 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
933 __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
934 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
937 __ sw(a1, ContextOperand(cp, variable->index()));
938 // We know that we have written a module, which is not a smi.
939 __ RecordWriteContextSlot(cp,
940 Context::SlotOffset(variable->index()),
947 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
949 // Traverse into body.
950 Visit(declaration->module());
954 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
955 VariableProxy* proxy = declaration->proxy();
956 Variable* variable = proxy->var();
957 switch (variable->location()) {
958 case Variable::UNALLOCATED:
962 case Variable::CONTEXT: {
963 Comment cmnt(masm_, "[ ImportDeclaration");
964 EmitDebugCheckDeclarationContext(variable);
969 case Variable::PARAMETER:
970 case Variable::LOCAL:
971 case Variable::LOOKUP:
977 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
982 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
983 // Call the runtime to declare the globals.
984 // The context is the first argument.
985 __ li(a1, Operand(pairs));
986 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
988 __ CallRuntime(Runtime::kDeclareGlobals, 3);
989 // Return value is ignored.
993 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
994 // Call the runtime to declare the modules.
995 __ Push(descriptions);
996 __ CallRuntime(Runtime::kDeclareModules, 1);
997 // Return value is ignored.
1001 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1002 Comment cmnt(masm_, "[ SwitchStatement");
1003 Breakable nested_statement(this, stmt);
1004 SetStatementPosition(stmt);
1006 // Keep the switch value on the stack until a case matches.
1007 VisitForStackValue(stmt->tag());
1008 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1010 ZoneList<CaseClause*>* clauses = stmt->cases();
1011 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1013 Label next_test; // Recycled for each test.
1014 // Compile all the tests with branches to their bodies.
1015 for (int i = 0; i < clauses->length(); i++) {
1016 CaseClause* clause = clauses->at(i);
1017 clause->body_target()->Unuse();
1019 // The default is not a test, but remember it as final fall through.
1020 if (clause->is_default()) {
1021 default_clause = clause;
1025 Comment cmnt(masm_, "[ Case comparison");
1026 __ bind(&next_test);
1029 // Compile the label expression.
1030 VisitForAccumulatorValue(clause->label());
1031 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1033 // Perform the comparison as if via '==='.
1034 __ lw(a1, MemOperand(sp, 0)); // Switch value.
1035 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1036 JumpPatchSite patch_site(masm_);
1037 if (inline_smi_code) {
1040 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1042 __ Branch(&next_test, ne, a1, Operand(a0));
1043 __ Drop(1); // Switch value is no longer needed.
1044 __ Branch(clause->body_target());
1046 __ bind(&slow_case);
1049 // Record position before stub call for type feedback.
1050 SetSourcePosition(clause->position());
1052 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1053 CallIC(ic, clause->CompareId());
1054 patch_site.EmitPatchInfo();
1058 PrepareForBailout(clause, TOS_REG);
1059 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1060 __ Branch(&next_test, ne, v0, Operand(at));
1062 __ Branch(clause->body_target());
1065 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1066 __ Drop(1); // Switch value is no longer needed.
1067 __ Branch(clause->body_target());
1070 // Discard the test value and jump to the default if present, otherwise to
1071 // the end of the statement.
1072 __ bind(&next_test);
1073 __ Drop(1); // Switch value is no longer needed.
1074 if (default_clause == NULL) {
1075 __ Branch(nested_statement.break_label());
1077 __ Branch(default_clause->body_target());
1080 // Compile all the case bodies.
1081 for (int i = 0; i < clauses->length(); i++) {
1082 Comment cmnt(masm_, "[ Case body");
1083 CaseClause* clause = clauses->at(i);
1084 __ bind(clause->body_target());
1085 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1086 VisitStatements(clause->statements());
1089 __ bind(nested_statement.break_label());
1090 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1094 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1095 Comment cmnt(masm_, "[ ForInStatement");
1096 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1097 SetStatementPosition(stmt);
1100 ForIn loop_statement(this, stmt);
1101 increment_loop_depth();
1103 // Get the object to enumerate over. If the object is null or undefined, skip
1104 // over the loop. See ECMA-262 version 5, section 12.6.4.
1105 VisitForAccumulatorValue(stmt->enumerable());
1106 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1107 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1108 __ Branch(&exit, eq, a0, Operand(at));
1109 Register null_value = t1;
1110 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1111 __ Branch(&exit, eq, a0, Operand(null_value));
1112 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1114 // Convert the object to a JS object.
1115 Label convert, done_convert;
1116 __ JumpIfSmi(a0, &convert);
1117 __ GetObjectType(a0, a1, a1);
1118 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1121 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1123 __ bind(&done_convert);
1124 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1127 // Check for proxies.
1129 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1130 __ GetObjectType(a0, a1, a1);
1131 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1133 // Check cache validity in generated code. This is a fast case for
1134 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1135 // guarantee cache validity, call the runtime system to check cache
1136 // validity or get the property names in a fixed array.
1137 __ CheckEnumCache(null_value, &call_runtime);
1139 // The enum cache is valid. Load the map of the object being
1140 // iterated over and use the cache for the iteration.
1142 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1143 __ Branch(&use_cache);
1145 // Get the set of properties to enumerate.
1146 __ bind(&call_runtime);
1147 __ push(a0); // Duplicate the enumerable object on the stack.
1148 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1149 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1151 // If we got a map from the runtime call, we can do a fast
1152 // modification check. Otherwise, we got a fixed array, and we have
1153 // to do a slow check.
1155 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1156 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1157 __ Branch(&fixed_array, ne, a2, Operand(at));
1159 // We got a map in register v0. Get the enumeration cache from it.
1160 Label no_descriptors;
1161 __ bind(&use_cache);
1163 __ EnumLength(a1, v0);
1164 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1166 __ LoadInstanceDescriptors(v0, a2);
1167 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1168 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1170 // Set up the four remaining stack slots.
1171 __ li(a0, Operand(Smi::FromInt(0)));
1172 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1173 __ Push(v0, a2, a1, a0);
1176 __ bind(&no_descriptors);
1180 // We got a fixed array in register v0. Iterate through that.
1182 __ bind(&fixed_array);
1184 __ li(a1, FeedbackVector());
1185 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1186 int vector_index = FeedbackVector()->GetIndex(slot);
1187 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1189 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1190 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1191 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1192 __ GetObjectType(a2, a3, a3);
1193 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1194 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1195 __ bind(&non_proxy);
1196 __ Push(a1, v0); // Smi and array
1197 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1198 __ li(a0, Operand(Smi::FromInt(0)));
1199 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1201 // Generate code for doing the condition check.
1202 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1204 // Load the current count to a0, load the length to a1.
1205 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1206 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1207 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1209 // Get the current entry of the array into register a3.
1210 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1211 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1212 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1213 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1214 __ lw(a3, MemOperand(t0)); // Current entry.
1216 // Get the expected map from the stack or a smi in the
1217 // permanent slow case into register a2.
1218 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1220 // Check if the expected map still matches that of the enumerable.
1221 // If not, we may have to filter the key.
1223 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1224 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1225 __ Branch(&update_each, eq, t0, Operand(a2));
1227 // For proxies, no filtering is done.
1228 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1229 DCHECK_EQ(Smi::FromInt(0), 0);
1230 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1232 // Convert the entry to a string or (smi) 0 if it isn't a property
1233 // any more. If the property has been removed while iterating, we
1235 __ Push(a1, a3); // Enumerable and current entry.
1236 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1237 __ mov(a3, result_register());
1238 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1240 // Update the 'each' property or variable from the possibly filtered
1241 // entry in register a3.
1242 __ bind(&update_each);
1243 __ mov(result_register(), a3);
1244 // Perform the assignment as if via '='.
1245 { EffectContext context(this);
1246 EmitAssignment(stmt->each());
1249 // Generate code for the body of the loop.
1250 Visit(stmt->body());
1252 // Generate code for the going to the next element by incrementing
1253 // the index (smi) stored on top of the stack.
1254 __ bind(loop_statement.continue_label());
1256 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1259 EmitBackEdgeBookkeeping(stmt, &loop);
1262 // Remove the pointers stored on the stack.
1263 __ bind(loop_statement.break_label());
1266 // Exit and decrement the loop depth.
1267 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1269 decrement_loop_depth();
1273 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1274 Comment cmnt(masm_, "[ ForOfStatement");
1275 SetStatementPosition(stmt);
1277 Iteration loop_statement(this, stmt);
1278 increment_loop_depth();
1280 // var iterator = iterable[Symbol.iterator]();
1281 VisitForEffect(stmt->assign_iterator());
1284 __ bind(loop_statement.continue_label());
1286 // result = iterator.next()
1287 VisitForEffect(stmt->next_result());
1289 // if (result.done) break;
1290 Label result_not_done;
1291 VisitForControl(stmt->result_done(),
1292 loop_statement.break_label(),
1295 __ bind(&result_not_done);
1297 // each = result.value
1298 VisitForEffect(stmt->assign_each());
1300 // Generate code for the body of the loop.
1301 Visit(stmt->body());
1303 // Check stack before looping.
1304 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1305 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1306 __ jmp(loop_statement.continue_label());
1308 // Exit and decrement the loop depth.
1309 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1310 __ bind(loop_statement.break_label());
1311 decrement_loop_depth();
1315 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1317 // Use the fast case closure allocation code that allocates in new
1318 // space for nested functions that don't need literals cloning. If
1319 // we're running with the --always-opt or the --prepare-always-opt
1320 // flag, we need to use the runtime function so that the new function
1321 // we are creating here gets a chance to have its code optimized and
1322 // doesn't just get a copy of the existing unoptimized code.
1323 if (!FLAG_always_opt &&
1324 !FLAG_prepare_always_opt &&
1326 scope()->is_function_scope() &&
1327 info->num_literals() == 0) {
1328 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1329 __ li(a2, Operand(info));
1332 __ li(a0, Operand(info));
1333 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1334 : Heap::kFalseValueRootIndex);
1335 __ Push(cp, a0, a1);
1336 __ CallRuntime(Runtime::kNewClosure, 3);
1338 context()->Plug(v0);
1342 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1343 Comment cmnt(masm_, "[ VariableProxy");
1344 EmitVariableLoad(expr);
1348 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1349 Comment cnmt(masm_, "[ SuperReference ");
1351 __ lw(LoadDescriptor::ReceiverRegister(),
1352 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1354 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1355 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1357 if (FLAG_vector_ics) {
1358 __ li(VectorLoadICDescriptor::SlotRegister(),
1359 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1360 CallLoadIC(NOT_CONTEXTUAL);
1362 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1366 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1367 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1372 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1373 TypeofState typeof_state,
1375 Register current = cp;
1381 if (s->num_heap_slots() > 0) {
1382 if (s->calls_sloppy_eval()) {
1383 // Check that extension is NULL.
1384 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1385 __ Branch(slow, ne, temp, Operand(zero_reg));
1387 // Load next context in chain.
1388 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1389 // Walk the rest of the chain without clobbering cp.
1392 // If no outer scope calls eval, we do not need to check more
1393 // context extensions.
1394 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1395 s = s->outer_scope();
1398 if (s->is_eval_scope()) {
1400 if (!current.is(next)) {
1401 __ Move(next, current);
1404 // Terminate at native context.
1405 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1406 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1407 __ Branch(&fast, eq, temp, Operand(t0));
1408 // Check that extension is NULL.
1409 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1410 __ Branch(slow, ne, temp, Operand(zero_reg));
1411 // Load next context in chain.
1412 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1417 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1418 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1419 if (FLAG_vector_ics) {
1420 __ li(VectorLoadICDescriptor::SlotRegister(),
1421 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1424 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1431 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1433 DCHECK(var->IsContextSlot());
1434 Register context = cp;
1438 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1439 if (s->num_heap_slots() > 0) {
1440 if (s->calls_sloppy_eval()) {
1441 // Check that extension is NULL.
1442 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1443 __ Branch(slow, ne, temp, Operand(zero_reg));
1445 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1446 // Walk the rest of the chain without clobbering cp.
1450 // Check that last extension is NULL.
1451 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1452 __ Branch(slow, ne, temp, Operand(zero_reg));
1454 // This function is used only for loads, not stores, so it's safe to
1455 // return an cp-based operand (the write barrier cannot be allowed to
1456 // destroy the cp register).
1457 return ContextOperand(context, var->index());
1461 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1462 TypeofState typeof_state,
1465 // Generate fast-case code for variables that might be shadowed by
1466 // eval-introduced variables. Eval is used a lot without
1467 // introducing variables. In those cases, we do not want to
1468 // perform a runtime call for all variables in the scope
1469 // containing the eval.
1470 Variable* var = proxy->var();
1471 if (var->mode() == DYNAMIC_GLOBAL) {
1472 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1474 } else if (var->mode() == DYNAMIC_LOCAL) {
1475 Variable* local = var->local_if_not_shadowed();
1476 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1477 if (local->mode() == LET || local->mode() == CONST ||
1478 local->mode() == CONST_LEGACY) {
1479 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1480 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1481 if (local->mode() == CONST_LEGACY) {
1482 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1483 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1484 } else { // LET || CONST
1485 __ Branch(done, ne, at, Operand(zero_reg));
1486 __ li(a0, Operand(var->name()));
1488 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1496 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1497 // Record position before possible IC call.
1498 SetSourcePosition(proxy->position());
1499 Variable* var = proxy->var();
1501 // Three cases: global variables, lookup variables, and all other types of
1503 switch (var->location()) {
1504 case Variable::UNALLOCATED: {
1505 Comment cmnt(masm_, "[ Global variable");
1506 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1507 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1508 if (FLAG_vector_ics) {
1509 __ li(VectorLoadICDescriptor::SlotRegister(),
1510 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1512 CallLoadIC(CONTEXTUAL);
1513 context()->Plug(v0);
1517 case Variable::PARAMETER:
1518 case Variable::LOCAL:
1519 case Variable::CONTEXT: {
1520 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1521 : "[ Stack variable");
1522 if (var->binding_needs_init()) {
1523 // var->scope() may be NULL when the proxy is located in eval code and
1524 // refers to a potential outside binding. Currently those bindings are
1525 // always looked up dynamically, i.e. in that case
1526 // var->location() == LOOKUP.
1528 DCHECK(var->scope() != NULL);
1530 // Check if the binding really needs an initialization check. The check
1531 // can be skipped in the following situation: we have a LET or CONST
1532 // binding in harmony mode, both the Variable and the VariableProxy have
1533 // the same declaration scope (i.e. they are both in global code, in the
1534 // same function or in the same eval code) and the VariableProxy is in
1535 // the source physically located after the initializer of the variable.
1537 // We cannot skip any initialization checks for CONST in non-harmony
1538 // mode because const variables may be declared but never initialized:
1539 // if (false) { const x; }; var y = x;
1541 // The condition on the declaration scopes is a conservative check for
1542 // nested functions that access a binding and are called before the
1543 // binding is initialized:
1544 // function() { f(); let x = 1; function f() { x = 2; } }
1546 bool skip_init_check;
1547 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1548 skip_init_check = false;
1550 // Check that we always have valid source position.
1551 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1552 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1553 skip_init_check = var->mode() != CONST_LEGACY &&
1554 var->initializer_position() < proxy->position();
1557 if (!skip_init_check) {
1558 // Let and const need a read barrier.
1560 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1561 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1562 if (var->mode() == LET || var->mode() == CONST) {
1563 // Throw a reference error when using an uninitialized let/const
1564 // binding in harmony mode.
1566 __ Branch(&done, ne, at, Operand(zero_reg));
1567 __ li(a0, Operand(var->name()));
1569 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1572 // Uninitalized const bindings outside of harmony mode are unholed.
1573 DCHECK(var->mode() == CONST_LEGACY);
1574 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1575 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1577 context()->Plug(v0);
1581 context()->Plug(var);
1585 case Variable::LOOKUP: {
1586 Comment cmnt(masm_, "[ Lookup variable");
1588 // Generate code for loading from variables potentially shadowed
1589 // by eval-introduced variables.
1590 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1592 __ li(a1, Operand(var->name()));
1593 __ Push(cp, a1); // Context and name.
1594 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1596 context()->Plug(v0);
1602 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1603 Comment cmnt(masm_, "[ RegExpLiteral");
1605 // Registers will be used as follows:
1606 // t1 = materialized value (RegExp literal)
1607 // t0 = JS function, literals array
1608 // a3 = literal index
1609 // a2 = RegExp pattern
1610 // a1 = RegExp flags
1611 // a0 = RegExp literal clone
1612 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1613 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1614 int literal_offset =
1615 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1616 __ lw(t1, FieldMemOperand(t0, literal_offset));
1617 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1618 __ Branch(&materialized, ne, t1, Operand(at));
1620 // Create regexp literal using runtime function.
1621 // Result will be in v0.
1622 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1623 __ li(a2, Operand(expr->pattern()));
1624 __ li(a1, Operand(expr->flags()));
1625 __ Push(t0, a3, a2, a1);
1626 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1629 __ bind(&materialized);
1630 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1631 Label allocated, runtime_allocate;
1632 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1635 __ bind(&runtime_allocate);
1636 __ li(a0, Operand(Smi::FromInt(size)));
1638 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1641 __ bind(&allocated);
1643 // After this, registers are used as follows:
1644 // v0: Newly allocated regexp.
1645 // t1: Materialized regexp.
1647 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1648 context()->Plug(v0);
1652 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1653 if (expression == NULL) {
1654 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1657 VisitForStackValue(expression);
1662 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1663 Comment cmnt(masm_, "[ ObjectLiteral");
1665 expr->BuildConstantProperties(isolate());
1666 Handle<FixedArray> constant_properties = expr->constant_properties();
1667 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1668 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1669 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1670 __ li(a1, Operand(constant_properties));
1671 int flags = expr->fast_elements()
1672 ? ObjectLiteral::kFastElements
1673 : ObjectLiteral::kNoFlags;
1674 flags |= expr->has_function()
1675 ? ObjectLiteral::kHasFunction
1676 : ObjectLiteral::kNoFlags;
1677 __ li(a0, Operand(Smi::FromInt(flags)));
1678 int properties_count = constant_properties->length() / 2;
1679 if (expr->may_store_doubles() || expr->depth() > 1 ||
1680 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1681 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1682 __ Push(a3, a2, a1, a0);
1683 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1685 FastCloneShallowObjectStub stub(isolate(), properties_count);
1688 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1690 // If result_saved is true the result is on top of the stack. If
1691 // result_saved is false the result is in v0.
1692 bool result_saved = false;
1694 // Mark all computed expressions that are bound to a key that
1695 // is shadowed by a later occurrence of the same key. For the
1696 // marked expressions, no store code is emitted.
1697 expr->CalculateEmitStore(zone());
1699 AccessorTable accessor_table(zone());
1700 for (int i = 0; i < expr->properties()->length(); i++) {
1701 ObjectLiteral::Property* property = expr->properties()->at(i);
1702 if (property->IsCompileTimeValue()) continue;
1704 Literal* key = property->key();
1705 Expression* value = property->value();
1706 if (!result_saved) {
1707 __ push(v0); // Save result on stack.
1708 result_saved = true;
1710 switch (property->kind()) {
1711 case ObjectLiteral::Property::CONSTANT:
1713 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1714 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1716 case ObjectLiteral::Property::COMPUTED:
1717 // It is safe to use [[Put]] here because the boilerplate already
1718 // contains computed properties with an uninitialized value.
1719 if (key->value()->IsInternalizedString()) {
1720 if (property->emit_store()) {
1721 VisitForAccumulatorValue(value);
1722 __ mov(StoreDescriptor::ValueRegister(), result_register());
1723 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1724 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1725 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1726 CallStoreIC(key->LiteralFeedbackId());
1727 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1729 VisitForEffect(value);
1733 // Duplicate receiver on stack.
1734 __ lw(a0, MemOperand(sp));
1736 VisitForStackValue(key);
1737 VisitForStackValue(value);
1738 if (property->emit_store()) {
1739 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1741 __ CallRuntime(Runtime::kSetProperty, 4);
1746 case ObjectLiteral::Property::PROTOTYPE:
1747 // Duplicate receiver on stack.
1748 __ lw(a0, MemOperand(sp));
1750 VisitForStackValue(value);
1751 if (property->emit_store()) {
1752 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1757 case ObjectLiteral::Property::GETTER:
1758 accessor_table.lookup(key)->second->getter = value;
1760 case ObjectLiteral::Property::SETTER:
1761 accessor_table.lookup(key)->second->setter = value;
1766 // Emit code to define accessors, using only a single call to the runtime for
1767 // each pair of corresponding getters and setters.
1768 for (AccessorTable::Iterator it = accessor_table.begin();
1769 it != accessor_table.end();
1771 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1773 VisitForStackValue(it->first);
1774 EmitAccessor(it->second->getter);
1775 EmitAccessor(it->second->setter);
1776 __ li(a0, Operand(Smi::FromInt(NONE)));
1778 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1781 if (expr->has_function()) {
1782 DCHECK(result_saved);
1783 __ lw(a0, MemOperand(sp));
1785 __ CallRuntime(Runtime::kToFastProperties, 1);
1789 context()->PlugTOS();
1791 context()->Plug(v0);
1796 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1797 Comment cmnt(masm_, "[ ArrayLiteral");
1799 expr->BuildConstantElements(isolate());
1800 int flags = expr->depth() == 1
1801 ? ArrayLiteral::kShallowElements
1802 : ArrayLiteral::kNoFlags;
1804 ZoneList<Expression*>* subexprs = expr->values();
1805 int length = subexprs->length();
1807 Handle<FixedArray> constant_elements = expr->constant_elements();
1808 DCHECK_EQ(2, constant_elements->length());
1809 ElementsKind constant_elements_kind =
1810 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1811 bool has_fast_elements =
1812 IsFastObjectElementsKind(constant_elements_kind);
1813 Handle<FixedArrayBase> constant_elements_values(
1814 FixedArrayBase::cast(constant_elements->get(1)));
1816 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1817 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1818 // If the only customer of allocation sites is transitioning, then
1819 // we can turn it off if we don't have anywhere else to transition to.
1820 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1823 __ mov(a0, result_register());
1824 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1825 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1826 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1827 __ li(a1, Operand(constant_elements));
1828 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1829 __ li(a0, Operand(Smi::FromInt(flags)));
1830 __ Push(a3, a2, a1, a0);
1831 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1833 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1837 bool result_saved = false; // Is the result saved to the stack?
1839 // Emit code to evaluate all the non-constant subexpressions and to store
1840 // them into the newly cloned array.
1841 for (int i = 0; i < length; i++) {
1842 Expression* subexpr = subexprs->at(i);
1843 // If the subexpression is a literal or a simple materialized literal it
1844 // is already set in the cloned array.
1845 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1847 if (!result_saved) {
1848 __ push(v0); // array literal
1849 __ Push(Smi::FromInt(expr->literal_index()));
1850 result_saved = true;
1853 VisitForAccumulatorValue(subexpr);
1855 if (IsFastObjectElementsKind(constant_elements_kind)) {
1856 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1857 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1858 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1859 __ sw(result_register(), FieldMemOperand(a1, offset));
1860 // Update the write barrier for the array store.
1861 __ RecordWriteField(a1, offset, result_register(), a2,
1862 kRAHasBeenSaved, kDontSaveFPRegs,
1863 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1865 __ li(a3, Operand(Smi::FromInt(i)));
1866 __ mov(a0, result_register());
1867 StoreArrayLiteralElementStub stub(isolate());
1871 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1874 __ Pop(); // literal index
1875 context()->PlugTOS();
1877 context()->Plug(v0);
1882 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1883 DCHECK(expr->target()->IsValidReferenceExpression());
1885 Comment cmnt(masm_, "[ Assignment");
1887 Property* property = expr->target()->AsProperty();
1888 LhsKind assign_type = GetAssignType(property);
1890 // Evaluate LHS expression.
1891 switch (assign_type) {
1893 // Nothing to do here.
1895 case NAMED_PROPERTY:
1896 if (expr->is_compound()) {
1897 // We need the receiver both on the stack and in the register.
1898 VisitForStackValue(property->obj());
1899 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1901 VisitForStackValue(property->obj());
1904 case NAMED_SUPER_PROPERTY:
1905 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1906 EmitLoadHomeObject(property->obj()->AsSuperReference());
1907 __ Push(result_register());
1908 if (expr->is_compound()) {
1909 const Register scratch = a1;
1910 __ lw(scratch, MemOperand(sp, kPointerSize));
1911 __ Push(scratch, result_register());
1914 case KEYED_SUPER_PROPERTY: {
1915 const Register scratch = a1;
1916 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1917 EmitLoadHomeObject(property->obj()->AsSuperReference());
1918 __ Move(scratch, result_register());
1919 VisitForAccumulatorValue(property->key());
1920 __ Push(scratch, result_register());
1921 if (expr->is_compound()) {
1922 const Register scratch1 = t0;
1923 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1924 __ Push(scratch1, scratch, result_register());
1928 case KEYED_PROPERTY:
1929 // We need the key and receiver on both the stack and in v0 and a1.
1930 if (expr->is_compound()) {
1931 VisitForStackValue(property->obj());
1932 VisitForStackValue(property->key());
1933 __ lw(LoadDescriptor::ReceiverRegister(),
1934 MemOperand(sp, 1 * kPointerSize));
1935 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1937 VisitForStackValue(property->obj());
1938 VisitForStackValue(property->key());
1943 // For compound assignments we need another deoptimization point after the
1944 // variable/property load.
1945 if (expr->is_compound()) {
1946 { AccumulatorValueContext context(this);
1947 switch (assign_type) {
1949 EmitVariableLoad(expr->target()->AsVariableProxy());
1950 PrepareForBailout(expr->target(), TOS_REG);
1952 case NAMED_PROPERTY:
1953 EmitNamedPropertyLoad(property);
1954 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1956 case NAMED_SUPER_PROPERTY:
1957 EmitNamedSuperPropertyLoad(property);
1958 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1960 case KEYED_SUPER_PROPERTY:
1961 EmitKeyedSuperPropertyLoad(property);
1962 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1964 case KEYED_PROPERTY:
1965 EmitKeyedPropertyLoad(property);
1966 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1971 Token::Value op = expr->binary_op();
1972 __ push(v0); // Left operand goes on the stack.
1973 VisitForAccumulatorValue(expr->value());
1975 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1978 SetSourcePosition(expr->position() + 1);
1979 AccumulatorValueContext context(this);
1980 if (ShouldInlineSmiCase(op)) {
1981 EmitInlineSmiBinaryOp(expr->binary_operation(),
1987 EmitBinaryOp(expr->binary_operation(), op, mode);
1990 // Deoptimization point in case the binary operation may have side effects.
1991 PrepareForBailout(expr->binary_operation(), TOS_REG);
1993 VisitForAccumulatorValue(expr->value());
1996 // Record source position before possible IC call.
1997 SetSourcePosition(expr->position());
2000 switch (assign_type) {
2002 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2004 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2005 context()->Plug(v0);
2007 case NAMED_PROPERTY:
2008 EmitNamedPropertyAssignment(expr);
2010 case NAMED_SUPER_PROPERTY:
2011 EmitNamedSuperPropertyStore(property);
2012 context()->Plug(v0);
2014 case KEYED_SUPER_PROPERTY:
2015 EmitKeyedSuperPropertyStore(property);
2016 context()->Plug(v0);
2018 case KEYED_PROPERTY:
2019 EmitKeyedPropertyAssignment(expr);
2025 void FullCodeGenerator::VisitYield(Yield* expr) {
2026 Comment cmnt(masm_, "[ Yield");
2027 // Evaluate yielded value first; the initial iterator definition depends on
2028 // this. It stays on the stack while we update the iterator.
2029 VisitForStackValue(expr->expression());
2031 switch (expr->yield_kind()) {
2032 case Yield::kSuspend:
2033 // Pop value from top-of-stack slot; box result into result register.
2034 EmitCreateIteratorResult(false);
2035 __ push(result_register());
2037 case Yield::kInitial: {
2038 Label suspend, continuation, post_runtime, resume;
2042 __ bind(&continuation);
2046 VisitForAccumulatorValue(expr->generator_object());
2047 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2048 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2049 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2050 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2052 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2053 kRAHasBeenSaved, kDontSaveFPRegs);
2054 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2055 __ Branch(&post_runtime, eq, sp, Operand(a1));
2056 __ push(v0); // generator object
2057 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2058 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2059 __ bind(&post_runtime);
2060 __ pop(result_register());
2061 EmitReturnSequence();
2064 context()->Plug(result_register());
2068 case Yield::kFinal: {
2069 VisitForAccumulatorValue(expr->generator_object());
2070 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2071 __ sw(a1, FieldMemOperand(result_register(),
2072 JSGeneratorObject::kContinuationOffset));
2073 // Pop value from top-of-stack slot, box result into result register.
2074 EmitCreateIteratorResult(true);
2075 EmitUnwindBeforeReturn();
2076 EmitReturnSequence();
2080 case Yield::kDelegating: {
2081 VisitForStackValue(expr->generator_object());
2083 // Initial stack layout is as follows:
2084 // [sp + 1 * kPointerSize] iter
2085 // [sp + 0 * kPointerSize] g
2087 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2088 Label l_next, l_call;
2089 Register load_receiver = LoadDescriptor::ReceiverRegister();
2090 Register load_name = LoadDescriptor::NameRegister();
2092 // Initial send value is undefined.
2093 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2096 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2099 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2100 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2101 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2102 __ Push(load_name, a3, a0); // "throw", iter, except
2105 // try { received = %yield result }
2106 // Shuffle the received result above a try handler and yield it without
2109 __ pop(a0); // result
2110 __ PushTryHandler(StackHandler::CATCH, expr->index());
2111 const int handler_size = StackHandlerConstants::kSize;
2112 __ push(a0); // result
2114 __ bind(&l_continuation);
2117 __ bind(&l_suspend);
2118 const int generator_object_depth = kPointerSize + handler_size;
2119 __ lw(a0, MemOperand(sp, generator_object_depth));
2121 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2122 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2123 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2124 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2126 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2127 kRAHasBeenSaved, kDontSaveFPRegs);
2128 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2129 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2130 __ pop(v0); // result
2131 EmitReturnSequence();
2133 __ bind(&l_resume); // received in a0
2136 // receiver = iter; f = 'next'; arg = received;
2139 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2140 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2141 __ Push(load_name, a3, a0); // "next", iter, received
2143 // result = receiver[f](arg);
2145 __ lw(load_receiver, MemOperand(sp, kPointerSize));
2146 __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2147 if (FLAG_vector_ics) {
2148 __ li(VectorLoadICDescriptor::SlotRegister(),
2149 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2151 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2152 CallIC(ic, TypeFeedbackId::None());
2155 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2156 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2159 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2160 __ Drop(1); // The function is still on the stack; drop it.
2162 // if (!result.done) goto l_try;
2163 __ Move(load_receiver, v0);
2165 __ push(load_receiver); // save result
2166 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2167 if (FLAG_vector_ics) {
2168 __ li(VectorLoadICDescriptor::SlotRegister(),
2169 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2171 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2173 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2175 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2178 __ pop(load_receiver); // result
2179 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2180 if (FLAG_vector_ics) {
2181 __ li(VectorLoadICDescriptor::SlotRegister(),
2182 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2184 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2185 context()->DropAndPlug(2, v0); // drop iter and g
2192 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2194 JSGeneratorObject::ResumeMode resume_mode) {
2195 // The value stays in a0, and is ultimately read by the resumed generator, as
2196 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2197 // is read to throw the value when the resumed generator is already closed.
2198 // a1 will hold the generator object until the activation has been resumed.
2199 VisitForStackValue(generator);
2200 VisitForAccumulatorValue(value);
2203 // Check generator state.
2204 Label wrong_state, closed_state, done;
2205 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2206 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2207 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2208 __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2209 __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2211 // Load suspended function and context.
2212 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2213 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2215 // Load receiver and store as the first argument.
2216 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2219 // Push holes for the rest of the arguments to the generator function.
2220 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2222 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2223 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2224 Label push_argument_holes, push_frame;
2225 __ bind(&push_argument_holes);
2226 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2227 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2229 __ jmp(&push_argument_holes);
2231 // Enter a new JavaScript frame, and initialize its slots as they were when
2232 // the generator was suspended.
2234 __ bind(&push_frame);
2235 __ Call(&resume_frame);
2237 __ bind(&resume_frame);
2238 // ra = return address.
2239 // fp = caller's frame pointer.
2240 // cp = callee's context,
2241 // t0 = callee's JS function.
2242 __ Push(ra, fp, cp, t0);
2243 // Adjust FP to point to saved FP.
2244 __ Addu(fp, sp, 2 * kPointerSize);
2246 // Load the operand stack size.
2247 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2248 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2251 // If we are sending a value and there is no operand stack, we can jump back
2253 if (resume_mode == JSGeneratorObject::NEXT) {
2255 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2256 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2257 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2259 __ Addu(a3, a3, Operand(a2));
2260 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2261 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2263 __ bind(&slow_resume);
2266 // Otherwise, we push holes for the operand stack and call the runtime to fix
2267 // up the stack and the handlers.
2268 Label push_operand_holes, call_resume;
2269 __ bind(&push_operand_holes);
2270 __ Subu(a3, a3, Operand(1));
2271 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2273 __ Branch(&push_operand_holes);
2274 __ bind(&call_resume);
2275 DCHECK(!result_register().is(a1));
2276 __ Push(a1, result_register());
2277 __ Push(Smi::FromInt(resume_mode));
2278 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2279 // Not reached: the runtime call returns elsewhere.
2280 __ stop("not-reached");
2282 // Reach here when generator is closed.
2283 __ bind(&closed_state);
2284 if (resume_mode == JSGeneratorObject::NEXT) {
2285 // Return completed iterator result when generator is closed.
2286 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2288 // Pop value from top-of-stack slot; box result into result register.
2289 EmitCreateIteratorResult(true);
2291 // Throw the provided value.
2293 __ CallRuntime(Runtime::kThrow, 1);
2297 // Throw error if we attempt to operate on a running generator.
2298 __ bind(&wrong_state);
2300 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2303 context()->Plug(result_register());
2307 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2311 const int instance_size = 5 * kPointerSize;
2312 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2315 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2318 __ bind(&gc_required);
2319 __ Push(Smi::FromInt(instance_size));
2320 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2321 __ lw(context_register(),
2322 MemOperand(fp, StandardFrameConstants::kContextOffset));
2324 __ bind(&allocated);
2325 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2326 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2327 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2329 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2330 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2331 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2332 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2333 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2335 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2337 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2339 // Only the value field needs a write barrier, as the other values are in the
2341 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2342 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2346 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2347 SetSourcePosition(prop->position());
2348 Literal* key = prop->key()->AsLiteral();
2349 DCHECK(!prop->IsSuperAccess());
2351 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2352 if (FLAG_vector_ics) {
2353 __ li(VectorLoadICDescriptor::SlotRegister(),
2354 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2355 CallLoadIC(NOT_CONTEXTUAL);
2357 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2362 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2363 // Stack: receiver, home_object.
2364 SetSourcePosition(prop->position());
2365 Literal* key = prop->key()->AsLiteral();
2366 DCHECK(!key->value()->IsSmi());
2367 DCHECK(prop->IsSuperAccess());
2369 __ Push(key->value());
2370 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2374 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2375 SetSourcePosition(prop->position());
2376 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2377 if (FLAG_vector_ics) {
2378 __ li(VectorLoadICDescriptor::SlotRegister(),
2379 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2382 CallIC(ic, prop->PropertyFeedbackId());
2387 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2388 // Stack: receiver, home_object, key.
2389 SetSourcePosition(prop->position());
2391 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2395 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2398 Expression* left_expr,
2399 Expression* right_expr) {
2400 Label done, smi_case, stub_call;
2402 Register scratch1 = a2;
2403 Register scratch2 = a3;
2405 // Get the arguments.
2407 Register right = a0;
2409 __ mov(a0, result_register());
2411 // Perform combined smi check on both operands.
2412 __ Or(scratch1, left, Operand(right));
2413 STATIC_ASSERT(kSmiTag == 0);
2414 JumpPatchSite patch_site(masm_);
2415 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2417 __ bind(&stub_call);
2418 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2419 CallIC(code, expr->BinaryOperationFeedbackId());
2420 patch_site.EmitPatchInfo();
2424 // Smi case. This code works the same way as the smi-smi case in the type
2425 // recording binary operation stub, see
2428 __ GetLeastBitsFromSmi(scratch1, right, 5);
2429 __ srav(right, left, scratch1);
2430 __ And(v0, right, Operand(~kSmiTagMask));
2433 __ SmiUntag(scratch1, left);
2434 __ GetLeastBitsFromSmi(scratch2, right, 5);
2435 __ sllv(scratch1, scratch1, scratch2);
2436 __ Addu(scratch2, scratch1, Operand(0x40000000));
2437 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2438 __ SmiTag(v0, scratch1);
2442 __ SmiUntag(scratch1, left);
2443 __ GetLeastBitsFromSmi(scratch2, right, 5);
2444 __ srlv(scratch1, scratch1, scratch2);
2445 __ And(scratch2, scratch1, 0xc0000000);
2446 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2447 __ SmiTag(v0, scratch1);
2451 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2452 __ BranchOnOverflow(&stub_call, scratch1);
2455 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2456 __ BranchOnOverflow(&stub_call, scratch1);
2459 __ SmiUntag(scratch1, right);
2460 __ Mul(scratch2, v0, left, scratch1);
2461 __ sra(scratch1, v0, 31);
2462 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2463 __ Branch(&done, ne, v0, Operand(zero_reg));
2464 __ Addu(scratch2, right, left);
2465 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2466 DCHECK(Smi::FromInt(0) == 0);
2467 __ mov(v0, zero_reg);
2471 __ Or(v0, left, Operand(right));
2473 case Token::BIT_AND:
2474 __ And(v0, left, Operand(right));
2476 case Token::BIT_XOR:
2477 __ Xor(v0, left, Operand(right));
2484 context()->Plug(v0);
2488 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2489 // Constructor is in v0.
2490 DCHECK(lit != NULL);
2493 // No access check is needed here since the constructor is created by the
2495 Register scratch = a1;
2497 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2500 for (int i = 0; i < lit->properties()->length(); i++) {
2501 ObjectLiteral::Property* property = lit->properties()->at(i);
2502 Literal* key = property->key()->AsLiteral();
2503 Expression* value = property->value();
2504 DCHECK(key != NULL);
2506 if (property->is_static()) {
2507 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2509 __ lw(scratch, MemOperand(sp, 0)); // prototype
2512 VisitForStackValue(key);
2513 VisitForStackValue(value);
2515 switch (property->kind()) {
2516 case ObjectLiteral::Property::CONSTANT:
2517 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2518 case ObjectLiteral::Property::COMPUTED:
2519 case ObjectLiteral::Property::PROTOTYPE:
2520 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2523 case ObjectLiteral::Property::GETTER:
2524 __ CallRuntime(Runtime::kDefineClassGetter, 3);
2527 case ObjectLiteral::Property::SETTER:
2528 __ CallRuntime(Runtime::kDefineClassSetter, 3);
2537 __ CallRuntime(Runtime::kToFastProperties, 1);
2540 __ CallRuntime(Runtime::kToFastProperties, 1);
2544 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2546 OverwriteMode mode) {
2547 __ mov(a0, result_register());
2549 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2550 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2551 CallIC(code, expr->BinaryOperationFeedbackId());
2552 patch_site.EmitPatchInfo();
2553 context()->Plug(v0);
2557 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2558 DCHECK(expr->IsValidReferenceExpression());
2560 Property* prop = expr->AsProperty();
2561 LhsKind assign_type = GetAssignType(prop);
2563 switch (assign_type) {
2565 Variable* var = expr->AsVariableProxy()->var();
2566 EffectContext context(this);
2567 EmitVariableAssignment(var, Token::ASSIGN);
2570 case NAMED_PROPERTY: {
2571 __ push(result_register()); // Preserve value.
2572 VisitForAccumulatorValue(prop->obj());
2573 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2574 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2575 __ li(StoreDescriptor::NameRegister(),
2576 Operand(prop->key()->AsLiteral()->value()));
2580 case NAMED_SUPER_PROPERTY: {
2582 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2583 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2584 // stack: value, this; v0: home_object
2585 Register scratch = a2;
2586 Register scratch2 = a3;
2587 __ mov(scratch, result_register()); // home_object
2588 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2589 __ lw(scratch2, MemOperand(sp, 0)); // this
2590 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2591 __ sw(scratch, MemOperand(sp, 0)); // home_object
2592 // stack: this, home_object; v0: value
2593 EmitNamedSuperPropertyStore(prop);
2596 case KEYED_SUPER_PROPERTY: {
2598 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2599 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2600 __ Push(result_register());
2601 VisitForAccumulatorValue(prop->key());
2602 Register scratch = a2;
2603 Register scratch2 = a3;
2604 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2605 // stack: value, this, home_object; v0: key, a3: value
2606 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2607 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2608 __ lw(scratch, MemOperand(sp, 0)); // home_object
2609 __ sw(scratch, MemOperand(sp, kPointerSize));
2610 __ sw(v0, MemOperand(sp, 0));
2611 __ Move(v0, scratch2);
2612 // stack: this, home_object, key; v0: value.
2613 EmitKeyedSuperPropertyStore(prop);
2616 case KEYED_PROPERTY: {
2617 __ push(result_register()); // Preserve value.
2618 VisitForStackValue(prop->obj());
2619 VisitForAccumulatorValue(prop->key());
2620 __ mov(StoreDescriptor::NameRegister(), result_register());
2621 __ Pop(StoreDescriptor::ValueRegister(),
2622 StoreDescriptor::ReceiverRegister());
2624 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2629 context()->Plug(v0);
2633 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2634 Variable* var, MemOperand location) {
2635 __ sw(result_register(), location);
2636 if (var->IsContextSlot()) {
2637 // RecordWrite may destroy all its register arguments.
2638 __ Move(a3, result_register());
2639 int offset = Context::SlotOffset(var->index());
2640 __ RecordWriteContextSlot(
2641 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2646 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2647 if (var->IsUnallocated()) {
2648 // Global var, const, or let.
2649 __ mov(StoreDescriptor::ValueRegister(), result_register());
2650 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2651 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2654 } else if (op == Token::INIT_CONST_LEGACY) {
2655 // Const initializers need a write barrier.
2656 DCHECK(!var->IsParameter()); // No const parameters.
2657 if (var->IsLookupSlot()) {
2658 __ li(a0, Operand(var->name()));
2659 __ Push(v0, cp, a0); // Context and name.
2660 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2662 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2664 MemOperand location = VarOperand(var, a1);
2665 __ lw(a2, location);
2666 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2667 __ Branch(&skip, ne, a2, Operand(at));
2668 EmitStoreToStackLocalOrContextSlot(var, location);
2672 } else if (var->mode() == LET && op != Token::INIT_LET) {
2673 // Non-initializing assignment to let variable needs a write barrier.
2674 DCHECK(!var->IsLookupSlot());
2675 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2677 MemOperand location = VarOperand(var, a1);
2678 __ lw(a3, location);
2679 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2680 __ Branch(&assign, ne, a3, Operand(t0));
2681 __ li(a3, Operand(var->name()));
2683 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2684 // Perform the assignment.
2686 EmitStoreToStackLocalOrContextSlot(var, location);
2688 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2689 if (var->IsLookupSlot()) {
2690 // Assignment to var.
2691 __ li(a1, Operand(var->name()));
2692 __ li(a0, Operand(Smi::FromInt(strict_mode())));
2693 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2694 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2696 // Assignment to var or initializing assignment to let/const in harmony
2698 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2699 MemOperand location = VarOperand(var, a1);
2700 if (generate_debug_code_ && op == Token::INIT_LET) {
2701 // Check for an uninitialized let binding.
2702 __ lw(a2, location);
2703 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2704 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2706 EmitStoreToStackLocalOrContextSlot(var, location);
2709 // Non-initializing assignments to consts are ignored.
2713 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2714 // Assignment to a property, using a named store IC.
2715 Property* prop = expr->target()->AsProperty();
2716 DCHECK(prop != NULL);
2717 DCHECK(prop->key()->IsLiteral());
2719 // Record source code position before IC call.
2720 SetSourcePosition(expr->position());
2721 __ mov(StoreDescriptor::ValueRegister(), result_register());
2722 __ li(StoreDescriptor::NameRegister(),
2723 Operand(prop->key()->AsLiteral()->value()));
2724 __ pop(StoreDescriptor::ReceiverRegister());
2725 CallStoreIC(expr->AssignmentFeedbackId());
2727 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2728 context()->Plug(v0);
2732 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2733 // Assignment to named property of super.
2735 // stack : receiver ('this'), home_object
2736 DCHECK(prop != NULL);
2737 Literal* key = prop->key()->AsLiteral();
2738 DCHECK(key != NULL);
2740 __ Push(key->value());
2742 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2743 : Runtime::kStoreToSuper_Sloppy),
2748 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2749 // Assignment to named property of super.
2751 // stack : receiver ('this'), home_object, key
2752 DCHECK(prop != NULL);
2755 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreKeyedToSuper_Strict
2756 : Runtime::kStoreKeyedToSuper_Sloppy),
2761 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2762 // Assignment to a property, using a keyed store IC.
2764 // Record source code position before IC call.
2765 SetSourcePosition(expr->position());
2766 // Call keyed store IC.
2767 // The arguments are:
2768 // - a0 is the value,
2770 // - a2 is the receiver.
2771 __ mov(StoreDescriptor::ValueRegister(), result_register());
2772 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2773 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2775 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2776 CallIC(ic, expr->AssignmentFeedbackId());
2778 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2779 context()->Plug(v0);
2783 void FullCodeGenerator::VisitProperty(Property* expr) {
2784 Comment cmnt(masm_, "[ Property");
2785 Expression* key = expr->key();
2787 if (key->IsPropertyName()) {
2788 if (!expr->IsSuperAccess()) {
2789 VisitForAccumulatorValue(expr->obj());
2790 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2791 EmitNamedPropertyLoad(expr);
2793 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2794 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2795 __ Push(result_register());
2796 EmitNamedSuperPropertyLoad(expr);
2798 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2799 context()->Plug(v0);
2801 if (!expr->IsSuperAccess()) {
2802 VisitForStackValue(expr->obj());
2803 VisitForAccumulatorValue(expr->key());
2804 __ Move(LoadDescriptor::NameRegister(), v0);
2805 __ pop(LoadDescriptor::ReceiverRegister());
2806 EmitKeyedPropertyLoad(expr);
2808 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2809 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2810 __ Push(result_register());
2811 VisitForStackValue(expr->key());
2812 EmitKeyedSuperPropertyLoad(expr);
2814 context()->Plug(v0);
2819 void FullCodeGenerator::CallIC(Handle<Code> code,
2820 TypeFeedbackId id) {
2822 __ Call(code, RelocInfo::CODE_TARGET, id);
2826 // Code common for calls using the IC.
2827 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2828 Expression* callee = expr->expression();
2830 CallICState::CallType call_type =
2831 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2833 // Get the target function.
2834 if (call_type == CallICState::FUNCTION) {
2835 { StackValueContext context(this);
2836 EmitVariableLoad(callee->AsVariableProxy());
2837 PrepareForBailout(callee, NO_REGISTERS);
2839 // Push undefined as receiver. This is patched in the method prologue if it
2840 // is a sloppy mode method.
2841 __ Push(isolate()->factory()->undefined_value());
2843 // Load the function from the receiver.
2844 DCHECK(callee->IsProperty());
2845 DCHECK(!callee->AsProperty()->IsSuperAccess());
2846 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2847 EmitNamedPropertyLoad(callee->AsProperty());
2848 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2849 // Push the target function under the receiver.
2850 __ lw(at, MemOperand(sp, 0));
2852 __ sw(v0, MemOperand(sp, kPointerSize));
2855 EmitCall(expr, call_type);
2859 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2860 Expression* callee = expr->expression();
2861 DCHECK(callee->IsProperty());
2862 Property* prop = callee->AsProperty();
2863 DCHECK(prop->IsSuperAccess());
2865 SetSourcePosition(prop->position());
2866 Literal* key = prop->key()->AsLiteral();
2867 DCHECK(!key->value()->IsSmi());
2868 // Load the function from the receiver.
2869 const Register scratch = a1;
2870 SuperReference* super_ref = prop->obj()->AsSuperReference();
2871 EmitLoadHomeObject(super_ref);
2872 __ mov(scratch, v0);
2873 VisitForAccumulatorValue(super_ref->this_var());
2874 __ Push(scratch, v0, v0, scratch);
2875 __ Push(key->value());
2879 // - this (receiver)
2880 // - this (receiver) <-- LoadFromSuper will pop here and below.
2883 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2885 // Replace home_object with target function.
2886 __ sw(v0, MemOperand(sp, kPointerSize));
2889 // - target function
2890 // - this (receiver)
2891 EmitCall(expr, CallICState::METHOD);
2895 // Code common for calls using the IC.
2896 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2899 VisitForAccumulatorValue(key);
2901 Expression* callee = expr->expression();
2903 // Load the function from the receiver.
2904 DCHECK(callee->IsProperty());
2905 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2906 __ Move(LoadDescriptor::NameRegister(), v0);
2907 EmitKeyedPropertyLoad(callee->AsProperty());
2908 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2910 // Push the target function under the receiver.
2911 __ lw(at, MemOperand(sp, 0));
2913 __ sw(v0, MemOperand(sp, kPointerSize));
2915 EmitCall(expr, CallICState::METHOD);
2919 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2920 Expression* callee = expr->expression();
2921 DCHECK(callee->IsProperty());
2922 Property* prop = callee->AsProperty();
2923 DCHECK(prop->IsSuperAccess());
2925 SetSourcePosition(prop->position());
2926 // Load the function from the receiver.
2927 const Register scratch = a1;
2928 SuperReference* super_ref = prop->obj()->AsSuperReference();
2929 EmitLoadHomeObject(super_ref);
2930 __ Move(scratch, v0);
2931 VisitForAccumulatorValue(super_ref->this_var());
2932 __ Push(scratch, v0, v0, scratch);
2933 VisitForStackValue(prop->key());
2937 // - this (receiver)
2938 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2941 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2943 // Replace home_object with target function.
2944 __ sw(v0, MemOperand(sp, kPointerSize));
2947 // - target function
2948 // - this (receiver)
2949 EmitCall(expr, CallICState::METHOD);
2953 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2954 // Load the arguments.
2955 ZoneList<Expression*>* args = expr->arguments();
2956 int arg_count = args->length();
2957 { PreservePositionScope scope(masm()->positions_recorder());
2958 for (int i = 0; i < arg_count; i++) {
2959 VisitForStackValue(args->at(i));
2963 // Record source position of the IC call.
2964 SetSourcePosition(expr->position());
2965 Handle<Code> ic = CallIC::initialize_stub(
2966 isolate(), arg_count, call_type);
2967 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
2968 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2969 // Don't assign a type feedback id to the IC, since type feedback is provided
2970 // by the vector above.
2973 RecordJSReturnSite(expr);
2974 // Restore context register.
2975 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2976 context()->DropAndPlug(1, v0);
2980 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2981 // t3: copy of the first argument or undefined if it doesn't exist.
2982 if (arg_count > 0) {
2983 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
2985 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
2988 // t2: the receiver of the enclosing function.
2989 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2991 // t1: the receiver of the enclosing function.
2992 int receiver_offset = 2 + info_->scope()->num_parameters();
2993 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
2995 // t0: the strict mode.
2996 __ li(t0, Operand(Smi::FromInt(strict_mode())));
2998 // a1: the start position of the scope the calls resides in.
2999 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3001 // Do the runtime call.
3003 __ Push(t2, t1, t0, a1);
3004 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3008 void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) {
3009 DCHECK(super_ref != NULL);
3010 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3012 __ CallRuntime(Runtime::kGetPrototype, 1);
3016 void FullCodeGenerator::VisitCall(Call* expr) {
3018 // We want to verify that RecordJSReturnSite gets called on all paths
3019 // through this function. Avoid early returns.
3020 expr->return_is_recorded_ = false;
3023 Comment cmnt(masm_, "[ Call");
3024 Expression* callee = expr->expression();
3025 Call::CallType call_type = expr->GetCallType(isolate());
3027 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3028 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3029 // to resolve the function we need to call and the receiver of the
3030 // call. Then we call the resolved function using the given
3032 ZoneList<Expression*>* args = expr->arguments();
3033 int arg_count = args->length();
3035 { PreservePositionScope pos_scope(masm()->positions_recorder());
3036 VisitForStackValue(callee);
3037 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3038 __ push(a2); // Reserved receiver slot.
3040 // Push the arguments.
3041 for (int i = 0; i < arg_count; i++) {
3042 VisitForStackValue(args->at(i));
3045 // Push a copy of the function (found below the arguments) and
3047 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3049 EmitResolvePossiblyDirectEval(arg_count);
3051 // The runtime call returns a pair of values in v0 (function) and
3052 // v1 (receiver). Touch up the stack with the right values.
3053 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3054 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
3056 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3058 // Record source position for debugger.
3059 SetSourcePosition(expr->position());
3060 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3061 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3063 RecordJSReturnSite(expr);
3064 // Restore context register.
3065 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3066 context()->DropAndPlug(1, v0);
3067 } else if (call_type == Call::GLOBAL_CALL) {
3068 EmitCallWithLoadIC(expr);
3069 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3070 // Call to a lookup slot (dynamically introduced variable).
3071 VariableProxy* proxy = callee->AsVariableProxy();
3074 { PreservePositionScope scope(masm()->positions_recorder());
3075 // Generate code for loading from variables potentially shadowed
3076 // by eval-introduced variables.
3077 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3081 // Call the runtime to find the function to call (returned in v0)
3082 // and the object holding it (returned in v1).
3083 DCHECK(!context_register().is(a2));
3084 __ li(a2, Operand(proxy->name()));
3085 __ Push(context_register(), a2);
3086 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3087 __ Push(v0, v1); // Function, receiver.
3088 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3090 // If fast case code has been generated, emit code to push the
3091 // function and receiver and have the slow path jump around this
3093 if (done.is_linked()) {
3099 // The receiver is implicitly the global receiver. Indicate this
3100 // by passing the hole to the call function stub.
3101 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3106 // The receiver is either the global receiver or an object found
3107 // by LoadContextSlot.
3109 } else if (call_type == Call::PROPERTY_CALL) {
3110 Property* property = callee->AsProperty();
3111 bool is_named_call = property->key()->IsPropertyName();
3112 if (property->IsSuperAccess()) {
3113 if (is_named_call) {
3114 EmitSuperCallWithLoadIC(expr);
3116 EmitKeyedSuperCallWithLoadIC(expr);
3120 PreservePositionScope scope(masm()->positions_recorder());
3121 VisitForStackValue(property->obj());
3123 if (is_named_call) {
3124 EmitCallWithLoadIC(expr);
3126 EmitKeyedCallWithLoadIC(expr, property->key());
3129 } else if (call_type == Call::SUPER_CALL) {
3130 SuperReference* super_ref = callee->AsSuperReference();
3131 EmitLoadSuperConstructor(super_ref);
3132 __ Push(result_register());
3133 VisitForStackValue(super_ref->this_var());
3134 EmitCall(expr, CallICState::METHOD);
3136 DCHECK(call_type == Call::OTHER_CALL);
3137 // Call to an arbitrary expression not handled specially above.
3138 { PreservePositionScope scope(masm()->positions_recorder());
3139 VisitForStackValue(callee);
3141 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3143 // Emit function call.
3148 // RecordJSReturnSite should have been called.
3149 DCHECK(expr->return_is_recorded_);
3154 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3155 Comment cmnt(masm_, "[ CallNew");
3156 // According to ECMA-262, section 11.2.2, page 44, the function
3157 // expression in new calls must be evaluated before the
3160 // Push constructor on the stack. If it's not a function it's used as
3161 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3163 if (expr->expression()->IsSuperReference()) {
3164 EmitLoadSuperConstructor(expr->expression()->AsSuperReference());
3165 __ Push(result_register());
3167 VisitForStackValue(expr->expression());
3170 // Push the arguments ("left-to-right") on the stack.
3171 ZoneList<Expression*>* args = expr->arguments();
3172 int arg_count = args->length();
3173 for (int i = 0; i < arg_count; i++) {
3174 VisitForStackValue(args->at(i));
3177 // Call the construct call builtin that handles allocation and
3178 // constructor invocation.
3179 SetSourcePosition(expr->position());
3181 // Load function and argument count into a1 and a0.
3182 __ li(a0, Operand(arg_count));
3183 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3185 // Record call targets in unoptimized code.
3186 if (FLAG_pretenuring_call_new) {
3187 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3188 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3189 expr->CallNewFeedbackSlot().ToInt() + 1);
3192 __ li(a2, FeedbackVector());
3193 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3195 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3196 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3197 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3198 context()->Plug(v0);
3202 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3203 ZoneList<Expression*>* args = expr->arguments();
3204 DCHECK(args->length() == 1);
3206 VisitForAccumulatorValue(args->at(0));
3208 Label materialize_true, materialize_false;
3209 Label* if_true = NULL;
3210 Label* if_false = NULL;
3211 Label* fall_through = NULL;
3212 context()->PrepareTest(&materialize_true, &materialize_false,
3213 &if_true, &if_false, &fall_through);
3215 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3217 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
3219 context()->Plug(if_true, if_false);
3223 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3224 ZoneList<Expression*>* args = expr->arguments();
3225 DCHECK(args->length() == 1);
3227 VisitForAccumulatorValue(args->at(0));
3229 Label materialize_true, materialize_false;
3230 Label* if_true = NULL;
3231 Label* if_false = NULL;
3232 Label* fall_through = NULL;
3233 context()->PrepareTest(&materialize_true, &materialize_false,
3234 &if_true, &if_false, &fall_through);
3236 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3237 __ NonNegativeSmiTst(v0, at);
3238 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3240 context()->Plug(if_true, if_false);
3244 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3245 ZoneList<Expression*>* args = expr->arguments();
3246 DCHECK(args->length() == 1);
3248 VisitForAccumulatorValue(args->at(0));
3250 Label materialize_true, materialize_false;
3251 Label* if_true = NULL;
3252 Label* if_false = NULL;
3253 Label* fall_through = NULL;
3254 context()->PrepareTest(&materialize_true, &materialize_false,
3255 &if_true, &if_false, &fall_through);
3257 __ JumpIfSmi(v0, if_false);
3258 __ LoadRoot(at, Heap::kNullValueRootIndex);
3259 __ Branch(if_true, eq, v0, Operand(at));
3260 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3261 // Undetectable objects behave like undefined when tested with typeof.
3262 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3263 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3264 __ Branch(if_false, ne, at, Operand(zero_reg));
3265 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3266 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3267 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3268 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3269 if_true, if_false, fall_through);
3271 context()->Plug(if_true, if_false);
3275 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 DCHECK(args->length() == 1);
3279 VisitForAccumulatorValue(args->at(0));
3281 Label materialize_true, materialize_false;
3282 Label* if_true = NULL;
3283 Label* if_false = NULL;
3284 Label* fall_through = NULL;
3285 context()->PrepareTest(&materialize_true, &materialize_false,
3286 &if_true, &if_false, &fall_through);
3288 __ JumpIfSmi(v0, if_false);
3289 __ GetObjectType(v0, a1, a1);
3290 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3291 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3292 if_true, if_false, fall_through);
3294 context()->Plug(if_true, if_false);
3298 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3299 ZoneList<Expression*>* args = expr->arguments();
3300 DCHECK(args->length() == 1);
3302 VisitForAccumulatorValue(args->at(0));
3304 Label materialize_true, materialize_false;
3305 Label* if_true = NULL;
3306 Label* if_false = NULL;
3307 Label* fall_through = NULL;
3308 context()->PrepareTest(&materialize_true, &materialize_false,
3309 &if_true, &if_false, &fall_through);
3311 __ JumpIfSmi(v0, if_false);
3312 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3313 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3314 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3315 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3316 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3318 context()->Plug(if_true, if_false);
3322 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3323 CallRuntime* expr) {
3324 ZoneList<Expression*>* args = expr->arguments();
3325 DCHECK(args->length() == 1);
3327 VisitForAccumulatorValue(args->at(0));
3329 Label materialize_true, materialize_false, skip_lookup;
3330 Label* if_true = NULL;
3331 Label* if_false = NULL;
3332 Label* fall_through = NULL;
3333 context()->PrepareTest(&materialize_true, &materialize_false,
3334 &if_true, &if_false, &fall_through);
3336 __ AssertNotSmi(v0);
3338 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3339 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3340 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3341 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3343 // Check for fast case object. Generate false result for slow case object.
3344 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3345 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3346 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3347 __ Branch(if_false, eq, a2, Operand(t0));
3349 // Look for valueOf name in the descriptor array, and indicate false if
3350 // found. Since we omit an enumeration index check, if it is added via a
3351 // transition that shares its descriptor array, this is a false positive.
3352 Label entry, loop, done;
3354 // Skip loop if no descriptors are valid.
3355 __ NumberOfOwnDescriptors(a3, a1);
3356 __ Branch(&done, eq, a3, Operand(zero_reg));
3358 __ LoadInstanceDescriptors(a1, t0);
3359 // t0: descriptor array.
3360 // a3: valid entries in the descriptor array.
3361 STATIC_ASSERT(kSmiTag == 0);
3362 STATIC_ASSERT(kSmiTagSize == 1);
3363 STATIC_ASSERT(kPointerSize == 4);
3364 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3366 // Calculate location of the first key name.
3367 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3368 // Calculate the end of the descriptor array.
3370 __ sll(t1, a3, kPointerSizeLog2);
3371 __ Addu(a2, a2, t1);
3373 // Loop through all the keys in the descriptor array. If one of these is the
3374 // string "valueOf" the result is false.
3375 // The use of t2 to store the valueOf string assumes that it is not otherwise
3376 // used in the loop below.
3377 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3380 __ lw(a3, MemOperand(t0, 0));
3381 __ Branch(if_false, eq, a3, Operand(t2));
3382 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3384 __ Branch(&loop, ne, t0, Operand(a2));
3388 // Set the bit in the map to indicate that there is no local valueOf field.
3389 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3390 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3391 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3393 __ bind(&skip_lookup);
3395 // If a valueOf property is not found on the object check that its
3396 // prototype is the un-modified String prototype. If not result is false.
3397 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3398 __ JumpIfSmi(a2, if_false);
3399 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3400 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3401 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3402 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3403 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3404 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3406 context()->Plug(if_true, if_false);
3410 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3411 ZoneList<Expression*>* args = expr->arguments();
3412 DCHECK(args->length() == 1);
3414 VisitForAccumulatorValue(args->at(0));
3416 Label materialize_true, materialize_false;
3417 Label* if_true = NULL;
3418 Label* if_false = NULL;
3419 Label* fall_through = NULL;
3420 context()->PrepareTest(&materialize_true, &materialize_false,
3421 &if_true, &if_false, &fall_through);
3423 __ JumpIfSmi(v0, if_false);
3424 __ GetObjectType(v0, a1, a2);
3425 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3426 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3427 __ Branch(if_false);
3429 context()->Plug(if_true, if_false);
3433 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3434 ZoneList<Expression*>* args = expr->arguments();
3435 DCHECK(args->length() == 1);
3437 VisitForAccumulatorValue(args->at(0));
3439 Label materialize_true, materialize_false;
3440 Label* if_true = NULL;
3441 Label* if_false = NULL;
3442 Label* fall_through = NULL;
3443 context()->PrepareTest(&materialize_true, &materialize_false,
3444 &if_true, &if_false, &fall_through);
3446 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3447 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3448 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3449 __ li(t0, 0x80000000);
3451 __ Branch(¬_nan, ne, a2, Operand(t0));
3452 __ mov(t0, zero_reg);
3456 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3457 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3459 context()->Plug(if_true, if_false);
3463 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3464 ZoneList<Expression*>* args = expr->arguments();
3465 DCHECK(args->length() == 1);
3467 VisitForAccumulatorValue(args->at(0));
3469 Label materialize_true, materialize_false;
3470 Label* if_true = NULL;
3471 Label* if_false = NULL;
3472 Label* fall_through = NULL;
3473 context()->PrepareTest(&materialize_true, &materialize_false,
3474 &if_true, &if_false, &fall_through);
3476 __ JumpIfSmi(v0, if_false);
3477 __ GetObjectType(v0, a1, a1);
3478 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3479 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3480 if_true, if_false, fall_through);
3482 context()->Plug(if_true, if_false);
3486 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3487 ZoneList<Expression*>* args = expr->arguments();
3488 DCHECK(args->length() == 1);
3490 VisitForAccumulatorValue(args->at(0));
3492 Label materialize_true, materialize_false;
3493 Label* if_true = NULL;
3494 Label* if_false = NULL;
3495 Label* fall_through = NULL;
3496 context()->PrepareTest(&materialize_true, &materialize_false,
3497 &if_true, &if_false, &fall_through);
3499 __ JumpIfSmi(v0, if_false);
3500 __ GetObjectType(v0, a1, a1);
3501 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3502 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3504 context()->Plug(if_true, if_false);
3508 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3509 ZoneList<Expression*>* args = expr->arguments();
3510 DCHECK(args->length() == 1);
3512 VisitForAccumulatorValue(args->at(0));
3514 Label materialize_true, materialize_false;
3515 Label* if_true = NULL;
3516 Label* if_false = NULL;
3517 Label* fall_through = NULL;
3518 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3519 &if_false, &fall_through);
3521 __ JumpIfSmi(v0, if_false);
3523 Register type_reg = a2;
3524 __ GetObjectType(v0, map, type_reg);
3525 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3526 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3527 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3528 if_true, if_false, fall_through);
3530 context()->Plug(if_true, if_false);
3534 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3535 DCHECK(expr->arguments()->length() == 0);
3537 Label materialize_true, materialize_false;
3538 Label* if_true = NULL;
3539 Label* if_false = NULL;
3540 Label* fall_through = NULL;
3541 context()->PrepareTest(&materialize_true, &materialize_false,
3542 &if_true, &if_false, &fall_through);
3544 // Get the frame pointer for the calling frame.
3545 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3547 // Skip the arguments adaptor frame if it exists.
3548 Label check_frame_marker;
3549 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3550 __ Branch(&check_frame_marker, ne,
3551 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3552 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3554 // Check the marker in the calling frame.
3555 __ bind(&check_frame_marker);
3556 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3557 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3558 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3559 if_true, if_false, fall_through);
3561 context()->Plug(if_true, if_false);
3565 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3566 ZoneList<Expression*>* args = expr->arguments();
3567 DCHECK(args->length() == 2);
3569 // Load the two objects into registers and perform the comparison.
3570 VisitForStackValue(args->at(0));
3571 VisitForAccumulatorValue(args->at(1));
3573 Label materialize_true, materialize_false;
3574 Label* if_true = NULL;
3575 Label* if_false = NULL;
3576 Label* fall_through = NULL;
3577 context()->PrepareTest(&materialize_true, &materialize_false,
3578 &if_true, &if_false, &fall_through);
3581 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3582 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3584 context()->Plug(if_true, if_false);
3588 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3589 ZoneList<Expression*>* args = expr->arguments();
3590 DCHECK(args->length() == 1);
3592 // ArgumentsAccessStub expects the key in a1 and the formal
3593 // parameter count in a0.
3594 VisitForAccumulatorValue(args->at(0));
3596 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3597 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3599 context()->Plug(v0);
3603 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3604 DCHECK(expr->arguments()->length() == 0);
3606 // Get the number of formal parameters.
3607 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3609 // Check if the calling frame is an arguments adaptor frame.
3610 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3611 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3612 __ Branch(&exit, ne, a3,
3613 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3615 // Arguments adaptor case: Read the arguments length from the
3617 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3620 context()->Plug(v0);
3624 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3625 ZoneList<Expression*>* args = expr->arguments();
3626 DCHECK(args->length() == 1);
3627 Label done, null, function, non_function_constructor;
3629 VisitForAccumulatorValue(args->at(0));
3631 // If the object is a smi, we return null.
3632 __ JumpIfSmi(v0, &null);
3634 // Check that the object is a JS object but take special care of JS
3635 // functions to make sure they have 'Function' as their class.
3636 // Assume that there are only two callable types, and one of them is at
3637 // either end of the type range for JS object types. Saves extra comparisons.
3638 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3639 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3640 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3642 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3643 FIRST_SPEC_OBJECT_TYPE + 1);
3644 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3646 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3647 LAST_SPEC_OBJECT_TYPE - 1);
3648 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3649 // Assume that there is no larger type.
3650 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3652 // Check if the constructor in the map is a JS function.
3653 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3654 __ GetObjectType(v0, a1, a1);
3655 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3657 // v0 now contains the constructor function. Grab the
3658 // instance class name from there.
3659 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3660 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3663 // Functions have class 'Function'.
3665 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3668 // Objects with a non-function constructor have class 'Object'.
3669 __ bind(&non_function_constructor);
3670 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3673 // Non-JS objects have class null.
3675 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3680 context()->Plug(v0);
3684 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3685 // Load the arguments on the stack and call the stub.
3686 SubStringStub stub(isolate());
3687 ZoneList<Expression*>* args = expr->arguments();
3688 DCHECK(args->length() == 3);
3689 VisitForStackValue(args->at(0));
3690 VisitForStackValue(args->at(1));
3691 VisitForStackValue(args->at(2));
3693 context()->Plug(v0);
3697 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3698 // Load the arguments on the stack and call the stub.
3699 RegExpExecStub stub(isolate());
3700 ZoneList<Expression*>* args = expr->arguments();
3701 DCHECK(args->length() == 4);
3702 VisitForStackValue(args->at(0));
3703 VisitForStackValue(args->at(1));
3704 VisitForStackValue(args->at(2));
3705 VisitForStackValue(args->at(3));
3707 context()->Plug(v0);
3711 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3712 ZoneList<Expression*>* args = expr->arguments();
3713 DCHECK(args->length() == 1);
3715 VisitForAccumulatorValue(args->at(0)); // Load the object.
3718 // If the object is a smi return the object.
3719 __ JumpIfSmi(v0, &done);
3720 // If the object is not a value type, return the object.
3721 __ GetObjectType(v0, a1, a1);
3722 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3724 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3727 context()->Plug(v0);
3731 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3732 ZoneList<Expression*>* args = expr->arguments();
3733 DCHECK(args->length() == 2);
3734 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3735 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3737 VisitForAccumulatorValue(args->at(0)); // Load the object.
3739 Label runtime, done, not_date_object;
3740 Register object = v0;
3741 Register result = v0;
3742 Register scratch0 = t5;
3743 Register scratch1 = a1;
3745 __ JumpIfSmi(object, ¬_date_object);
3746 __ GetObjectType(object, scratch1, scratch1);
3747 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3749 if (index->value() == 0) {
3750 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3753 if (index->value() < JSDate::kFirstUncachedField) {
3754 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3755 __ li(scratch1, Operand(stamp));
3756 __ lw(scratch1, MemOperand(scratch1));
3757 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3758 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3759 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3760 kPointerSize * index->value()));
3764 __ PrepareCallCFunction(2, scratch1);
3765 __ li(a1, Operand(index));
3766 __ Move(a0, object);
3767 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3771 __ bind(¬_date_object);
3772 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3774 context()->Plug(v0);
3778 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3779 ZoneList<Expression*>* args = expr->arguments();
3780 DCHECK_EQ(3, args->length());
3782 Register string = v0;
3783 Register index = a1;
3784 Register value = a2;
3786 VisitForStackValue(args->at(0)); // index
3787 VisitForStackValue(args->at(1)); // value
3788 VisitForAccumulatorValue(args->at(2)); // string
3789 __ Pop(index, value);
3791 if (FLAG_debug_code) {
3792 __ SmiTst(value, at);
3793 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3794 __ SmiTst(index, at);
3795 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3796 __ SmiUntag(index, index);
3797 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3798 Register scratch = t5;
3799 __ EmitSeqStringSetCharCheck(
3800 string, index, value, scratch, one_byte_seq_type);
3801 __ SmiTag(index, index);
3804 __ SmiUntag(value, value);
3807 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3809 __ Addu(at, at, index);
3810 __ sb(value, MemOperand(at));
3811 context()->Plug(string);
3815 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3816 ZoneList<Expression*>* args = expr->arguments();
3817 DCHECK_EQ(3, args->length());
3819 Register string = v0;
3820 Register index = a1;
3821 Register value = a2;
3823 VisitForStackValue(args->at(0)); // index
3824 VisitForStackValue(args->at(1)); // value
3825 VisitForAccumulatorValue(args->at(2)); // string
3826 __ Pop(index, value);
3828 if (FLAG_debug_code) {
3829 __ SmiTst(value, at);
3830 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3831 __ SmiTst(index, at);
3832 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3833 __ SmiUntag(index, index);
3834 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3835 Register scratch = t5;
3836 __ EmitSeqStringSetCharCheck(
3837 string, index, value, scratch, two_byte_seq_type);
3838 __ SmiTag(index, index);
3841 __ SmiUntag(value, value);
3844 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3845 __ Addu(at, at, index);
3846 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3847 __ sh(value, MemOperand(at));
3848 context()->Plug(string);
3852 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3853 // Load the arguments on the stack and call the runtime function.
3854 ZoneList<Expression*>* args = expr->arguments();
3855 DCHECK(args->length() == 2);
3856 VisitForStackValue(args->at(0));
3857 VisitForStackValue(args->at(1));
3858 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3860 context()->Plug(v0);
3864 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3865 ZoneList<Expression*>* args = expr->arguments();
3866 DCHECK(args->length() == 2);
3868 VisitForStackValue(args->at(0)); // Load the object.
3869 VisitForAccumulatorValue(args->at(1)); // Load the value.
3870 __ pop(a1); // v0 = value. a1 = object.
3873 // If the object is a smi, return the value.
3874 __ JumpIfSmi(a1, &done);
3876 // If the object is not a value type, return the value.
3877 __ GetObjectType(a1, a2, a2);
3878 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3881 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3882 // Update the write barrier. Save the value as it will be
3883 // overwritten by the write barrier code and is needed afterward.
3885 __ RecordWriteField(
3886 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3889 context()->Plug(v0);
3893 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3894 ZoneList<Expression*>* args = expr->arguments();
3895 DCHECK_EQ(args->length(), 1);
3897 // Load the argument into a0 and call the stub.
3898 VisitForAccumulatorValue(args->at(0));
3899 __ mov(a0, result_register());
3901 NumberToStringStub stub(isolate());
3903 context()->Plug(v0);
3907 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3908 ZoneList<Expression*>* args = expr->arguments();
3909 DCHECK(args->length() == 1);
3911 VisitForAccumulatorValue(args->at(0));
3914 StringCharFromCodeGenerator generator(v0, a1);
3915 generator.GenerateFast(masm_);
3918 NopRuntimeCallHelper call_helper;
3919 generator.GenerateSlow(masm_, call_helper);
3922 context()->Plug(a1);
3926 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3927 ZoneList<Expression*>* args = expr->arguments();
3928 DCHECK(args->length() == 2);
3930 VisitForStackValue(args->at(0));
3931 VisitForAccumulatorValue(args->at(1));
3932 __ mov(a0, result_register());
3934 Register object = a1;
3935 Register index = a0;
3936 Register result = v0;
3940 Label need_conversion;
3941 Label index_out_of_range;
3943 StringCharCodeAtGenerator generator(object,
3948 &index_out_of_range,
3949 STRING_INDEX_IS_NUMBER);
3950 generator.GenerateFast(masm_);
3953 __ bind(&index_out_of_range);
3954 // When the index is out of range, the spec requires us to return
3956 __ LoadRoot(result, Heap::kNanValueRootIndex);
3959 __ bind(&need_conversion);
3960 // Load the undefined value into the result register, which will
3961 // trigger conversion.
3962 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3965 NopRuntimeCallHelper call_helper;
3966 generator.GenerateSlow(masm_, call_helper);
3969 context()->Plug(result);
3973 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3974 ZoneList<Expression*>* args = expr->arguments();
3975 DCHECK(args->length() == 2);
3977 VisitForStackValue(args->at(0));
3978 VisitForAccumulatorValue(args->at(1));
3979 __ mov(a0, result_register());
3981 Register object = a1;
3982 Register index = a0;
3983 Register scratch = a3;
3984 Register result = v0;
3988 Label need_conversion;
3989 Label index_out_of_range;
3991 StringCharAtGenerator generator(object,
3997 &index_out_of_range,
3998 STRING_INDEX_IS_NUMBER);
3999 generator.GenerateFast(masm_);
4002 __ bind(&index_out_of_range);
4003 // When the index is out of range, the spec requires us to return
4004 // the empty string.
4005 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4008 __ bind(&need_conversion);
4009 // Move smi zero into the result register, which will trigger
4011 __ li(result, Operand(Smi::FromInt(0)));
4014 NopRuntimeCallHelper call_helper;
4015 generator.GenerateSlow(masm_, call_helper);
4018 context()->Plug(result);
4022 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4023 ZoneList<Expression*>* args = expr->arguments();
4024 DCHECK_EQ(2, args->length());
4025 VisitForStackValue(args->at(0));
4026 VisitForAccumulatorValue(args->at(1));
4029 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4030 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4032 context()->Plug(v0);
4036 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4037 ZoneList<Expression*>* args = expr->arguments();
4038 DCHECK_EQ(2, args->length());
4040 VisitForStackValue(args->at(0));
4041 VisitForStackValue(args->at(1));
4043 StringCompareStub stub(isolate());
4045 context()->Plug(v0);
4049 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4050 ZoneList<Expression*>* args = expr->arguments();
4051 DCHECK(args->length() >= 2);
4053 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4054 for (int i = 0; i < arg_count + 1; i++) {
4055 VisitForStackValue(args->at(i));
4057 VisitForAccumulatorValue(args->last()); // Function.
4059 Label runtime, done;
4060 // Check for non-function argument (including proxy).
4061 __ JumpIfSmi(v0, &runtime);
4062 __ GetObjectType(v0, a1, a1);
4063 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4065 // InvokeFunction requires the function in a1. Move it in there.
4066 __ mov(a1, result_register());
4067 ParameterCount count(arg_count);
4068 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4069 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4074 __ CallRuntime(Runtime::kCall, args->length());
4077 context()->Plug(v0);
4081 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4082 RegExpConstructResultStub stub(isolate());
4083 ZoneList<Expression*>* args = expr->arguments();
4084 DCHECK(args->length() == 3);
4085 VisitForStackValue(args->at(0));
4086 VisitForStackValue(args->at(1));
4087 VisitForAccumulatorValue(args->at(2));
4088 __ mov(a0, result_register());
4092 context()->Plug(v0);
4096 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4097 ZoneList<Expression*>* args = expr->arguments();
4098 DCHECK_EQ(2, args->length());
4100 DCHECK_NE(NULL, args->at(0)->AsLiteral());
4101 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4103 Handle<FixedArray> jsfunction_result_caches(
4104 isolate()->native_context()->jsfunction_result_caches());
4105 if (jsfunction_result_caches->length() <= cache_id) {
4106 __ Abort(kAttemptToUseUndefinedCache);
4107 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4108 context()->Plug(v0);
4112 VisitForAccumulatorValue(args->at(1));
4115 Register cache = a1;
4116 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4117 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4120 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4122 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4125 Label done, not_found;
4126 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4127 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4128 // a2 now holds finger offset as a smi.
4129 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4130 // a3 now points to the start of fixed array elements.
4131 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
4132 __ addu(a3, a3, at);
4133 // a3 now points to key of indexed element of cache.
4134 __ lw(a2, MemOperand(a3));
4135 __ Branch(¬_found, ne, key, Operand(a2));
4137 __ lw(v0, MemOperand(a3, kPointerSize));
4140 __ bind(¬_found);
4141 // Call runtime to perform the lookup.
4142 __ Push(cache, key);
4143 __ CallRuntime(Runtime::kGetFromCache, 2);
4146 context()->Plug(v0);
4150 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4151 ZoneList<Expression*>* args = expr->arguments();
4152 VisitForAccumulatorValue(args->at(0));
4154 Label materialize_true, materialize_false;
4155 Label* if_true = NULL;
4156 Label* if_false = NULL;
4157 Label* fall_through = NULL;
4158 context()->PrepareTest(&materialize_true, &materialize_false,
4159 &if_true, &if_false, &fall_through);
4161 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4162 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4164 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4165 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4167 context()->Plug(if_true, if_false);
4171 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4172 ZoneList<Expression*>* args = expr->arguments();
4173 DCHECK(args->length() == 1);
4174 VisitForAccumulatorValue(args->at(0));
4176 __ AssertString(v0);
4178 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4179 __ IndexFromHash(v0, v0);
4181 context()->Plug(v0);
4185 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4186 Label bailout, done, one_char_separator, long_separator,
4187 non_trivial_array, not_size_one_array, loop,
4188 empty_separator_loop, one_char_separator_loop,
4189 one_char_separator_loop_entry, long_separator_loop;
4190 ZoneList<Expression*>* args = expr->arguments();
4191 DCHECK(args->length() == 2);
4192 VisitForStackValue(args->at(1));
4193 VisitForAccumulatorValue(args->at(0));
4195 // All aliases of the same register have disjoint lifetimes.
4196 Register array = v0;
4197 Register elements = no_reg; // Will be v0.
4198 Register result = no_reg; // Will be v0.
4199 Register separator = a1;
4200 Register array_length = a2;
4201 Register result_pos = no_reg; // Will be a2.
4202 Register string_length = a3;
4203 Register string = t0;
4204 Register element = t1;
4205 Register elements_end = t2;
4206 Register scratch1 = t3;
4207 Register scratch2 = t5;
4208 Register scratch3 = t4;
4210 // Separator operand is on the stack.
4213 // Check that the array is a JSArray.
4214 __ JumpIfSmi(array, &bailout);
4215 __ GetObjectType(array, scratch1, scratch2);
4216 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4218 // Check that the array has fast elements.
4219 __ CheckFastElements(scratch1, scratch2, &bailout);
4221 // If the array has length zero, return the empty string.
4222 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4223 __ SmiUntag(array_length);
4224 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4225 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4228 __ bind(&non_trivial_array);
4230 // Get the FixedArray containing array's elements.
4232 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4233 array = no_reg; // End of array's live range.
4235 // Check that all array elements are sequential one-byte strings, and
4236 // accumulate the sum of their lengths, as a smi-encoded value.
4237 __ mov(string_length, zero_reg);
4239 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4240 __ sll(elements_end, array_length, kPointerSizeLog2);
4241 __ Addu(elements_end, element, elements_end);
4242 // Loop condition: while (element < elements_end).
4243 // Live values in registers:
4244 // elements: Fixed array of strings.
4245 // array_length: Length of the fixed array of strings (not smi)
4246 // separator: Separator string
4247 // string_length: Accumulated sum of string lengths (smi).
4248 // element: Current array element.
4249 // elements_end: Array end.
4250 if (generate_debug_code_) {
4251 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4255 __ lw(string, MemOperand(element));
4256 __ Addu(element, element, kPointerSize);
4257 __ JumpIfSmi(string, &bailout);
4258 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4259 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4260 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4261 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4262 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4263 __ BranchOnOverflow(&bailout, scratch3);
4264 __ Branch(&loop, lt, element, Operand(elements_end));
4266 // If array_length is 1, return elements[0], a string.
4267 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4268 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4271 __ bind(¬_size_one_array);
4273 // Live values in registers:
4274 // separator: Separator string
4275 // array_length: Length of the array.
4276 // string_length: Sum of string lengths (smi).
4277 // elements: FixedArray of strings.
4279 // Check that the separator is a flat one-byte string.
4280 __ JumpIfSmi(separator, &bailout);
4281 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4282 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4283 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4285 // Add (separator length times array_length) - separator length to the
4286 // string_length to get the length of the result string. array_length is not
4287 // smi but the other values are, so the result is a smi.
4288 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4289 __ Subu(string_length, string_length, Operand(scratch1));
4290 __ Mul(scratch3, scratch2, array_length, scratch1);
4291 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4293 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4294 __ And(scratch3, scratch2, Operand(0x80000000));
4295 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4296 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4297 __ BranchOnOverflow(&bailout, scratch3);
4298 __ SmiUntag(string_length);
4300 // Get first element in the array to free up the elements register to be used
4303 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4304 result = elements; // End of live range for elements.
4306 // Live values in registers:
4307 // element: First array element
4308 // separator: Separator string
4309 // string_length: Length of result string (not smi)
4310 // array_length: Length of the array.
4311 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4312 elements_end, &bailout);
4313 // Prepare for looping. Set up elements_end to end of the array. Set
4314 // result_pos to the position of the result where to write the first
4316 __ sll(elements_end, array_length, kPointerSizeLog2);
4317 __ Addu(elements_end, element, elements_end);
4318 result_pos = array_length; // End of live range for array_length.
4319 array_length = no_reg;
4322 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4324 // Check the length of the separator.
4325 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4326 __ li(at, Operand(Smi::FromInt(1)));
4327 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4328 __ Branch(&long_separator, gt, scratch1, Operand(at));
4330 // Empty separator case.
4331 __ bind(&empty_separator_loop);
4332 // Live values in registers:
4333 // result_pos: the position to which we are currently copying characters.
4334 // element: Current array element.
4335 // elements_end: Array end.
4337 // Copy next array element to the result.
4338 __ lw(string, MemOperand(element));
4339 __ Addu(element, element, kPointerSize);
4340 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4341 __ SmiUntag(string_length);
4342 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4343 __ CopyBytes(string, result_pos, string_length, scratch1);
4344 // End while (element < elements_end).
4345 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4346 DCHECK(result.is(v0));
4349 // One-character separator case.
4350 __ bind(&one_char_separator);
4351 // Replace separator with its one-byte character value.
4352 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4353 // Jump into the loop after the code that copies the separator, so the first
4354 // element is not preceded by a separator.
4355 __ jmp(&one_char_separator_loop_entry);
4357 __ bind(&one_char_separator_loop);
4358 // Live values in registers:
4359 // result_pos: the position to which we are currently copying characters.
4360 // element: Current array element.
4361 // elements_end: Array end.
4362 // separator: Single separator one-byte char (in lower byte).
4364 // Copy the separator character to the result.
4365 __ sb(separator, MemOperand(result_pos));
4366 __ Addu(result_pos, result_pos, 1);
4368 // Copy next array element to the result.
4369 __ bind(&one_char_separator_loop_entry);
4370 __ lw(string, MemOperand(element));
4371 __ Addu(element, element, kPointerSize);
4372 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4373 __ SmiUntag(string_length);
4374 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4375 __ CopyBytes(string, result_pos, string_length, scratch1);
4376 // End while (element < elements_end).
4377 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4378 DCHECK(result.is(v0));
4381 // Long separator case (separator is more than one character). Entry is at the
4382 // label long_separator below.
4383 __ bind(&long_separator_loop);
4384 // Live values in registers:
4385 // result_pos: the position to which we are currently copying characters.
4386 // element: Current array element.
4387 // elements_end: Array end.
4388 // separator: Separator string.
4390 // Copy the separator to the result.
4391 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4392 __ SmiUntag(string_length);
4395 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4396 __ CopyBytes(string, result_pos, string_length, scratch1);
4398 __ bind(&long_separator);
4399 __ lw(string, MemOperand(element));
4400 __ Addu(element, element, kPointerSize);
4401 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4402 __ SmiUntag(string_length);
4403 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4404 __ CopyBytes(string, result_pos, string_length, scratch1);
4405 // End while (element < elements_end).
4406 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4407 DCHECK(result.is(v0));
4411 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4413 context()->Plug(v0);
4417 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4418 DCHECK(expr->arguments()->length() == 0);
4419 ExternalReference debug_is_active =
4420 ExternalReference::debug_is_active_address(isolate());
4421 __ li(at, Operand(debug_is_active));
4422 __ lb(v0, MemOperand(at));
4424 context()->Plug(v0);
4428 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4429 if (expr->function() != NULL &&
4430 expr->function()->intrinsic_type == Runtime::INLINE) {
4431 Comment cmnt(masm_, "[ InlineRuntimeCall");
4432 EmitInlineRuntimeCall(expr);
4436 Comment cmnt(masm_, "[ CallRuntime");
4437 ZoneList<Expression*>* args = expr->arguments();
4438 int arg_count = args->length();
4440 if (expr->is_jsruntime()) {
4441 // Push the builtins object as the receiver.
4442 Register receiver = LoadDescriptor::ReceiverRegister();
4443 __ lw(receiver, GlobalObjectOperand());
4444 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4447 // Load the function from the receiver.
4448 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4449 if (FLAG_vector_ics) {
4450 __ li(VectorLoadICDescriptor::SlotRegister(),
4451 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4452 CallLoadIC(NOT_CONTEXTUAL);
4454 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4457 // Push the target function under the receiver.
4458 __ lw(at, MemOperand(sp, 0));
4460 __ sw(v0, MemOperand(sp, kPointerSize));
4462 // Push the arguments ("left-to-right").
4463 int arg_count = args->length();
4464 for (int i = 0; i < arg_count; i++) {
4465 VisitForStackValue(args->at(i));
4468 // Record source position of the IC call.
4469 SetSourcePosition(expr->position());
4470 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4471 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4474 // Restore context register.
4475 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4477 context()->DropAndPlug(1, v0);
4479 // Push the arguments ("left-to-right").
4480 for (int i = 0; i < arg_count; i++) {
4481 VisitForStackValue(args->at(i));
4484 // Call the C runtime function.
4485 __ CallRuntime(expr->function(), arg_count);
4486 context()->Plug(v0);
4491 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4492 switch (expr->op()) {
4493 case Token::DELETE: {
4494 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4495 Property* property = expr->expression()->AsProperty();
4496 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4498 if (property != NULL) {
4499 VisitForStackValue(property->obj());
4500 VisitForStackValue(property->key());
4501 __ li(a1, Operand(Smi::FromInt(strict_mode())));
4503 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4504 context()->Plug(v0);
4505 } else if (proxy != NULL) {
4506 Variable* var = proxy->var();
4507 // Delete of an unqualified identifier is disallowed in strict mode
4508 // but "delete this" is allowed.
4509 DCHECK(strict_mode() == SLOPPY || var->is_this());
4510 if (var->IsUnallocated()) {
4511 __ lw(a2, GlobalObjectOperand());
4512 __ li(a1, Operand(var->name()));
4513 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4514 __ Push(a2, a1, a0);
4515 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4516 context()->Plug(v0);
4517 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4518 // Result of deleting non-global, non-dynamic variables is false.
4519 // The subexpression does not have side effects.
4520 context()->Plug(var->is_this());
4522 // Non-global variable. Call the runtime to try to delete from the
4523 // context where the variable was introduced.
4524 DCHECK(!context_register().is(a2));
4525 __ li(a2, Operand(var->name()));
4526 __ Push(context_register(), a2);
4527 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4528 context()->Plug(v0);
4531 // Result of deleting non-property, non-variable reference is true.
4532 // The subexpression may have side effects.
4533 VisitForEffect(expr->expression());
4534 context()->Plug(true);
4540 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4541 VisitForEffect(expr->expression());
4542 context()->Plug(Heap::kUndefinedValueRootIndex);
4547 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4548 if (context()->IsEffect()) {
4549 // Unary NOT has no side effects so it's only necessary to visit the
4550 // subexpression. Match the optimizing compiler by not branching.
4551 VisitForEffect(expr->expression());
4552 } else if (context()->IsTest()) {
4553 const TestContext* test = TestContext::cast(context());
4554 // The labels are swapped for the recursive call.
4555 VisitForControl(expr->expression(),
4556 test->false_label(),
4558 test->fall_through());
4559 context()->Plug(test->true_label(), test->false_label());
4561 // We handle value contexts explicitly rather than simply visiting
4562 // for control and plugging the control flow into the context,
4563 // because we need to prepare a pair of extra administrative AST ids
4564 // for the optimizing compiler.
4565 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4566 Label materialize_true, materialize_false, done;
4567 VisitForControl(expr->expression(),
4571 __ bind(&materialize_true);
4572 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4573 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4574 if (context()->IsStackValue()) __ push(v0);
4576 __ bind(&materialize_false);
4577 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4578 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4579 if (context()->IsStackValue()) __ push(v0);
4585 case Token::TYPEOF: {
4586 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4587 { StackValueContext context(this);
4588 VisitForTypeofValue(expr->expression());
4590 __ CallRuntime(Runtime::kTypeof, 1);
4591 context()->Plug(v0);
4601 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4602 DCHECK(expr->expression()->IsValidReferenceExpression());
4604 Comment cmnt(masm_, "[ CountOperation");
4605 SetSourcePosition(expr->position());
4607 Property* prop = expr->expression()->AsProperty();
4608 LhsKind assign_type = GetAssignType(prop);
4610 // Evaluate expression and get value.
4611 if (assign_type == VARIABLE) {
4612 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4613 AccumulatorValueContext context(this);
4614 EmitVariableLoad(expr->expression()->AsVariableProxy());
4616 // Reserve space for result of postfix operation.
4617 if (expr->is_postfix() && !context()->IsEffect()) {
4618 __ li(at, Operand(Smi::FromInt(0)));
4621 switch (assign_type) {
4622 case NAMED_PROPERTY: {
4623 // Put the object both on the stack and in the register.
4624 VisitForStackValue(prop->obj());
4625 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4626 EmitNamedPropertyLoad(prop);
4630 case NAMED_SUPER_PROPERTY: {
4631 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4632 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4633 __ Push(result_register());
4634 const Register scratch = a1;
4635 __ lw(scratch, MemOperand(sp, kPointerSize));
4636 __ Push(scratch, result_register());
4637 EmitNamedSuperPropertyLoad(prop);
4641 case KEYED_SUPER_PROPERTY: {
4642 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4643 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4644 const Register scratch = a1;
4645 const Register scratch1 = t0;
4646 __ Move(scratch, result_register());
4647 VisitForAccumulatorValue(prop->key());
4648 __ Push(scratch, result_register());
4649 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
4650 __ Push(scratch1, scratch, result_register());
4651 EmitKeyedSuperPropertyLoad(prop);
4655 case KEYED_PROPERTY: {
4656 VisitForStackValue(prop->obj());
4657 VisitForStackValue(prop->key());
4658 __ lw(LoadDescriptor::ReceiverRegister(),
4659 MemOperand(sp, 1 * kPointerSize));
4660 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4661 EmitKeyedPropertyLoad(prop);
4670 // We need a second deoptimization point after loading the value
4671 // in case evaluating the property load my have a side effect.
4672 if (assign_type == VARIABLE) {
4673 PrepareForBailout(expr->expression(), TOS_REG);
4675 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4678 // Inline smi case if we are in a loop.
4679 Label stub_call, done;
4680 JumpPatchSite patch_site(masm_);
4682 int count_value = expr->op() == Token::INC ? 1 : -1;
4684 if (ShouldInlineSmiCase(expr->op())) {
4686 patch_site.EmitJumpIfNotSmi(v0, &slow);
4688 // Save result for postfix expressions.
4689 if (expr->is_postfix()) {
4690 if (!context()->IsEffect()) {
4691 // Save the result on the stack. If we have a named or keyed property
4692 // we store the result under the receiver that is currently on top
4694 switch (assign_type) {
4698 case NAMED_PROPERTY:
4699 __ sw(v0, MemOperand(sp, kPointerSize));
4701 case NAMED_SUPER_PROPERTY:
4702 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4704 case KEYED_PROPERTY:
4705 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4707 case KEYED_SUPER_PROPERTY:
4708 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4714 Register scratch1 = a1;
4715 Register scratch2 = t0;
4716 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4717 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4718 __ BranchOnNoOverflow(&done, scratch2);
4719 // Call stub. Undo operation first.
4724 ToNumberStub convert_stub(isolate());
4725 __ CallStub(&convert_stub);
4727 // Save result for postfix expressions.
4728 if (expr->is_postfix()) {
4729 if (!context()->IsEffect()) {
4730 // Save the result on the stack. If we have a named or keyed property
4731 // we store the result under the receiver that is currently on top
4733 switch (assign_type) {
4737 case NAMED_PROPERTY:
4738 __ sw(v0, MemOperand(sp, kPointerSize));
4740 case NAMED_SUPER_PROPERTY:
4741 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4743 case KEYED_PROPERTY:
4744 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4746 case KEYED_SUPER_PROPERTY:
4747 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4753 __ bind(&stub_call);
4755 __ li(a0, Operand(Smi::FromInt(count_value)));
4757 // Record position before stub call.
4758 SetSourcePosition(expr->position());
4761 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4762 CallIC(code, expr->CountBinOpFeedbackId());
4763 patch_site.EmitPatchInfo();
4766 // Store the value returned in v0.
4767 switch (assign_type) {
4769 if (expr->is_postfix()) {
4770 { EffectContext context(this);
4771 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4773 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4776 // For all contexts except EffectConstant we have the result on
4777 // top of the stack.
4778 if (!context()->IsEffect()) {
4779 context()->PlugTOS();
4782 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4784 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4785 context()->Plug(v0);
4788 case NAMED_PROPERTY: {
4789 __ mov(StoreDescriptor::ValueRegister(), result_register());
4790 __ li(StoreDescriptor::NameRegister(),
4791 Operand(prop->key()->AsLiteral()->value()));
4792 __ pop(StoreDescriptor::ReceiverRegister());
4793 CallStoreIC(expr->CountStoreFeedbackId());
4794 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4795 if (expr->is_postfix()) {
4796 if (!context()->IsEffect()) {
4797 context()->PlugTOS();
4800 context()->Plug(v0);
4804 case NAMED_SUPER_PROPERTY: {
4805 EmitNamedSuperPropertyStore(prop);
4806 if (expr->is_postfix()) {
4807 if (!context()->IsEffect()) {
4808 context()->PlugTOS();
4811 context()->Plug(v0);
4815 case KEYED_SUPER_PROPERTY: {
4816 EmitKeyedSuperPropertyStore(prop);
4817 if (expr->is_postfix()) {
4818 if (!context()->IsEffect()) {
4819 context()->PlugTOS();
4822 context()->Plug(v0);
4826 case KEYED_PROPERTY: {
4827 __ mov(StoreDescriptor::ValueRegister(), result_register());
4828 __ Pop(StoreDescriptor::ReceiverRegister(),
4829 StoreDescriptor::NameRegister());
4831 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4832 CallIC(ic, expr->CountStoreFeedbackId());
4833 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4834 if (expr->is_postfix()) {
4835 if (!context()->IsEffect()) {
4836 context()->PlugTOS();
4839 context()->Plug(v0);
4847 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4848 DCHECK(!context()->IsEffect());
4849 DCHECK(!context()->IsTest());
4850 VariableProxy* proxy = expr->AsVariableProxy();
4851 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4852 Comment cmnt(masm_, "[ Global variable");
4853 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4854 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4855 if (FLAG_vector_ics) {
4856 __ li(VectorLoadICDescriptor::SlotRegister(),
4857 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
4859 // Use a regular load, not a contextual load, to avoid a reference
4861 CallLoadIC(NOT_CONTEXTUAL);
4862 PrepareForBailout(expr, TOS_REG);
4863 context()->Plug(v0);
4864 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4865 Comment cmnt(masm_, "[ Lookup slot");
4868 // Generate code for loading from variables potentially shadowed
4869 // by eval-introduced variables.
4870 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4873 __ li(a0, Operand(proxy->name()));
4875 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4876 PrepareForBailout(expr, TOS_REG);
4879 context()->Plug(v0);
4881 // This expression cannot throw a reference error at the top level.
4882 VisitInDuplicateContext(expr);
4886 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4887 Expression* sub_expr,
4888 Handle<String> check) {
4889 Label materialize_true, materialize_false;
4890 Label* if_true = NULL;
4891 Label* if_false = NULL;
4892 Label* fall_through = NULL;
4893 context()->PrepareTest(&materialize_true, &materialize_false,
4894 &if_true, &if_false, &fall_through);
4896 { AccumulatorValueContext context(this);
4897 VisitForTypeofValue(sub_expr);
4899 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4901 Factory* factory = isolate()->factory();
4902 if (String::Equals(check, factory->number_string())) {
4903 __ JumpIfSmi(v0, if_true);
4904 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4905 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4906 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4907 } else if (String::Equals(check, factory->string_string())) {
4908 __ JumpIfSmi(v0, if_false);
4909 // Check for undetectable objects => false.
4910 __ GetObjectType(v0, v0, a1);
4911 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4912 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4913 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4914 Split(eq, a1, Operand(zero_reg),
4915 if_true, if_false, fall_through);
4916 } else if (String::Equals(check, factory->symbol_string())) {
4917 __ JumpIfSmi(v0, if_false);
4918 __ GetObjectType(v0, v0, a1);
4919 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4920 } else if (String::Equals(check, factory->boolean_string())) {
4921 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4922 __ Branch(if_true, eq, v0, Operand(at));
4923 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4924 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4925 } else if (String::Equals(check, factory->undefined_string())) {
4926 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4927 __ Branch(if_true, eq, v0, Operand(at));
4928 __ JumpIfSmi(v0, if_false);
4929 // Check for undetectable objects => true.
4930 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4931 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4932 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4933 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4934 } else if (String::Equals(check, factory->function_string())) {
4935 __ JumpIfSmi(v0, if_false);
4936 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4937 __ GetObjectType(v0, v0, a1);
4938 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4939 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4940 if_true, if_false, fall_through);
4941 } else if (String::Equals(check, factory->object_string())) {
4942 __ JumpIfSmi(v0, if_false);
4943 __ LoadRoot(at, Heap::kNullValueRootIndex);
4944 __ Branch(if_true, eq, v0, Operand(at));
4945 // Check for JS objects => true.
4946 __ GetObjectType(v0, v0, a1);
4947 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4948 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4949 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4950 // Check for undetectable objects => false.
4951 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4952 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4953 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4955 if (if_false != fall_through) __ jmp(if_false);
4957 context()->Plug(if_true, if_false);
4961 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4962 Comment cmnt(masm_, "[ CompareOperation");
4963 SetSourcePosition(expr->position());
4965 // First we try a fast inlined version of the compare when one of
4966 // the operands is a literal.
4967 if (TryLiteralCompare(expr)) return;
4969 // Always perform the comparison for its control flow. Pack the result
4970 // into the expression's context after the comparison is performed.
4971 Label materialize_true, materialize_false;
4972 Label* if_true = NULL;
4973 Label* if_false = NULL;
4974 Label* fall_through = NULL;
4975 context()->PrepareTest(&materialize_true, &materialize_false,
4976 &if_true, &if_false, &fall_through);
4978 Token::Value op = expr->op();
4979 VisitForStackValue(expr->left());
4982 VisitForStackValue(expr->right());
4983 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4984 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4985 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4986 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4989 case Token::INSTANCEOF: {
4990 VisitForStackValue(expr->right());
4991 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4993 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4994 // The stub returns 0 for true.
4995 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5000 VisitForAccumulatorValue(expr->right());
5001 Condition cc = CompareIC::ComputeCondition(op);
5002 __ mov(a0, result_register());
5005 bool inline_smi_code = ShouldInlineSmiCase(op);
5006 JumpPatchSite patch_site(masm_);
5007 if (inline_smi_code) {
5009 __ Or(a2, a0, Operand(a1));
5010 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5011 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5012 __ bind(&slow_case);
5014 // Record position and call the compare IC.
5015 SetSourcePosition(expr->position());
5016 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5017 CallIC(ic, expr->CompareOperationFeedbackId());
5018 patch_site.EmitPatchInfo();
5019 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5020 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5024 // Convert the result of the comparison into one expected for this
5025 // expression's context.
5026 context()->Plug(if_true, if_false);
5030 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5031 Expression* sub_expr,
5033 Label materialize_true, materialize_false;
5034 Label* if_true = NULL;
5035 Label* if_false = NULL;
5036 Label* fall_through = NULL;
5037 context()->PrepareTest(&materialize_true, &materialize_false,
5038 &if_true, &if_false, &fall_through);
5040 VisitForAccumulatorValue(sub_expr);
5041 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5042 __ mov(a0, result_register());
5043 if (expr->op() == Token::EQ_STRICT) {
5044 Heap::RootListIndex nil_value = nil == kNullValue ?
5045 Heap::kNullValueRootIndex :
5046 Heap::kUndefinedValueRootIndex;
5047 __ LoadRoot(a1, nil_value);
5048 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5050 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5051 CallIC(ic, expr->CompareOperationFeedbackId());
5052 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5054 context()->Plug(if_true, if_false);
5058 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5059 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5060 context()->Plug(v0);
5064 Register FullCodeGenerator::result_register() {
5069 Register FullCodeGenerator::context_register() {
5074 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5075 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5076 __ sw(value, MemOperand(fp, frame_offset));
5080 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5081 __ lw(dst, ContextOperand(cp, context_index));
5085 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5086 Scope* declaration_scope = scope()->DeclarationScope();
5087 if (declaration_scope->is_global_scope() ||
5088 declaration_scope->is_module_scope()) {
5089 // Contexts nested in the native context have a canonical empty function
5090 // as their closure, not the anonymous closure containing the global
5091 // code. Pass a smi sentinel and let the runtime look up the empty
5093 __ li(at, Operand(Smi::FromInt(0)));
5094 } else if (declaration_scope->is_eval_scope()) {
5095 // Contexts created by a call to eval have the same closure as the
5096 // context calling eval, not the anonymous closure containing the eval
5097 // code. Fetch it from the context.
5098 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5100 DCHECK(declaration_scope->is_function_scope());
5101 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5107 // ----------------------------------------------------------------------------
5108 // Non-local control flow support.
5110 void FullCodeGenerator::EnterFinallyBlock() {
5111 DCHECK(!result_register().is(a1));
5112 // Store result register while executing finally block.
5113 __ push(result_register());
5114 // Cook return address in link register to stack (smi encoded Code* delta).
5115 __ Subu(a1, ra, Operand(masm_->CodeObject()));
5116 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5117 STATIC_ASSERT(0 == kSmiTag);
5118 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
5120 // Store result register while executing finally block.
5123 // Store pending message while executing finally block.
5124 ExternalReference pending_message_obj =
5125 ExternalReference::address_of_pending_message_obj(isolate());
5126 __ li(at, Operand(pending_message_obj));
5127 __ lw(a1, MemOperand(at));
5130 ExternalReference has_pending_message =
5131 ExternalReference::address_of_has_pending_message(isolate());
5132 __ li(at, Operand(has_pending_message));
5133 __ lw(a1, MemOperand(at));
5137 ExternalReference pending_message_script =
5138 ExternalReference::address_of_pending_message_script(isolate());
5139 __ li(at, Operand(pending_message_script));
5140 __ lw(a1, MemOperand(at));
5145 void FullCodeGenerator::ExitFinallyBlock() {
5146 DCHECK(!result_register().is(a1));
5147 // Restore pending message from stack.
5149 ExternalReference pending_message_script =
5150 ExternalReference::address_of_pending_message_script(isolate());
5151 __ li(at, Operand(pending_message_script));
5152 __ sw(a1, MemOperand(at));
5156 ExternalReference has_pending_message =
5157 ExternalReference::address_of_has_pending_message(isolate());
5158 __ li(at, Operand(has_pending_message));
5159 __ sw(a1, MemOperand(at));
5162 ExternalReference pending_message_obj =
5163 ExternalReference::address_of_pending_message_obj(isolate());
5164 __ li(at, Operand(pending_message_obj));
5165 __ sw(a1, MemOperand(at));
5167 // Restore result register from stack.
5170 // Uncook return address and return.
5171 __ pop(result_register());
5172 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5173 __ sra(a1, a1, 1); // Un-smi-tag value.
5174 __ Addu(at, a1, Operand(masm_->CodeObject()));
5181 #define __ ACCESS_MASM(masm())
5183 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5185 int* context_length) {
5186 // The macros used here must preserve the result register.
5188 // Because the handler block contains the context of the finally
5189 // code, we can restore it directly from there for the finally code
5190 // rather than iteratively unwinding contexts via their previous
5192 __ Drop(*stack_depth); // Down to the handler block.
5193 if (*context_length > 0) {
5194 // Restore the context to its dedicated register and the stack.
5195 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
5196 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5199 __ Call(finally_entry_);
5202 *context_length = 0;
5210 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5212 BackEdgeState target_state,
5213 Code* replacement_code) {
5214 static const int kInstrSize = Assembler::kInstrSize;
5215 Address branch_address = pc - 6 * kInstrSize;
5216 CodePatcher patcher(branch_address, 1);
5218 switch (target_state) {
5220 // slt at, a3, zero_reg (in case of count based interrupts)
5221 // beq at, zero_reg, ok
5222 // lui t9, <interrupt stub address> upper
5223 // ori t9, <interrupt stub address> lower
5226 // ok-label ----- pc_after points here
5227 patcher.masm()->slt(at, a3, zero_reg);
5229 case ON_STACK_REPLACEMENT:
5230 case OSR_AFTER_STACK_CHECK:
5231 // addiu at, zero_reg, 1
5232 // beq at, zero_reg, ok ;; Not changed
5233 // lui t9, <on-stack replacement address> upper
5234 // ori t9, <on-stack replacement address> lower
5235 // jalr t9 ;; Not changed
5236 // nop ;; Not changed
5237 // ok-label ----- pc_after points here
5238 patcher.masm()->addiu(at, zero_reg, 1);
5241 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5242 // Replace the stack check address in the load-immediate (lui/ori pair)
5243 // with the entry address of the replacement code.
5244 Assembler::set_target_address_at(pc_immediate_load_address,
5245 replacement_code->entry());
5247 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5248 unoptimized_code, pc_immediate_load_address, replacement_code);
5252 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5254 Code* unoptimized_code,
5256 static const int kInstrSize = Assembler::kInstrSize;
5257 Address branch_address = pc - 6 * kInstrSize;
5258 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5260 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
5261 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5262 DCHECK(reinterpret_cast<uint32_t>(
5263 Assembler::target_address_at(pc_immediate_load_address)) ==
5264 reinterpret_cast<uint32_t>(
5265 isolate->builtins()->InterruptCheck()->entry()));
5269 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5271 if (reinterpret_cast<uint32_t>(
5272 Assembler::target_address_at(pc_immediate_load_address)) ==
5273 reinterpret_cast<uint32_t>(
5274 isolate->builtins()->OnStackReplacement()->entry())) {
5275 return ON_STACK_REPLACEMENT;
5278 DCHECK(reinterpret_cast<uint32_t>(
5279 Assembler::target_address_at(pc_immediate_load_address)) ==
5280 reinterpret_cast<uint32_t>(
5281 isolate->builtins()->OsrAfterStackCheck()->entry()));
5282 return OSR_AFTER_STACK_CHECK;
5286 } } // namespace v8::internal
5288 #endif // V8_TARGET_ARCH_MIPS