1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
28 #include "src/mips/code-stubs-mips.h"
29 #include "src/mips/macro-assembler-mips.h"
34 #define __ ACCESS_MASM(masm_)
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
49 info_emitted_ = false;
54 DCHECK(patch_site_.is_bound() == info_emitted_);
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
64 // Always taken before patched.
65 __ BranchShort(target, eq, at, Operand(zero_reg));
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
75 // Never taken before patched.
76 __ BranchShort(target, ne, at, Operand(zero_reg));
79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
88 __ nop(); // Signals no inlined code.
93 MacroAssembler* masm_;
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
115 void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
118 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
119 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
121 profiling_counter_ = isolate()->factory()->NewCell(
122 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
123 SetFunctionPosition(function());
124 Comment cmnt(masm_, "[ function compiled by full code generator");
126 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
129 if (strlen(FLAG_stop_at) > 0 &&
130 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
135 // Sloppy mode functions and builtins need to replace the receiver with the
136 // global proxy when called as functions (without an explicit receiver
138 if (is_sloppy(info->language_mode()) && !info->is_native()) {
140 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
141 __ lw(at, MemOperand(sp, receiver_offset));
142 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
143 __ Branch(&ok, ne, a2, Operand(at));
145 __ lw(a2, GlobalObjectOperand());
146 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
148 __ sw(a2, MemOperand(sp, receiver_offset));
153 // Open a frame scope to indicate that there is a frame on the stack. The
154 // MANUAL indicates that the scope shouldn't actually generate code to set up
155 // the frame (that is done below).
156 FrameScope frame_scope(masm_, StackFrame::MANUAL);
158 info->set_prologue_offset(masm_->pc_offset());
159 __ Prologue(info->IsCodePreAgingActive());
160 info->AddNoFrameRange(0, masm_->pc_offset());
162 { Comment cmnt(masm_, "[ Allocate locals");
163 int locals_count = info->scope()->num_stack_slots();
164 // Generators allocate locals, if any, in context slots.
165 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
166 if (locals_count > 0) {
167 if (locals_count >= 128) {
169 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
170 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
171 __ Branch(&ok, hs, t5, Operand(a2));
172 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
175 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
176 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
177 if (locals_count >= kMaxPushes) {
178 int loop_iterations = locals_count / kMaxPushes;
179 __ li(a2, Operand(loop_iterations));
181 __ bind(&loop_header);
183 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
184 for (int i = 0; i < kMaxPushes; i++) {
185 __ sw(t5, MemOperand(sp, i * kPointerSize));
187 // Continue loop if not done.
188 __ Subu(a2, a2, Operand(1));
189 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
191 int remaining = locals_count % kMaxPushes;
192 // Emit the remaining pushes.
193 __ Subu(sp, sp, Operand(remaining * kPointerSize));
194 for (int i = 0; i < remaining; i++) {
195 __ sw(t5, MemOperand(sp, i * kPointerSize));
200 bool function_in_register = true;
202 // Possibly allocate a local context.
203 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
204 if (heap_slots > 0) {
205 Comment cmnt(masm_, "[ Allocate context");
206 // Argument to NewContext is the function, which is still in a1.
207 bool need_write_barrier = true;
208 if (info->scope()->is_script_scope()) {
210 __ Push(info->scope()->GetScopeInfo(info->isolate()));
211 __ CallRuntime(Runtime::kNewScriptContext, 2);
212 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
213 FastNewContextStub stub(isolate(), heap_slots);
215 // Result of FastNewContextStub is always in new space.
216 need_write_barrier = false;
219 __ CallRuntime(Runtime::kNewFunctionContext, 1);
221 function_in_register = false;
222 // Context is returned in v0. It replaces the context passed to us.
223 // It's saved in the stack and kept live in cp.
225 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
226 // Copy any necessary parameters into the context.
227 int num_parameters = info->scope()->num_parameters();
228 for (int i = 0; i < num_parameters; i++) {
229 Variable* var = scope()->parameter(i);
230 if (var->IsContextSlot()) {
231 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
232 (num_parameters - 1 - i) * kPointerSize;
233 // Load parameter from stack.
234 __ lw(a0, MemOperand(fp, parameter_offset));
235 // Store it in the context.
236 MemOperand target = ContextOperand(cp, var->index());
239 // Update the write barrier.
240 if (need_write_barrier) {
241 __ RecordWriteContextSlot(
242 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
243 } else if (FLAG_debug_code) {
245 __ JumpIfInNewSpace(cp, a0, &done);
246 __ Abort(kExpectedNewSpaceObject);
253 ArgumentsAccessStub::HasNewTarget has_new_target =
254 IsSubclassConstructor(info->function()->kind())
255 ? ArgumentsAccessStub::HAS_NEW_TARGET
256 : ArgumentsAccessStub::NO_NEW_TARGET;
258 // Possibly allocate RestParameters
260 Variable* rest_param = scope()->rest_parameter(&rest_index);
262 Comment cmnt(masm_, "[ Allocate rest parameter array");
264 int num_parameters = info->scope()->num_parameters();
265 int offset = num_parameters * kPointerSize;
266 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
272 Operand(StandardFrameConstants::kCallerSPOffset + offset));
273 __ li(a2, Operand(Smi::FromInt(num_parameters)));
274 __ li(a1, Operand(Smi::FromInt(rest_index)));
277 RestParamAccessStub stub(isolate());
280 SetVar(rest_param, v0, a1, a2);
283 Variable* arguments = scope()->arguments();
284 if (arguments != NULL) {
285 // Function uses arguments object.
286 Comment cmnt(masm_, "[ Allocate arguments object");
287 if (!function_in_register) {
288 // Load this again, if it's used by the local context below.
289 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
293 // Receiver is just before the parameters on the caller's stack.
294 int num_parameters = info->scope()->num_parameters();
295 int offset = num_parameters * kPointerSize;
297 Operand(StandardFrameConstants::kCallerSPOffset + offset));
298 __ li(a1, Operand(Smi::FromInt(num_parameters)));
301 // Arguments to ArgumentsAccessStub:
302 // function, receiver address, parameter count.
303 // The stub will rewrite receiever and parameter count if the previous
304 // stack frame was an arguments adapter frame.
305 ArgumentsAccessStub::Type type;
306 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
307 type = ArgumentsAccessStub::NEW_STRICT;
308 } else if (function()->has_duplicate_parameters()) {
309 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
311 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
313 ArgumentsAccessStub stub(isolate(), type, has_new_target);
316 SetVar(arguments, v0, a1, a2);
320 __ CallRuntime(Runtime::kTraceEnter, 0);
323 // Visit the declarations and body unless there is an illegal
325 if (scope()->HasIllegalRedeclaration()) {
326 Comment cmnt(masm_, "[ Declarations");
327 scope()->VisitIllegalRedeclaration(this);
330 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
331 { Comment cmnt(masm_, "[ Declarations");
332 // For named function expressions, declare the function name as a
334 if (scope()->is_function_scope() && scope()->function() != NULL) {
335 VariableDeclaration* function = scope()->function();
336 DCHECK(function->proxy()->var()->mode() == CONST ||
337 function->proxy()->var()->mode() == CONST_LEGACY);
338 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
339 VisitVariableDeclaration(function);
341 VisitDeclarations(scope()->declarations());
344 { Comment cmnt(masm_, "[ Stack check");
345 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
347 __ LoadRoot(at, Heap::kStackLimitRootIndex);
348 __ Branch(&ok, hs, sp, Operand(at));
349 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
350 PredictableCodeSizeScope predictable(masm_,
351 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
352 __ Call(stack_check, RelocInfo::CODE_TARGET);
356 { Comment cmnt(masm_, "[ Body");
357 DCHECK(loop_depth() == 0);
358 VisitStatements(function()->body());
359 DCHECK(loop_depth() == 0);
363 // Always emit a 'return undefined' in case control fell off the end of
365 { Comment cmnt(masm_, "[ return <undefined>;");
366 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
368 EmitReturnSequence();
372 void FullCodeGenerator::ClearAccumulator() {
373 DCHECK(Smi::FromInt(0) == 0);
374 __ mov(v0, zero_reg);
378 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
379 __ li(a2, Operand(profiling_counter_));
380 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
381 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
382 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
386 void FullCodeGenerator::EmitProfilingCounterReset() {
387 int reset_value = FLAG_interrupt_budget;
388 if (info_->is_debug()) {
389 // Detect debug break requests as soon as possible.
390 reset_value = FLAG_interrupt_budget >> 4;
392 __ li(a2, Operand(profiling_counter_));
393 __ li(a3, Operand(Smi::FromInt(reset_value)));
394 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
398 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
399 Label* back_edge_target) {
400 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
401 // to make sure it is constant. Branch may emit a skip-or-jump sequence
402 // instead of the normal Branch. It seems that the "skip" part of that
403 // sequence is about as long as this Branch would be so it is safe to ignore
405 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
406 Comment cmnt(masm_, "[ Back edge bookkeeping");
408 DCHECK(back_edge_target->is_bound());
409 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
410 int weight = Min(kMaxBackEdgeWeight,
411 Max(1, distance / kCodeSizeMultiplier));
412 EmitProfilingCounterDecrement(weight);
413 __ slt(at, a3, zero_reg);
414 __ beq(at, zero_reg, &ok);
415 // Call will emit a li t9 first, so it is safe to use the delay slot.
416 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
417 // Record a mapping of this PC offset to the OSR id. This is used to find
418 // the AST id from the unoptimized code in order to use it as a key into
419 // the deoptimization input data found in the optimized code.
420 RecordBackEdge(stmt->OsrEntryId());
421 EmitProfilingCounterReset();
424 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
425 // Record a mapping of the OSR id to this PC. This is used if the OSR
426 // entry becomes the target of a bailout. We don't expect it to be, but
427 // we want it to work if it is.
428 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
432 void FullCodeGenerator::EmitReturnSequence() {
433 Comment cmnt(masm_, "[ Return sequence");
434 if (return_label_.is_bound()) {
435 __ Branch(&return_label_);
437 __ bind(&return_label_);
439 // Push the return value on the stack as the parameter.
440 // Runtime::TraceExit returns its parameter in v0.
442 __ CallRuntime(Runtime::kTraceExit, 1);
444 // Pretend that the exit is a backwards jump to the entry.
446 if (info_->ShouldSelfOptimize()) {
447 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
449 int distance = masm_->pc_offset();
450 weight = Min(kMaxBackEdgeWeight,
451 Max(1, distance / kCodeSizeMultiplier));
453 EmitProfilingCounterDecrement(weight);
455 __ Branch(&ok, ge, a3, Operand(zero_reg));
457 __ Call(isolate()->builtins()->InterruptCheck(),
458 RelocInfo::CODE_TARGET);
460 EmitProfilingCounterReset();
464 // Add a label for checking the size of the code used for returning.
465 Label check_exit_codesize;
466 masm_->bind(&check_exit_codesize);
468 // Make sure that the constant pool is not emitted inside of the return
470 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
471 // Here we use masm_-> instead of the __ macro to avoid the code coverage
472 // tool from instrumenting as we rely on the code size here.
473 int32_t arg_count = info_->scope()->num_parameters() + 1;
474 if (IsSubclassConstructor(info_->function()->kind())) {
477 int32_t sp_delta = arg_count * kPointerSize;
478 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
481 int no_frame_start = masm_->pc_offset();
482 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
483 masm_->Addu(sp, sp, Operand(sp_delta));
485 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
489 // Check that the size of the code used for returning is large enough
490 // for the debugger's requirements.
491 DCHECK(Assembler::kJSReturnSequenceInstructions <=
492 masm_->InstructionsGeneratedSince(&check_exit_codesize));
498 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
499 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
503 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
504 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
505 codegen()->GetVar(result_register(), var);
509 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
510 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
511 codegen()->GetVar(result_register(), var);
512 __ push(result_register());
516 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
517 // For simplicity we always test the accumulator register.
518 codegen()->GetVar(result_register(), var);
519 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
520 codegen()->DoTest(this);
524 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
528 void FullCodeGenerator::AccumulatorValueContext::Plug(
529 Heap::RootListIndex index) const {
530 __ LoadRoot(result_register(), index);
534 void FullCodeGenerator::StackValueContext::Plug(
535 Heap::RootListIndex index) const {
536 __ LoadRoot(result_register(), index);
537 __ push(result_register());
541 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
542 codegen()->PrepareForBailoutBeforeSplit(condition(),
546 if (index == Heap::kUndefinedValueRootIndex ||
547 index == Heap::kNullValueRootIndex ||
548 index == Heap::kFalseValueRootIndex) {
549 if (false_label_ != fall_through_) __ Branch(false_label_);
550 } else if (index == Heap::kTrueValueRootIndex) {
551 if (true_label_ != fall_through_) __ Branch(true_label_);
553 __ LoadRoot(result_register(), index);
554 codegen()->DoTest(this);
559 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
563 void FullCodeGenerator::AccumulatorValueContext::Plug(
564 Handle<Object> lit) const {
565 __ li(result_register(), Operand(lit));
569 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
570 // Immediates cannot be pushed directly.
571 __ li(result_register(), Operand(lit));
572 __ push(result_register());
576 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
577 codegen()->PrepareForBailoutBeforeSplit(condition(),
581 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
582 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
583 if (false_label_ != fall_through_) __ Branch(false_label_);
584 } else if (lit->IsTrue() || lit->IsJSObject()) {
585 if (true_label_ != fall_through_) __ Branch(true_label_);
586 } else if (lit->IsString()) {
587 if (String::cast(*lit)->length() == 0) {
588 if (false_label_ != fall_through_) __ Branch(false_label_);
590 if (true_label_ != fall_through_) __ Branch(true_label_);
592 } else if (lit->IsSmi()) {
593 if (Smi::cast(*lit)->value() == 0) {
594 if (false_label_ != fall_through_) __ Branch(false_label_);
596 if (true_label_ != fall_through_) __ Branch(true_label_);
599 // For simplicity we always test the accumulator register.
600 __ li(result_register(), Operand(lit));
601 codegen()->DoTest(this);
606 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
607 Register reg) const {
613 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
615 Register reg) const {
618 __ Move(result_register(), reg);
622 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
623 Register reg) const {
625 if (count > 1) __ Drop(count - 1);
626 __ sw(reg, MemOperand(sp, 0));
630 void FullCodeGenerator::TestContext::DropAndPlug(int count,
631 Register reg) const {
633 // For simplicity we always test the accumulator register.
635 __ Move(result_register(), reg);
636 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
637 codegen()->DoTest(this);
641 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
642 Label* materialize_false) const {
643 DCHECK(materialize_true == materialize_false);
644 __ bind(materialize_true);
648 void FullCodeGenerator::AccumulatorValueContext::Plug(
649 Label* materialize_true,
650 Label* materialize_false) const {
652 __ bind(materialize_true);
653 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
655 __ bind(materialize_false);
656 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
661 void FullCodeGenerator::StackValueContext::Plug(
662 Label* materialize_true,
663 Label* materialize_false) const {
665 __ bind(materialize_true);
666 __ LoadRoot(at, Heap::kTrueValueRootIndex);
667 // Push the value as the following branch can clobber at in long branch mode.
670 __ bind(materialize_false);
671 __ LoadRoot(at, Heap::kFalseValueRootIndex);
677 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
678 Label* materialize_false) const {
679 DCHECK(materialize_true == true_label_);
680 DCHECK(materialize_false == false_label_);
684 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
688 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
689 Heap::RootListIndex value_root_index =
690 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
691 __ LoadRoot(result_register(), value_root_index);
695 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
696 Heap::RootListIndex value_root_index =
697 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
698 __ LoadRoot(at, value_root_index);
703 void FullCodeGenerator::TestContext::Plug(bool flag) const {
704 codegen()->PrepareForBailoutBeforeSplit(condition(),
709 if (true_label_ != fall_through_) __ Branch(true_label_);
711 if (false_label_ != fall_through_) __ Branch(false_label_);
716 void FullCodeGenerator::DoTest(Expression* condition,
719 Label* fall_through) {
720 __ mov(a0, result_register());
721 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
722 CallIC(ic, condition->test_id());
723 __ mov(at, zero_reg);
724 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
728 void FullCodeGenerator::Split(Condition cc,
733 Label* fall_through) {
734 if (if_false == fall_through) {
735 __ Branch(if_true, cc, lhs, rhs);
736 } else if (if_true == fall_through) {
737 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
739 __ Branch(if_true, cc, lhs, rhs);
745 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
746 DCHECK(var->IsStackAllocated());
747 // Offset is negative because higher indexes are at lower addresses.
748 int offset = -var->index() * kPointerSize;
749 // Adjust by a (parameter or local) base offset.
750 if (var->IsParameter()) {
751 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
753 offset += JavaScriptFrameConstants::kLocal0Offset;
755 return MemOperand(fp, offset);
759 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
760 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
761 if (var->IsContextSlot()) {
762 int context_chain_length = scope()->ContextChainLength(var->scope());
763 __ LoadContext(scratch, context_chain_length);
764 return ContextOperand(scratch, var->index());
766 return StackOperand(var);
771 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
772 // Use destination as scratch.
773 MemOperand location = VarOperand(var, dest);
774 __ lw(dest, location);
778 void FullCodeGenerator::SetVar(Variable* var,
782 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
783 DCHECK(!scratch0.is(src));
784 DCHECK(!scratch0.is(scratch1));
785 DCHECK(!scratch1.is(src));
786 MemOperand location = VarOperand(var, scratch0);
787 __ sw(src, location);
788 // Emit the write barrier code if the location is in the heap.
789 if (var->IsContextSlot()) {
790 __ RecordWriteContextSlot(scratch0,
800 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
801 bool should_normalize,
804 // Only prepare for bailouts before splits if we're in a test
805 // context. Otherwise, we let the Visit function deal with the
806 // preparation to avoid preparing with the same AST id twice.
807 if (!context()->IsTest() || !info_->IsOptimizable()) return;
810 if (should_normalize) __ Branch(&skip);
811 PrepareForBailout(expr, TOS_REG);
812 if (should_normalize) {
813 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
814 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
820 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
821 // The variable in the declaration always resides in the current function
823 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
824 if (generate_debug_code_) {
825 // Check that we're not inside a with or catch context.
826 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
827 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
828 __ Check(ne, kDeclarationInWithContext,
830 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
831 __ Check(ne, kDeclarationInCatchContext,
837 void FullCodeGenerator::VisitVariableDeclaration(
838 VariableDeclaration* declaration) {
839 // If it was not possible to allocate the variable at compile time, we
840 // need to "declare" it at runtime to make sure it actually exists in the
842 VariableProxy* proxy = declaration->proxy();
843 VariableMode mode = declaration->mode();
844 Variable* variable = proxy->var();
845 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
846 switch (variable->location()) {
847 case Variable::UNALLOCATED:
848 globals_->Add(variable->name(), zone());
849 globals_->Add(variable->binding_needs_init()
850 ? isolate()->factory()->the_hole_value()
851 : isolate()->factory()->undefined_value(),
855 case Variable::PARAMETER:
856 case Variable::LOCAL:
858 Comment cmnt(masm_, "[ VariableDeclaration");
859 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
860 __ sw(t0, StackOperand(variable));
864 case Variable::CONTEXT:
866 Comment cmnt(masm_, "[ VariableDeclaration");
867 EmitDebugCheckDeclarationContext(variable);
868 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
869 __ sw(at, ContextOperand(cp, variable->index()));
870 // No write barrier since the_hole_value is in old space.
871 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
875 case Variable::LOOKUP: {
876 Comment cmnt(masm_, "[ VariableDeclaration");
877 __ li(a2, Operand(variable->name()));
878 // Declaration nodes are always introduced in one of four modes.
879 DCHECK(IsDeclaredVariableMode(mode));
880 PropertyAttributes attr =
881 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
882 __ li(a1, Operand(Smi::FromInt(attr)));
883 // Push initial value, if any.
884 // Note: For variables we must not push an initial value (such as
885 // 'undefined') because we may have a (legal) redeclaration and we
886 // must not destroy the current value.
888 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
889 __ Push(cp, a2, a1, a0);
891 DCHECK(Smi::FromInt(0) == 0);
892 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
893 __ Push(cp, a2, a1, a0);
895 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
902 void FullCodeGenerator::VisitFunctionDeclaration(
903 FunctionDeclaration* declaration) {
904 VariableProxy* proxy = declaration->proxy();
905 Variable* variable = proxy->var();
906 switch (variable->location()) {
907 case Variable::UNALLOCATED: {
908 globals_->Add(variable->name(), zone());
909 Handle<SharedFunctionInfo> function =
910 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
911 // Check for stack-overflow exception.
912 if (function.is_null()) return SetStackOverflow();
913 globals_->Add(function, zone());
917 case Variable::PARAMETER:
918 case Variable::LOCAL: {
919 Comment cmnt(masm_, "[ FunctionDeclaration");
920 VisitForAccumulatorValue(declaration->fun());
921 __ sw(result_register(), StackOperand(variable));
925 case Variable::CONTEXT: {
926 Comment cmnt(masm_, "[ FunctionDeclaration");
927 EmitDebugCheckDeclarationContext(variable);
928 VisitForAccumulatorValue(declaration->fun());
929 __ sw(result_register(), ContextOperand(cp, variable->index()));
930 int offset = Context::SlotOffset(variable->index());
931 // We know that we have written a function, which is not a smi.
932 __ RecordWriteContextSlot(cp,
940 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
944 case Variable::LOOKUP: {
945 Comment cmnt(masm_, "[ FunctionDeclaration");
946 __ li(a2, Operand(variable->name()));
947 __ li(a1, Operand(Smi::FromInt(NONE)));
949 // Push initial value for function declaration.
950 VisitForStackValue(declaration->fun());
951 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
958 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
959 Variable* variable = declaration->proxy()->var();
960 ModuleDescriptor* descriptor = declaration->module()->descriptor();
961 DCHECK(variable->location() == Variable::CONTEXT);
962 DCHECK(descriptor->IsFrozen());
964 Comment cmnt(masm_, "[ ModuleDeclaration");
965 EmitDebugCheckDeclarationContext(variable);
967 // Load instance object.
968 __ LoadContext(a1, scope_->ContextChainLength(scope_->ScriptScope()));
969 __ lw(a1, ContextOperand(a1, descriptor->Index()));
970 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
973 __ sw(a1, ContextOperand(cp, variable->index()));
974 // We know that we have written a module, which is not a smi.
975 __ RecordWriteContextSlot(cp,
976 Context::SlotOffset(variable->index()),
983 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
985 // Traverse into body.
986 Visit(declaration->module());
990 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
991 VariableProxy* proxy = declaration->proxy();
992 Variable* variable = proxy->var();
993 switch (variable->location()) {
994 case Variable::UNALLOCATED:
998 case Variable::CONTEXT: {
999 Comment cmnt(masm_, "[ ImportDeclaration");
1000 EmitDebugCheckDeclarationContext(variable);
1005 case Variable::PARAMETER:
1006 case Variable::LOCAL:
1007 case Variable::LOOKUP:
1013 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1018 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1019 // Call the runtime to declare the globals.
1020 // The context is the first argument.
1021 __ li(a1, Operand(pairs));
1022 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1023 __ Push(cp, a1, a0);
1024 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1025 // Return value is ignored.
1029 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1030 // Call the runtime to declare the modules.
1031 __ Push(descriptions);
1032 __ CallRuntime(Runtime::kDeclareModules, 1);
1033 // Return value is ignored.
1037 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1038 Comment cmnt(masm_, "[ SwitchStatement");
1039 Breakable nested_statement(this, stmt);
1040 SetStatementPosition(stmt);
1042 // Keep the switch value on the stack until a case matches.
1043 VisitForStackValue(stmt->tag());
1044 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1046 ZoneList<CaseClause*>* clauses = stmt->cases();
1047 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1049 Label next_test; // Recycled for each test.
1050 // Compile all the tests with branches to their bodies.
1051 for (int i = 0; i < clauses->length(); i++) {
1052 CaseClause* clause = clauses->at(i);
1053 clause->body_target()->Unuse();
1055 // The default is not a test, but remember it as final fall through.
1056 if (clause->is_default()) {
1057 default_clause = clause;
1061 Comment cmnt(masm_, "[ Case comparison");
1062 __ bind(&next_test);
1065 // Compile the label expression.
1066 VisitForAccumulatorValue(clause->label());
1067 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1069 // Perform the comparison as if via '==='.
1070 __ lw(a1, MemOperand(sp, 0)); // Switch value.
1071 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1072 JumpPatchSite patch_site(masm_);
1073 if (inline_smi_code) {
1076 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1078 __ Branch(&next_test, ne, a1, Operand(a0));
1079 __ Drop(1); // Switch value is no longer needed.
1080 __ Branch(clause->body_target());
1082 __ bind(&slow_case);
1085 // Record position before stub call for type feedback.
1086 SetSourcePosition(clause->position());
1088 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1089 CallIC(ic, clause->CompareId());
1090 patch_site.EmitPatchInfo();
1094 PrepareForBailout(clause, TOS_REG);
1095 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1096 __ Branch(&next_test, ne, v0, Operand(at));
1098 __ Branch(clause->body_target());
1101 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1102 __ Drop(1); // Switch value is no longer needed.
1103 __ Branch(clause->body_target());
1106 // Discard the test value and jump to the default if present, otherwise to
1107 // the end of the statement.
1108 __ bind(&next_test);
1109 __ Drop(1); // Switch value is no longer needed.
1110 if (default_clause == NULL) {
1111 __ Branch(nested_statement.break_label());
1113 __ Branch(default_clause->body_target());
1116 // Compile all the case bodies.
1117 for (int i = 0; i < clauses->length(); i++) {
1118 Comment cmnt(masm_, "[ Case body");
1119 CaseClause* clause = clauses->at(i);
1120 __ bind(clause->body_target());
1121 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1122 VisitStatements(clause->statements());
1125 __ bind(nested_statement.break_label());
1126 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1130 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1131 Comment cmnt(masm_, "[ ForInStatement");
1132 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1133 SetStatementPosition(stmt);
1136 ForIn loop_statement(this, stmt);
1137 increment_loop_depth();
1139 // Get the object to enumerate over. If the object is null or undefined, skip
1140 // over the loop. See ECMA-262 version 5, section 12.6.4.
1141 SetExpressionPosition(stmt->enumerable());
1142 VisitForAccumulatorValue(stmt->enumerable());
1143 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1144 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1145 __ Branch(&exit, eq, a0, Operand(at));
1146 Register null_value = t1;
1147 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1148 __ Branch(&exit, eq, a0, Operand(null_value));
1149 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1151 // Convert the object to a JS object.
1152 Label convert, done_convert;
1153 __ JumpIfSmi(a0, &convert);
1154 __ GetObjectType(a0, a1, a1);
1155 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1158 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1160 __ bind(&done_convert);
1161 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1164 // Check for proxies.
1166 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1167 __ GetObjectType(a0, a1, a1);
1168 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1170 // Check cache validity in generated code. This is a fast case for
1171 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1172 // guarantee cache validity, call the runtime system to check cache
1173 // validity or get the property names in a fixed array.
1174 __ CheckEnumCache(null_value, &call_runtime);
1176 // The enum cache is valid. Load the map of the object being
1177 // iterated over and use the cache for the iteration.
1179 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1180 __ Branch(&use_cache);
1182 // Get the set of properties to enumerate.
1183 __ bind(&call_runtime);
1184 __ push(a0); // Duplicate the enumerable object on the stack.
1185 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1186 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1188 // If we got a map from the runtime call, we can do a fast
1189 // modification check. Otherwise, we got a fixed array, and we have
1190 // to do a slow check.
1192 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1193 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1194 __ Branch(&fixed_array, ne, a2, Operand(at));
1196 // We got a map in register v0. Get the enumeration cache from it.
1197 Label no_descriptors;
1198 __ bind(&use_cache);
1200 __ EnumLength(a1, v0);
1201 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1203 __ LoadInstanceDescriptors(v0, a2);
1204 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1205 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1207 // Set up the four remaining stack slots.
1208 __ li(a0, Operand(Smi::FromInt(0)));
1209 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1210 __ Push(v0, a2, a1, a0);
1213 __ bind(&no_descriptors);
1217 // We got a fixed array in register v0. Iterate through that.
1219 __ bind(&fixed_array);
1221 __ li(a1, FeedbackVector());
1222 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1223 int vector_index = FeedbackVector()->GetIndex(slot);
1224 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1226 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1227 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1228 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1229 __ GetObjectType(a2, a3, a3);
1230 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1231 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1232 __ bind(&non_proxy);
1233 __ Push(a1, v0); // Smi and array
1234 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1235 __ li(a0, Operand(Smi::FromInt(0)));
1236 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1238 // Generate code for doing the condition check.
1239 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1241 SetExpressionPosition(stmt->each());
1243 // Load the current count to a0, load the length to a1.
1244 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1245 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1246 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1248 // Get the current entry of the array into register a3.
1249 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1250 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1251 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1252 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1253 __ lw(a3, MemOperand(t0)); // Current entry.
1255 // Get the expected map from the stack or a smi in the
1256 // permanent slow case into register a2.
1257 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1259 // Check if the expected map still matches that of the enumerable.
1260 // If not, we may have to filter the key.
1262 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1263 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1264 __ Branch(&update_each, eq, t0, Operand(a2));
1266 // For proxies, no filtering is done.
1267 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1268 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1269 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1271 // Convert the entry to a string or (smi) 0 if it isn't a property
1272 // any more. If the property has been removed while iterating, we
1274 __ Push(a1, a3); // Enumerable and current entry.
1275 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1276 __ mov(a3, result_register());
1277 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1279 // Update the 'each' property or variable from the possibly filtered
1280 // entry in register a3.
1281 __ bind(&update_each);
1282 __ mov(result_register(), a3);
1283 // Perform the assignment as if via '='.
1284 { EffectContext context(this);
1285 EmitAssignment(stmt->each());
1286 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1289 // Generate code for the body of the loop.
1290 Visit(stmt->body());
1292 // Generate code for the going to the next element by incrementing
1293 // the index (smi) stored on top of the stack.
1294 __ bind(loop_statement.continue_label());
1296 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1299 EmitBackEdgeBookkeeping(stmt, &loop);
1302 // Remove the pointers stored on the stack.
1303 __ bind(loop_statement.break_label());
1306 // Exit and decrement the loop depth.
1307 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1309 decrement_loop_depth();
1313 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1315 // Use the fast case closure allocation code that allocates in new
1316 // space for nested functions that don't need literals cloning. If
1317 // we're running with the --always-opt or the --prepare-always-opt
1318 // flag, we need to use the runtime function so that the new function
1319 // we are creating here gets a chance to have its code optimized and
1320 // doesn't just get a copy of the existing unoptimized code.
1321 if (!FLAG_always_opt &&
1322 !FLAG_prepare_always_opt &&
1324 scope()->is_function_scope() &&
1325 info->num_literals() == 0) {
1326 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1327 __ li(a2, Operand(info));
1330 __ li(a0, Operand(info));
1331 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1332 : Heap::kFalseValueRootIndex);
1333 __ Push(cp, a0, a1);
1334 __ CallRuntime(Runtime::kNewClosure, 3);
1336 context()->Plug(v0);
1340 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1341 Comment cmnt(masm_, "[ VariableProxy");
1342 EmitVariableLoad(expr);
1346 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1347 Comment cnmt(masm_, "[ SuperReference ");
1349 __ lw(LoadDescriptor::ReceiverRegister(),
1350 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1352 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1353 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1355 if (FLAG_vector_ics) {
1356 __ li(VectorLoadICDescriptor::SlotRegister(),
1357 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1358 CallLoadIC(NOT_CONTEXTUAL);
1360 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1364 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1365 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1370 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1372 if (NeedsHomeObject(initializer)) {
1373 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1374 __ li(StoreDescriptor::NameRegister(),
1375 Operand(isolate()->factory()->home_object_symbol()));
1376 __ lw(StoreDescriptor::ValueRegister(),
1377 MemOperand(sp, offset * kPointerSize));
1383 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1384 TypeofState typeof_state,
1386 Register current = cp;
1392 if (s->num_heap_slots() > 0) {
1393 if (s->calls_sloppy_eval()) {
1394 // Check that extension is NULL.
1395 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1396 __ Branch(slow, ne, temp, Operand(zero_reg));
1398 // Load next context in chain.
1399 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1400 // Walk the rest of the chain without clobbering cp.
1403 // If no outer scope calls eval, we do not need to check more
1404 // context extensions.
1405 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1406 s = s->outer_scope();
1409 if (s->is_eval_scope()) {
1411 if (!current.is(next)) {
1412 __ Move(next, current);
1415 // Terminate at native context.
1416 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1417 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1418 __ Branch(&fast, eq, temp, Operand(t0));
1419 // Check that extension is NULL.
1420 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1421 __ Branch(slow, ne, temp, Operand(zero_reg));
1422 // Load next context in chain.
1423 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1428 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1429 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1430 if (FLAG_vector_ics) {
1431 __ li(VectorLoadICDescriptor::SlotRegister(),
1432 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1435 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1442 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1444 DCHECK(var->IsContextSlot());
1445 Register context = cp;
1449 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1450 if (s->num_heap_slots() > 0) {
1451 if (s->calls_sloppy_eval()) {
1452 // Check that extension is NULL.
1453 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1454 __ Branch(slow, ne, temp, Operand(zero_reg));
1456 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1457 // Walk the rest of the chain without clobbering cp.
1461 // Check that last extension is NULL.
1462 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1463 __ Branch(slow, ne, temp, Operand(zero_reg));
1465 // This function is used only for loads, not stores, so it's safe to
1466 // return an cp-based operand (the write barrier cannot be allowed to
1467 // destroy the cp register).
1468 return ContextOperand(context, var->index());
1472 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1473 TypeofState typeof_state,
1476 // Generate fast-case code for variables that might be shadowed by
1477 // eval-introduced variables. Eval is used a lot without
1478 // introducing variables. In those cases, we do not want to
1479 // perform a runtime call for all variables in the scope
1480 // containing the eval.
1481 Variable* var = proxy->var();
1482 if (var->mode() == DYNAMIC_GLOBAL) {
1483 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1485 } else if (var->mode() == DYNAMIC_LOCAL) {
1486 Variable* local = var->local_if_not_shadowed();
1487 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1488 if (local->mode() == LET || local->mode() == CONST ||
1489 local->mode() == CONST_LEGACY) {
1490 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1491 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1492 if (local->mode() == CONST_LEGACY) {
1493 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1494 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1495 } else { // LET || CONST
1496 __ Branch(done, ne, at, Operand(zero_reg));
1497 __ li(a0, Operand(var->name()));
1499 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1507 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1508 // Record position before possible IC call.
1509 SetSourcePosition(proxy->position());
1510 Variable* var = proxy->var();
1512 // Three cases: global variables, lookup variables, and all other types of
1514 switch (var->location()) {
1515 case Variable::UNALLOCATED: {
1516 Comment cmnt(masm_, "[ Global variable");
1517 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1518 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1519 if (FLAG_vector_ics) {
1520 __ li(VectorLoadICDescriptor::SlotRegister(),
1521 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1523 CallGlobalLoadIC(var->name());
1524 context()->Plug(v0);
1528 case Variable::PARAMETER:
1529 case Variable::LOCAL:
1530 case Variable::CONTEXT: {
1531 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1532 : "[ Stack variable");
1533 if (var->binding_needs_init()) {
1534 // var->scope() may be NULL when the proxy is located in eval code and
1535 // refers to a potential outside binding. Currently those bindings are
1536 // always looked up dynamically, i.e. in that case
1537 // var->location() == LOOKUP.
1539 DCHECK(var->scope() != NULL);
1541 // Check if the binding really needs an initialization check. The check
1542 // can be skipped in the following situation: we have a LET or CONST
1543 // binding in harmony mode, both the Variable and the VariableProxy have
1544 // the same declaration scope (i.e. they are both in global code, in the
1545 // same function or in the same eval code) and the VariableProxy is in
1546 // the source physically located after the initializer of the variable.
1548 // We cannot skip any initialization checks for CONST in non-harmony
1549 // mode because const variables may be declared but never initialized:
1550 // if (false) { const x; }; var y = x;
1552 // The condition on the declaration scopes is a conservative check for
1553 // nested functions that access a binding and are called before the
1554 // binding is initialized:
1555 // function() { f(); let x = 1; function f() { x = 2; } }
1557 bool skip_init_check;
1558 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1559 skip_init_check = false;
1560 } else if (var->is_this()) {
1561 CHECK(info_->function() != nullptr &&
1562 (info_->function()->kind() & kSubclassConstructor) != 0);
1563 // TODO(dslomov): implement 'this' hole check elimination.
1564 skip_init_check = false;
1566 // Check that we always have valid source position.
1567 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1568 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1569 skip_init_check = var->mode() != CONST_LEGACY &&
1570 var->initializer_position() < proxy->position();
1573 if (!skip_init_check) {
1574 // Let and const need a read barrier.
1576 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1577 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1578 if (var->mode() == LET || var->mode() == CONST) {
1579 // Throw a reference error when using an uninitialized let/const
1580 // binding in harmony mode.
1582 __ Branch(&done, ne, at, Operand(zero_reg));
1583 __ li(a0, Operand(var->name()));
1585 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1588 // Uninitalized const bindings outside of harmony mode are unholed.
1589 DCHECK(var->mode() == CONST_LEGACY);
1590 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1591 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1593 context()->Plug(v0);
1597 context()->Plug(var);
1601 case Variable::LOOKUP: {
1602 Comment cmnt(masm_, "[ Lookup variable");
1604 // Generate code for loading from variables potentially shadowed
1605 // by eval-introduced variables.
1606 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1608 __ li(a1, Operand(var->name()));
1609 __ Push(cp, a1); // Context and name.
1610 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1612 context()->Plug(v0);
1618 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1619 Comment cmnt(masm_, "[ RegExpLiteral");
1621 // Registers will be used as follows:
1622 // t1 = materialized value (RegExp literal)
1623 // t0 = JS function, literals array
1624 // a3 = literal index
1625 // a2 = RegExp pattern
1626 // a1 = RegExp flags
1627 // a0 = RegExp literal clone
1628 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1629 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1630 int literal_offset =
1631 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1632 __ lw(t1, FieldMemOperand(t0, literal_offset));
1633 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1634 __ Branch(&materialized, ne, t1, Operand(at));
1636 // Create regexp literal using runtime function.
1637 // Result will be in v0.
1638 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1639 __ li(a2, Operand(expr->pattern()));
1640 __ li(a1, Operand(expr->flags()));
1641 __ Push(t0, a3, a2, a1);
1642 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1645 __ bind(&materialized);
1646 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1647 Label allocated, runtime_allocate;
1648 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1651 __ bind(&runtime_allocate);
1652 __ li(a0, Operand(Smi::FromInt(size)));
1654 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1657 __ bind(&allocated);
1659 // After this, registers are used as follows:
1660 // v0: Newly allocated regexp.
1661 // t1: Materialized regexp.
1663 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1664 context()->Plug(v0);
1668 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1669 if (expression == NULL) {
1670 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1673 VisitForStackValue(expression);
1678 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1679 Comment cmnt(masm_, "[ ObjectLiteral");
1681 expr->BuildConstantProperties(isolate());
1682 Handle<FixedArray> constant_properties = expr->constant_properties();
1683 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1684 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1685 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1686 __ li(a1, Operand(constant_properties));
1687 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1688 if (MustCreateObjectLiteralWithRuntime(expr)) {
1689 __ Push(a3, a2, a1, a0);
1690 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1692 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1695 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1697 // If result_saved is true the result is on top of the stack. If
1698 // result_saved is false the result is in v0.
1699 bool result_saved = false;
1701 // Mark all computed expressions that are bound to a key that
1702 // is shadowed by a later occurrence of the same key. For the
1703 // marked expressions, no store code is emitted.
1704 expr->CalculateEmitStore(zone());
1706 AccessorTable accessor_table(zone());
1707 int property_index = 0;
1708 for (; property_index < expr->properties()->length(); property_index++) {
1709 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1710 if (property->is_computed_name()) break;
1711 if (property->IsCompileTimeValue()) continue;
1713 Literal* key = property->key()->AsLiteral();
1714 Expression* value = property->value();
1715 if (!result_saved) {
1716 __ push(v0); // Save result on stack.
1717 result_saved = true;
1719 switch (property->kind()) {
1720 case ObjectLiteral::Property::CONSTANT:
1722 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1723 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1725 case ObjectLiteral::Property::COMPUTED:
1726 // It is safe to use [[Put]] here because the boilerplate already
1727 // contains computed properties with an uninitialized value.
1728 if (key->value()->IsInternalizedString()) {
1729 if (property->emit_store()) {
1730 VisitForAccumulatorValue(value);
1731 __ mov(StoreDescriptor::ValueRegister(), result_register());
1732 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1733 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1734 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1735 CallStoreIC(key->LiteralFeedbackId());
1736 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1738 if (NeedsHomeObject(value)) {
1739 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1740 __ li(StoreDescriptor::NameRegister(),
1741 Operand(isolate()->factory()->home_object_symbol()));
1742 __ lw(StoreDescriptor::ValueRegister(), MemOperand(sp));
1746 VisitForEffect(value);
1750 // Duplicate receiver on stack.
1751 __ lw(a0, MemOperand(sp));
1753 VisitForStackValue(key);
1754 VisitForStackValue(value);
1755 if (property->emit_store()) {
1756 EmitSetHomeObjectIfNeeded(value, 2);
1757 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1759 __ CallRuntime(Runtime::kSetProperty, 4);
1764 case ObjectLiteral::Property::PROTOTYPE:
1765 // Duplicate receiver on stack.
1766 __ lw(a0, MemOperand(sp));
1768 VisitForStackValue(value);
1769 DCHECK(property->emit_store());
1770 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1772 case ObjectLiteral::Property::GETTER:
1773 if (property->emit_store()) {
1774 accessor_table.lookup(key)->second->getter = value;
1777 case ObjectLiteral::Property::SETTER:
1778 if (property->emit_store()) {
1779 accessor_table.lookup(key)->second->setter = value;
1785 // Emit code to define accessors, using only a single call to the runtime for
1786 // each pair of corresponding getters and setters.
1787 for (AccessorTable::Iterator it = accessor_table.begin();
1788 it != accessor_table.end();
1790 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1792 VisitForStackValue(it->first);
1793 EmitAccessor(it->second->getter);
1794 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1795 EmitAccessor(it->second->setter);
1796 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1797 __ li(a0, Operand(Smi::FromInt(NONE)));
1799 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1802 // Object literals have two parts. The "static" part on the left contains no
1803 // computed property names, and so we can compute its map ahead of time; see
1804 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1805 // starts with the first computed property name, and continues with all
1806 // properties to its right. All the code from above initializes the static
1807 // component of the object literal, and arranges for the map of the result to
1808 // reflect the static order in which the keys appear. For the dynamic
1809 // properties, we compile them into a series of "SetOwnProperty" runtime
1810 // calls. This will preserve insertion order.
1811 for (; property_index < expr->properties()->length(); property_index++) {
1812 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1814 Expression* value = property->value();
1815 if (!result_saved) {
1816 __ push(v0); // Save result on the stack
1817 result_saved = true;
1820 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1823 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1824 DCHECK(!property->is_computed_name());
1825 VisitForStackValue(value);
1826 DCHECK(property->emit_store());
1827 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1829 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1830 VisitForStackValue(value);
1831 EmitSetHomeObjectIfNeeded(value, 2);
1833 switch (property->kind()) {
1834 case ObjectLiteral::Property::CONSTANT:
1835 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1836 case ObjectLiteral::Property::COMPUTED:
1837 if (property->emit_store()) {
1838 __ li(a0, Operand(Smi::FromInt(NONE)));
1840 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1846 case ObjectLiteral::Property::PROTOTYPE:
1850 case ObjectLiteral::Property::GETTER:
1851 __ li(a0, Operand(Smi::FromInt(NONE)));
1853 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1856 case ObjectLiteral::Property::SETTER:
1857 __ li(a0, Operand(Smi::FromInt(NONE)));
1859 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1865 if (expr->has_function()) {
1866 DCHECK(result_saved);
1867 __ lw(a0, MemOperand(sp));
1869 __ CallRuntime(Runtime::kToFastProperties, 1);
1873 context()->PlugTOS();
1875 context()->Plug(v0);
1880 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1881 Comment cmnt(masm_, "[ ArrayLiteral");
1883 expr->BuildConstantElements(isolate());
1885 Handle<FixedArray> constant_elements = expr->constant_elements();
1886 bool has_fast_elements =
1887 IsFastObjectElementsKind(expr->constant_elements_kind());
1889 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1890 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1891 // If the only customer of allocation sites is transitioning, then
1892 // we can turn it off if we don't have anywhere else to transition to.
1893 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1896 __ mov(a0, result_register());
1897 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1898 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1899 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1900 __ li(a1, Operand(constant_elements));
1901 if (MustCreateArrayLiteralWithRuntime(expr)) {
1902 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1903 __ Push(a3, a2, a1, a0);
1904 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1906 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1909 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1911 bool result_saved = false; // Is the result saved to the stack?
1912 ZoneList<Expression*>* subexprs = expr->values();
1913 int length = subexprs->length();
1915 // Emit code to evaluate all the non-constant subexpressions and to store
1916 // them into the newly cloned array.
1917 for (int i = 0; i < length; i++) {
1918 Expression* subexpr = subexprs->at(i);
1919 // If the subexpression is a literal or a simple materialized literal it
1920 // is already set in the cloned array.
1921 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1923 if (!result_saved) {
1924 __ push(v0); // array literal
1925 __ Push(Smi::FromInt(expr->literal_index()));
1926 result_saved = true;
1929 VisitForAccumulatorValue(subexpr);
1931 if (has_fast_elements) {
1932 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1933 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1934 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1935 __ sw(result_register(), FieldMemOperand(a1, offset));
1936 // Update the write barrier for the array store.
1937 __ RecordWriteField(a1, offset, result_register(), a2,
1938 kRAHasBeenSaved, kDontSaveFPRegs,
1939 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1941 __ li(a3, Operand(Smi::FromInt(i)));
1942 __ mov(a0, result_register());
1943 StoreArrayLiteralElementStub stub(isolate());
1947 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1950 __ Pop(); // literal index
1951 context()->PlugTOS();
1953 context()->Plug(v0);
1958 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1959 DCHECK(expr->target()->IsValidReferenceExpression());
1961 Comment cmnt(masm_, "[ Assignment");
1963 Property* property = expr->target()->AsProperty();
1964 LhsKind assign_type = GetAssignType(property);
1966 // Evaluate LHS expression.
1967 switch (assign_type) {
1969 // Nothing to do here.
1971 case NAMED_PROPERTY:
1972 if (expr->is_compound()) {
1973 // We need the receiver both on the stack and in the register.
1974 VisitForStackValue(property->obj());
1975 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1977 VisitForStackValue(property->obj());
1980 case NAMED_SUPER_PROPERTY:
1981 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1982 EmitLoadHomeObject(property->obj()->AsSuperReference());
1983 __ Push(result_register());
1984 if (expr->is_compound()) {
1985 const Register scratch = a1;
1986 __ lw(scratch, MemOperand(sp, kPointerSize));
1987 __ Push(scratch, result_register());
1990 case KEYED_SUPER_PROPERTY: {
1991 const Register scratch = a1;
1992 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1993 EmitLoadHomeObject(property->obj()->AsSuperReference());
1994 __ Move(scratch, result_register());
1995 VisitForAccumulatorValue(property->key());
1996 __ Push(scratch, result_register());
1997 if (expr->is_compound()) {
1998 const Register scratch1 = t0;
1999 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
2000 __ Push(scratch1, scratch, result_register());
2004 case KEYED_PROPERTY:
2005 // We need the key and receiver on both the stack and in v0 and a1.
2006 if (expr->is_compound()) {
2007 VisitForStackValue(property->obj());
2008 VisitForStackValue(property->key());
2009 __ lw(LoadDescriptor::ReceiverRegister(),
2010 MemOperand(sp, 1 * kPointerSize));
2011 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2013 VisitForStackValue(property->obj());
2014 VisitForStackValue(property->key());
2019 // For compound assignments we need another deoptimization point after the
2020 // variable/property load.
2021 if (expr->is_compound()) {
2022 { AccumulatorValueContext context(this);
2023 switch (assign_type) {
2025 EmitVariableLoad(expr->target()->AsVariableProxy());
2026 PrepareForBailout(expr->target(), TOS_REG);
2028 case NAMED_PROPERTY:
2029 EmitNamedPropertyLoad(property);
2030 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2032 case NAMED_SUPER_PROPERTY:
2033 EmitNamedSuperPropertyLoad(property);
2034 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2036 case KEYED_SUPER_PROPERTY:
2037 EmitKeyedSuperPropertyLoad(property);
2038 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2040 case KEYED_PROPERTY:
2041 EmitKeyedPropertyLoad(property);
2042 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2047 Token::Value op = expr->binary_op();
2048 __ push(v0); // Left operand goes on the stack.
2049 VisitForAccumulatorValue(expr->value());
2051 SetSourcePosition(expr->position() + 1);
2052 AccumulatorValueContext context(this);
2053 if (ShouldInlineSmiCase(op)) {
2054 EmitInlineSmiBinaryOp(expr->binary_operation(),
2059 EmitBinaryOp(expr->binary_operation(), op);
2062 // Deoptimization point in case the binary operation may have side effects.
2063 PrepareForBailout(expr->binary_operation(), TOS_REG);
2065 VisitForAccumulatorValue(expr->value());
2068 // Record source position before possible IC call.
2069 SetSourcePosition(expr->position());
2072 switch (assign_type) {
2074 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2076 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2077 context()->Plug(v0);
2079 case NAMED_PROPERTY:
2080 EmitNamedPropertyAssignment(expr);
2082 case NAMED_SUPER_PROPERTY:
2083 EmitNamedSuperPropertyStore(property);
2084 context()->Plug(v0);
2086 case KEYED_SUPER_PROPERTY:
2087 EmitKeyedSuperPropertyStore(property);
2088 context()->Plug(v0);
2090 case KEYED_PROPERTY:
2091 EmitKeyedPropertyAssignment(expr);
2097 void FullCodeGenerator::VisitYield(Yield* expr) {
2098 Comment cmnt(masm_, "[ Yield");
2099 // Evaluate yielded value first; the initial iterator definition depends on
2100 // this. It stays on the stack while we update the iterator.
2101 VisitForStackValue(expr->expression());
2103 switch (expr->yield_kind()) {
2104 case Yield::kSuspend:
2105 // Pop value from top-of-stack slot; box result into result register.
2106 EmitCreateIteratorResult(false);
2107 __ push(result_register());
2109 case Yield::kInitial: {
2110 Label suspend, continuation, post_runtime, resume;
2114 __ bind(&continuation);
2118 VisitForAccumulatorValue(expr->generator_object());
2119 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2120 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2121 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2122 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2124 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2125 kRAHasBeenSaved, kDontSaveFPRegs);
2126 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2127 __ Branch(&post_runtime, eq, sp, Operand(a1));
2128 __ push(v0); // generator object
2129 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2130 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2131 __ bind(&post_runtime);
2132 __ pop(result_register());
2133 EmitReturnSequence();
2136 context()->Plug(result_register());
2140 case Yield::kFinal: {
2141 VisitForAccumulatorValue(expr->generator_object());
2142 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2143 __ sw(a1, FieldMemOperand(result_register(),
2144 JSGeneratorObject::kContinuationOffset));
2145 // Pop value from top-of-stack slot, box result into result register.
2146 EmitCreateIteratorResult(true);
2147 EmitUnwindBeforeReturn();
2148 EmitReturnSequence();
2152 case Yield::kDelegating: {
2153 VisitForStackValue(expr->generator_object());
2155 // Initial stack layout is as follows:
2156 // [sp + 1 * kPointerSize] iter
2157 // [sp + 0 * kPointerSize] g
2159 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2160 Label l_next, l_call;
2161 Register load_receiver = LoadDescriptor::ReceiverRegister();
2162 Register load_name = LoadDescriptor::NameRegister();
2164 // Initial send value is undefined.
2165 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2168 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2171 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2172 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2173 __ Push(load_name, a3, a0); // "throw", iter, except
2176 // try { received = %yield result }
2177 // Shuffle the received result above a try handler and yield it without
2180 __ pop(a0); // result
2181 EnterTryBlock(expr->index(), &l_catch);
2182 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2183 __ push(a0); // result
2185 __ bind(&l_continuation);
2188 __ bind(&l_suspend);
2189 const int generator_object_depth = kPointerSize + try_block_size;
2190 __ lw(a0, MemOperand(sp, generator_object_depth));
2192 __ Push(Smi::FromInt(expr->index())); // handler-index
2193 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2194 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2195 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2196 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2198 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2199 kRAHasBeenSaved, kDontSaveFPRegs);
2200 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2201 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2202 __ pop(v0); // result
2203 EmitReturnSequence();
2205 __ bind(&l_resume); // received in a0
2206 ExitTryBlock(expr->index());
2208 // receiver = iter; f = 'next'; arg = received;
2211 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2212 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2213 __ Push(load_name, a3, a0); // "next", iter, received
2215 // result = receiver[f](arg);
2217 __ lw(load_receiver, MemOperand(sp, kPointerSize));
2218 __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2219 if (FLAG_vector_ics) {
2220 __ li(VectorLoadICDescriptor::SlotRegister(),
2221 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2223 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2224 CallIC(ic, TypeFeedbackId::None());
2227 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2228 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2231 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2232 __ Drop(1); // The function is still on the stack; drop it.
2234 // if (!result.done) goto l_try;
2235 __ Move(load_receiver, v0);
2237 __ push(load_receiver); // save result
2238 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2239 if (FLAG_vector_ics) {
2240 __ li(VectorLoadICDescriptor::SlotRegister(),
2241 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2243 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2245 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2247 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2250 __ pop(load_receiver); // result
2251 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2252 if (FLAG_vector_ics) {
2253 __ li(VectorLoadICDescriptor::SlotRegister(),
2254 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2256 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2257 context()->DropAndPlug(2, v0); // drop iter and g
2264 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2266 JSGeneratorObject::ResumeMode resume_mode) {
2267 // The value stays in a0, and is ultimately read by the resumed generator, as
2268 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2269 // is read to throw the value when the resumed generator is already closed.
2270 // a1 will hold the generator object until the activation has been resumed.
2271 VisitForStackValue(generator);
2272 VisitForAccumulatorValue(value);
2275 // Load suspended function and context.
2276 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2277 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2279 // Load receiver and store as the first argument.
2280 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2283 // Push holes for the rest of the arguments to the generator function.
2284 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2286 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2287 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2288 Label push_argument_holes, push_frame;
2289 __ bind(&push_argument_holes);
2290 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2291 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2293 __ jmp(&push_argument_holes);
2295 // Enter a new JavaScript frame, and initialize its slots as they were when
2296 // the generator was suspended.
2297 Label resume_frame, done;
2298 __ bind(&push_frame);
2299 __ Call(&resume_frame);
2301 __ bind(&resume_frame);
2302 // ra = return address.
2303 // fp = caller's frame pointer.
2304 // cp = callee's context,
2305 // t0 = callee's JS function.
2306 __ Push(ra, fp, cp, t0);
2307 // Adjust FP to point to saved FP.
2308 __ Addu(fp, sp, 2 * kPointerSize);
2310 // Load the operand stack size.
2311 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2312 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2315 // If we are sending a value and there is no operand stack, we can jump back
2317 if (resume_mode == JSGeneratorObject::NEXT) {
2319 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2320 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2321 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2323 __ Addu(a3, a3, Operand(a2));
2324 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2325 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2327 __ bind(&slow_resume);
2330 // Otherwise, we push holes for the operand stack and call the runtime to fix
2331 // up the stack and the handlers.
2332 Label push_operand_holes, call_resume;
2333 __ bind(&push_operand_holes);
2334 __ Subu(a3, a3, Operand(1));
2335 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2337 __ Branch(&push_operand_holes);
2338 __ bind(&call_resume);
2339 DCHECK(!result_register().is(a1));
2340 __ Push(a1, result_register());
2341 __ Push(Smi::FromInt(resume_mode));
2342 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2343 // Not reached: the runtime call returns elsewhere.
2344 __ stop("not-reached");
2347 context()->Plug(result_register());
2351 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2355 const int instance_size = 5 * kPointerSize;
2356 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2359 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2362 __ bind(&gc_required);
2363 __ Push(Smi::FromInt(instance_size));
2364 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2365 __ lw(context_register(),
2366 MemOperand(fp, StandardFrameConstants::kContextOffset));
2368 __ bind(&allocated);
2369 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2370 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2371 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2373 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2374 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2375 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2376 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2377 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2379 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2381 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2383 // Only the value field needs a write barrier, as the other values are in the
2385 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2386 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2390 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2391 SetSourcePosition(prop->position());
2392 Literal* key = prop->key()->AsLiteral();
2393 DCHECK(!prop->IsSuperAccess());
2395 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2396 if (FLAG_vector_ics) {
2397 __ li(VectorLoadICDescriptor::SlotRegister(),
2398 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2399 CallLoadIC(NOT_CONTEXTUAL);
2401 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2406 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2407 // Stack: receiver, home_object.
2408 SetSourcePosition(prop->position());
2409 Literal* key = prop->key()->AsLiteral();
2410 DCHECK(!key->value()->IsSmi());
2411 DCHECK(prop->IsSuperAccess());
2413 __ Push(key->value());
2414 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2418 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2419 SetSourcePosition(prop->position());
2420 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2421 if (FLAG_vector_ics) {
2422 __ li(VectorLoadICDescriptor::SlotRegister(),
2423 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2426 CallIC(ic, prop->PropertyFeedbackId());
2431 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2432 // Stack: receiver, home_object, key.
2433 SetSourcePosition(prop->position());
2435 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2439 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2441 Expression* left_expr,
2442 Expression* right_expr) {
2443 Label done, smi_case, stub_call;
2445 Register scratch1 = a2;
2446 Register scratch2 = a3;
2448 // Get the arguments.
2450 Register right = a0;
2452 __ mov(a0, result_register());
2454 // Perform combined smi check on both operands.
2455 __ Or(scratch1, left, Operand(right));
2456 STATIC_ASSERT(kSmiTag == 0);
2457 JumpPatchSite patch_site(masm_);
2458 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2460 __ bind(&stub_call);
2461 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2462 CallIC(code, expr->BinaryOperationFeedbackId());
2463 patch_site.EmitPatchInfo();
2467 // Smi case. This code works the same way as the smi-smi case in the type
2468 // recording binary operation stub, see
2471 __ GetLeastBitsFromSmi(scratch1, right, 5);
2472 __ srav(right, left, scratch1);
2473 __ And(v0, right, Operand(~kSmiTagMask));
2476 __ SmiUntag(scratch1, left);
2477 __ GetLeastBitsFromSmi(scratch2, right, 5);
2478 __ sllv(scratch1, scratch1, scratch2);
2479 __ Addu(scratch2, scratch1, Operand(0x40000000));
2480 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2481 __ SmiTag(v0, scratch1);
2485 __ SmiUntag(scratch1, left);
2486 __ GetLeastBitsFromSmi(scratch2, right, 5);
2487 __ srlv(scratch1, scratch1, scratch2);
2488 __ And(scratch2, scratch1, 0xc0000000);
2489 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2490 __ SmiTag(v0, scratch1);
2494 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2495 __ BranchOnOverflow(&stub_call, scratch1);
2498 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2499 __ BranchOnOverflow(&stub_call, scratch1);
2502 __ SmiUntag(scratch1, right);
2503 __ Mul(scratch2, v0, left, scratch1);
2504 __ sra(scratch1, v0, 31);
2505 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2506 __ Branch(&done, ne, v0, Operand(zero_reg));
2507 __ Addu(scratch2, right, left);
2508 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2509 DCHECK(Smi::FromInt(0) == 0);
2510 __ mov(v0, zero_reg);
2514 __ Or(v0, left, Operand(right));
2516 case Token::BIT_AND:
2517 __ And(v0, left, Operand(right));
2519 case Token::BIT_XOR:
2520 __ Xor(v0, left, Operand(right));
2527 context()->Plug(v0);
2531 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2532 // Constructor is in v0.
2533 DCHECK(lit != NULL);
2536 // No access check is needed here since the constructor is created by the
2538 Register scratch = a1;
2540 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2543 for (int i = 0; i < lit->properties()->length(); i++) {
2544 ObjectLiteral::Property* property = lit->properties()->at(i);
2545 Expression* value = property->value();
2547 if (property->is_static()) {
2548 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2550 __ lw(scratch, MemOperand(sp, 0)); // prototype
2553 EmitPropertyKey(property, lit->GetIdForProperty(i));
2555 // The static prototype property is read only. We handle the non computed
2556 // property name case in the parser. Since this is the only case where we
2557 // need to check for an own read only property we special case this so we do
2558 // not need to do this for every property.
2559 if (property->is_static() && property->is_computed_name()) {
2560 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2564 VisitForStackValue(value);
2565 EmitSetHomeObjectIfNeeded(value, 2);
2567 switch (property->kind()) {
2568 case ObjectLiteral::Property::CONSTANT:
2569 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2570 case ObjectLiteral::Property::PROTOTYPE:
2572 case ObjectLiteral::Property::COMPUTED:
2573 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2576 case ObjectLiteral::Property::GETTER:
2577 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2579 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2582 case ObjectLiteral::Property::SETTER:
2583 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2585 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2594 __ CallRuntime(Runtime::kToFastProperties, 1);
2597 __ CallRuntime(Runtime::kToFastProperties, 1);
2601 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2602 __ mov(a0, result_register());
2604 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2605 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2606 CallIC(code, expr->BinaryOperationFeedbackId());
2607 patch_site.EmitPatchInfo();
2608 context()->Plug(v0);
2612 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2613 DCHECK(expr->IsValidReferenceExpression());
2615 Property* prop = expr->AsProperty();
2616 LhsKind assign_type = GetAssignType(prop);
2618 switch (assign_type) {
2620 Variable* var = expr->AsVariableProxy()->var();
2621 EffectContext context(this);
2622 EmitVariableAssignment(var, Token::ASSIGN);
2625 case NAMED_PROPERTY: {
2626 __ push(result_register()); // Preserve value.
2627 VisitForAccumulatorValue(prop->obj());
2628 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2629 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2630 __ li(StoreDescriptor::NameRegister(),
2631 Operand(prop->key()->AsLiteral()->value()));
2635 case NAMED_SUPER_PROPERTY: {
2637 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2638 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2639 // stack: value, this; v0: home_object
2640 Register scratch = a2;
2641 Register scratch2 = a3;
2642 __ mov(scratch, result_register()); // home_object
2643 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2644 __ lw(scratch2, MemOperand(sp, 0)); // this
2645 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2646 __ sw(scratch, MemOperand(sp, 0)); // home_object
2647 // stack: this, home_object; v0: value
2648 EmitNamedSuperPropertyStore(prop);
2651 case KEYED_SUPER_PROPERTY: {
2653 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2654 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2655 __ Push(result_register());
2656 VisitForAccumulatorValue(prop->key());
2657 Register scratch = a2;
2658 Register scratch2 = a3;
2659 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2660 // stack: value, this, home_object; v0: key, a3: value
2661 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2662 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2663 __ lw(scratch, MemOperand(sp, 0)); // home_object
2664 __ sw(scratch, MemOperand(sp, kPointerSize));
2665 __ sw(v0, MemOperand(sp, 0));
2666 __ Move(v0, scratch2);
2667 // stack: this, home_object, key; v0: value.
2668 EmitKeyedSuperPropertyStore(prop);
2671 case KEYED_PROPERTY: {
2672 __ push(result_register()); // Preserve value.
2673 VisitForStackValue(prop->obj());
2674 VisitForAccumulatorValue(prop->key());
2675 __ mov(StoreDescriptor::NameRegister(), result_register());
2676 __ Pop(StoreDescriptor::ValueRegister(),
2677 StoreDescriptor::ReceiverRegister());
2679 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2684 context()->Plug(v0);
2688 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2689 Variable* var, MemOperand location) {
2690 __ sw(result_register(), location);
2691 if (var->IsContextSlot()) {
2692 // RecordWrite may destroy all its register arguments.
2693 __ Move(a3, result_register());
2694 int offset = Context::SlotOffset(var->index());
2695 __ RecordWriteContextSlot(
2696 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2701 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2702 if (var->IsUnallocated()) {
2703 // Global var, const, or let.
2704 __ mov(StoreDescriptor::ValueRegister(), result_register());
2705 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2706 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2709 } else if (var->mode() == LET && op != Token::INIT_LET) {
2710 // Non-initializing assignment to let variable needs a write barrier.
2711 DCHECK(!var->IsLookupSlot());
2712 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2714 MemOperand location = VarOperand(var, a1);
2715 __ lw(a3, location);
2716 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2717 __ Branch(&assign, ne, a3, Operand(t0));
2718 __ li(a3, Operand(var->name()));
2720 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2721 // Perform the assignment.
2723 EmitStoreToStackLocalOrContextSlot(var, location);
2725 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2726 // Assignment to const variable needs a write barrier.
2727 DCHECK(!var->IsLookupSlot());
2728 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2730 MemOperand location = VarOperand(var, a1);
2731 __ lw(a3, location);
2732 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2733 __ Branch(&const_error, ne, a3, Operand(at));
2734 __ li(a3, Operand(var->name()));
2736 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2737 __ bind(&const_error);
2738 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2740 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2741 if (var->IsLookupSlot()) {
2742 // Assignment to var.
2743 __ li(a1, Operand(var->name()));
2744 __ li(a0, Operand(Smi::FromInt(language_mode())));
2745 __ Push(v0, cp, a1, a0); // Value, context, name, language mode.
2746 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2748 // Assignment to var or initializing assignment to let/const in harmony
2750 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2751 MemOperand location = VarOperand(var, a1);
2752 if (generate_debug_code_ && op == Token::INIT_LET) {
2753 // Check for an uninitialized let binding.
2754 __ lw(a2, location);
2755 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2756 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2758 EmitStoreToStackLocalOrContextSlot(var, location);
2761 } else if (op == Token::INIT_CONST_LEGACY) {
2762 // Const initializers need a write barrier.
2763 DCHECK(!var->IsParameter()); // No const parameters.
2764 if (var->IsLookupSlot()) {
2765 __ li(a0, Operand(var->name()));
2766 __ Push(v0, cp, a0); // Context and name.
2767 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2769 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2771 MemOperand location = VarOperand(var, a1);
2772 __ lw(a2, location);
2773 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2774 __ Branch(&skip, ne, a2, Operand(at));
2775 EmitStoreToStackLocalOrContextSlot(var, location);
2780 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2781 if (is_strict(language_mode())) {
2782 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2784 // Silently ignore store in sloppy mode.
2789 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2790 // Assignment to a property, using a named store IC.
2791 Property* prop = expr->target()->AsProperty();
2792 DCHECK(prop != NULL);
2793 DCHECK(prop->key()->IsLiteral());
2795 // Record source code position before IC call.
2796 SetSourcePosition(expr->position());
2797 __ mov(StoreDescriptor::ValueRegister(), result_register());
2798 __ li(StoreDescriptor::NameRegister(),
2799 Operand(prop->key()->AsLiteral()->value()));
2800 __ pop(StoreDescriptor::ReceiverRegister());
2801 CallStoreIC(expr->AssignmentFeedbackId());
2803 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2804 context()->Plug(v0);
2808 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2809 // Assignment to named property of super.
2811 // stack : receiver ('this'), home_object
2812 DCHECK(prop != NULL);
2813 Literal* key = prop->key()->AsLiteral();
2814 DCHECK(key != NULL);
2816 __ Push(key->value());
2818 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2819 : Runtime::kStoreToSuper_Sloppy),
2824 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2825 // Assignment to named property of super.
2827 // stack : receiver ('this'), home_object, key
2828 DCHECK(prop != NULL);
2832 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2833 : Runtime::kStoreKeyedToSuper_Sloppy),
2838 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2839 // Assignment to a property, using a keyed store IC.
2841 // Record source code position before IC call.
2842 SetSourcePosition(expr->position());
2843 // Call keyed store IC.
2844 // The arguments are:
2845 // - a0 is the value,
2847 // - a2 is the receiver.
2848 __ mov(StoreDescriptor::ValueRegister(), result_register());
2849 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2850 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2853 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2854 CallIC(ic, expr->AssignmentFeedbackId());
2856 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2857 context()->Plug(v0);
2861 void FullCodeGenerator::VisitProperty(Property* expr) {
2862 Comment cmnt(masm_, "[ Property");
2863 Expression* key = expr->key();
2865 if (key->IsPropertyName()) {
2866 if (!expr->IsSuperAccess()) {
2867 VisitForAccumulatorValue(expr->obj());
2868 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2869 EmitNamedPropertyLoad(expr);
2871 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2872 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2873 __ Push(result_register());
2874 EmitNamedSuperPropertyLoad(expr);
2877 if (!expr->IsSuperAccess()) {
2878 VisitForStackValue(expr->obj());
2879 VisitForAccumulatorValue(expr->key());
2880 __ Move(LoadDescriptor::NameRegister(), v0);
2881 __ pop(LoadDescriptor::ReceiverRegister());
2882 EmitKeyedPropertyLoad(expr);
2884 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2885 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2886 __ Push(result_register());
2887 VisitForStackValue(expr->key());
2888 EmitKeyedSuperPropertyLoad(expr);
2891 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2892 context()->Plug(v0);
2896 void FullCodeGenerator::CallIC(Handle<Code> code,
2897 TypeFeedbackId id) {
2899 __ Call(code, RelocInfo::CODE_TARGET, id);
2903 // Code common for calls using the IC.
2904 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2905 Expression* callee = expr->expression();
2907 CallICState::CallType call_type =
2908 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2910 // Get the target function.
2911 if (call_type == CallICState::FUNCTION) {
2912 { StackValueContext context(this);
2913 EmitVariableLoad(callee->AsVariableProxy());
2914 PrepareForBailout(callee, NO_REGISTERS);
2916 // Push undefined as receiver. This is patched in the method prologue if it
2917 // is a sloppy mode method.
2918 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2921 // Load the function from the receiver.
2922 DCHECK(callee->IsProperty());
2923 DCHECK(!callee->AsProperty()->IsSuperAccess());
2924 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2925 EmitNamedPropertyLoad(callee->AsProperty());
2926 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2927 // Push the target function under the receiver.
2928 __ lw(at, MemOperand(sp, 0));
2930 __ sw(v0, MemOperand(sp, kPointerSize));
2933 EmitCall(expr, call_type);
2937 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2938 Expression* callee = expr->expression();
2939 DCHECK(callee->IsProperty());
2940 Property* prop = callee->AsProperty();
2941 DCHECK(prop->IsSuperAccess());
2943 SetSourcePosition(prop->position());
2944 Literal* key = prop->key()->AsLiteral();
2945 DCHECK(!key->value()->IsSmi());
2946 // Load the function from the receiver.
2947 const Register scratch = a1;
2948 SuperReference* super_ref = prop->obj()->AsSuperReference();
2949 EmitLoadHomeObject(super_ref);
2950 __ mov(scratch, v0);
2951 VisitForAccumulatorValue(super_ref->this_var());
2952 __ Push(scratch, v0, v0, scratch);
2953 __ Push(key->value());
2957 // - this (receiver)
2958 // - this (receiver) <-- LoadFromSuper will pop here and below.
2961 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2963 // Replace home_object with target function.
2964 __ sw(v0, MemOperand(sp, kPointerSize));
2967 // - target function
2968 // - this (receiver)
2969 EmitCall(expr, CallICState::METHOD);
2973 // Code common for calls using the IC.
2974 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2977 VisitForAccumulatorValue(key);
2979 Expression* callee = expr->expression();
2981 // Load the function from the receiver.
2982 DCHECK(callee->IsProperty());
2983 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2984 __ Move(LoadDescriptor::NameRegister(), v0);
2985 EmitKeyedPropertyLoad(callee->AsProperty());
2986 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2988 // Push the target function under the receiver.
2989 __ lw(at, MemOperand(sp, 0));
2991 __ sw(v0, MemOperand(sp, kPointerSize));
2993 EmitCall(expr, CallICState::METHOD);
2997 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2998 Expression* callee = expr->expression();
2999 DCHECK(callee->IsProperty());
3000 Property* prop = callee->AsProperty();
3001 DCHECK(prop->IsSuperAccess());
3003 SetSourcePosition(prop->position());
3004 // Load the function from the receiver.
3005 const Register scratch = a1;
3006 SuperReference* super_ref = prop->obj()->AsSuperReference();
3007 EmitLoadHomeObject(super_ref);
3008 __ Move(scratch, v0);
3009 VisitForAccumulatorValue(super_ref->this_var());
3010 __ Push(scratch, v0, v0, scratch);
3011 VisitForStackValue(prop->key());
3015 // - this (receiver)
3016 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3019 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3021 // Replace home_object with target function.
3022 __ sw(v0, MemOperand(sp, kPointerSize));
3025 // - target function
3026 // - this (receiver)
3027 EmitCall(expr, CallICState::METHOD);
3031 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3032 // Load the arguments.
3033 ZoneList<Expression*>* args = expr->arguments();
3034 int arg_count = args->length();
3035 { PreservePositionScope scope(masm()->positions_recorder());
3036 for (int i = 0; i < arg_count; i++) {
3037 VisitForStackValue(args->at(i));
3041 // Record source position of the IC call.
3042 SetSourcePosition(expr->position());
3043 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3044 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3045 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3046 // Don't assign a type feedback id to the IC, since type feedback is provided
3047 // by the vector above.
3050 RecordJSReturnSite(expr);
3051 // Restore context register.
3052 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3053 context()->DropAndPlug(1, v0);
3057 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3058 // t3: copy of the first argument or undefined if it doesn't exist.
3059 if (arg_count > 0) {
3060 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
3062 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
3065 // t2: the receiver of the enclosing function.
3066 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3068 // t1: the receiver of the enclosing function.
3069 int receiver_offset = 2 + info_->scope()->num_parameters();
3070 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
3072 // t0: the language mode.
3073 __ li(t0, Operand(Smi::FromInt(language_mode())));
3075 // a1: the start position of the scope the calls resides in.
3076 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3078 // Do the runtime call.
3080 __ Push(t2, t1, t0, a1);
3081 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3085 void FullCodeGenerator::EmitLoadSuperConstructor() {
3086 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3088 __ CallRuntime(Runtime::kGetPrototype, 1);
3092 void FullCodeGenerator::VisitCall(Call* expr) {
3094 // We want to verify that RecordJSReturnSite gets called on all paths
3095 // through this function. Avoid early returns.
3096 expr->return_is_recorded_ = false;
3099 Comment cmnt(masm_, "[ Call");
3100 Expression* callee = expr->expression();
3101 Call::CallType call_type = expr->GetCallType(isolate());
3103 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3104 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3105 // to resolve the function we need to call and the receiver of the
3106 // call. Then we call the resolved function using the given
3108 ZoneList<Expression*>* args = expr->arguments();
3109 int arg_count = args->length();
3111 { PreservePositionScope pos_scope(masm()->positions_recorder());
3112 VisitForStackValue(callee);
3113 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3114 __ push(a2); // Reserved receiver slot.
3116 // Push the arguments.
3117 for (int i = 0; i < arg_count; i++) {
3118 VisitForStackValue(args->at(i));
3121 // Push a copy of the function (found below the arguments) and
3123 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3125 EmitResolvePossiblyDirectEval(arg_count);
3127 // The runtime call returns a pair of values in v0 (function) and
3128 // v1 (receiver). Touch up the stack with the right values.
3129 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3130 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
3132 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3134 // Record source position for debugger.
3135 SetSourcePosition(expr->position());
3136 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3137 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3139 RecordJSReturnSite(expr);
3140 // Restore context register.
3141 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3142 context()->DropAndPlug(1, v0);
3143 } else if (call_type == Call::GLOBAL_CALL) {
3144 EmitCallWithLoadIC(expr);
3145 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3146 // Call to a lookup slot (dynamically introduced variable).
3147 VariableProxy* proxy = callee->AsVariableProxy();
3150 { PreservePositionScope scope(masm()->positions_recorder());
3151 // Generate code for loading from variables potentially shadowed
3152 // by eval-introduced variables.
3153 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3157 // Call the runtime to find the function to call (returned in v0)
3158 // and the object holding it (returned in v1).
3159 DCHECK(!context_register().is(a2));
3160 __ li(a2, Operand(proxy->name()));
3161 __ Push(context_register(), a2);
3162 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3163 __ Push(v0, v1); // Function, receiver.
3164 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3166 // If fast case code has been generated, emit code to push the
3167 // function and receiver and have the slow path jump around this
3169 if (done.is_linked()) {
3175 // The receiver is implicitly the global receiver. Indicate this
3176 // by passing the hole to the call function stub.
3177 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3182 // The receiver is either the global receiver or an object found
3183 // by LoadContextSlot.
3185 } else if (call_type == Call::PROPERTY_CALL) {
3186 Property* property = callee->AsProperty();
3187 bool is_named_call = property->key()->IsPropertyName();
3188 if (property->IsSuperAccess()) {
3189 if (is_named_call) {
3190 EmitSuperCallWithLoadIC(expr);
3192 EmitKeyedSuperCallWithLoadIC(expr);
3196 PreservePositionScope scope(masm()->positions_recorder());
3197 VisitForStackValue(property->obj());
3199 if (is_named_call) {
3200 EmitCallWithLoadIC(expr);
3202 EmitKeyedCallWithLoadIC(expr, property->key());
3205 } else if (call_type == Call::SUPER_CALL) {
3206 EmitSuperConstructorCall(expr);
3208 DCHECK(call_type == Call::OTHER_CALL);
3209 // Call to an arbitrary expression not handled specially above.
3210 { PreservePositionScope scope(masm()->positions_recorder());
3211 VisitForStackValue(callee);
3213 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3215 // Emit function call.
3220 // RecordJSReturnSite should have been called.
3221 DCHECK(expr->return_is_recorded_);
3226 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3227 Comment cmnt(masm_, "[ CallNew");
3228 // According to ECMA-262, section 11.2.2, page 44, the function
3229 // expression in new calls must be evaluated before the
3232 // Push constructor on the stack. If it's not a function it's used as
3233 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3235 DCHECK(!expr->expression()->IsSuperReference());
3236 VisitForStackValue(expr->expression());
3238 // Push the arguments ("left-to-right") on the stack.
3239 ZoneList<Expression*>* args = expr->arguments();
3240 int arg_count = args->length();
3241 for (int i = 0; i < arg_count; i++) {
3242 VisitForStackValue(args->at(i));
3245 // Call the construct call builtin that handles allocation and
3246 // constructor invocation.
3247 SetSourcePosition(expr->position());
3249 // Load function and argument count into a1 and a0.
3250 __ li(a0, Operand(arg_count));
3251 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3253 // Record call targets in unoptimized code.
3254 if (FLAG_pretenuring_call_new) {
3255 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3256 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3257 expr->CallNewFeedbackSlot().ToInt() + 1);
3260 __ li(a2, FeedbackVector());
3261 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3263 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3264 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3265 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3266 context()->Plug(v0);
3270 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3271 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3272 GetVar(result_register(), new_target_var);
3273 __ Push(result_register());
3275 EmitLoadSuperConstructor();
3276 __ push(result_register());
3278 // Push the arguments ("left-to-right") on the stack.
3279 ZoneList<Expression*>* args = expr->arguments();
3280 int arg_count = args->length();
3281 for (int i = 0; i < arg_count; i++) {
3282 VisitForStackValue(args->at(i));
3285 // Call the construct call builtin that handles allocation and
3286 // constructor invocation.
3287 SetSourcePosition(expr->position());
3289 // Load function and argument count into a1 and a0.
3290 __ li(a0, Operand(arg_count));
3291 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3293 // Record call targets in unoptimized code.
3294 if (FLAG_pretenuring_call_new) {
3296 /* TODO(dslomov): support pretenuring.
3297 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3298 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3299 expr->CallNewFeedbackSlot().ToInt() + 1);
3303 __ li(a2, FeedbackVector());
3304 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3306 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3307 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3311 RecordJSReturnSite(expr);
3313 SuperReference* super_ref = expr->expression()->AsSuperReference();
3314 Variable* this_var = super_ref->this_var()->var();
3315 GetVar(a1, this_var);
3316 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3317 Label uninitialized_this;
3318 __ Branch(&uninitialized_this, eq, a1, Operand(at));
3319 __ li(a0, Operand(this_var->name()));
3321 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3322 __ bind(&uninitialized_this);
3324 EmitVariableAssignment(this_var, Token::INIT_CONST);
3325 context()->Plug(v0);
3329 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3330 ZoneList<Expression*>* args = expr->arguments();
3331 DCHECK(args->length() == 1);
3333 VisitForAccumulatorValue(args->at(0));
3335 Label materialize_true, materialize_false;
3336 Label* if_true = NULL;
3337 Label* if_false = NULL;
3338 Label* fall_through = NULL;
3339 context()->PrepareTest(&materialize_true, &materialize_false,
3340 &if_true, &if_false, &fall_through);
3342 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3344 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
3346 context()->Plug(if_true, if_false);
3350 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3351 ZoneList<Expression*>* args = expr->arguments();
3352 DCHECK(args->length() == 1);
3354 VisitForAccumulatorValue(args->at(0));
3356 Label materialize_true, materialize_false;
3357 Label* if_true = NULL;
3358 Label* if_false = NULL;
3359 Label* fall_through = NULL;
3360 context()->PrepareTest(&materialize_true, &materialize_false,
3361 &if_true, &if_false, &fall_through);
3363 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3364 __ NonNegativeSmiTst(v0, at);
3365 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3367 context()->Plug(if_true, if_false);
3371 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3372 ZoneList<Expression*>* args = expr->arguments();
3373 DCHECK(args->length() == 1);
3375 VisitForAccumulatorValue(args->at(0));
3377 Label materialize_true, materialize_false;
3378 Label* if_true = NULL;
3379 Label* if_false = NULL;
3380 Label* fall_through = NULL;
3381 context()->PrepareTest(&materialize_true, &materialize_false,
3382 &if_true, &if_false, &fall_through);
3384 __ JumpIfSmi(v0, if_false);
3385 __ LoadRoot(at, Heap::kNullValueRootIndex);
3386 __ Branch(if_true, eq, v0, Operand(at));
3387 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3388 // Undetectable objects behave like undefined when tested with typeof.
3389 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3390 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3391 __ Branch(if_false, ne, at, Operand(zero_reg));
3392 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3393 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3394 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3395 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3396 if_true, if_false, fall_through);
3398 context()->Plug(if_true, if_false);
3402 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3403 ZoneList<Expression*>* args = expr->arguments();
3404 DCHECK(args->length() == 1);
3406 VisitForAccumulatorValue(args->at(0));
3408 Label materialize_true, materialize_false;
3409 Label* if_true = NULL;
3410 Label* if_false = NULL;
3411 Label* fall_through = NULL;
3412 context()->PrepareTest(&materialize_true, &materialize_false,
3413 &if_true, &if_false, &fall_through);
3415 __ JumpIfSmi(v0, if_false);
3416 __ GetObjectType(v0, a1, a1);
3417 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3418 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3419 if_true, if_false, fall_through);
3421 context()->Plug(if_true, if_false);
3425 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3426 ZoneList<Expression*>* args = expr->arguments();
3427 DCHECK(args->length() == 1);
3429 VisitForAccumulatorValue(args->at(0));
3431 Label materialize_true, materialize_false;
3432 Label* if_true = NULL;
3433 Label* if_false = NULL;
3434 Label* fall_through = NULL;
3435 context()->PrepareTest(&materialize_true, &materialize_false,
3436 &if_true, &if_false, &fall_through);
3438 __ JumpIfSmi(v0, if_false);
3439 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3440 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3441 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3442 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3443 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3445 context()->Plug(if_true, if_false);
3449 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3450 CallRuntime* expr) {
3451 ZoneList<Expression*>* args = expr->arguments();
3452 DCHECK(args->length() == 1);
3454 VisitForAccumulatorValue(args->at(0));
3456 Label materialize_true, materialize_false, skip_lookup;
3457 Label* if_true = NULL;
3458 Label* if_false = NULL;
3459 Label* fall_through = NULL;
3460 context()->PrepareTest(&materialize_true, &materialize_false,
3461 &if_true, &if_false, &fall_through);
3463 __ AssertNotSmi(v0);
3465 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3466 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3467 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3468 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3470 // Check for fast case object. Generate false result for slow case object.
3471 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3472 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3473 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3474 __ Branch(if_false, eq, a2, Operand(t0));
3476 // Look for valueOf name in the descriptor array, and indicate false if
3477 // found. Since we omit an enumeration index check, if it is added via a
3478 // transition that shares its descriptor array, this is a false positive.
3479 Label entry, loop, done;
3481 // Skip loop if no descriptors are valid.
3482 __ NumberOfOwnDescriptors(a3, a1);
3483 __ Branch(&done, eq, a3, Operand(zero_reg));
3485 __ LoadInstanceDescriptors(a1, t0);
3486 // t0: descriptor array.
3487 // a3: valid entries in the descriptor array.
3488 STATIC_ASSERT(kSmiTag == 0);
3489 STATIC_ASSERT(kSmiTagSize == 1);
3490 STATIC_ASSERT(kPointerSize == 4);
3491 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3493 // Calculate location of the first key name.
3494 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3495 // Calculate the end of the descriptor array.
3497 __ sll(t1, a3, kPointerSizeLog2);
3498 __ Addu(a2, a2, t1);
3500 // Loop through all the keys in the descriptor array. If one of these is the
3501 // string "valueOf" the result is false.
3502 // The use of t2 to store the valueOf string assumes that it is not otherwise
3503 // used in the loop below.
3504 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3507 __ lw(a3, MemOperand(t0, 0));
3508 __ Branch(if_false, eq, a3, Operand(t2));
3509 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3511 __ Branch(&loop, ne, t0, Operand(a2));
3515 // Set the bit in the map to indicate that there is no local valueOf field.
3516 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3517 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3518 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3520 __ bind(&skip_lookup);
3522 // If a valueOf property is not found on the object check that its
3523 // prototype is the un-modified String prototype. If not result is false.
3524 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3525 __ JumpIfSmi(a2, if_false);
3526 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3527 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3528 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3529 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3530 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3531 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3533 context()->Plug(if_true, if_false);
3537 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3538 ZoneList<Expression*>* args = expr->arguments();
3539 DCHECK(args->length() == 1);
3541 VisitForAccumulatorValue(args->at(0));
3543 Label materialize_true, materialize_false;
3544 Label* if_true = NULL;
3545 Label* if_false = NULL;
3546 Label* fall_through = NULL;
3547 context()->PrepareTest(&materialize_true, &materialize_false,
3548 &if_true, &if_false, &fall_through);
3550 __ JumpIfSmi(v0, if_false);
3551 __ GetObjectType(v0, a1, a2);
3552 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3553 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3554 __ Branch(if_false);
3556 context()->Plug(if_true, if_false);
3560 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3561 ZoneList<Expression*>* args = expr->arguments();
3562 DCHECK(args->length() == 1);
3564 VisitForAccumulatorValue(args->at(0));
3566 Label materialize_true, materialize_false;
3567 Label* if_true = NULL;
3568 Label* if_false = NULL;
3569 Label* fall_through = NULL;
3570 context()->PrepareTest(&materialize_true, &materialize_false,
3571 &if_true, &if_false, &fall_through);
3573 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3574 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3575 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3576 __ li(t0, 0x80000000);
3578 __ Branch(¬_nan, ne, a2, Operand(t0));
3579 __ mov(t0, zero_reg);
3583 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3584 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3586 context()->Plug(if_true, if_false);
3590 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3591 ZoneList<Expression*>* args = expr->arguments();
3592 DCHECK(args->length() == 1);
3594 VisitForAccumulatorValue(args->at(0));
3596 Label materialize_true, materialize_false;
3597 Label* if_true = NULL;
3598 Label* if_false = NULL;
3599 Label* fall_through = NULL;
3600 context()->PrepareTest(&materialize_true, &materialize_false,
3601 &if_true, &if_false, &fall_through);
3603 __ JumpIfSmi(v0, if_false);
3604 __ GetObjectType(v0, a1, a1);
3605 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3606 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3607 if_true, if_false, fall_through);
3609 context()->Plug(if_true, if_false);
3613 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3614 ZoneList<Expression*>* args = expr->arguments();
3615 DCHECK(args->length() == 1);
3617 VisitForAccumulatorValue(args->at(0));
3619 Label materialize_true, materialize_false;
3620 Label* if_true = NULL;
3621 Label* if_false = NULL;
3622 Label* fall_through = NULL;
3623 context()->PrepareTest(&materialize_true, &materialize_false,
3624 &if_true, &if_false, &fall_through);
3626 __ JumpIfSmi(v0, if_false);
3627 __ GetObjectType(v0, a1, a1);
3628 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3629 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3631 context()->Plug(if_true, if_false);
3635 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3636 ZoneList<Expression*>* args = expr->arguments();
3637 DCHECK(args->length() == 1);
3639 VisitForAccumulatorValue(args->at(0));
3641 Label materialize_true, materialize_false;
3642 Label* if_true = NULL;
3643 Label* if_false = NULL;
3644 Label* fall_through = NULL;
3645 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3646 &if_false, &fall_through);
3648 __ JumpIfSmi(v0, if_false);
3650 Register type_reg = a2;
3651 __ GetObjectType(v0, map, type_reg);
3652 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3653 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3654 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3655 if_true, if_false, fall_through);
3657 context()->Plug(if_true, if_false);
3661 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3662 DCHECK(expr->arguments()->length() == 0);
3664 Label materialize_true, materialize_false;
3665 Label* if_true = NULL;
3666 Label* if_false = NULL;
3667 Label* fall_through = NULL;
3668 context()->PrepareTest(&materialize_true, &materialize_false,
3669 &if_true, &if_false, &fall_through);
3671 // Get the frame pointer for the calling frame.
3672 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3674 // Skip the arguments adaptor frame if it exists.
3675 Label check_frame_marker;
3676 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3677 __ Branch(&check_frame_marker, ne,
3678 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3679 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3681 // Check the marker in the calling frame.
3682 __ bind(&check_frame_marker);
3683 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3684 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3685 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3686 if_true, if_false, fall_through);
3688 context()->Plug(if_true, if_false);
3692 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3693 ZoneList<Expression*>* args = expr->arguments();
3694 DCHECK(args->length() == 2);
3696 // Load the two objects into registers and perform the comparison.
3697 VisitForStackValue(args->at(0));
3698 VisitForAccumulatorValue(args->at(1));
3700 Label materialize_true, materialize_false;
3701 Label* if_true = NULL;
3702 Label* if_false = NULL;
3703 Label* fall_through = NULL;
3704 context()->PrepareTest(&materialize_true, &materialize_false,
3705 &if_true, &if_false, &fall_through);
3708 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3709 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3711 context()->Plug(if_true, if_false);
3715 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3716 ZoneList<Expression*>* args = expr->arguments();
3717 DCHECK(args->length() == 1);
3719 // ArgumentsAccessStub expects the key in a1 and the formal
3720 // parameter count in a0.
3721 VisitForAccumulatorValue(args->at(0));
3723 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3724 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3726 context()->Plug(v0);
3730 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3731 DCHECK(expr->arguments()->length() == 0);
3733 // Get the number of formal parameters.
3734 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3736 // Check if the calling frame is an arguments adaptor frame.
3737 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3738 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3739 __ Branch(&exit, ne, a3,
3740 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3742 // Arguments adaptor case: Read the arguments length from the
3744 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3747 context()->Plug(v0);
3751 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3752 ZoneList<Expression*>* args = expr->arguments();
3753 DCHECK(args->length() == 1);
3754 Label done, null, function, non_function_constructor;
3756 VisitForAccumulatorValue(args->at(0));
3758 // If the object is a smi, we return null.
3759 __ JumpIfSmi(v0, &null);
3761 // Check that the object is a JS object but take special care of JS
3762 // functions to make sure they have 'Function' as their class.
3763 // Assume that there are only two callable types, and one of them is at
3764 // either end of the type range for JS object types. Saves extra comparisons.
3765 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3766 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3767 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3769 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3770 FIRST_SPEC_OBJECT_TYPE + 1);
3771 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3773 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3774 LAST_SPEC_OBJECT_TYPE - 1);
3775 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3776 // Assume that there is no larger type.
3777 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3779 // Check if the constructor in the map is a JS function.
3780 Register instance_type = a2;
3781 __ GetMapConstructor(v0, v0, a1, instance_type);
3782 __ Branch(&non_function_constructor, ne, instance_type,
3783 Operand(JS_FUNCTION_TYPE));
3785 // v0 now contains the constructor function. Grab the
3786 // instance class name from there.
3787 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3788 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3791 // Functions have class 'Function'.
3793 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3796 // Objects with a non-function constructor have class 'Object'.
3797 __ bind(&non_function_constructor);
3798 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3801 // Non-JS objects have class null.
3803 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3808 context()->Plug(v0);
3812 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3813 // Load the arguments on the stack and call the stub.
3814 SubStringStub stub(isolate());
3815 ZoneList<Expression*>* args = expr->arguments();
3816 DCHECK(args->length() == 3);
3817 VisitForStackValue(args->at(0));
3818 VisitForStackValue(args->at(1));
3819 VisitForStackValue(args->at(2));
3821 context()->Plug(v0);
3825 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3826 // Load the arguments on the stack and call the stub.
3827 RegExpExecStub stub(isolate());
3828 ZoneList<Expression*>* args = expr->arguments();
3829 DCHECK(args->length() == 4);
3830 VisitForStackValue(args->at(0));
3831 VisitForStackValue(args->at(1));
3832 VisitForStackValue(args->at(2));
3833 VisitForStackValue(args->at(3));
3835 context()->Plug(v0);
3839 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3840 ZoneList<Expression*>* args = expr->arguments();
3841 DCHECK(args->length() == 1);
3843 VisitForAccumulatorValue(args->at(0)); // Load the object.
3846 // If the object is a smi return the object.
3847 __ JumpIfSmi(v0, &done);
3848 // If the object is not a value type, return the object.
3849 __ GetObjectType(v0, a1, a1);
3850 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3852 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3855 context()->Plug(v0);
3859 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3860 ZoneList<Expression*>* args = expr->arguments();
3861 DCHECK(args->length() == 2);
3862 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3863 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3865 VisitForAccumulatorValue(args->at(0)); // Load the object.
3867 Label runtime, done, not_date_object;
3868 Register object = v0;
3869 Register result = v0;
3870 Register scratch0 = t5;
3871 Register scratch1 = a1;
3873 __ JumpIfSmi(object, ¬_date_object);
3874 __ GetObjectType(object, scratch1, scratch1);
3875 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3877 if (index->value() == 0) {
3878 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3881 if (index->value() < JSDate::kFirstUncachedField) {
3882 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3883 __ li(scratch1, Operand(stamp));
3884 __ lw(scratch1, MemOperand(scratch1));
3885 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3886 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3887 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3888 kPointerSize * index->value()));
3892 __ PrepareCallCFunction(2, scratch1);
3893 __ li(a1, Operand(index));
3894 __ Move(a0, object);
3895 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3899 __ bind(¬_date_object);
3900 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3902 context()->Plug(v0);
3906 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3907 ZoneList<Expression*>* args = expr->arguments();
3908 DCHECK_EQ(3, args->length());
3910 Register string = v0;
3911 Register index = a1;
3912 Register value = a2;
3914 VisitForStackValue(args->at(0)); // index
3915 VisitForStackValue(args->at(1)); // value
3916 VisitForAccumulatorValue(args->at(2)); // string
3917 __ Pop(index, value);
3919 if (FLAG_debug_code) {
3920 __ SmiTst(value, at);
3921 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3922 __ SmiTst(index, at);
3923 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3924 __ SmiUntag(index, index);
3925 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3926 Register scratch = t5;
3927 __ EmitSeqStringSetCharCheck(
3928 string, index, value, scratch, one_byte_seq_type);
3929 __ SmiTag(index, index);
3932 __ SmiUntag(value, value);
3935 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3937 __ Addu(at, at, index);
3938 __ sb(value, MemOperand(at));
3939 context()->Plug(string);
3943 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3944 ZoneList<Expression*>* args = expr->arguments();
3945 DCHECK_EQ(3, args->length());
3947 Register string = v0;
3948 Register index = a1;
3949 Register value = a2;
3951 VisitForStackValue(args->at(0)); // index
3952 VisitForStackValue(args->at(1)); // value
3953 VisitForAccumulatorValue(args->at(2)); // string
3954 __ Pop(index, value);
3956 if (FLAG_debug_code) {
3957 __ SmiTst(value, at);
3958 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3959 __ SmiTst(index, at);
3960 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3961 __ SmiUntag(index, index);
3962 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3963 Register scratch = t5;
3964 __ EmitSeqStringSetCharCheck(
3965 string, index, value, scratch, two_byte_seq_type);
3966 __ SmiTag(index, index);
3969 __ SmiUntag(value, value);
3972 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3973 __ Addu(at, at, index);
3974 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3975 __ sh(value, MemOperand(at));
3976 context()->Plug(string);
3980 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3981 // Load the arguments on the stack and call the runtime function.
3982 ZoneList<Expression*>* args = expr->arguments();
3983 DCHECK(args->length() == 2);
3984 VisitForStackValue(args->at(0));
3985 VisitForStackValue(args->at(1));
3986 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3988 context()->Plug(v0);
3992 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3993 ZoneList<Expression*>* args = expr->arguments();
3994 DCHECK(args->length() == 2);
3996 VisitForStackValue(args->at(0)); // Load the object.
3997 VisitForAccumulatorValue(args->at(1)); // Load the value.
3998 __ pop(a1); // v0 = value. a1 = object.
4001 // If the object is a smi, return the value.
4002 __ JumpIfSmi(a1, &done);
4004 // If the object is not a value type, return the value.
4005 __ GetObjectType(a1, a2, a2);
4006 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
4009 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
4010 // Update the write barrier. Save the value as it will be
4011 // overwritten by the write barrier code and is needed afterward.
4013 __ RecordWriteField(
4014 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
4017 context()->Plug(v0);
4021 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4022 ZoneList<Expression*>* args = expr->arguments();
4023 DCHECK_EQ(args->length(), 1);
4025 // Load the argument into a0 and call the stub.
4026 VisitForAccumulatorValue(args->at(0));
4027 __ mov(a0, result_register());
4029 NumberToStringStub stub(isolate());
4031 context()->Plug(v0);
4035 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4036 ZoneList<Expression*>* args = expr->arguments();
4037 DCHECK(args->length() == 1);
4039 VisitForAccumulatorValue(args->at(0));
4042 StringCharFromCodeGenerator generator(v0, a1);
4043 generator.GenerateFast(masm_);
4046 NopRuntimeCallHelper call_helper;
4047 generator.GenerateSlow(masm_, call_helper);
4050 context()->Plug(a1);
4054 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4055 ZoneList<Expression*>* args = expr->arguments();
4056 DCHECK(args->length() == 2);
4058 VisitForStackValue(args->at(0));
4059 VisitForAccumulatorValue(args->at(1));
4060 __ mov(a0, result_register());
4062 Register object = a1;
4063 Register index = a0;
4064 Register result = v0;
4068 Label need_conversion;
4069 Label index_out_of_range;
4071 StringCharCodeAtGenerator generator(object,
4076 &index_out_of_range,
4077 STRING_INDEX_IS_NUMBER);
4078 generator.GenerateFast(masm_);
4081 __ bind(&index_out_of_range);
4082 // When the index is out of range, the spec requires us to return
4084 __ LoadRoot(result, Heap::kNanValueRootIndex);
4087 __ bind(&need_conversion);
4088 // Load the undefined value into the result register, which will
4089 // trigger conversion.
4090 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4093 NopRuntimeCallHelper call_helper;
4094 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4097 context()->Plug(result);
4101 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4102 ZoneList<Expression*>* args = expr->arguments();
4103 DCHECK(args->length() == 2);
4105 VisitForStackValue(args->at(0));
4106 VisitForAccumulatorValue(args->at(1));
4107 __ mov(a0, result_register());
4109 Register object = a1;
4110 Register index = a0;
4111 Register scratch = a3;
4112 Register result = v0;
4116 Label need_conversion;
4117 Label index_out_of_range;
4119 StringCharAtGenerator generator(object,
4125 &index_out_of_range,
4126 STRING_INDEX_IS_NUMBER);
4127 generator.GenerateFast(masm_);
4130 __ bind(&index_out_of_range);
4131 // When the index is out of range, the spec requires us to return
4132 // the empty string.
4133 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4136 __ bind(&need_conversion);
4137 // Move smi zero into the result register, which will trigger
4139 __ li(result, Operand(Smi::FromInt(0)));
4142 NopRuntimeCallHelper call_helper;
4143 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4146 context()->Plug(result);
4150 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4151 ZoneList<Expression*>* args = expr->arguments();
4152 DCHECK_EQ(2, args->length());
4153 VisitForStackValue(args->at(0));
4154 VisitForAccumulatorValue(args->at(1));
4157 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4158 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4160 context()->Plug(v0);
4164 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4165 ZoneList<Expression*>* args = expr->arguments();
4166 DCHECK_EQ(2, args->length());
4168 VisitForStackValue(args->at(0));
4169 VisitForStackValue(args->at(1));
4171 StringCompareStub stub(isolate());
4173 context()->Plug(v0);
4177 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4178 ZoneList<Expression*>* args = expr->arguments();
4179 DCHECK(args->length() >= 2);
4181 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4182 for (int i = 0; i < arg_count + 1; i++) {
4183 VisitForStackValue(args->at(i));
4185 VisitForAccumulatorValue(args->last()); // Function.
4187 Label runtime, done;
4188 // Check for non-function argument (including proxy).
4189 __ JumpIfSmi(v0, &runtime);
4190 __ GetObjectType(v0, a1, a1);
4191 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4193 // InvokeFunction requires the function in a1. Move it in there.
4194 __ mov(a1, result_register());
4195 ParameterCount count(arg_count);
4196 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4197 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4202 __ CallRuntime(Runtime::kCall, args->length());
4205 context()->Plug(v0);
4209 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4210 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4211 GetVar(result_register(), new_target_var);
4212 __ Push(result_register());
4214 EmitLoadSuperConstructor();
4215 __ Push(result_register());
4217 // Check if the calling frame is an arguments adaptor frame.
4218 Label adaptor_frame, args_set_up, runtime;
4219 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4220 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4221 __ Branch(&adaptor_frame, eq, a3,
4222 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4223 // default constructor has no arguments, so no adaptor frame means no args.
4224 __ mov(a0, zero_reg);
4225 __ Branch(&args_set_up);
4227 // Copy arguments from adaptor frame.
4229 __ bind(&adaptor_frame);
4230 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4231 __ SmiUntag(a1, a1);
4233 // Subtract 1 from arguments count, for new.target.
4234 __ Addu(a1, a1, Operand(-1));
4237 // Get arguments pointer in a2.
4238 __ sll(at, a1, kPointerSizeLog2);
4239 __ addu(a2, a2, at);
4240 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4243 // Pre-decrement a2 with kPointerSize on each iteration.
4244 // Pre-decrement in order to skip receiver.
4245 __ Addu(a2, a2, Operand(-kPointerSize));
4246 __ lw(a3, MemOperand(a2));
4248 __ Addu(a1, a1, Operand(-1));
4249 __ Branch(&loop, ne, a1, Operand(zero_reg));
4252 __ bind(&args_set_up);
4253 __ sll(at, a0, kPointerSizeLog2);
4254 __ Addu(at, at, Operand(sp));
4255 __ lw(a1, MemOperand(at, 0));
4256 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4258 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4259 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4263 context()->Plug(result_register());
4267 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4268 RegExpConstructResultStub stub(isolate());
4269 ZoneList<Expression*>* args = expr->arguments();
4270 DCHECK(args->length() == 3);
4271 VisitForStackValue(args->at(0));
4272 VisitForStackValue(args->at(1));
4273 VisitForAccumulatorValue(args->at(2));
4274 __ mov(a0, result_register());
4278 context()->Plug(v0);
4282 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4283 ZoneList<Expression*>* args = expr->arguments();
4284 DCHECK_EQ(2, args->length());
4286 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4287 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4289 Handle<FixedArray> jsfunction_result_caches(
4290 isolate()->native_context()->jsfunction_result_caches());
4291 if (jsfunction_result_caches->length() <= cache_id) {
4292 __ Abort(kAttemptToUseUndefinedCache);
4293 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4294 context()->Plug(v0);
4298 VisitForAccumulatorValue(args->at(1));
4301 Register cache = a1;
4302 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4303 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4306 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4308 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4311 Label done, not_found;
4312 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4313 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4314 // a2 now holds finger offset as a smi.
4315 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4316 // a3 now points to the start of fixed array elements.
4317 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
4318 __ addu(a3, a3, at);
4319 // a3 now points to key of indexed element of cache.
4320 __ lw(a2, MemOperand(a3));
4321 __ Branch(¬_found, ne, key, Operand(a2));
4323 __ lw(v0, MemOperand(a3, kPointerSize));
4326 __ bind(¬_found);
4327 // Call runtime to perform the lookup.
4328 __ Push(cache, key);
4329 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4332 context()->Plug(v0);
4336 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4337 ZoneList<Expression*>* args = expr->arguments();
4338 VisitForAccumulatorValue(args->at(0));
4340 Label materialize_true, materialize_false;
4341 Label* if_true = NULL;
4342 Label* if_false = NULL;
4343 Label* fall_through = NULL;
4344 context()->PrepareTest(&materialize_true, &materialize_false,
4345 &if_true, &if_false, &fall_through);
4347 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4348 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4350 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4351 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4353 context()->Plug(if_true, if_false);
4357 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4358 ZoneList<Expression*>* args = expr->arguments();
4359 DCHECK(args->length() == 1);
4360 VisitForAccumulatorValue(args->at(0));
4362 __ AssertString(v0);
4364 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4365 __ IndexFromHash(v0, v0);
4367 context()->Plug(v0);
4371 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4372 Label bailout, done, one_char_separator, long_separator,
4373 non_trivial_array, not_size_one_array, loop,
4374 empty_separator_loop, one_char_separator_loop,
4375 one_char_separator_loop_entry, long_separator_loop;
4376 ZoneList<Expression*>* args = expr->arguments();
4377 DCHECK(args->length() == 2);
4378 VisitForStackValue(args->at(1));
4379 VisitForAccumulatorValue(args->at(0));
4381 // All aliases of the same register have disjoint lifetimes.
4382 Register array = v0;
4383 Register elements = no_reg; // Will be v0.
4384 Register result = no_reg; // Will be v0.
4385 Register separator = a1;
4386 Register array_length = a2;
4387 Register result_pos = no_reg; // Will be a2.
4388 Register string_length = a3;
4389 Register string = t0;
4390 Register element = t1;
4391 Register elements_end = t2;
4392 Register scratch1 = t3;
4393 Register scratch2 = t5;
4394 Register scratch3 = t4;
4396 // Separator operand is on the stack.
4399 // Check that the array is a JSArray.
4400 __ JumpIfSmi(array, &bailout);
4401 __ GetObjectType(array, scratch1, scratch2);
4402 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4404 // Check that the array has fast elements.
4405 __ CheckFastElements(scratch1, scratch2, &bailout);
4407 // If the array has length zero, return the empty string.
4408 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4409 __ SmiUntag(array_length);
4410 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4411 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4414 __ bind(&non_trivial_array);
4416 // Get the FixedArray containing array's elements.
4418 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4419 array = no_reg; // End of array's live range.
4421 // Check that all array elements are sequential one-byte strings, and
4422 // accumulate the sum of their lengths, as a smi-encoded value.
4423 __ mov(string_length, zero_reg);
4425 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4426 __ sll(elements_end, array_length, kPointerSizeLog2);
4427 __ Addu(elements_end, element, elements_end);
4428 // Loop condition: while (element < elements_end).
4429 // Live values in registers:
4430 // elements: Fixed array of strings.
4431 // array_length: Length of the fixed array of strings (not smi)
4432 // separator: Separator string
4433 // string_length: Accumulated sum of string lengths (smi).
4434 // element: Current array element.
4435 // elements_end: Array end.
4436 if (generate_debug_code_) {
4437 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4441 __ lw(string, MemOperand(element));
4442 __ Addu(element, element, kPointerSize);
4443 __ JumpIfSmi(string, &bailout);
4444 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4445 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4446 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4447 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4448 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4449 __ BranchOnOverflow(&bailout, scratch3);
4450 __ Branch(&loop, lt, element, Operand(elements_end));
4452 // If array_length is 1, return elements[0], a string.
4453 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4454 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4457 __ bind(¬_size_one_array);
4459 // Live values in registers:
4460 // separator: Separator string
4461 // array_length: Length of the array.
4462 // string_length: Sum of string lengths (smi).
4463 // elements: FixedArray of strings.
4465 // Check that the separator is a flat one-byte string.
4466 __ JumpIfSmi(separator, &bailout);
4467 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4468 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4469 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4471 // Add (separator length times array_length) - separator length to the
4472 // string_length to get the length of the result string. array_length is not
4473 // smi but the other values are, so the result is a smi.
4474 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4475 __ Subu(string_length, string_length, Operand(scratch1));
4476 __ Mul(scratch3, scratch2, array_length, scratch1);
4477 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4479 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4480 __ And(scratch3, scratch2, Operand(0x80000000));
4481 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4482 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4483 __ BranchOnOverflow(&bailout, scratch3);
4484 __ SmiUntag(string_length);
4486 // Get first element in the array to free up the elements register to be used
4489 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4490 result = elements; // End of live range for elements.
4492 // Live values in registers:
4493 // element: First array element
4494 // separator: Separator string
4495 // string_length: Length of result string (not smi)
4496 // array_length: Length of the array.
4497 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4498 elements_end, &bailout);
4499 // Prepare for looping. Set up elements_end to end of the array. Set
4500 // result_pos to the position of the result where to write the first
4502 __ sll(elements_end, array_length, kPointerSizeLog2);
4503 __ Addu(elements_end, element, elements_end);
4504 result_pos = array_length; // End of live range for array_length.
4505 array_length = no_reg;
4508 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4510 // Check the length of the separator.
4511 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4512 __ li(at, Operand(Smi::FromInt(1)));
4513 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4514 __ Branch(&long_separator, gt, scratch1, Operand(at));
4516 // Empty separator case.
4517 __ bind(&empty_separator_loop);
4518 // Live values in registers:
4519 // result_pos: the position to which we are currently copying characters.
4520 // element: Current array element.
4521 // elements_end: Array end.
4523 // Copy next array element to the result.
4524 __ lw(string, MemOperand(element));
4525 __ Addu(element, element, kPointerSize);
4526 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4527 __ SmiUntag(string_length);
4528 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4529 __ CopyBytes(string, result_pos, string_length, scratch1);
4530 // End while (element < elements_end).
4531 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4532 DCHECK(result.is(v0));
4535 // One-character separator case.
4536 __ bind(&one_char_separator);
4537 // Replace separator with its one-byte character value.
4538 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4539 // Jump into the loop after the code that copies the separator, so the first
4540 // element is not preceded by a separator.
4541 __ jmp(&one_char_separator_loop_entry);
4543 __ bind(&one_char_separator_loop);
4544 // Live values in registers:
4545 // result_pos: the position to which we are currently copying characters.
4546 // element: Current array element.
4547 // elements_end: Array end.
4548 // separator: Single separator one-byte char (in lower byte).
4550 // Copy the separator character to the result.
4551 __ sb(separator, MemOperand(result_pos));
4552 __ Addu(result_pos, result_pos, 1);
4554 // Copy next array element to the result.
4555 __ bind(&one_char_separator_loop_entry);
4556 __ lw(string, MemOperand(element));
4557 __ Addu(element, element, kPointerSize);
4558 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4559 __ SmiUntag(string_length);
4560 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4561 __ CopyBytes(string, result_pos, string_length, scratch1);
4562 // End while (element < elements_end).
4563 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4564 DCHECK(result.is(v0));
4567 // Long separator case (separator is more than one character). Entry is at the
4568 // label long_separator below.
4569 __ bind(&long_separator_loop);
4570 // Live values in registers:
4571 // result_pos: the position to which we are currently copying characters.
4572 // element: Current array element.
4573 // elements_end: Array end.
4574 // separator: Separator string.
4576 // Copy the separator to the result.
4577 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4578 __ SmiUntag(string_length);
4581 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4582 __ CopyBytes(string, result_pos, string_length, scratch1);
4584 __ bind(&long_separator);
4585 __ lw(string, MemOperand(element));
4586 __ Addu(element, element, kPointerSize);
4587 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4588 __ SmiUntag(string_length);
4589 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4590 __ CopyBytes(string, result_pos, string_length, scratch1);
4591 // End while (element < elements_end).
4592 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4593 DCHECK(result.is(v0));
4597 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4599 context()->Plug(v0);
4603 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4604 DCHECK(expr->arguments()->length() == 0);
4605 ExternalReference debug_is_active =
4606 ExternalReference::debug_is_active_address(isolate());
4607 __ li(at, Operand(debug_is_active));
4608 __ lb(v0, MemOperand(at));
4610 context()->Plug(v0);
4614 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4615 ZoneList<Expression*>* args = expr->arguments();
4616 int arg_count = args->length();
4618 if (expr->is_jsruntime()) {
4619 Comment cmnt(masm_, "[ CallRuntime");
4620 // Push the builtins object as the receiver.
4621 Register receiver = LoadDescriptor::ReceiverRegister();
4622 __ lw(receiver, GlobalObjectOperand());
4623 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4626 // Load the function from the receiver.
4627 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4628 if (FLAG_vector_ics) {
4629 __ li(VectorLoadICDescriptor::SlotRegister(),
4630 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4631 CallLoadIC(NOT_CONTEXTUAL);
4633 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4636 // Push the target function under the receiver.
4637 __ lw(at, MemOperand(sp, 0));
4639 __ sw(v0, MemOperand(sp, kPointerSize));
4641 // Push the arguments ("left-to-right").
4642 for (int i = 0; i < arg_count; i++) {
4643 VisitForStackValue(args->at(i));
4646 // Record source position of the IC call.
4647 SetSourcePosition(expr->position());
4648 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4649 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4652 // Restore context register.
4653 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4655 context()->DropAndPlug(1, v0);
4658 const Runtime::Function* function = expr->function();
4659 switch (function->function_id) {
4660 #define CALL_INTRINSIC_GENERATOR(Name) \
4661 case Runtime::kInline##Name: { \
4662 Comment cmnt(masm_, "[ Inline" #Name); \
4663 return Emit##Name(expr); \
4665 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4666 #undef CALL_INTRINSIC_GENERATOR
4668 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4669 // Push the arguments ("left-to-right").
4670 for (int i = 0; i < arg_count; i++) {
4671 VisitForStackValue(args->at(i));
4674 // Call the C runtime function.
4675 __ CallRuntime(expr->function(), arg_count);
4676 context()->Plug(v0);
4683 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4684 switch (expr->op()) {
4685 case Token::DELETE: {
4686 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4687 Property* property = expr->expression()->AsProperty();
4688 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4690 if (property != NULL) {
4691 VisitForStackValue(property->obj());
4692 VisitForStackValue(property->key());
4693 __ li(a1, Operand(Smi::FromInt(language_mode())));
4695 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4696 context()->Plug(v0);
4697 } else if (proxy != NULL) {
4698 Variable* var = proxy->var();
4699 // Delete of an unqualified identifier is disallowed in strict mode
4700 // but "delete this" is allowed.
4701 DCHECK(is_sloppy(language_mode()) || var->is_this());
4702 if (var->IsUnallocated()) {
4703 __ lw(a2, GlobalObjectOperand());
4704 __ li(a1, Operand(var->name()));
4705 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4706 __ Push(a2, a1, a0);
4707 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4708 context()->Plug(v0);
4709 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4710 // Result of deleting non-global, non-dynamic variables is false.
4711 // The subexpression does not have side effects.
4712 context()->Plug(var->is_this());
4714 // Non-global variable. Call the runtime to try to delete from the
4715 // context where the variable was introduced.
4716 DCHECK(!context_register().is(a2));
4717 __ li(a2, Operand(var->name()));
4718 __ Push(context_register(), a2);
4719 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4720 context()->Plug(v0);
4723 // Result of deleting non-property, non-variable reference is true.
4724 // The subexpression may have side effects.
4725 VisitForEffect(expr->expression());
4726 context()->Plug(true);
4732 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4733 VisitForEffect(expr->expression());
4734 context()->Plug(Heap::kUndefinedValueRootIndex);
4739 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4740 if (context()->IsEffect()) {
4741 // Unary NOT has no side effects so it's only necessary to visit the
4742 // subexpression. Match the optimizing compiler by not branching.
4743 VisitForEffect(expr->expression());
4744 } else if (context()->IsTest()) {
4745 const TestContext* test = TestContext::cast(context());
4746 // The labels are swapped for the recursive call.
4747 VisitForControl(expr->expression(),
4748 test->false_label(),
4750 test->fall_through());
4751 context()->Plug(test->true_label(), test->false_label());
4753 // We handle value contexts explicitly rather than simply visiting
4754 // for control and plugging the control flow into the context,
4755 // because we need to prepare a pair of extra administrative AST ids
4756 // for the optimizing compiler.
4757 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4758 Label materialize_true, materialize_false, done;
4759 VisitForControl(expr->expression(),
4763 __ bind(&materialize_true);
4764 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4765 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4766 if (context()->IsStackValue()) __ push(v0);
4768 __ bind(&materialize_false);
4769 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4770 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4771 if (context()->IsStackValue()) __ push(v0);
4777 case Token::TYPEOF: {
4778 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4779 { StackValueContext context(this);
4780 VisitForTypeofValue(expr->expression());
4782 __ CallRuntime(Runtime::kTypeof, 1);
4783 context()->Plug(v0);
4793 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4794 DCHECK(expr->expression()->IsValidReferenceExpression());
4796 Comment cmnt(masm_, "[ CountOperation");
4797 SetSourcePosition(expr->position());
4799 Property* prop = expr->expression()->AsProperty();
4800 LhsKind assign_type = GetAssignType(prop);
4802 // Evaluate expression and get value.
4803 if (assign_type == VARIABLE) {
4804 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4805 AccumulatorValueContext context(this);
4806 EmitVariableLoad(expr->expression()->AsVariableProxy());
4808 // Reserve space for result of postfix operation.
4809 if (expr->is_postfix() && !context()->IsEffect()) {
4810 __ li(at, Operand(Smi::FromInt(0)));
4813 switch (assign_type) {
4814 case NAMED_PROPERTY: {
4815 // Put the object both on the stack and in the register.
4816 VisitForStackValue(prop->obj());
4817 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4818 EmitNamedPropertyLoad(prop);
4822 case NAMED_SUPER_PROPERTY: {
4823 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4824 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4825 __ Push(result_register());
4826 const Register scratch = a1;
4827 __ lw(scratch, MemOperand(sp, kPointerSize));
4828 __ Push(scratch, result_register());
4829 EmitNamedSuperPropertyLoad(prop);
4833 case KEYED_SUPER_PROPERTY: {
4834 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4835 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4836 const Register scratch = a1;
4837 const Register scratch1 = t0;
4838 __ Move(scratch, result_register());
4839 VisitForAccumulatorValue(prop->key());
4840 __ Push(scratch, result_register());
4841 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
4842 __ Push(scratch1, scratch, result_register());
4843 EmitKeyedSuperPropertyLoad(prop);
4847 case KEYED_PROPERTY: {
4848 VisitForStackValue(prop->obj());
4849 VisitForStackValue(prop->key());
4850 __ lw(LoadDescriptor::ReceiverRegister(),
4851 MemOperand(sp, 1 * kPointerSize));
4852 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4853 EmitKeyedPropertyLoad(prop);
4862 // We need a second deoptimization point after loading the value
4863 // in case evaluating the property load my have a side effect.
4864 if (assign_type == VARIABLE) {
4865 PrepareForBailout(expr->expression(), TOS_REG);
4867 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4870 // Inline smi case if we are in a loop.
4871 Label stub_call, done;
4872 JumpPatchSite patch_site(masm_);
4874 int count_value = expr->op() == Token::INC ? 1 : -1;
4876 if (ShouldInlineSmiCase(expr->op())) {
4878 patch_site.EmitJumpIfNotSmi(v0, &slow);
4880 // Save result for postfix expressions.
4881 if (expr->is_postfix()) {
4882 if (!context()->IsEffect()) {
4883 // Save the result on the stack. If we have a named or keyed property
4884 // we store the result under the receiver that is currently on top
4886 switch (assign_type) {
4890 case NAMED_PROPERTY:
4891 __ sw(v0, MemOperand(sp, kPointerSize));
4893 case NAMED_SUPER_PROPERTY:
4894 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4896 case KEYED_PROPERTY:
4897 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4899 case KEYED_SUPER_PROPERTY:
4900 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4906 Register scratch1 = a1;
4907 Register scratch2 = t0;
4908 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4909 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4910 __ BranchOnNoOverflow(&done, scratch2);
4911 // Call stub. Undo operation first.
4916 ToNumberStub convert_stub(isolate());
4917 __ CallStub(&convert_stub);
4918 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4920 // Save result for postfix expressions.
4921 if (expr->is_postfix()) {
4922 if (!context()->IsEffect()) {
4923 // Save the result on the stack. If we have a named or keyed property
4924 // we store the result under the receiver that is currently on top
4926 switch (assign_type) {
4930 case NAMED_PROPERTY:
4931 __ sw(v0, MemOperand(sp, kPointerSize));
4933 case NAMED_SUPER_PROPERTY:
4934 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4936 case KEYED_PROPERTY:
4937 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4939 case KEYED_SUPER_PROPERTY:
4940 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4946 __ bind(&stub_call);
4948 __ li(a0, Operand(Smi::FromInt(count_value)));
4950 // Record position before stub call.
4951 SetSourcePosition(expr->position());
4953 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4954 CallIC(code, expr->CountBinOpFeedbackId());
4955 patch_site.EmitPatchInfo();
4958 // Store the value returned in v0.
4959 switch (assign_type) {
4961 if (expr->is_postfix()) {
4962 { EffectContext context(this);
4963 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4965 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4968 // For all contexts except EffectConstant we have the result on
4969 // top of the stack.
4970 if (!context()->IsEffect()) {
4971 context()->PlugTOS();
4974 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4976 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4977 context()->Plug(v0);
4980 case NAMED_PROPERTY: {
4981 __ mov(StoreDescriptor::ValueRegister(), result_register());
4982 __ li(StoreDescriptor::NameRegister(),
4983 Operand(prop->key()->AsLiteral()->value()));
4984 __ pop(StoreDescriptor::ReceiverRegister());
4985 CallStoreIC(expr->CountStoreFeedbackId());
4986 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4987 if (expr->is_postfix()) {
4988 if (!context()->IsEffect()) {
4989 context()->PlugTOS();
4992 context()->Plug(v0);
4996 case NAMED_SUPER_PROPERTY: {
4997 EmitNamedSuperPropertyStore(prop);
4998 if (expr->is_postfix()) {
4999 if (!context()->IsEffect()) {
5000 context()->PlugTOS();
5003 context()->Plug(v0);
5007 case KEYED_SUPER_PROPERTY: {
5008 EmitKeyedSuperPropertyStore(prop);
5009 if (expr->is_postfix()) {
5010 if (!context()->IsEffect()) {
5011 context()->PlugTOS();
5014 context()->Plug(v0);
5018 case KEYED_PROPERTY: {
5019 __ mov(StoreDescriptor::ValueRegister(), result_register());
5020 __ Pop(StoreDescriptor::ReceiverRegister(),
5021 StoreDescriptor::NameRegister());
5023 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5024 CallIC(ic, expr->CountStoreFeedbackId());
5025 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5026 if (expr->is_postfix()) {
5027 if (!context()->IsEffect()) {
5028 context()->PlugTOS();
5031 context()->Plug(v0);
5039 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5040 DCHECK(!context()->IsEffect());
5041 DCHECK(!context()->IsTest());
5042 VariableProxy* proxy = expr->AsVariableProxy();
5043 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5044 Comment cmnt(masm_, "[ Global variable");
5045 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5046 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5047 if (FLAG_vector_ics) {
5048 __ li(VectorLoadICDescriptor::SlotRegister(),
5049 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5051 // Use a regular load, not a contextual load, to avoid a reference
5053 CallLoadIC(NOT_CONTEXTUAL);
5054 PrepareForBailout(expr, TOS_REG);
5055 context()->Plug(v0);
5056 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5057 Comment cmnt(masm_, "[ Lookup slot");
5060 // Generate code for loading from variables potentially shadowed
5061 // by eval-introduced variables.
5062 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5065 __ li(a0, Operand(proxy->name()));
5067 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5068 PrepareForBailout(expr, TOS_REG);
5071 context()->Plug(v0);
5073 // This expression cannot throw a reference error at the top level.
5074 VisitInDuplicateContext(expr);
5078 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5079 Expression* sub_expr,
5080 Handle<String> check) {
5081 Label materialize_true, materialize_false;
5082 Label* if_true = NULL;
5083 Label* if_false = NULL;
5084 Label* fall_through = NULL;
5085 context()->PrepareTest(&materialize_true, &materialize_false,
5086 &if_true, &if_false, &fall_through);
5088 { AccumulatorValueContext context(this);
5089 VisitForTypeofValue(sub_expr);
5091 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5093 Factory* factory = isolate()->factory();
5094 if (String::Equals(check, factory->number_string())) {
5095 __ JumpIfSmi(v0, if_true);
5096 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5097 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5098 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5099 } else if (String::Equals(check, factory->string_string())) {
5100 __ JumpIfSmi(v0, if_false);
5101 // Check for undetectable objects => false.
5102 __ GetObjectType(v0, v0, a1);
5103 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5104 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5105 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5106 Split(eq, a1, Operand(zero_reg),
5107 if_true, if_false, fall_through);
5108 } else if (String::Equals(check, factory->symbol_string())) {
5109 __ JumpIfSmi(v0, if_false);
5110 __ GetObjectType(v0, v0, a1);
5111 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5112 } else if (String::Equals(check, factory->boolean_string())) {
5113 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5114 __ Branch(if_true, eq, v0, Operand(at));
5115 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5116 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5117 } else if (String::Equals(check, factory->undefined_string())) {
5118 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5119 __ Branch(if_true, eq, v0, Operand(at));
5120 __ JumpIfSmi(v0, if_false);
5121 // Check for undetectable objects => true.
5122 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5123 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5124 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5125 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5126 } else if (String::Equals(check, factory->function_string())) {
5127 __ JumpIfSmi(v0, if_false);
5128 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5129 __ GetObjectType(v0, v0, a1);
5130 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5131 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5132 if_true, if_false, fall_through);
5133 } else if (String::Equals(check, factory->object_string())) {
5134 __ JumpIfSmi(v0, if_false);
5135 __ LoadRoot(at, Heap::kNullValueRootIndex);
5136 __ Branch(if_true, eq, v0, Operand(at));
5137 // Check for JS objects => true.
5138 __ GetObjectType(v0, v0, a1);
5139 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5140 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5141 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5142 // Check for undetectable objects => false.
5143 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5144 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5145 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5147 if (if_false != fall_through) __ jmp(if_false);
5149 context()->Plug(if_true, if_false);
5153 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5154 Comment cmnt(masm_, "[ CompareOperation");
5155 SetSourcePosition(expr->position());
5157 // First we try a fast inlined version of the compare when one of
5158 // the operands is a literal.
5159 if (TryLiteralCompare(expr)) return;
5161 // Always perform the comparison for its control flow. Pack the result
5162 // into the expression's context after the comparison is performed.
5163 Label materialize_true, materialize_false;
5164 Label* if_true = NULL;
5165 Label* if_false = NULL;
5166 Label* fall_through = NULL;
5167 context()->PrepareTest(&materialize_true, &materialize_false,
5168 &if_true, &if_false, &fall_through);
5170 Token::Value op = expr->op();
5171 VisitForStackValue(expr->left());
5174 VisitForStackValue(expr->right());
5175 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5176 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5177 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
5178 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
5181 case Token::INSTANCEOF: {
5182 VisitForStackValue(expr->right());
5183 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5185 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5186 // The stub returns 0 for true.
5187 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5192 VisitForAccumulatorValue(expr->right());
5193 Condition cc = CompareIC::ComputeCondition(op);
5194 __ mov(a0, result_register());
5197 bool inline_smi_code = ShouldInlineSmiCase(op);
5198 JumpPatchSite patch_site(masm_);
5199 if (inline_smi_code) {
5201 __ Or(a2, a0, Operand(a1));
5202 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5203 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5204 __ bind(&slow_case);
5206 // Record position and call the compare IC.
5207 SetSourcePosition(expr->position());
5208 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5209 CallIC(ic, expr->CompareOperationFeedbackId());
5210 patch_site.EmitPatchInfo();
5211 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5212 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5216 // Convert the result of the comparison into one expected for this
5217 // expression's context.
5218 context()->Plug(if_true, if_false);
5222 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5223 Expression* sub_expr,
5225 Label materialize_true, materialize_false;
5226 Label* if_true = NULL;
5227 Label* if_false = NULL;
5228 Label* fall_through = NULL;
5229 context()->PrepareTest(&materialize_true, &materialize_false,
5230 &if_true, &if_false, &fall_through);
5232 VisitForAccumulatorValue(sub_expr);
5233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5234 __ mov(a0, result_register());
5235 if (expr->op() == Token::EQ_STRICT) {
5236 Heap::RootListIndex nil_value = nil == kNullValue ?
5237 Heap::kNullValueRootIndex :
5238 Heap::kUndefinedValueRootIndex;
5239 __ LoadRoot(a1, nil_value);
5240 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5242 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5243 CallIC(ic, expr->CompareOperationFeedbackId());
5244 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5246 context()->Plug(if_true, if_false);
5250 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5251 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5252 context()->Plug(v0);
5256 Register FullCodeGenerator::result_register() {
5261 Register FullCodeGenerator::context_register() {
5266 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5267 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5268 __ sw(value, MemOperand(fp, frame_offset));
5272 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5273 __ lw(dst, ContextOperand(cp, context_index));
5277 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5278 Scope* declaration_scope = scope()->DeclarationScope();
5279 if (declaration_scope->is_script_scope() ||
5280 declaration_scope->is_module_scope()) {
5281 // Contexts nested in the native context have a canonical empty function
5282 // as their closure, not the anonymous closure containing the global
5283 // code. Pass a smi sentinel and let the runtime look up the empty
5285 __ li(at, Operand(Smi::FromInt(0)));
5286 } else if (declaration_scope->is_eval_scope()) {
5287 // Contexts created by a call to eval have the same closure as the
5288 // context calling eval, not the anonymous closure containing the eval
5289 // code. Fetch it from the context.
5290 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5292 DCHECK(declaration_scope->is_function_scope());
5293 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5299 // ----------------------------------------------------------------------------
5300 // Non-local control flow support.
5302 void FullCodeGenerator::EnterFinallyBlock() {
5303 DCHECK(!result_register().is(a1));
5304 // Store result register while executing finally block.
5305 __ push(result_register());
5306 // Cook return address in link register to stack (smi encoded Code* delta).
5307 __ Subu(a1, ra, Operand(masm_->CodeObject()));
5308 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5309 STATIC_ASSERT(0 == kSmiTag);
5310 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
5312 // Store result register while executing finally block.
5315 // Store pending message while executing finally block.
5316 ExternalReference pending_message_obj =
5317 ExternalReference::address_of_pending_message_obj(isolate());
5318 __ li(at, Operand(pending_message_obj));
5319 __ lw(a1, MemOperand(at));
5324 void FullCodeGenerator::ExitFinallyBlock() {
5325 DCHECK(!result_register().is(a1));
5326 // Restore pending message from stack.
5328 ExternalReference pending_message_obj =
5329 ExternalReference::address_of_pending_message_obj(isolate());
5330 __ li(at, Operand(pending_message_obj));
5331 __ sw(a1, MemOperand(at));
5333 // Restore result register from stack.
5336 // Uncook return address and return.
5337 __ pop(result_register());
5338 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5339 __ sra(a1, a1, 1); // Un-smi-tag value.
5340 __ Addu(at, a1, Operand(masm_->CodeObject()));
5348 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5350 BackEdgeState target_state,
5351 Code* replacement_code) {
5352 static const int kInstrSize = Assembler::kInstrSize;
5353 Address branch_address = pc - 6 * kInstrSize;
5354 CodePatcher patcher(branch_address, 1);
5356 switch (target_state) {
5358 // slt at, a3, zero_reg (in case of count based interrupts)
5359 // beq at, zero_reg, ok
5360 // lui t9, <interrupt stub address> upper
5361 // ori t9, <interrupt stub address> lower
5364 // ok-label ----- pc_after points here
5365 patcher.masm()->slt(at, a3, zero_reg);
5367 case ON_STACK_REPLACEMENT:
5368 case OSR_AFTER_STACK_CHECK:
5369 // addiu at, zero_reg, 1
5370 // beq at, zero_reg, ok ;; Not changed
5371 // lui t9, <on-stack replacement address> upper
5372 // ori t9, <on-stack replacement address> lower
5373 // jalr t9 ;; Not changed
5374 // nop ;; Not changed
5375 // ok-label ----- pc_after points here
5376 patcher.masm()->addiu(at, zero_reg, 1);
5379 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5380 // Replace the stack check address in the load-immediate (lui/ori pair)
5381 // with the entry address of the replacement code.
5382 Assembler::set_target_address_at(pc_immediate_load_address,
5383 replacement_code->entry());
5385 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5386 unoptimized_code, pc_immediate_load_address, replacement_code);
5390 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5392 Code* unoptimized_code,
5394 static const int kInstrSize = Assembler::kInstrSize;
5395 Address branch_address = pc - 6 * kInstrSize;
5396 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5398 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
5399 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5400 DCHECK(reinterpret_cast<uint32_t>(
5401 Assembler::target_address_at(pc_immediate_load_address)) ==
5402 reinterpret_cast<uint32_t>(
5403 isolate->builtins()->InterruptCheck()->entry()));
5407 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5409 if (reinterpret_cast<uint32_t>(
5410 Assembler::target_address_at(pc_immediate_load_address)) ==
5411 reinterpret_cast<uint32_t>(
5412 isolate->builtins()->OnStackReplacement()->entry())) {
5413 return ON_STACK_REPLACEMENT;
5416 DCHECK(reinterpret_cast<uint32_t>(
5417 Assembler::target_address_at(pc_immediate_load_address)) ==
5418 reinterpret_cast<uint32_t>(
5419 isolate->builtins()->OsrAfterStackCheck()->entry()));
5420 return OSR_AFTER_STACK_CHECK;
5424 } } // namespace v8::internal
5426 #endif // V8_TARGET_ARCH_MIPS