1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS64
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/parser.h"
25 #include "src/scopes.h"
27 #include "src/mips64/code-stubs-mips64.h"
28 #include "src/mips64/macro-assembler-mips64.h"
33 #define __ ACCESS_MASM(masm_)
36 // A patch site is a location in the code which it is possible to patch. This
37 // class has a number of methods to emit the code which is patchable and the
38 // method EmitPatchInfo to record a marker back to the patchable code. This
39 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
40 // (raw 16 bit immediate value is used) is the delta from the pc to the first
41 // instruction of the patchable code.
42 // The marker instruction is effectively a NOP (dest is zero_reg) and will
43 // never be emitted by normal code.
44 class JumpPatchSite BASE_EMBEDDED {
46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48 info_emitted_ = false;
53 DCHECK(patch_site_.is_bound() == info_emitted_);
56 // When initially emitting this ensure that a jump is always generated to skip
57 // the inlined smi code.
58 void EmitJumpIfNotSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
61 __ bind(&patch_site_);
63 // Always taken before patched.
64 __ BranchShort(target, eq, at, Operand(zero_reg));
67 // When initially emitting this ensure that a jump is never generated to skip
68 // the inlined smi code.
69 void EmitJumpIfSmi(Register reg, Label* target) {
70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
71 DCHECK(!patch_site_.is_bound() && !info_emitted_);
72 __ bind(&patch_site_);
74 // Never taken before patched.
75 __ BranchShort(target, ne, at, Operand(zero_reg));
78 void EmitPatchInfo() {
79 if (patch_site_.is_bound()) {
80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
82 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
87 __ nop(); // Signals no inlined code.
92 MacroAssembler* masm_;
100 // Generate code for a JS function. On entry to the function the receiver
101 // and arguments have been pushed on the stack left to right. The actual
102 // argument count matches the formal parameter count expected by the
105 // The live registers are:
106 // o a1: the JS function object being called (i.e. ourselves)
108 // o fp: our caller's frame pointer
109 // o sp: stack pointer
110 // o ra: return address
112 // The function builds a JS frame. Please see JavaScriptFrameConstants in
113 // frames-mips.h for its layout.
114 void FullCodeGenerator::Generate() {
115 CompilationInfo* info = info_;
117 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
118 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
120 profiling_counter_ = isolate()->factory()->NewCell(
121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122 SetFunctionPosition(function());
123 Comment cmnt(masm_, "[ function compiled by full code generator");
125 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
134 // Sloppy mode functions and builtins need to replace the receiver with the
135 // global proxy when called as functions (without an explicit receiver
137 if (is_sloppy(info->language_mode()) && !info->is_native() &&
138 info->MayUseThis() && info->scope()->has_this_declaration()) {
140 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
141 __ ld(at, MemOperand(sp, receiver_offset));
142 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
143 __ Branch(&ok, ne, a2, Operand(at));
145 __ ld(a2, GlobalObjectOperand());
146 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
148 __ sd(a2, MemOperand(sp, receiver_offset));
151 // Open a frame scope to indicate that there is a frame on the stack. The
152 // MANUAL indicates that the scope shouldn't actually generate code to set up
153 // the frame (that is done below).
154 FrameScope frame_scope(masm_, StackFrame::MANUAL);
155 info->set_prologue_offset(masm_->pc_offset());
156 __ Prologue(info->IsCodePreAgingActive());
157 info->AddNoFrameRange(0, masm_->pc_offset());
159 { Comment cmnt(masm_, "[ Allocate locals");
160 int locals_count = info->scope()->num_stack_slots();
161 // Generators allocate locals, if any, in context slots.
162 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
163 if (locals_count > 0) {
164 if (locals_count >= 128) {
166 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
167 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
168 __ Branch(&ok, hs, t1, Operand(a2));
169 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
172 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
173 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
174 if (locals_count >= kMaxPushes) {
175 int loop_iterations = locals_count / kMaxPushes;
176 __ li(a2, Operand(loop_iterations));
178 __ bind(&loop_header);
180 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
181 for (int i = 0; i < kMaxPushes; i++) {
182 __ sd(t1, MemOperand(sp, i * kPointerSize));
184 // Continue loop if not done.
185 __ Dsubu(a2, a2, Operand(1));
186 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
188 int remaining = locals_count % kMaxPushes;
189 // Emit the remaining pushes.
190 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
191 for (int i = 0; i < remaining; i++) {
192 __ sd(t1, MemOperand(sp, i * kPointerSize));
197 bool function_in_register = true;
199 // Possibly allocate a local context.
200 if (info->scope()->num_heap_slots() > 0) {
201 Comment cmnt(masm_, "[ Allocate context");
202 // Argument to NewContext is the function, which is still in a1.
203 bool need_write_barrier = true;
204 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
205 if (info->scope()->is_script_scope()) {
207 __ Push(info->scope()->GetScopeInfo(info->isolate()));
208 __ CallRuntime(Runtime::kNewScriptContext, 2);
209 } else if (slots <= FastNewContextStub::kMaximumSlots) {
210 FastNewContextStub stub(isolate(), slots);
212 // Result of FastNewContextStub is always in new space.
213 need_write_barrier = false;
216 __ CallRuntime(Runtime::kNewFunctionContext, 1);
218 function_in_register = false;
219 // Context is returned in v0. It replaces the context passed to us.
220 // It's saved in the stack and kept live in cp.
222 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
223 // Copy any necessary parameters into the context.
224 int num_parameters = info->scope()->num_parameters();
225 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
226 for (int i = first_parameter; i < num_parameters; i++) {
227 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
228 if (var->IsContextSlot()) {
229 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
230 (num_parameters - 1 - i) * kPointerSize;
231 // Load parameter from stack.
232 __ ld(a0, MemOperand(fp, parameter_offset));
233 // Store it in the context.
234 MemOperand target = ContextOperand(cp, var->index());
237 // Update the write barrier.
238 if (need_write_barrier) {
239 __ RecordWriteContextSlot(
240 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
241 } else if (FLAG_debug_code) {
243 __ JumpIfInNewSpace(cp, a0, &done);
244 __ Abort(kExpectedNewSpaceObject);
251 // Possibly set up a local binding to the this function which is used in
252 // derived constructors with super calls.
253 Variable* this_function_var = scope()->this_function_var();
254 if (this_function_var != nullptr) {
255 Comment cmnt(masm_, "[ This function");
256 SetVar(this_function_var, a1, a2, a3);
259 Variable* new_target_var = scope()->new_target_var();
260 if (new_target_var != nullptr) {
261 Comment cmnt(masm_, "[ new.target");
262 // new.target is parameter -2.
263 int offset = 2 * kPointerSize +
264 (info_->scope()->num_parameters() + 1) * kPointerSize;
265 __ ld(v0, MemOperand(fp, offset));
266 SetVar(new_target_var, v0, a2, a3);
269 ArgumentsAccessStub::HasNewTarget has_new_target =
270 IsSubclassConstructor(info->function()->kind())
271 ? ArgumentsAccessStub::HAS_NEW_TARGET
272 : ArgumentsAccessStub::NO_NEW_TARGET;
274 // Possibly allocate RestParameters
276 Variable* rest_param = scope()->rest_parameter(&rest_index);
278 Comment cmnt(masm_, "[ Allocate rest parameter array");
280 int num_parameters = info->scope()->num_parameters();
281 int offset = num_parameters * kPointerSize;
282 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
288 Operand(StandardFrameConstants::kCallerSPOffset + offset));
289 __ li(a2, Operand(Smi::FromInt(num_parameters)));
290 __ li(a1, Operand(Smi::FromInt(rest_index)));
291 __ li(a0, Operand(Smi::FromInt(language_mode())));
292 __ Push(a3, a2, a1, a0);
294 RestParamAccessStub stub(isolate());
297 SetVar(rest_param, v0, a1, a2);
300 Variable* arguments = scope()->arguments();
301 if (arguments != NULL) {
302 // Function uses arguments object.
303 Comment cmnt(masm_, "[ Allocate arguments object");
304 if (!function_in_register) {
305 // Load this again, if it's used by the local context below.
306 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
310 // Receiver is just before the parameters on the caller's stack.
311 int num_parameters = info->scope()->num_parameters();
312 int offset = num_parameters * kPointerSize;
314 Operand(StandardFrameConstants::kCallerSPOffset + offset));
315 __ li(a1, Operand(Smi::FromInt(num_parameters)));
318 // Arguments to ArgumentsAccessStub:
319 // function, receiver address, parameter count.
320 // The stub will rewrite receiever and parameter count if the previous
321 // stack frame was an arguments adapter frame.
322 ArgumentsAccessStub::Type type;
323 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
324 type = ArgumentsAccessStub::NEW_STRICT;
325 } else if (function()->has_duplicate_parameters()) {
326 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
328 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
330 ArgumentsAccessStub stub(isolate(), type, has_new_target);
333 SetVar(arguments, v0, a1, a2);
337 __ CallRuntime(Runtime::kTraceEnter, 0);
339 // Visit the declarations and body unless there is an illegal
341 if (scope()->HasIllegalRedeclaration()) {
342 Comment cmnt(masm_, "[ Declarations");
343 scope()->VisitIllegalRedeclaration(this);
346 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
347 { Comment cmnt(masm_, "[ Declarations");
348 // For named function expressions, declare the function name as a
350 if (scope()->is_function_scope() && scope()->function() != NULL) {
351 VariableDeclaration* function = scope()->function();
352 DCHECK(function->proxy()->var()->mode() == CONST ||
353 function->proxy()->var()->mode() == CONST_LEGACY);
354 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
355 VisitVariableDeclaration(function);
357 VisitDeclarations(scope()->declarations());
359 { Comment cmnt(masm_, "[ Stack check");
360 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
362 __ LoadRoot(at, Heap::kStackLimitRootIndex);
363 __ Branch(&ok, hs, sp, Operand(at));
364 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
365 PredictableCodeSizeScope predictable(masm_,
366 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
367 __ Call(stack_check, RelocInfo::CODE_TARGET);
371 { Comment cmnt(masm_, "[ Body");
372 DCHECK(loop_depth() == 0);
374 VisitStatements(function()->body());
376 DCHECK(loop_depth() == 0);
380 // Always emit a 'return undefined' in case control fell off the end of
382 { Comment cmnt(masm_, "[ return <undefined>;");
383 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
385 EmitReturnSequence();
389 void FullCodeGenerator::ClearAccumulator() {
390 DCHECK(Smi::FromInt(0) == 0);
391 __ mov(v0, zero_reg);
395 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
396 __ li(a2, Operand(profiling_counter_));
397 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
398 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
399 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
403 void FullCodeGenerator::EmitProfilingCounterReset() {
404 int reset_value = FLAG_interrupt_budget;
405 if (info_->is_debug()) {
406 // Detect debug break requests as soon as possible.
407 reset_value = FLAG_interrupt_budget >> 4;
409 __ li(a2, Operand(profiling_counter_));
410 __ li(a3, Operand(Smi::FromInt(reset_value)));
411 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
415 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
416 Label* back_edge_target) {
417 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
418 // to make sure it is constant. Branch may emit a skip-or-jump sequence
419 // instead of the normal Branch. It seems that the "skip" part of that
420 // sequence is about as long as this Branch would be so it is safe to ignore
422 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
423 Comment cmnt(masm_, "[ Back edge bookkeeping");
425 DCHECK(back_edge_target->is_bound());
426 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
427 int weight = Min(kMaxBackEdgeWeight,
428 Max(1, distance / kCodeSizeMultiplier));
429 EmitProfilingCounterDecrement(weight);
430 __ slt(at, a3, zero_reg);
431 __ beq(at, zero_reg, &ok);
432 // Call will emit a li t9 first, so it is safe to use the delay slot.
433 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
434 // Record a mapping of this PC offset to the OSR id. This is used to find
435 // the AST id from the unoptimized code in order to use it as a key into
436 // the deoptimization input data found in the optimized code.
437 RecordBackEdge(stmt->OsrEntryId());
438 EmitProfilingCounterReset();
441 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
442 // Record a mapping of the OSR id to this PC. This is used if the OSR
443 // entry becomes the target of a bailout. We don't expect it to be, but
444 // we want it to work if it is.
445 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
449 void FullCodeGenerator::EmitReturnSequence() {
450 Comment cmnt(masm_, "[ Return sequence");
451 if (return_label_.is_bound()) {
452 __ Branch(&return_label_);
454 __ bind(&return_label_);
456 // Push the return value on the stack as the parameter.
457 // Runtime::TraceExit returns its parameter in v0.
459 __ CallRuntime(Runtime::kTraceExit, 1);
461 // Pretend that the exit is a backwards jump to the entry.
463 if (info_->ShouldSelfOptimize()) {
464 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
466 int distance = masm_->pc_offset();
467 weight = Min(kMaxBackEdgeWeight,
468 Max(1, distance / kCodeSizeMultiplier));
470 EmitProfilingCounterDecrement(weight);
472 __ Branch(&ok, ge, a3, Operand(zero_reg));
474 __ Call(isolate()->builtins()->InterruptCheck(),
475 RelocInfo::CODE_TARGET);
477 EmitProfilingCounterReset();
481 // Add a label for checking the size of the code used for returning.
482 Label check_exit_codesize;
483 masm_->bind(&check_exit_codesize);
485 // Make sure that the constant pool is not emitted inside of the return
487 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
488 // Here we use masm_-> instead of the __ macro to avoid the code coverage
489 // tool from instrumenting as we rely on the code size here.
490 int32_t arg_count = info_->scope()->num_parameters() + 1;
491 if (IsSubclassConstructor(info_->function()->kind())) {
494 int32_t sp_delta = arg_count * kPointerSize;
495 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
498 int no_frame_start = masm_->pc_offset();
499 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
500 masm_->Daddu(sp, sp, Operand(sp_delta));
502 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
506 // Check that the size of the code used for returning is large enough
507 // for the debugger's requirements.
508 DCHECK(Assembler::kJSReturnSequenceInstructions <=
509 masm_->InstructionsGeneratedSince(&check_exit_codesize));
515 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
516 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
520 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
521 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
522 codegen()->GetVar(result_register(), var);
526 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
527 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
528 codegen()->GetVar(result_register(), var);
529 __ push(result_register());
533 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
534 // For simplicity we always test the accumulator register.
535 codegen()->GetVar(result_register(), var);
536 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
537 codegen()->DoTest(this);
541 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
545 void FullCodeGenerator::AccumulatorValueContext::Plug(
546 Heap::RootListIndex index) const {
547 __ LoadRoot(result_register(), index);
551 void FullCodeGenerator::StackValueContext::Plug(
552 Heap::RootListIndex index) const {
553 __ LoadRoot(result_register(), index);
554 __ push(result_register());
558 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
559 codegen()->PrepareForBailoutBeforeSplit(condition(),
563 if (index == Heap::kUndefinedValueRootIndex ||
564 index == Heap::kNullValueRootIndex ||
565 index == Heap::kFalseValueRootIndex) {
566 if (false_label_ != fall_through_) __ Branch(false_label_);
567 } else if (index == Heap::kTrueValueRootIndex) {
568 if (true_label_ != fall_through_) __ Branch(true_label_);
570 __ LoadRoot(result_register(), index);
571 codegen()->DoTest(this);
576 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
580 void FullCodeGenerator::AccumulatorValueContext::Plug(
581 Handle<Object> lit) const {
582 __ li(result_register(), Operand(lit));
586 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
587 // Immediates cannot be pushed directly.
588 __ li(result_register(), Operand(lit));
589 __ push(result_register());
593 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
594 codegen()->PrepareForBailoutBeforeSplit(condition(),
598 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
599 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
600 if (false_label_ != fall_through_) __ Branch(false_label_);
601 } else if (lit->IsTrue() || lit->IsJSObject()) {
602 if (true_label_ != fall_through_) __ Branch(true_label_);
603 } else if (lit->IsString()) {
604 if (String::cast(*lit)->length() == 0) {
605 if (false_label_ != fall_through_) __ Branch(false_label_);
607 if (true_label_ != fall_through_) __ Branch(true_label_);
609 } else if (lit->IsSmi()) {
610 if (Smi::cast(*lit)->value() == 0) {
611 if (false_label_ != fall_through_) __ Branch(false_label_);
613 if (true_label_ != fall_through_) __ Branch(true_label_);
616 // For simplicity we always test the accumulator register.
617 __ li(result_register(), Operand(lit));
618 codegen()->DoTest(this);
623 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
624 Register reg) const {
630 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
632 Register reg) const {
635 __ Move(result_register(), reg);
639 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
640 Register reg) const {
642 if (count > 1) __ Drop(count - 1);
643 __ sd(reg, MemOperand(sp, 0));
647 void FullCodeGenerator::TestContext::DropAndPlug(int count,
648 Register reg) const {
650 // For simplicity we always test the accumulator register.
652 __ Move(result_register(), reg);
653 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
654 codegen()->DoTest(this);
658 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
659 Label* materialize_false) const {
660 DCHECK(materialize_true == materialize_false);
661 __ bind(materialize_true);
665 void FullCodeGenerator::AccumulatorValueContext::Plug(
666 Label* materialize_true,
667 Label* materialize_false) const {
669 __ bind(materialize_true);
670 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
672 __ bind(materialize_false);
673 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
678 void FullCodeGenerator::StackValueContext::Plug(
679 Label* materialize_true,
680 Label* materialize_false) const {
682 __ bind(materialize_true);
683 __ LoadRoot(at, Heap::kTrueValueRootIndex);
684 // Push the value as the following branch can clobber at in long branch mode.
687 __ bind(materialize_false);
688 __ LoadRoot(at, Heap::kFalseValueRootIndex);
694 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
695 Label* materialize_false) const {
696 DCHECK(materialize_true == true_label_);
697 DCHECK(materialize_false == false_label_);
701 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
705 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
706 Heap::RootListIndex value_root_index =
707 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
708 __ LoadRoot(result_register(), value_root_index);
712 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
713 Heap::RootListIndex value_root_index =
714 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
715 __ LoadRoot(at, value_root_index);
720 void FullCodeGenerator::TestContext::Plug(bool flag) const {
721 codegen()->PrepareForBailoutBeforeSplit(condition(),
726 if (true_label_ != fall_through_) __ Branch(true_label_);
728 if (false_label_ != fall_through_) __ Branch(false_label_);
733 void FullCodeGenerator::DoTest(Expression* condition,
736 Label* fall_through) {
737 __ mov(a0, result_register());
738 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
739 CallIC(ic, condition->test_id());
740 __ mov(at, zero_reg);
741 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
745 void FullCodeGenerator::Split(Condition cc,
750 Label* fall_through) {
751 if (if_false == fall_through) {
752 __ Branch(if_true, cc, lhs, rhs);
753 } else if (if_true == fall_through) {
754 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
756 __ Branch(if_true, cc, lhs, rhs);
762 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
763 DCHECK(var->IsStackAllocated());
764 // Offset is negative because higher indexes are at lower addresses.
765 int offset = -var->index() * kPointerSize;
766 // Adjust by a (parameter or local) base offset.
767 if (var->IsParameter()) {
768 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
770 offset += JavaScriptFrameConstants::kLocal0Offset;
772 return MemOperand(fp, offset);
776 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
777 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
778 if (var->IsContextSlot()) {
779 int context_chain_length = scope()->ContextChainLength(var->scope());
780 __ LoadContext(scratch, context_chain_length);
781 return ContextOperand(scratch, var->index());
783 return StackOperand(var);
788 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
789 // Use destination as scratch.
790 MemOperand location = VarOperand(var, dest);
791 __ ld(dest, location);
795 void FullCodeGenerator::SetVar(Variable* var,
799 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
800 DCHECK(!scratch0.is(src));
801 DCHECK(!scratch0.is(scratch1));
802 DCHECK(!scratch1.is(src));
803 MemOperand location = VarOperand(var, scratch0);
804 __ sd(src, location);
805 // Emit the write barrier code if the location is in the heap.
806 if (var->IsContextSlot()) {
807 __ RecordWriteContextSlot(scratch0,
817 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
818 bool should_normalize,
821 // Only prepare for bailouts before splits if we're in a test
822 // context. Otherwise, we let the Visit function deal with the
823 // preparation to avoid preparing with the same AST id twice.
824 if (!context()->IsTest() || !info_->IsOptimizable()) return;
827 if (should_normalize) __ Branch(&skip);
828 PrepareForBailout(expr, TOS_REG);
829 if (should_normalize) {
830 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
831 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
837 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
838 // The variable in the declaration always resides in the current function
840 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
841 if (generate_debug_code_) {
842 // Check that we're not inside a with or catch context.
843 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
844 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
845 __ Check(ne, kDeclarationInWithContext,
847 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
848 __ Check(ne, kDeclarationInCatchContext,
854 void FullCodeGenerator::VisitVariableDeclaration(
855 VariableDeclaration* declaration) {
856 // If it was not possible to allocate the variable at compile time, we
857 // need to "declare" it at runtime to make sure it actually exists in the
859 VariableProxy* proxy = declaration->proxy();
860 VariableMode mode = declaration->mode();
861 Variable* variable = proxy->var();
862 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
863 switch (variable->location()) {
864 case Variable::UNALLOCATED:
865 globals_->Add(variable->name(), zone());
866 globals_->Add(variable->binding_needs_init()
867 ? isolate()->factory()->the_hole_value()
868 : isolate()->factory()->undefined_value(),
872 case Variable::PARAMETER:
873 case Variable::LOCAL:
875 Comment cmnt(masm_, "[ VariableDeclaration");
876 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
877 __ sd(a4, StackOperand(variable));
881 case Variable::CONTEXT:
883 Comment cmnt(masm_, "[ VariableDeclaration");
884 EmitDebugCheckDeclarationContext(variable);
885 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
886 __ sd(at, ContextOperand(cp, variable->index()));
887 // No write barrier since the_hole_value is in old space.
888 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
892 case Variable::LOOKUP: {
893 Comment cmnt(masm_, "[ VariableDeclaration");
894 __ li(a2, Operand(variable->name()));
895 // Declaration nodes are always introduced in one of four modes.
896 DCHECK(IsDeclaredVariableMode(mode));
897 PropertyAttributes attr =
898 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
899 __ li(a1, Operand(Smi::FromInt(attr)));
900 // Push initial value, if any.
901 // Note: For variables we must not push an initial value (such as
902 // 'undefined') because we may have a (legal) redeclaration and we
903 // must not destroy the current value.
905 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
906 __ Push(cp, a2, a1, a0);
908 DCHECK(Smi::FromInt(0) == 0);
909 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
910 __ Push(cp, a2, a1, a0);
912 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
919 void FullCodeGenerator::VisitFunctionDeclaration(
920 FunctionDeclaration* declaration) {
921 VariableProxy* proxy = declaration->proxy();
922 Variable* variable = proxy->var();
923 switch (variable->location()) {
924 case Variable::UNALLOCATED: {
925 globals_->Add(variable->name(), zone());
926 Handle<SharedFunctionInfo> function =
927 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
928 // Check for stack-overflow exception.
929 if (function.is_null()) return SetStackOverflow();
930 globals_->Add(function, zone());
934 case Variable::PARAMETER:
935 case Variable::LOCAL: {
936 Comment cmnt(masm_, "[ FunctionDeclaration");
937 VisitForAccumulatorValue(declaration->fun());
938 __ sd(result_register(), StackOperand(variable));
942 case Variable::CONTEXT: {
943 Comment cmnt(masm_, "[ FunctionDeclaration");
944 EmitDebugCheckDeclarationContext(variable);
945 VisitForAccumulatorValue(declaration->fun());
946 __ sd(result_register(), ContextOperand(cp, variable->index()));
947 int offset = Context::SlotOffset(variable->index());
948 // We know that we have written a function, which is not a smi.
949 __ RecordWriteContextSlot(cp,
957 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
961 case Variable::LOOKUP: {
962 Comment cmnt(masm_, "[ FunctionDeclaration");
963 __ li(a2, Operand(variable->name()));
964 __ li(a1, Operand(Smi::FromInt(NONE)));
966 // Push initial value for function declaration.
967 VisitForStackValue(declaration->fun());
968 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
975 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
976 VariableProxy* proxy = declaration->proxy();
977 Variable* variable = proxy->var();
978 switch (variable->location()) {
979 case Variable::UNALLOCATED:
983 case Variable::CONTEXT: {
984 Comment cmnt(masm_, "[ ImportDeclaration");
985 EmitDebugCheckDeclarationContext(variable);
990 case Variable::PARAMETER:
991 case Variable::LOCAL:
992 case Variable::LOOKUP:
998 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1003 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1004 // Call the runtime to declare the globals.
1005 // The context is the first argument.
1006 __ li(a1, Operand(pairs));
1007 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1008 __ Push(cp, a1, a0);
1009 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1010 // Return value is ignored.
1014 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1015 // Call the runtime to declare the modules.
1016 __ Push(descriptions);
1017 __ CallRuntime(Runtime::kDeclareModules, 1);
1018 // Return value is ignored.
1022 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1023 Comment cmnt(masm_, "[ SwitchStatement");
1024 Breakable nested_statement(this, stmt);
1025 SetStatementPosition(stmt);
1027 // Keep the switch value on the stack until a case matches.
1028 VisitForStackValue(stmt->tag());
1029 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1031 ZoneList<CaseClause*>* clauses = stmt->cases();
1032 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1034 Label next_test; // Recycled for each test.
1035 // Compile all the tests with branches to their bodies.
1036 for (int i = 0; i < clauses->length(); i++) {
1037 CaseClause* clause = clauses->at(i);
1038 clause->body_target()->Unuse();
1040 // The default is not a test, but remember it as final fall through.
1041 if (clause->is_default()) {
1042 default_clause = clause;
1046 Comment cmnt(masm_, "[ Case comparison");
1047 __ bind(&next_test);
1050 // Compile the label expression.
1051 VisitForAccumulatorValue(clause->label());
1052 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1054 // Perform the comparison as if via '==='.
1055 __ ld(a1, MemOperand(sp, 0)); // Switch value.
1056 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1057 JumpPatchSite patch_site(masm_);
1058 if (inline_smi_code) {
1061 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1063 __ Branch(&next_test, ne, a1, Operand(a0));
1064 __ Drop(1); // Switch value is no longer needed.
1065 __ Branch(clause->body_target());
1067 __ bind(&slow_case);
1070 // Record position before stub call for type feedback.
1071 SetSourcePosition(clause->position());
1072 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1073 strength(language_mode())).code();
1074 CallIC(ic, clause->CompareId());
1075 patch_site.EmitPatchInfo();
1079 PrepareForBailout(clause, TOS_REG);
1080 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1081 __ Branch(&next_test, ne, v0, Operand(at));
1083 __ Branch(clause->body_target());
1086 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1087 __ Drop(1); // Switch value is no longer needed.
1088 __ Branch(clause->body_target());
1091 // Discard the test value and jump to the default if present, otherwise to
1092 // the end of the statement.
1093 __ bind(&next_test);
1094 __ Drop(1); // Switch value is no longer needed.
1095 if (default_clause == NULL) {
1096 __ Branch(nested_statement.break_label());
1098 __ Branch(default_clause->body_target());
1101 // Compile all the case bodies.
1102 for (int i = 0; i < clauses->length(); i++) {
1103 Comment cmnt(masm_, "[ Case body");
1104 CaseClause* clause = clauses->at(i);
1105 __ bind(clause->body_target());
1106 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1107 VisitStatements(clause->statements());
1110 __ bind(nested_statement.break_label());
1111 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1115 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1116 Comment cmnt(masm_, "[ ForInStatement");
1117 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1118 SetStatementPosition(stmt);
1121 ForIn loop_statement(this, stmt);
1122 increment_loop_depth();
1124 // Get the object to enumerate over. If the object is null or undefined, skip
1125 // over the loop. See ECMA-262 version 5, section 12.6.4.
1126 SetExpressionPosition(stmt->enumerable());
1127 VisitForAccumulatorValue(stmt->enumerable());
1128 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1129 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1130 __ Branch(&exit, eq, a0, Operand(at));
1131 Register null_value = a5;
1132 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1133 __ Branch(&exit, eq, a0, Operand(null_value));
1134 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1136 // Convert the object to a JS object.
1137 Label convert, done_convert;
1138 __ JumpIfSmi(a0, &convert);
1139 __ GetObjectType(a0, a1, a1);
1140 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1143 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1145 __ bind(&done_convert);
1146 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1149 // Check for proxies.
1151 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1152 __ GetObjectType(a0, a1, a1);
1153 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1155 // Check cache validity in generated code. This is a fast case for
1156 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1157 // guarantee cache validity, call the runtime system to check cache
1158 // validity or get the property names in a fixed array.
1159 __ CheckEnumCache(null_value, &call_runtime);
1161 // The enum cache is valid. Load the map of the object being
1162 // iterated over and use the cache for the iteration.
1164 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1165 __ Branch(&use_cache);
1167 // Get the set of properties to enumerate.
1168 __ bind(&call_runtime);
1169 __ push(a0); // Duplicate the enumerable object on the stack.
1170 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1171 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1173 // If we got a map from the runtime call, we can do a fast
1174 // modification check. Otherwise, we got a fixed array, and we have
1175 // to do a slow check.
1177 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1178 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1179 __ Branch(&fixed_array, ne, a2, Operand(at));
1181 // We got a map in register v0. Get the enumeration cache from it.
1182 Label no_descriptors;
1183 __ bind(&use_cache);
1185 __ EnumLength(a1, v0);
1186 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1188 __ LoadInstanceDescriptors(v0, a2);
1189 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1190 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1192 // Set up the four remaining stack slots.
1193 __ li(a0, Operand(Smi::FromInt(0)));
1194 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1195 __ Push(v0, a2, a1, a0);
1198 __ bind(&no_descriptors);
1202 // We got a fixed array in register v0. Iterate through that.
1204 __ bind(&fixed_array);
1206 __ li(a1, FeedbackVector());
1207 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1208 int vector_index = FeedbackVector()->GetIndex(slot);
1209 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1211 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1212 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1213 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1214 __ GetObjectType(a2, a3, a3);
1215 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1216 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1217 __ bind(&non_proxy);
1218 __ Push(a1, v0); // Smi and array
1219 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1220 __ li(a0, Operand(Smi::FromInt(0)));
1221 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1223 // Generate code for doing the condition check.
1224 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1226 SetExpressionPosition(stmt->each());
1228 // Load the current count to a0, load the length to a1.
1229 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1230 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1231 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1233 // Get the current entry of the array into register a3.
1234 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1235 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1236 __ SmiScale(a4, a0, kPointerSizeLog2);
1237 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1238 __ ld(a3, MemOperand(a4)); // Current entry.
1240 // Get the expected map from the stack or a smi in the
1241 // permanent slow case into register a2.
1242 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1244 // Check if the expected map still matches that of the enumerable.
1245 // If not, we may have to filter the key.
1247 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1248 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1249 __ Branch(&update_each, eq, a4, Operand(a2));
1251 // For proxies, no filtering is done.
1252 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1253 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1254 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1256 // Convert the entry to a string or (smi) 0 if it isn't a property
1257 // any more. If the property has been removed while iterating, we
1259 __ Push(a1, a3); // Enumerable and current entry.
1260 __ CallRuntime(Runtime::kForInFilter, 2);
1261 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1262 __ mov(a3, result_register());
1263 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1264 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1266 // Update the 'each' property or variable from the possibly filtered
1267 // entry in register a3.
1268 __ bind(&update_each);
1269 __ mov(result_register(), a3);
1270 // Perform the assignment as if via '='.
1271 { EffectContext context(this);
1272 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1273 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1276 // Generate code for the body of the loop.
1277 Visit(stmt->body());
1279 // Generate code for the going to the next element by incrementing
1280 // the index (smi) stored on top of the stack.
1281 __ bind(loop_statement.continue_label());
1283 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1286 EmitBackEdgeBookkeeping(stmt, &loop);
1289 // Remove the pointers stored on the stack.
1290 __ bind(loop_statement.break_label());
1293 // Exit and decrement the loop depth.
1294 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1296 decrement_loop_depth();
1300 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1302 // Use the fast case closure allocation code that allocates in new
1303 // space for nested functions that don't need literals cloning. If
1304 // we're running with the --always-opt or the --prepare-always-opt
1305 // flag, we need to use the runtime function so that the new function
1306 // we are creating here gets a chance to have its code optimized and
1307 // doesn't just get a copy of the existing unoptimized code.
1308 if (!FLAG_always_opt &&
1309 !FLAG_prepare_always_opt &&
1311 scope()->is_function_scope() &&
1312 info->num_literals() == 0) {
1313 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1314 __ li(a2, Operand(info));
1317 __ li(a0, Operand(info));
1318 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1319 : Heap::kFalseValueRootIndex);
1320 __ Push(cp, a0, a1);
1321 __ CallRuntime(Runtime::kNewClosure, 3);
1323 context()->Plug(v0);
1327 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1328 Comment cmnt(masm_, "[ VariableProxy");
1329 EmitVariableLoad(expr);
1333 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1335 FeedbackVectorICSlot slot) {
1336 if (NeedsHomeObject(initializer)) {
1337 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1338 __ li(StoreDescriptor::NameRegister(),
1339 Operand(isolate()->factory()->home_object_symbol()));
1340 __ ld(StoreDescriptor::ValueRegister(),
1341 MemOperand(sp, offset * kPointerSize));
1342 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1348 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1349 TypeofState typeof_state,
1351 Register current = cp;
1357 if (s->num_heap_slots() > 0) {
1358 if (s->calls_sloppy_eval()) {
1359 // Check that extension is NULL.
1360 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1361 __ Branch(slow, ne, temp, Operand(zero_reg));
1363 // Load next context in chain.
1364 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1365 // Walk the rest of the chain without clobbering cp.
1368 // If no outer scope calls eval, we do not need to check more
1369 // context extensions.
1370 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1371 s = s->outer_scope();
1374 if (s->is_eval_scope()) {
1376 if (!current.is(next)) {
1377 __ Move(next, current);
1380 // Terminate at native context.
1381 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1382 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1383 __ Branch(&fast, eq, temp, Operand(a4));
1384 // Check that extension is NULL.
1385 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1386 __ Branch(slow, ne, temp, Operand(zero_reg));
1387 // Load next context in chain.
1388 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1393 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1394 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1395 __ li(LoadDescriptor::SlotRegister(),
1396 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1398 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1405 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1407 DCHECK(var->IsContextSlot());
1408 Register context = cp;
1412 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1413 if (s->num_heap_slots() > 0) {
1414 if (s->calls_sloppy_eval()) {
1415 // Check that extension is NULL.
1416 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1417 __ Branch(slow, ne, temp, Operand(zero_reg));
1419 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1420 // Walk the rest of the chain without clobbering cp.
1424 // Check that last extension is NULL.
1425 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1426 __ Branch(slow, ne, temp, Operand(zero_reg));
1428 // This function is used only for loads, not stores, so it's safe to
1429 // return an cp-based operand (the write barrier cannot be allowed to
1430 // destroy the cp register).
1431 return ContextOperand(context, var->index());
1435 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1436 TypeofState typeof_state,
1439 // Generate fast-case code for variables that might be shadowed by
1440 // eval-introduced variables. Eval is used a lot without
1441 // introducing variables. In those cases, we do not want to
1442 // perform a runtime call for all variables in the scope
1443 // containing the eval.
1444 Variable* var = proxy->var();
1445 if (var->mode() == DYNAMIC_GLOBAL) {
1446 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1448 } else if (var->mode() == DYNAMIC_LOCAL) {
1449 Variable* local = var->local_if_not_shadowed();
1450 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1451 if (local->mode() == LET || local->mode() == CONST ||
1452 local->mode() == CONST_LEGACY) {
1453 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1454 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1455 if (local->mode() == CONST_LEGACY) {
1456 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1457 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1458 } else { // LET || CONST
1459 __ Branch(done, ne, at, Operand(zero_reg));
1460 __ li(a0, Operand(var->name()));
1462 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1470 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1471 // Record position before possible IC call.
1472 SetSourcePosition(proxy->position());
1473 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1474 Variable* var = proxy->var();
1476 // Three cases: global variables, lookup variables, and all other types of
1478 switch (var->location()) {
1479 case Variable::UNALLOCATED: {
1480 Comment cmnt(masm_, "[ Global variable");
1481 // Use inline caching. Variable name is passed in a2 and the global
1482 // object (receiver) in a0.
1483 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1484 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1485 __ li(LoadDescriptor::SlotRegister(),
1486 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1487 CallGlobalLoadIC(var->name());
1488 context()->Plug(v0);
1492 case Variable::PARAMETER:
1493 case Variable::LOCAL:
1494 case Variable::CONTEXT: {
1495 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1496 : "[ Stack variable");
1497 if (var->binding_needs_init()) {
1498 // var->scope() may be NULL when the proxy is located in eval code and
1499 // refers to a potential outside binding. Currently those bindings are
1500 // always looked up dynamically, i.e. in that case
1501 // var->location() == LOOKUP.
1503 DCHECK(var->scope() != NULL);
1505 // Check if the binding really needs an initialization check. The check
1506 // can be skipped in the following situation: we have a LET or CONST
1507 // binding in harmony mode, both the Variable and the VariableProxy have
1508 // the same declaration scope (i.e. they are both in global code, in the
1509 // same function or in the same eval code) and the VariableProxy is in
1510 // the source physically located after the initializer of the variable.
1512 // We cannot skip any initialization checks for CONST in non-harmony
1513 // mode because const variables may be declared but never initialized:
1514 // if (false) { const x; }; var y = x;
1516 // The condition on the declaration scopes is a conservative check for
1517 // nested functions that access a binding and are called before the
1518 // binding is initialized:
1519 // function() { f(); let x = 1; function f() { x = 2; } }
1521 bool skip_init_check;
1522 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1523 skip_init_check = false;
1524 } else if (var->is_this()) {
1525 CHECK(info_->function() != nullptr &&
1526 (info_->function()->kind() & kSubclassConstructor) != 0);
1527 // TODO(dslomov): implement 'this' hole check elimination.
1528 skip_init_check = false;
1530 // Check that we always have valid source position.
1531 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1532 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1533 skip_init_check = var->mode() != CONST_LEGACY &&
1534 var->initializer_position() < proxy->position();
1537 if (!skip_init_check) {
1538 // Let and const need a read barrier.
1540 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1541 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1542 if (var->mode() == LET || var->mode() == CONST) {
1543 // Throw a reference error when using an uninitialized let/const
1544 // binding in harmony mode.
1546 __ Branch(&done, ne, at, Operand(zero_reg));
1547 __ li(a0, Operand(var->name()));
1549 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1552 // Uninitalized const bindings outside of harmony mode are unholed.
1553 DCHECK(var->mode() == CONST_LEGACY);
1554 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1555 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1557 context()->Plug(v0);
1561 context()->Plug(var);
1565 case Variable::LOOKUP: {
1566 Comment cmnt(masm_, "[ Lookup variable");
1568 // Generate code for loading from variables potentially shadowed
1569 // by eval-introduced variables.
1570 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1572 __ li(a1, Operand(var->name()));
1573 __ Push(cp, a1); // Context and name.
1574 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1576 context()->Plug(v0);
1582 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1583 Comment cmnt(masm_, "[ RegExpLiteral");
1585 // Registers will be used as follows:
1586 // a5 = materialized value (RegExp literal)
1587 // a4 = JS function, literals array
1588 // a3 = literal index
1589 // a2 = RegExp pattern
1590 // a1 = RegExp flags
1591 // a0 = RegExp literal clone
1592 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1593 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1594 int literal_offset =
1595 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1596 __ ld(a5, FieldMemOperand(a4, literal_offset));
1597 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1598 __ Branch(&materialized, ne, a5, Operand(at));
1600 // Create regexp literal using runtime function.
1601 // Result will be in v0.
1602 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1603 __ li(a2, Operand(expr->pattern()));
1604 __ li(a1, Operand(expr->flags()));
1605 __ Push(a4, a3, a2, a1);
1606 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1609 __ bind(&materialized);
1610 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1611 Label allocated, runtime_allocate;
1612 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1615 __ bind(&runtime_allocate);
1616 __ li(a0, Operand(Smi::FromInt(size)));
1618 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1621 __ bind(&allocated);
1623 // After this, registers are used as follows:
1624 // v0: Newly allocated regexp.
1625 // a5: Materialized regexp.
1627 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1628 context()->Plug(v0);
1632 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1633 if (expression == NULL) {
1634 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1637 VisitForStackValue(expression);
1642 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1643 Comment cmnt(masm_, "[ ObjectLiteral");
1645 Handle<FixedArray> constant_properties = expr->constant_properties();
1646 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1647 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1648 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1649 __ li(a1, Operand(constant_properties));
1650 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1651 if (MustCreateObjectLiteralWithRuntime(expr)) {
1652 __ Push(a3, a2, a1, a0);
1653 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1655 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1658 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1660 // If result_saved is true the result is on top of the stack. If
1661 // result_saved is false the result is in v0.
1662 bool result_saved = false;
1664 AccessorTable accessor_table(zone());
1665 int property_index = 0;
1666 // store_slot_index points to the vector IC slot for the next store IC used.
1667 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1668 // and must be updated if the number of store ICs emitted here changes.
1669 int store_slot_index = 0;
1670 for (; property_index < expr->properties()->length(); property_index++) {
1671 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1672 if (property->is_computed_name()) break;
1673 if (property->IsCompileTimeValue()) continue;
1675 Literal* key = property->key()->AsLiteral();
1676 Expression* value = property->value();
1677 if (!result_saved) {
1678 __ push(v0); // Save result on stack.
1679 result_saved = true;
1681 switch (property->kind()) {
1682 case ObjectLiteral::Property::CONSTANT:
1684 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1685 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1687 case ObjectLiteral::Property::COMPUTED:
1688 // It is safe to use [[Put]] here because the boilerplate already
1689 // contains computed properties with an uninitialized value.
1690 if (key->value()->IsInternalizedString()) {
1691 if (property->emit_store()) {
1692 VisitForAccumulatorValue(value);
1693 __ mov(StoreDescriptor::ValueRegister(), result_register());
1694 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1695 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1696 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1697 if (FLAG_vector_stores) {
1698 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1701 CallStoreIC(key->LiteralFeedbackId());
1703 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1705 if (NeedsHomeObject(value)) {
1706 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1707 __ li(StoreDescriptor::NameRegister(),
1708 Operand(isolate()->factory()->home_object_symbol()));
1709 __ ld(StoreDescriptor::ValueRegister(), MemOperand(sp));
1710 if (FLAG_vector_stores) {
1711 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1716 VisitForEffect(value);
1720 // Duplicate receiver on stack.
1721 __ ld(a0, MemOperand(sp));
1723 VisitForStackValue(key);
1724 VisitForStackValue(value);
1725 if (property->emit_store()) {
1726 EmitSetHomeObjectIfNeeded(
1727 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1728 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1730 __ CallRuntime(Runtime::kSetProperty, 4);
1735 case ObjectLiteral::Property::PROTOTYPE:
1736 // Duplicate receiver on stack.
1737 __ ld(a0, MemOperand(sp));
1739 VisitForStackValue(value);
1740 DCHECK(property->emit_store());
1741 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1743 case ObjectLiteral::Property::GETTER:
1744 if (property->emit_store()) {
1745 accessor_table.lookup(key)->second->getter = value;
1748 case ObjectLiteral::Property::SETTER:
1749 if (property->emit_store()) {
1750 accessor_table.lookup(key)->second->setter = value;
1756 // Emit code to define accessors, using only a single call to the runtime for
1757 // each pair of corresponding getters and setters.
1758 for (AccessorTable::Iterator it = accessor_table.begin();
1759 it != accessor_table.end();
1761 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1763 VisitForStackValue(it->first);
1764 EmitAccessor(it->second->getter);
1765 EmitSetHomeObjectIfNeeded(
1766 it->second->getter, 2,
1767 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1768 EmitAccessor(it->second->setter);
1769 EmitSetHomeObjectIfNeeded(
1770 it->second->setter, 3,
1771 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1772 __ li(a0, Operand(Smi::FromInt(NONE)));
1774 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1777 // Object literals have two parts. The "static" part on the left contains no
1778 // computed property names, and so we can compute its map ahead of time; see
1779 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1780 // starts with the first computed property name, and continues with all
1781 // properties to its right. All the code from above initializes the static
1782 // component of the object literal, and arranges for the map of the result to
1783 // reflect the static order in which the keys appear. For the dynamic
1784 // properties, we compile them into a series of "SetOwnProperty" runtime
1785 // calls. This will preserve insertion order.
1786 for (; property_index < expr->properties()->length(); property_index++) {
1787 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1789 Expression* value = property->value();
1790 if (!result_saved) {
1791 __ push(v0); // Save result on the stack
1792 result_saved = true;
1795 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1798 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1799 DCHECK(!property->is_computed_name());
1800 VisitForStackValue(value);
1801 DCHECK(property->emit_store());
1802 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1804 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1805 VisitForStackValue(value);
1806 EmitSetHomeObjectIfNeeded(
1807 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1809 switch (property->kind()) {
1810 case ObjectLiteral::Property::CONSTANT:
1811 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1812 case ObjectLiteral::Property::COMPUTED:
1813 if (property->emit_store()) {
1814 __ li(a0, Operand(Smi::FromInt(NONE)));
1816 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1822 case ObjectLiteral::Property::PROTOTYPE:
1826 case ObjectLiteral::Property::GETTER:
1827 __ li(a0, Operand(Smi::FromInt(NONE)));
1829 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1832 case ObjectLiteral::Property::SETTER:
1833 __ li(a0, Operand(Smi::FromInt(NONE)));
1835 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1841 if (expr->has_function()) {
1842 DCHECK(result_saved);
1843 __ ld(a0, MemOperand(sp));
1845 __ CallRuntime(Runtime::kToFastProperties, 1);
1849 context()->PlugTOS();
1851 context()->Plug(v0);
1854 // Verify that compilation exactly consumed the number of store ic slots that
1855 // the ObjectLiteral node had to offer.
1856 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1860 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1861 Comment cmnt(masm_, "[ ArrayLiteral");
1863 expr->BuildConstantElements(isolate());
1865 Handle<FixedArray> constant_elements = expr->constant_elements();
1866 bool has_fast_elements =
1867 IsFastObjectElementsKind(expr->constant_elements_kind());
1869 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1870 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1871 // If the only customer of allocation sites is transitioning, then
1872 // we can turn it off if we don't have anywhere else to transition to.
1873 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1876 __ mov(a0, result_register());
1877 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1878 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1879 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1880 __ li(a1, Operand(constant_elements));
1881 if (MustCreateArrayLiteralWithRuntime(expr)) {
1882 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1883 __ Push(a3, a2, a1, a0);
1884 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1886 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1889 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1891 bool result_saved = false; // Is the result saved to the stack?
1892 ZoneList<Expression*>* subexprs = expr->values();
1893 int length = subexprs->length();
1895 // Emit code to evaluate all the non-constant subexpressions and to store
1896 // them into the newly cloned array.
1897 int array_index = 0;
1898 for (; array_index < length; array_index++) {
1899 Expression* subexpr = subexprs->at(array_index);
1900 if (subexpr->IsSpread()) break;
1902 // If the subexpression is a literal or a simple materialized literal it
1903 // is already set in the cloned array.
1904 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1906 if (!result_saved) {
1907 __ push(v0); // array literal
1908 __ Push(Smi::FromInt(expr->literal_index()));
1909 result_saved = true;
1912 VisitForAccumulatorValue(subexpr);
1914 if (has_fast_elements) {
1915 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1916 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1917 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1918 __ sd(result_register(), FieldMemOperand(a1, offset));
1919 // Update the write barrier for the array store.
1920 __ RecordWriteField(a1, offset, result_register(), a2,
1921 kRAHasBeenSaved, kDontSaveFPRegs,
1922 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1924 __ li(a3, Operand(Smi::FromInt(array_index)));
1925 __ mov(a0, result_register());
1926 StoreArrayLiteralElementStub stub(isolate());
1930 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1933 // In case the array literal contains spread expressions it has two parts. The
1934 // first part is the "static" array which has a literal index is handled
1935 // above. The second part is the part after the first spread expression
1936 // (inclusive) and these elements gets appended to the array. Note that the
1937 // number elements an iterable produces is unknown ahead of time.
1938 if (array_index < length && result_saved) {
1939 __ Pop(); // literal index
1941 result_saved = false;
1943 for (; array_index < length; array_index++) {
1944 Expression* subexpr = subexprs->at(array_index);
1947 if (subexpr->IsSpread()) {
1948 VisitForStackValue(subexpr->AsSpread()->expression());
1949 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1951 VisitForStackValue(subexpr);
1952 __ CallRuntime(Runtime::kAppendElement, 2);
1955 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1959 __ Pop(); // literal index
1960 context()->PlugTOS();
1962 context()->Plug(v0);
1967 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1968 DCHECK(expr->target()->IsValidReferenceExpression());
1970 Comment cmnt(masm_, "[ Assignment");
1972 Property* property = expr->target()->AsProperty();
1973 LhsKind assign_type = Property::GetAssignType(property);
1975 // Evaluate LHS expression.
1976 switch (assign_type) {
1978 // Nothing to do here.
1980 case NAMED_PROPERTY:
1981 if (expr->is_compound()) {
1982 // We need the receiver both on the stack and in the register.
1983 VisitForStackValue(property->obj());
1984 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1986 VisitForStackValue(property->obj());
1989 case NAMED_SUPER_PROPERTY:
1991 property->obj()->AsSuperPropertyReference()->this_var());
1992 VisitForAccumulatorValue(
1993 property->obj()->AsSuperPropertyReference()->home_object());
1994 __ Push(result_register());
1995 if (expr->is_compound()) {
1996 const Register scratch = a1;
1997 __ ld(scratch, MemOperand(sp, kPointerSize));
1998 __ Push(scratch, result_register());
2001 case KEYED_SUPER_PROPERTY: {
2002 const Register scratch = a1;
2004 property->obj()->AsSuperPropertyReference()->this_var());
2005 VisitForAccumulatorValue(
2006 property->obj()->AsSuperPropertyReference()->home_object());
2007 __ Move(scratch, result_register());
2008 VisitForAccumulatorValue(property->key());
2009 __ Push(scratch, result_register());
2010 if (expr->is_compound()) {
2011 const Register scratch1 = a4;
2012 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
2013 __ Push(scratch1, scratch, result_register());
2017 case KEYED_PROPERTY:
2018 // We need the key and receiver on both the stack and in v0 and a1.
2019 if (expr->is_compound()) {
2020 VisitForStackValue(property->obj());
2021 VisitForStackValue(property->key());
2022 __ ld(LoadDescriptor::ReceiverRegister(),
2023 MemOperand(sp, 1 * kPointerSize));
2024 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2026 VisitForStackValue(property->obj());
2027 VisitForStackValue(property->key());
2032 // For compound assignments we need another deoptimization point after the
2033 // variable/property load.
2034 if (expr->is_compound()) {
2035 { AccumulatorValueContext context(this);
2036 switch (assign_type) {
2038 EmitVariableLoad(expr->target()->AsVariableProxy());
2039 PrepareForBailout(expr->target(), TOS_REG);
2041 case NAMED_PROPERTY:
2042 EmitNamedPropertyLoad(property);
2043 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2045 case NAMED_SUPER_PROPERTY:
2046 EmitNamedSuperPropertyLoad(property);
2047 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2049 case KEYED_SUPER_PROPERTY:
2050 EmitKeyedSuperPropertyLoad(property);
2051 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2053 case KEYED_PROPERTY:
2054 EmitKeyedPropertyLoad(property);
2055 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2060 Token::Value op = expr->binary_op();
2061 __ push(v0); // Left operand goes on the stack.
2062 VisitForAccumulatorValue(expr->value());
2064 SetSourcePosition(expr->position() + 1);
2065 AccumulatorValueContext context(this);
2066 if (ShouldInlineSmiCase(op)) {
2067 EmitInlineSmiBinaryOp(expr->binary_operation(),
2072 EmitBinaryOp(expr->binary_operation(), op);
2075 // Deoptimization point in case the binary operation may have side effects.
2076 PrepareForBailout(expr->binary_operation(), TOS_REG);
2078 VisitForAccumulatorValue(expr->value());
2081 // Record source position before possible IC call.
2082 SetSourcePosition(expr->position());
2085 switch (assign_type) {
2087 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2088 expr->op(), expr->AssignmentSlot());
2089 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2090 context()->Plug(v0);
2092 case NAMED_PROPERTY:
2093 EmitNamedPropertyAssignment(expr);
2095 case NAMED_SUPER_PROPERTY:
2096 EmitNamedSuperPropertyStore(property);
2097 context()->Plug(v0);
2099 case KEYED_SUPER_PROPERTY:
2100 EmitKeyedSuperPropertyStore(property);
2101 context()->Plug(v0);
2103 case KEYED_PROPERTY:
2104 EmitKeyedPropertyAssignment(expr);
2110 void FullCodeGenerator::VisitYield(Yield* expr) {
2111 Comment cmnt(masm_, "[ Yield");
2112 // Evaluate yielded value first; the initial iterator definition depends on
2113 // this. It stays on the stack while we update the iterator.
2114 VisitForStackValue(expr->expression());
2116 switch (expr->yield_kind()) {
2117 case Yield::kSuspend:
2118 // Pop value from top-of-stack slot; box result into result register.
2119 EmitCreateIteratorResult(false);
2120 __ push(result_register());
2122 case Yield::kInitial: {
2123 Label suspend, continuation, post_runtime, resume;
2127 __ bind(&continuation);
2131 VisitForAccumulatorValue(expr->generator_object());
2132 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2133 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2134 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2135 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2137 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2138 kRAHasBeenSaved, kDontSaveFPRegs);
2139 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2140 __ Branch(&post_runtime, eq, sp, Operand(a1));
2141 __ push(v0); // generator object
2142 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2143 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2144 __ bind(&post_runtime);
2145 __ pop(result_register());
2146 EmitReturnSequence();
2149 context()->Plug(result_register());
2153 case Yield::kFinal: {
2154 VisitForAccumulatorValue(expr->generator_object());
2155 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2156 __ sd(a1, FieldMemOperand(result_register(),
2157 JSGeneratorObject::kContinuationOffset));
2158 // Pop value from top-of-stack slot, box result into result register.
2159 EmitCreateIteratorResult(true);
2160 EmitUnwindBeforeReturn();
2161 EmitReturnSequence();
2165 case Yield::kDelegating: {
2166 VisitForStackValue(expr->generator_object());
2168 // Initial stack layout is as follows:
2169 // [sp + 1 * kPointerSize] iter
2170 // [sp + 0 * kPointerSize] g
2172 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2173 Label l_next, l_call;
2174 Register load_receiver = LoadDescriptor::ReceiverRegister();
2175 Register load_name = LoadDescriptor::NameRegister();
2176 // Initial send value is undefined.
2177 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2180 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2183 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2184 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2185 __ Push(a2, a3, a0); // "throw", iter, except
2188 // try { received = %yield result }
2189 // Shuffle the received result above a try handler and yield it without
2192 __ pop(a0); // result
2193 EnterTryBlock(expr->index(), &l_catch);
2194 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2195 __ push(a0); // result
2197 __ bind(&l_continuation);
2200 __ bind(&l_suspend);
2201 const int generator_object_depth = kPointerSize + try_block_size;
2202 __ ld(a0, MemOperand(sp, generator_object_depth));
2204 __ Push(Smi::FromInt(expr->index())); // handler-index
2205 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2206 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2207 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2208 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2210 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2211 kRAHasBeenSaved, kDontSaveFPRegs);
2212 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2213 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2214 __ pop(v0); // result
2215 EmitReturnSequence();
2217 __ bind(&l_resume); // received in a0
2218 ExitTryBlock(expr->index());
2220 // receiver = iter; f = 'next'; arg = received;
2222 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2223 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2224 __ Push(load_name, a3, a0); // "next", iter, received
2226 // result = receiver[f](arg);
2228 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2229 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2230 __ li(LoadDescriptor::SlotRegister(),
2231 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2232 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2233 CallIC(ic, TypeFeedbackId::None());
2236 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2237 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2240 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2241 __ Drop(1); // The function is still on the stack; drop it.
2243 // if (!result.done) goto l_try;
2244 __ Move(load_receiver, v0);
2246 __ push(load_receiver); // save result
2247 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2248 __ li(LoadDescriptor::SlotRegister(),
2249 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2250 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2252 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2254 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2257 __ pop(load_receiver); // result
2258 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2259 __ li(LoadDescriptor::SlotRegister(),
2260 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2261 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2262 context()->DropAndPlug(2, v0); // drop iter and g
2269 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2271 JSGeneratorObject::ResumeMode resume_mode) {
2272 // The value stays in a0, and is ultimately read by the resumed generator, as
2273 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2274 // is read to throw the value when the resumed generator is already closed.
2275 // a1 will hold the generator object until the activation has been resumed.
2276 VisitForStackValue(generator);
2277 VisitForAccumulatorValue(value);
2280 // Load suspended function and context.
2281 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2282 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2284 // Load receiver and store as the first argument.
2285 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2288 // Push holes for the rest of the arguments to the generator function.
2289 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2290 // The argument count is stored as int32_t on 64-bit platforms.
2291 // TODO(plind): Smi on 32-bit platforms.
2293 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2294 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2295 Label push_argument_holes, push_frame;
2296 __ bind(&push_argument_holes);
2297 __ Dsubu(a3, a3, Operand(1));
2298 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2300 __ jmp(&push_argument_holes);
2302 // Enter a new JavaScript frame, and initialize its slots as they were when
2303 // the generator was suspended.
2304 Label resume_frame, done;
2305 __ bind(&push_frame);
2306 __ Call(&resume_frame);
2308 __ bind(&resume_frame);
2309 // ra = return address.
2310 // fp = caller's frame pointer.
2311 // cp = callee's context,
2312 // a4 = callee's JS function.
2313 __ Push(ra, fp, cp, a4);
2314 // Adjust FP to point to saved FP.
2315 __ Daddu(fp, sp, 2 * kPointerSize);
2317 // Load the operand stack size.
2318 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2319 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2322 // If we are sending a value and there is no operand stack, we can jump back
2324 if (resume_mode == JSGeneratorObject::NEXT) {
2326 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2327 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2328 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2330 __ Daddu(a3, a3, Operand(a2));
2331 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2332 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2334 __ bind(&slow_resume);
2337 // Otherwise, we push holes for the operand stack and call the runtime to fix
2338 // up the stack and the handlers.
2339 Label push_operand_holes, call_resume;
2340 __ bind(&push_operand_holes);
2341 __ Dsubu(a3, a3, Operand(1));
2342 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2344 __ Branch(&push_operand_holes);
2345 __ bind(&call_resume);
2346 DCHECK(!result_register().is(a1));
2347 __ Push(a1, result_register());
2348 __ Push(Smi::FromInt(resume_mode));
2349 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2350 // Not reached: the runtime call returns elsewhere.
2351 __ stop("not-reached");
2354 context()->Plug(result_register());
2358 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2362 const int instance_size = 5 * kPointerSize;
2363 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2366 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2369 __ bind(&gc_required);
2370 __ Push(Smi::FromInt(instance_size));
2371 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2372 __ ld(context_register(),
2373 MemOperand(fp, StandardFrameConstants::kContextOffset));
2375 __ bind(&allocated);
2376 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2377 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2378 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2380 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2381 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2382 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2383 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2384 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2386 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2388 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2390 // Only the value field needs a write barrier, as the other values are in the
2392 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2393 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2397 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2398 SetSourcePosition(prop->position());
2399 Literal* key = prop->key()->AsLiteral();
2400 DCHECK(!prop->IsSuperAccess());
2402 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2403 __ li(LoadDescriptor::SlotRegister(),
2404 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2405 CallLoadIC(NOT_CONTEXTUAL);
2409 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2410 // Stack: receiver, home_object.
2411 SetSourcePosition(prop->position());
2412 Literal* key = prop->key()->AsLiteral();
2413 DCHECK(!key->value()->IsSmi());
2414 DCHECK(prop->IsSuperAccess());
2416 __ Push(key->value());
2417 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2421 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2422 SetSourcePosition(prop->position());
2423 // Call keyed load IC. It has register arguments receiver and key.
2424 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2425 __ li(LoadDescriptor::SlotRegister(),
2426 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2431 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2432 // Stack: receiver, home_object, key.
2433 SetSourcePosition(prop->position());
2435 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2439 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2441 Expression* left_expr,
2442 Expression* right_expr) {
2443 Label done, smi_case, stub_call;
2445 Register scratch1 = a2;
2446 Register scratch2 = a3;
2448 // Get the arguments.
2450 Register right = a0;
2452 __ mov(a0, result_register());
2454 // Perform combined smi check on both operands.
2455 __ Or(scratch1, left, Operand(right));
2456 STATIC_ASSERT(kSmiTag == 0);
2457 JumpPatchSite patch_site(masm_);
2458 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2460 __ bind(&stub_call);
2462 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2463 CallIC(code, expr->BinaryOperationFeedbackId());
2464 patch_site.EmitPatchInfo();
2468 // Smi case. This code works the same way as the smi-smi case in the type
2469 // recording binary operation stub, see
2472 __ GetLeastBitsFromSmi(scratch1, right, 5);
2473 __ dsrav(right, left, scratch1);
2474 __ And(v0, right, Operand(0xffffffff00000000L));
2477 __ SmiUntag(scratch1, left);
2478 __ GetLeastBitsFromSmi(scratch2, right, 5);
2479 __ dsllv(scratch1, scratch1, scratch2);
2480 __ SmiTag(v0, scratch1);
2484 __ SmiUntag(scratch1, left);
2485 __ GetLeastBitsFromSmi(scratch2, right, 5);
2486 __ dsrlv(scratch1, scratch1, scratch2);
2487 __ And(scratch2, scratch1, 0x80000000);
2488 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2489 __ SmiTag(v0, scratch1);
2493 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2494 __ BranchOnOverflow(&stub_call, scratch1);
2497 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2498 __ BranchOnOverflow(&stub_call, scratch1);
2501 __ Dmulh(v0, left, right);
2502 __ dsra32(scratch2, v0, 0);
2503 __ sra(scratch1, v0, 31);
2504 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2506 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2507 __ Daddu(scratch2, right, left);
2508 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2509 DCHECK(Smi::FromInt(0) == 0);
2510 __ mov(v0, zero_reg);
2514 __ Or(v0, left, Operand(right));
2516 case Token::BIT_AND:
2517 __ And(v0, left, Operand(right));
2519 case Token::BIT_XOR:
2520 __ Xor(v0, left, Operand(right));
2527 context()->Plug(v0);
2531 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2532 // Constructor is in v0.
2533 DCHECK(lit != NULL);
2536 // No access check is needed here since the constructor is created by the
2538 Register scratch = a1;
2540 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2543 // store_slot_index points to the vector IC slot for the next store IC used.
2544 // ClassLiteral::ComputeFeedbackRequirements controls the allocation of slots
2545 // and must be updated if the number of store ICs emitted here changes.
2546 int store_slot_index = 0;
2547 for (int i = 0; i < lit->properties()->length(); i++) {
2548 ObjectLiteral::Property* property = lit->properties()->at(i);
2549 Expression* value = property->value();
2551 if (property->is_static()) {
2552 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2554 __ ld(scratch, MemOperand(sp, 0)); // prototype
2557 EmitPropertyKey(property, lit->GetIdForProperty(i));
2559 // The static prototype property is read only. We handle the non computed
2560 // property name case in the parser. Since this is the only case where we
2561 // need to check for an own read only property we special case this so we do
2562 // not need to do this for every property.
2563 if (property->is_static() && property->is_computed_name()) {
2564 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2568 VisitForStackValue(value);
2569 EmitSetHomeObjectIfNeeded(value, 2,
2570 lit->SlotForHomeObject(value, &store_slot_index));
2572 switch (property->kind()) {
2573 case ObjectLiteral::Property::CONSTANT:
2574 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2575 case ObjectLiteral::Property::PROTOTYPE:
2577 case ObjectLiteral::Property::COMPUTED:
2578 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2581 case ObjectLiteral::Property::GETTER:
2582 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2584 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2587 case ObjectLiteral::Property::SETTER:
2588 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2590 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2599 __ CallRuntime(Runtime::kToFastProperties, 1);
2602 __ CallRuntime(Runtime::kToFastProperties, 1);
2604 // Verify that compilation exactly consumed the number of store ic slots that
2605 // the ClassLiteral node had to offer.
2606 DCHECK(!FLAG_vector_stores || store_slot_index == lit->slot_count());
2610 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2611 __ mov(a0, result_register());
2614 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2615 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2616 CallIC(code, expr->BinaryOperationFeedbackId());
2617 patch_site.EmitPatchInfo();
2618 context()->Plug(v0);
2622 void FullCodeGenerator::EmitAssignment(Expression* expr,
2623 FeedbackVectorICSlot slot) {
2624 DCHECK(expr->IsValidReferenceExpression());
2626 Property* prop = expr->AsProperty();
2627 LhsKind assign_type = Property::GetAssignType(prop);
2629 switch (assign_type) {
2631 Variable* var = expr->AsVariableProxy()->var();
2632 EffectContext context(this);
2633 EmitVariableAssignment(var, Token::ASSIGN, slot);
2636 case NAMED_PROPERTY: {
2637 __ push(result_register()); // Preserve value.
2638 VisitForAccumulatorValue(prop->obj());
2639 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2640 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2641 __ li(StoreDescriptor::NameRegister(),
2642 Operand(prop->key()->AsLiteral()->value()));
2643 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2647 case NAMED_SUPER_PROPERTY: {
2649 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2650 VisitForAccumulatorValue(
2651 prop->obj()->AsSuperPropertyReference()->home_object());
2652 // stack: value, this; v0: home_object
2653 Register scratch = a2;
2654 Register scratch2 = a3;
2655 __ mov(scratch, result_register()); // home_object
2656 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2657 __ ld(scratch2, MemOperand(sp, 0)); // this
2658 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2659 __ sd(scratch, MemOperand(sp, 0)); // home_object
2660 // stack: this, home_object; v0: value
2661 EmitNamedSuperPropertyStore(prop);
2664 case KEYED_SUPER_PROPERTY: {
2666 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2668 prop->obj()->AsSuperPropertyReference()->home_object());
2669 VisitForAccumulatorValue(prop->key());
2670 Register scratch = a2;
2671 Register scratch2 = a3;
2672 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2673 // stack: value, this, home_object; v0: key, a3: value
2674 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2675 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2676 __ ld(scratch, MemOperand(sp, 0)); // home_object
2677 __ sd(scratch, MemOperand(sp, kPointerSize));
2678 __ sd(v0, MemOperand(sp, 0));
2679 __ Move(v0, scratch2);
2680 // stack: this, home_object, key; v0: value.
2681 EmitKeyedSuperPropertyStore(prop);
2684 case KEYED_PROPERTY: {
2685 __ push(result_register()); // Preserve value.
2686 VisitForStackValue(prop->obj());
2687 VisitForAccumulatorValue(prop->key());
2688 __ Move(StoreDescriptor::NameRegister(), result_register());
2689 __ Pop(StoreDescriptor::ValueRegister(),
2690 StoreDescriptor::ReceiverRegister());
2691 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2693 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2698 context()->Plug(v0);
2702 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2703 Variable* var, MemOperand location) {
2704 __ sd(result_register(), location);
2705 if (var->IsContextSlot()) {
2706 // RecordWrite may destroy all its register arguments.
2707 __ Move(a3, result_register());
2708 int offset = Context::SlotOffset(var->index());
2709 __ RecordWriteContextSlot(
2710 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2715 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2716 FeedbackVectorICSlot slot) {
2717 if (var->IsUnallocated()) {
2718 // Global var, const, or let.
2719 __ mov(StoreDescriptor::ValueRegister(), result_register());
2720 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2721 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2722 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2725 } else if (var->mode() == LET && op != Token::INIT_LET) {
2726 // Non-initializing assignment to let variable needs a write barrier.
2727 DCHECK(!var->IsLookupSlot());
2728 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2730 MemOperand location = VarOperand(var, a1);
2731 __ ld(a3, location);
2732 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2733 __ Branch(&assign, ne, a3, Operand(a4));
2734 __ li(a3, Operand(var->name()));
2736 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2737 // Perform the assignment.
2739 EmitStoreToStackLocalOrContextSlot(var, location);
2741 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2742 // Assignment to const variable needs a write barrier.
2743 DCHECK(!var->IsLookupSlot());
2744 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2746 MemOperand location = VarOperand(var, a1);
2747 __ ld(a3, location);
2748 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2749 __ Branch(&const_error, ne, a3, Operand(at));
2750 __ li(a3, Operand(var->name()));
2752 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2753 __ bind(&const_error);
2754 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2756 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2757 if (var->IsLookupSlot()) {
2758 // Assignment to var.
2759 __ li(a4, Operand(var->name()));
2760 __ li(a3, Operand(Smi::FromInt(language_mode())));
2761 // jssp[0] : language mode.
2763 // jssp[16] : context.
2764 // jssp[24] : value.
2765 __ Push(v0, cp, a4, a3);
2766 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2768 // Assignment to var or initializing assignment to let/const in harmony
2770 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2771 MemOperand location = VarOperand(var, a1);
2772 if (generate_debug_code_ && op == Token::INIT_LET) {
2773 // Check for an uninitialized let binding.
2774 __ ld(a2, location);
2775 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2776 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2778 EmitStoreToStackLocalOrContextSlot(var, location);
2781 } else if (op == Token::INIT_CONST_LEGACY) {
2782 // Const initializers need a write barrier.
2783 DCHECK(!var->IsParameter()); // No const parameters.
2784 if (var->IsLookupSlot()) {
2785 __ li(a0, Operand(var->name()));
2786 __ Push(v0, cp, a0); // Context and name.
2787 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2789 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2791 MemOperand location = VarOperand(var, a1);
2792 __ ld(a2, location);
2793 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2794 __ Branch(&skip, ne, a2, Operand(at));
2795 EmitStoreToStackLocalOrContextSlot(var, location);
2800 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2801 if (is_strict(language_mode())) {
2802 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2804 // Silently ignore store in sloppy mode.
2809 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2810 // Assignment to a property, using a named store IC.
2811 Property* prop = expr->target()->AsProperty();
2812 DCHECK(prop != NULL);
2813 DCHECK(prop->key()->IsLiteral());
2815 // Record source code position before IC call.
2816 SetSourcePosition(expr->position());
2817 __ mov(StoreDescriptor::ValueRegister(), result_register());
2818 __ li(StoreDescriptor::NameRegister(),
2819 Operand(prop->key()->AsLiteral()->value()));
2820 __ pop(StoreDescriptor::ReceiverRegister());
2821 if (FLAG_vector_stores) {
2822 EmitLoadStoreICSlot(expr->AssignmentSlot());
2825 CallStoreIC(expr->AssignmentFeedbackId());
2828 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2829 context()->Plug(v0);
2833 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2834 // Assignment to named property of super.
2836 // stack : receiver ('this'), home_object
2837 DCHECK(prop != NULL);
2838 Literal* key = prop->key()->AsLiteral();
2839 DCHECK(key != NULL);
2841 __ Push(key->value());
2843 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2844 : Runtime::kStoreToSuper_Sloppy),
2849 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2850 // Assignment to named property of super.
2852 // stack : receiver ('this'), home_object, key
2853 DCHECK(prop != NULL);
2857 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2858 : Runtime::kStoreKeyedToSuper_Sloppy),
2863 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2864 // Assignment to a property, using a keyed store IC.
2866 // Record source code position before IC call.
2867 SetSourcePosition(expr->position());
2868 // Call keyed store IC.
2869 // The arguments are:
2870 // - a0 is the value,
2872 // - a2 is the receiver.
2873 __ mov(StoreDescriptor::ValueRegister(), result_register());
2874 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2875 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2878 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2879 if (FLAG_vector_stores) {
2880 EmitLoadStoreICSlot(expr->AssignmentSlot());
2883 CallIC(ic, expr->AssignmentFeedbackId());
2886 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2887 context()->Plug(v0);
2891 void FullCodeGenerator::VisitProperty(Property* expr) {
2892 Comment cmnt(masm_, "[ Property");
2893 Expression* key = expr->key();
2895 if (key->IsPropertyName()) {
2896 if (!expr->IsSuperAccess()) {
2897 VisitForAccumulatorValue(expr->obj());
2898 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2899 EmitNamedPropertyLoad(expr);
2901 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2903 expr->obj()->AsSuperPropertyReference()->home_object());
2904 EmitNamedSuperPropertyLoad(expr);
2907 if (!expr->IsSuperAccess()) {
2908 VisitForStackValue(expr->obj());
2909 VisitForAccumulatorValue(expr->key());
2910 __ Move(LoadDescriptor::NameRegister(), v0);
2911 __ pop(LoadDescriptor::ReceiverRegister());
2912 EmitKeyedPropertyLoad(expr);
2914 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2916 expr->obj()->AsSuperPropertyReference()->home_object());
2917 VisitForStackValue(expr->key());
2918 EmitKeyedSuperPropertyLoad(expr);
2921 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2922 context()->Plug(v0);
2926 void FullCodeGenerator::CallIC(Handle<Code> code,
2927 TypeFeedbackId id) {
2929 __ Call(code, RelocInfo::CODE_TARGET, id);
2933 // Code common for calls using the IC.
2934 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2935 Expression* callee = expr->expression();
2937 CallICState::CallType call_type =
2938 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2940 // Get the target function.
2941 if (call_type == CallICState::FUNCTION) {
2942 { StackValueContext context(this);
2943 EmitVariableLoad(callee->AsVariableProxy());
2944 PrepareForBailout(callee, NO_REGISTERS);
2946 // Push undefined as receiver. This is patched in the method prologue if it
2947 // is a sloppy mode method.
2948 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2951 // Load the function from the receiver.
2952 DCHECK(callee->IsProperty());
2953 DCHECK(!callee->AsProperty()->IsSuperAccess());
2954 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2955 EmitNamedPropertyLoad(callee->AsProperty());
2956 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2957 // Push the target function under the receiver.
2958 __ ld(at, MemOperand(sp, 0));
2960 __ sd(v0, MemOperand(sp, kPointerSize));
2963 EmitCall(expr, call_type);
2967 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2968 Expression* callee = expr->expression();
2969 DCHECK(callee->IsProperty());
2970 Property* prop = callee->AsProperty();
2971 DCHECK(prop->IsSuperAccess());
2973 SetSourcePosition(prop->position());
2974 Literal* key = prop->key()->AsLiteral();
2975 DCHECK(!key->value()->IsSmi());
2976 // Load the function from the receiver.
2977 const Register scratch = a1;
2978 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2979 VisitForAccumulatorValue(super_ref->home_object());
2980 __ mov(scratch, v0);
2981 VisitForAccumulatorValue(super_ref->this_var());
2982 __ Push(scratch, v0, v0, scratch);
2983 __ Push(key->value());
2987 // - this (receiver)
2988 // - this (receiver) <-- LoadFromSuper will pop here and below.
2991 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2993 // Replace home_object with target function.
2994 __ sd(v0, MemOperand(sp, kPointerSize));
2997 // - target function
2998 // - this (receiver)
2999 EmitCall(expr, CallICState::METHOD);
3003 // Code common for calls using the IC.
3004 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3007 VisitForAccumulatorValue(key);
3009 Expression* callee = expr->expression();
3011 // Load the function from the receiver.
3012 DCHECK(callee->IsProperty());
3013 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3014 __ Move(LoadDescriptor::NameRegister(), v0);
3015 EmitKeyedPropertyLoad(callee->AsProperty());
3016 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3018 // Push the target function under the receiver.
3019 __ ld(at, MemOperand(sp, 0));
3021 __ sd(v0, MemOperand(sp, kPointerSize));
3023 EmitCall(expr, CallICState::METHOD);
3027 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3028 Expression* callee = expr->expression();
3029 DCHECK(callee->IsProperty());
3030 Property* prop = callee->AsProperty();
3031 DCHECK(prop->IsSuperAccess());
3033 SetSourcePosition(prop->position());
3034 // Load the function from the receiver.
3035 const Register scratch = a1;
3036 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3037 VisitForAccumulatorValue(super_ref->home_object());
3038 __ Move(scratch, v0);
3039 VisitForAccumulatorValue(super_ref->this_var());
3040 __ Push(scratch, v0, v0, scratch);
3041 VisitForStackValue(prop->key());
3045 // - this (receiver)
3046 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3049 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3051 // Replace home_object with target function.
3052 __ sd(v0, MemOperand(sp, kPointerSize));
3055 // - target function
3056 // - this (receiver)
3057 EmitCall(expr, CallICState::METHOD);
3061 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3062 // Load the arguments.
3063 ZoneList<Expression*>* args = expr->arguments();
3064 int arg_count = args->length();
3065 { PreservePositionScope scope(masm()->positions_recorder());
3066 for (int i = 0; i < arg_count; i++) {
3067 VisitForStackValue(args->at(i));
3071 // Record source position of the IC call.
3072 SetSourcePosition(expr->position());
3073 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3074 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3075 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3076 // Don't assign a type feedback id to the IC, since type feedback is provided
3077 // by the vector above.
3079 RecordJSReturnSite(expr);
3080 // Restore context register.
3081 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3082 context()->DropAndPlug(1, v0);
3086 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3087 // a7: copy of the first argument or undefined if it doesn't exist.
3088 if (arg_count > 0) {
3089 __ ld(a7, MemOperand(sp, arg_count * kPointerSize));
3091 __ LoadRoot(a7, Heap::kUndefinedValueRootIndex);
3094 // a6: the receiver of the enclosing function.
3095 __ ld(a6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3097 // a5: the receiver of the enclosing function.
3098 Variable* this_var = scope()->LookupThis();
3099 DCHECK_NOT_NULL(this_var);
3100 __ ld(a5, VarOperand(this_var, a5));
3102 // a4: the language mode.
3103 __ li(a4, Operand(Smi::FromInt(language_mode())));
3105 // a1: the start position of the scope the calls resides in.
3106 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3108 // Do the runtime call.
3110 __ Push(a6, a5, a4, a1);
3111 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3115 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3116 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
3117 Variable* this_var = super_ref->this_var()->var();
3118 GetVar(a1, this_var);
3119 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3120 Label uninitialized_this;
3121 __ Branch(&uninitialized_this, eq, a1, Operand(at));
3122 __ li(a0, Operand(this_var->name()));
3124 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3125 __ bind(&uninitialized_this);
3127 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3131 void FullCodeGenerator::VisitCall(Call* expr) {
3133 // We want to verify that RecordJSReturnSite gets called on all paths
3134 // through this function. Avoid early returns.
3135 expr->return_is_recorded_ = false;
3138 Comment cmnt(masm_, "[ Call");
3139 Expression* callee = expr->expression();
3140 Call::CallType call_type = expr->GetCallType(isolate());
3142 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3143 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3144 // to resolve the function we need to call and the receiver of the
3145 // call. Then we call the resolved function using the given
3147 ZoneList<Expression*>* args = expr->arguments();
3148 int arg_count = args->length();
3150 { PreservePositionScope pos_scope(masm()->positions_recorder());
3151 VisitForStackValue(callee);
3152 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3153 __ push(a2); // Reserved receiver slot.
3155 // Push the arguments.
3156 for (int i = 0; i < arg_count; i++) {
3157 VisitForStackValue(args->at(i));
3160 // Push a copy of the function (found below the arguments) and
3162 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3164 EmitResolvePossiblyDirectEval(arg_count);
3166 // The runtime call returns a pair of values in v0 (function) and
3167 // v1 (receiver). Touch up the stack with the right values.
3168 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3169 __ sd(v1, MemOperand(sp, arg_count * kPointerSize));
3171 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3173 // Record source position for debugger.
3174 SetSourcePosition(expr->position());
3175 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3176 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3178 RecordJSReturnSite(expr);
3179 // Restore context register.
3180 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3181 context()->DropAndPlug(1, v0);
3182 } else if (call_type == Call::GLOBAL_CALL) {
3183 EmitCallWithLoadIC(expr);
3184 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3185 // Call to a lookup slot (dynamically introduced variable).
3186 VariableProxy* proxy = callee->AsVariableProxy();
3189 { PreservePositionScope scope(masm()->positions_recorder());
3190 // Generate code for loading from variables potentially shadowed
3191 // by eval-introduced variables.
3192 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3196 // Call the runtime to find the function to call (returned in v0)
3197 // and the object holding it (returned in v1).
3198 DCHECK(!context_register().is(a2));
3199 __ li(a2, Operand(proxy->name()));
3200 __ Push(context_register(), a2);
3201 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3202 __ Push(v0, v1); // Function, receiver.
3203 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3205 // If fast case code has been generated, emit code to push the
3206 // function and receiver and have the slow path jump around this
3208 if (done.is_linked()) {
3214 // The receiver is implicitly the global receiver. Indicate this
3215 // by passing the hole to the call function stub.
3216 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3221 // The receiver is either the global receiver or an object found
3222 // by LoadContextSlot.
3224 } else if (call_type == Call::PROPERTY_CALL) {
3225 Property* property = callee->AsProperty();
3226 bool is_named_call = property->key()->IsPropertyName();
3227 if (property->IsSuperAccess()) {
3228 if (is_named_call) {
3229 EmitSuperCallWithLoadIC(expr);
3231 EmitKeyedSuperCallWithLoadIC(expr);
3235 PreservePositionScope scope(masm()->positions_recorder());
3236 VisitForStackValue(property->obj());
3238 if (is_named_call) {
3239 EmitCallWithLoadIC(expr);
3241 EmitKeyedCallWithLoadIC(expr, property->key());
3244 } else if (call_type == Call::SUPER_CALL) {
3245 EmitSuperConstructorCall(expr);
3247 DCHECK(call_type == Call::OTHER_CALL);
3248 // Call to an arbitrary expression not handled specially above.
3249 { PreservePositionScope scope(masm()->positions_recorder());
3250 VisitForStackValue(callee);
3252 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3254 // Emit function call.
3259 // RecordJSReturnSite should have been called.
3260 DCHECK(expr->return_is_recorded_);
3265 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3266 Comment cmnt(masm_, "[ CallNew");
3267 // According to ECMA-262, section 11.2.2, page 44, the function
3268 // expression in new calls must be evaluated before the
3271 // Push constructor on the stack. If it's not a function it's used as
3272 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3274 DCHECK(!expr->expression()->IsSuperPropertyReference());
3275 VisitForStackValue(expr->expression());
3277 // Push the arguments ("left-to-right") on the stack.
3278 ZoneList<Expression*>* args = expr->arguments();
3279 int arg_count = args->length();
3280 for (int i = 0; i < arg_count; i++) {
3281 VisitForStackValue(args->at(i));
3284 // Call the construct call builtin that handles allocation and
3285 // constructor invocation.
3286 SetSourcePosition(expr->position());
3288 // Load function and argument count into a1 and a0.
3289 __ li(a0, Operand(arg_count));
3290 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3292 // Record call targets in unoptimized code.
3293 if (FLAG_pretenuring_call_new) {
3294 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3295 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3296 expr->CallNewFeedbackSlot().ToInt() + 1);
3299 __ li(a2, FeedbackVector());
3300 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3302 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3303 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3304 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3305 context()->Plug(v0);
3309 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3310 SuperCallReference* super_call_ref =
3311 expr->expression()->AsSuperCallReference();
3312 DCHECK_NOT_NULL(super_call_ref);
3314 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3315 VisitForStackValue(new_target_proxy);
3317 EmitLoadSuperConstructor(super_call_ref);
3318 __ push(result_register());
3320 // Push the arguments ("left-to-right") on the stack.
3321 ZoneList<Expression*>* args = expr->arguments();
3322 int arg_count = args->length();
3323 for (int i = 0; i < arg_count; i++) {
3324 VisitForStackValue(args->at(i));
3327 // Call the construct call builtin that handles allocation and
3328 // constructor invocation.
3329 SetSourcePosition(expr->position());
3331 // Load function and argument count into a1 and a0.
3332 __ li(a0, Operand(arg_count));
3333 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3335 // Record call targets in unoptimized code.
3336 if (FLAG_pretenuring_call_new) {
3338 /* TODO(dslomov): support pretenuring.
3339 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3340 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3341 expr->CallNewFeedbackSlot().ToInt() + 1);
3345 __ li(a2, FeedbackVector());
3346 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3348 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3349 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3353 RecordJSReturnSite(expr);
3355 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3356 context()->Plug(v0);
3360 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3361 ZoneList<Expression*>* args = expr->arguments();
3362 DCHECK(args->length() == 1);
3364 VisitForAccumulatorValue(args->at(0));
3366 Label materialize_true, materialize_false;
3367 Label* if_true = NULL;
3368 Label* if_false = NULL;
3369 Label* fall_through = NULL;
3370 context()->PrepareTest(&materialize_true, &materialize_false,
3371 &if_true, &if_false, &fall_through);
3373 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3375 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3377 context()->Plug(if_true, if_false);
3381 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3382 ZoneList<Expression*>* args = expr->arguments();
3383 DCHECK(args->length() == 1);
3385 VisitForAccumulatorValue(args->at(0));
3387 Label materialize_true, materialize_false;
3388 Label* if_true = NULL;
3389 Label* if_false = NULL;
3390 Label* fall_through = NULL;
3391 context()->PrepareTest(&materialize_true, &materialize_false,
3392 &if_true, &if_false, &fall_through);
3394 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3395 __ NonNegativeSmiTst(v0, at);
3396 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3398 context()->Plug(if_true, if_false);
3402 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3403 ZoneList<Expression*>* args = expr->arguments();
3404 DCHECK(args->length() == 1);
3406 VisitForAccumulatorValue(args->at(0));
3408 Label materialize_true, materialize_false;
3409 Label* if_true = NULL;
3410 Label* if_false = NULL;
3411 Label* fall_through = NULL;
3412 context()->PrepareTest(&materialize_true, &materialize_false,
3413 &if_true, &if_false, &fall_through);
3415 __ JumpIfSmi(v0, if_false);
3416 __ LoadRoot(at, Heap::kNullValueRootIndex);
3417 __ Branch(if_true, eq, v0, Operand(at));
3418 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3419 // Undetectable objects behave like undefined when tested with typeof.
3420 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3421 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3422 __ Branch(if_false, ne, at, Operand(zero_reg));
3423 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3424 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3425 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3426 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3427 if_true, if_false, fall_through);
3429 context()->Plug(if_true, if_false);
3433 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3434 ZoneList<Expression*>* args = expr->arguments();
3435 DCHECK(args->length() == 1);
3437 VisitForAccumulatorValue(args->at(0));
3439 Label materialize_true, materialize_false;
3440 Label* if_true = NULL;
3441 Label* if_false = NULL;
3442 Label* fall_through = NULL;
3443 context()->PrepareTest(&materialize_true, &materialize_false,
3444 &if_true, &if_false, &fall_through);
3446 __ JumpIfSmi(v0, if_false);
3447 __ GetObjectType(v0, a1, a1);
3448 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3449 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3450 if_true, if_false, fall_through);
3452 context()->Plug(if_true, if_false);
3456 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3457 ZoneList<Expression*>* args = expr->arguments();
3458 DCHECK(args->length() == 1);
3460 VisitForAccumulatorValue(args->at(0));
3462 Label materialize_true, materialize_false;
3463 Label* if_true = NULL;
3464 Label* if_false = NULL;
3465 Label* fall_through = NULL;
3466 context()->PrepareTest(&materialize_true, &materialize_false,
3467 &if_true, &if_false, &fall_through);
3469 __ JumpIfSmi(v0, if_false);
3470 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3471 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3472 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3473 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3474 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3476 context()->Plug(if_true, if_false);
3480 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3481 CallRuntime* expr) {
3482 ZoneList<Expression*>* args = expr->arguments();
3483 DCHECK(args->length() == 1);
3485 VisitForAccumulatorValue(args->at(0));
3487 Label materialize_true, materialize_false, skip_lookup;
3488 Label* if_true = NULL;
3489 Label* if_false = NULL;
3490 Label* fall_through = NULL;
3491 context()->PrepareTest(&materialize_true, &materialize_false,
3492 &if_true, &if_false, &fall_through);
3494 __ AssertNotSmi(v0);
3496 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3497 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3498 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3499 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3501 // Check for fast case object. Generate false result for slow case object.
3502 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3503 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3504 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3505 __ Branch(if_false, eq, a2, Operand(a4));
3507 // Look for valueOf name in the descriptor array, and indicate false if
3508 // found. Since we omit an enumeration index check, if it is added via a
3509 // transition that shares its descriptor array, this is a false positive.
3510 Label entry, loop, done;
3512 // Skip loop if no descriptors are valid.
3513 __ NumberOfOwnDescriptors(a3, a1);
3514 __ Branch(&done, eq, a3, Operand(zero_reg));
3516 __ LoadInstanceDescriptors(a1, a4);
3517 // a4: descriptor array.
3518 // a3: valid entries in the descriptor array.
3519 STATIC_ASSERT(kSmiTag == 0);
3520 STATIC_ASSERT(kSmiTagSize == 1);
3522 // STATIC_ASSERT(kPointerSize == 4);
3523 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3524 __ Dmul(a3, a3, at);
3525 // Calculate location of the first key name.
3526 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3527 // Calculate the end of the descriptor array.
3529 __ dsll(a5, a3, kPointerSizeLog2);
3530 __ Daddu(a2, a2, a5);
3532 // Loop through all the keys in the descriptor array. If one of these is the
3533 // string "valueOf" the result is false.
3534 // The use of a6 to store the valueOf string assumes that it is not otherwise
3535 // used in the loop below.
3536 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3539 __ ld(a3, MemOperand(a4, 0));
3540 __ Branch(if_false, eq, a3, Operand(a6));
3541 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3543 __ Branch(&loop, ne, a4, Operand(a2));
3547 // Set the bit in the map to indicate that there is no local valueOf field.
3548 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3549 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3550 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3552 __ bind(&skip_lookup);
3554 // If a valueOf property is not found on the object check that its
3555 // prototype is the un-modified String prototype. If not result is false.
3556 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3557 __ JumpIfSmi(a2, if_false);
3558 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3559 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3560 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3561 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3562 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3563 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3565 context()->Plug(if_true, if_false);
3569 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3570 ZoneList<Expression*>* args = expr->arguments();
3571 DCHECK(args->length() == 1);
3573 VisitForAccumulatorValue(args->at(0));
3575 Label materialize_true, materialize_false;
3576 Label* if_true = NULL;
3577 Label* if_false = NULL;
3578 Label* fall_through = NULL;
3579 context()->PrepareTest(&materialize_true, &materialize_false,
3580 &if_true, &if_false, &fall_through);
3582 __ JumpIfSmi(v0, if_false);
3583 __ GetObjectType(v0, a1, a2);
3584 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3585 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3586 __ Branch(if_false);
3588 context()->Plug(if_true, if_false);
3592 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3593 ZoneList<Expression*>* args = expr->arguments();
3594 DCHECK(args->length() == 1);
3596 VisitForAccumulatorValue(args->at(0));
3598 Label materialize_true, materialize_false;
3599 Label* if_true = NULL;
3600 Label* if_false = NULL;
3601 Label* fall_through = NULL;
3602 context()->PrepareTest(&materialize_true, &materialize_false,
3603 &if_true, &if_false, &fall_through);
3605 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3606 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3607 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3608 __ li(a4, 0x80000000);
3610 __ Branch(¬_nan, ne, a2, Operand(a4));
3611 __ mov(a4, zero_reg);
3615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3616 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3618 context()->Plug(if_true, if_false);
3622 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3623 ZoneList<Expression*>* args = expr->arguments();
3624 DCHECK(args->length() == 1);
3626 VisitForAccumulatorValue(args->at(0));
3628 Label materialize_true, materialize_false;
3629 Label* if_true = NULL;
3630 Label* if_false = NULL;
3631 Label* fall_through = NULL;
3632 context()->PrepareTest(&materialize_true, &materialize_false,
3633 &if_true, &if_false, &fall_through);
3635 __ JumpIfSmi(v0, if_false);
3636 __ GetObjectType(v0, a1, a1);
3637 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3638 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3639 if_true, if_false, fall_through);
3641 context()->Plug(if_true, if_false);
3645 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3647 DCHECK(args->length() == 1);
3649 VisitForAccumulatorValue(args->at(0));
3651 Label materialize_true, materialize_false;
3652 Label* if_true = NULL;
3653 Label* if_false = NULL;
3654 Label* fall_through = NULL;
3655 context()->PrepareTest(&materialize_true, &materialize_false,
3656 &if_true, &if_false, &fall_through);
3658 __ JumpIfSmi(v0, if_false);
3659 __ GetObjectType(v0, a1, a1);
3660 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3661 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3663 context()->Plug(if_true, if_false);
3667 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3668 ZoneList<Expression*>* args = expr->arguments();
3669 DCHECK(args->length() == 1);
3671 VisitForAccumulatorValue(args->at(0));
3673 Label materialize_true, materialize_false;
3674 Label* if_true = NULL;
3675 Label* if_false = NULL;
3676 Label* fall_through = NULL;
3677 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3678 &if_false, &fall_through);
3680 __ JumpIfSmi(v0, if_false);
3682 Register type_reg = a2;
3683 __ GetObjectType(v0, map, type_reg);
3684 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3685 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3686 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3687 if_true, if_false, fall_through);
3689 context()->Plug(if_true, if_false);
3693 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3694 DCHECK(expr->arguments()->length() == 0);
3696 Label materialize_true, materialize_false;
3697 Label* if_true = NULL;
3698 Label* if_false = NULL;
3699 Label* fall_through = NULL;
3700 context()->PrepareTest(&materialize_true, &materialize_false,
3701 &if_true, &if_false, &fall_through);
3703 // Get the frame pointer for the calling frame.
3704 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3706 // Skip the arguments adaptor frame if it exists.
3707 Label check_frame_marker;
3708 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3709 __ Branch(&check_frame_marker, ne,
3710 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3711 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3713 // Check the marker in the calling frame.
3714 __ bind(&check_frame_marker);
3715 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3716 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3717 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3718 if_true, if_false, fall_through);
3720 context()->Plug(if_true, if_false);
3724 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3725 ZoneList<Expression*>* args = expr->arguments();
3726 DCHECK(args->length() == 2);
3728 // Load the two objects into registers and perform the comparison.
3729 VisitForStackValue(args->at(0));
3730 VisitForAccumulatorValue(args->at(1));
3732 Label materialize_true, materialize_false;
3733 Label* if_true = NULL;
3734 Label* if_false = NULL;
3735 Label* fall_through = NULL;
3736 context()->PrepareTest(&materialize_true, &materialize_false,
3737 &if_true, &if_false, &fall_through);
3740 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3741 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3743 context()->Plug(if_true, if_false);
3747 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3748 ZoneList<Expression*>* args = expr->arguments();
3749 DCHECK(args->length() == 1);
3751 // ArgumentsAccessStub expects the key in a1 and the formal
3752 // parameter count in a0.
3753 VisitForAccumulatorValue(args->at(0));
3755 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3756 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3758 context()->Plug(v0);
3762 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3763 DCHECK(expr->arguments()->length() == 0);
3765 // Get the number of formal parameters.
3766 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3768 // Check if the calling frame is an arguments adaptor frame.
3769 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3770 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3771 __ Branch(&exit, ne, a3,
3772 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3774 // Arguments adaptor case: Read the arguments length from the
3776 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3779 context()->Plug(v0);
3783 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3784 ZoneList<Expression*>* args = expr->arguments();
3785 DCHECK(args->length() == 1);
3786 Label done, null, function, non_function_constructor;
3788 VisitForAccumulatorValue(args->at(0));
3790 // If the object is a smi, we return null.
3791 __ JumpIfSmi(v0, &null);
3793 // Check that the object is a JS object but take special care of JS
3794 // functions to make sure they have 'Function' as their class.
3795 // Assume that there are only two callable types, and one of them is at
3796 // either end of the type range for JS object types. Saves extra comparisons.
3797 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3798 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3799 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3801 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3802 FIRST_SPEC_OBJECT_TYPE + 1);
3803 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3805 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3806 LAST_SPEC_OBJECT_TYPE - 1);
3807 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3808 // Assume that there is no larger type.
3809 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3811 // Check if the constructor in the map is a JS function.
3812 Register instance_type = a2;
3813 __ GetMapConstructor(v0, v0, a1, instance_type);
3814 __ Branch(&non_function_constructor, ne, instance_type,
3815 Operand(JS_FUNCTION_TYPE));
3817 // v0 now contains the constructor function. Grab the
3818 // instance class name from there.
3819 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3820 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3823 // Functions have class 'Function'.
3825 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3828 // Objects with a non-function constructor have class 'Object'.
3829 __ bind(&non_function_constructor);
3830 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3833 // Non-JS objects have class null.
3835 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3840 context()->Plug(v0);
3844 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3845 // Load the arguments on the stack and call the stub.
3846 SubStringStub stub(isolate());
3847 ZoneList<Expression*>* args = expr->arguments();
3848 DCHECK(args->length() == 3);
3849 VisitForStackValue(args->at(0));
3850 VisitForStackValue(args->at(1));
3851 VisitForStackValue(args->at(2));
3853 context()->Plug(v0);
3857 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3858 // Load the arguments on the stack and call the stub.
3859 RegExpExecStub stub(isolate());
3860 ZoneList<Expression*>* args = expr->arguments();
3861 DCHECK(args->length() == 4);
3862 VisitForStackValue(args->at(0));
3863 VisitForStackValue(args->at(1));
3864 VisitForStackValue(args->at(2));
3865 VisitForStackValue(args->at(3));
3867 context()->Plug(v0);
3871 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3872 ZoneList<Expression*>* args = expr->arguments();
3873 DCHECK(args->length() == 1);
3875 VisitForAccumulatorValue(args->at(0)); // Load the object.
3878 // If the object is a smi return the object.
3879 __ JumpIfSmi(v0, &done);
3880 // If the object is not a value type, return the object.
3881 __ GetObjectType(v0, a1, a1);
3882 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3884 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3887 context()->Plug(v0);
3891 void FullCodeGenerator::EmitThrowIfNotADate(CallRuntime* expr) {
3892 ZoneList<Expression*>* args = expr->arguments();
3893 DCHECK_EQ(1, args->length());
3895 VisitForAccumulatorValue(args->at(0)); // Load the object.
3897 Label done, not_date_object;
3898 Register object = v0;
3899 Register result = v0;
3900 Register scratch1 = a1;
3902 __ JumpIfSmi(object, ¬_date_object);
3903 __ GetObjectType(object, scratch1, scratch1);
3904 __ Branch(&done, eq, scratch1, Operand(JS_DATE_TYPE));
3905 __ bind(¬_date_object);
3906 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3909 context()->Plug(result);
3913 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3914 ZoneList<Expression*>* args = expr->arguments();
3915 DCHECK(args->length() == 2);
3916 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3917 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3919 VisitForAccumulatorValue(args->at(0)); // Load the object.
3921 Register object = v0;
3922 Register result = v0;
3923 Register scratch0 = t1;
3924 Register scratch1 = a1;
3926 if (index->value() == 0) {
3927 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3929 Label runtime, done;
3930 if (index->value() < JSDate::kFirstUncachedField) {
3931 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3932 __ li(scratch1, Operand(stamp));
3933 __ ld(scratch1, MemOperand(scratch1));
3934 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3935 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3936 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3937 kPointerSize * index->value()));
3941 __ PrepareCallCFunction(2, scratch1);
3942 __ li(a1, Operand(index));
3943 __ Move(a0, object);
3944 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3948 context()->Plug(result);
3952 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3953 ZoneList<Expression*>* args = expr->arguments();
3954 DCHECK_EQ(3, args->length());
3956 Register string = v0;
3957 Register index = a1;
3958 Register value = a2;
3960 VisitForStackValue(args->at(0)); // index
3961 VisitForStackValue(args->at(1)); // value
3962 VisitForAccumulatorValue(args->at(2)); // string
3963 __ Pop(index, value);
3965 if (FLAG_debug_code) {
3966 __ SmiTst(value, at);
3967 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3968 __ SmiTst(index, at);
3969 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3970 __ SmiUntag(index, index);
3971 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3972 Register scratch = t1;
3973 __ EmitSeqStringSetCharCheck(
3974 string, index, value, scratch, one_byte_seq_type);
3975 __ SmiTag(index, index);
3978 __ SmiUntag(value, value);
3981 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3983 __ Daddu(at, at, index);
3984 __ sb(value, MemOperand(at));
3985 context()->Plug(string);
3989 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3990 ZoneList<Expression*>* args = expr->arguments();
3991 DCHECK_EQ(3, args->length());
3993 Register string = v0;
3994 Register index = a1;
3995 Register value = a2;
3997 VisitForStackValue(args->at(0)); // index
3998 VisitForStackValue(args->at(1)); // value
3999 VisitForAccumulatorValue(args->at(2)); // string
4000 __ Pop(index, value);
4002 if (FLAG_debug_code) {
4003 __ SmiTst(value, at);
4004 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
4005 __ SmiTst(index, at);
4006 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
4007 __ SmiUntag(index, index);
4008 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
4009 Register scratch = t1;
4010 __ EmitSeqStringSetCharCheck(
4011 string, index, value, scratch, two_byte_seq_type);
4012 __ SmiTag(index, index);
4015 __ SmiUntag(value, value);
4018 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4019 __ dsra(index, index, 32 - 1);
4020 __ Daddu(at, at, index);
4021 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
4022 __ sh(value, MemOperand(at));
4023 context()->Plug(string);
4027 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
4028 // Load the arguments on the stack and call the runtime function.
4029 ZoneList<Expression*>* args = expr->arguments();
4030 DCHECK(args->length() == 2);
4031 VisitForStackValue(args->at(0));
4032 VisitForStackValue(args->at(1));
4033 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4035 context()->Plug(v0);
4039 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4040 ZoneList<Expression*>* args = expr->arguments();
4041 DCHECK(args->length() == 2);
4043 VisitForStackValue(args->at(0)); // Load the object.
4044 VisitForAccumulatorValue(args->at(1)); // Load the value.
4045 __ pop(a1); // v0 = value. a1 = object.
4048 // If the object is a smi, return the value.
4049 __ JumpIfSmi(a1, &done);
4051 // If the object is not a value type, return the value.
4052 __ GetObjectType(a1, a2, a2);
4053 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
4056 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
4057 // Update the write barrier. Save the value as it will be
4058 // overwritten by the write barrier code and is needed afterward.
4060 __ RecordWriteField(
4061 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
4064 context()->Plug(v0);
4068 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4069 ZoneList<Expression*>* args = expr->arguments();
4070 DCHECK_EQ(args->length(), 1);
4072 // Load the argument into a0 and call the stub.
4073 VisitForAccumulatorValue(args->at(0));
4074 __ mov(a0, result_register());
4076 NumberToStringStub stub(isolate());
4078 context()->Plug(v0);
4082 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4083 ZoneList<Expression*>* args = expr->arguments();
4084 DCHECK(args->length() == 1);
4086 VisitForAccumulatorValue(args->at(0));
4089 StringCharFromCodeGenerator generator(v0, a1);
4090 generator.GenerateFast(masm_);
4093 NopRuntimeCallHelper call_helper;
4094 generator.GenerateSlow(masm_, call_helper);
4097 context()->Plug(a1);
4101 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4102 ZoneList<Expression*>* args = expr->arguments();
4103 DCHECK(args->length() == 2);
4105 VisitForStackValue(args->at(0));
4106 VisitForAccumulatorValue(args->at(1));
4107 __ mov(a0, result_register());
4109 Register object = a1;
4110 Register index = a0;
4111 Register result = v0;
4115 Label need_conversion;
4116 Label index_out_of_range;
4118 StringCharCodeAtGenerator generator(object,
4123 &index_out_of_range,
4124 STRING_INDEX_IS_NUMBER);
4125 generator.GenerateFast(masm_);
4128 __ bind(&index_out_of_range);
4129 // When the index is out of range, the spec requires us to return
4131 __ LoadRoot(result, Heap::kNanValueRootIndex);
4134 __ bind(&need_conversion);
4135 // Load the undefined value into the result register, which will
4136 // trigger conversion.
4137 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4140 NopRuntimeCallHelper call_helper;
4141 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4144 context()->Plug(result);
4148 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4149 ZoneList<Expression*>* args = expr->arguments();
4150 DCHECK(args->length() == 2);
4152 VisitForStackValue(args->at(0));
4153 VisitForAccumulatorValue(args->at(1));
4154 __ mov(a0, result_register());
4156 Register object = a1;
4157 Register index = a0;
4158 Register scratch = a3;
4159 Register result = v0;
4163 Label need_conversion;
4164 Label index_out_of_range;
4166 StringCharAtGenerator generator(object,
4172 &index_out_of_range,
4173 STRING_INDEX_IS_NUMBER);
4174 generator.GenerateFast(masm_);
4177 __ bind(&index_out_of_range);
4178 // When the index is out of range, the spec requires us to return
4179 // the empty string.
4180 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4183 __ bind(&need_conversion);
4184 // Move smi zero into the result register, which will trigger
4186 __ li(result, Operand(Smi::FromInt(0)));
4189 NopRuntimeCallHelper call_helper;
4190 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4193 context()->Plug(result);
4197 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4198 ZoneList<Expression*>* args = expr->arguments();
4199 DCHECK_EQ(2, args->length());
4200 VisitForStackValue(args->at(0));
4201 VisitForAccumulatorValue(args->at(1));
4204 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4205 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4207 context()->Plug(v0);
4211 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4212 ZoneList<Expression*>* args = expr->arguments();
4213 DCHECK_EQ(2, args->length());
4215 VisitForStackValue(args->at(0));
4216 VisitForStackValue(args->at(1));
4218 StringCompareStub stub(isolate());
4220 context()->Plug(v0);
4224 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4225 ZoneList<Expression*>* args = expr->arguments();
4226 DCHECK(args->length() >= 2);
4228 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4229 for (int i = 0; i < arg_count + 1; i++) {
4230 VisitForStackValue(args->at(i));
4232 VisitForAccumulatorValue(args->last()); // Function.
4234 Label runtime, done;
4235 // Check for non-function argument (including proxy).
4236 __ JumpIfSmi(v0, &runtime);
4237 __ GetObjectType(v0, a1, a1);
4238 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4240 // InvokeFunction requires the function in a1. Move it in there.
4241 __ mov(a1, result_register());
4242 ParameterCount count(arg_count);
4243 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4244 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4249 __ CallRuntime(Runtime::kCall, args->length());
4252 context()->Plug(v0);
4256 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4257 ZoneList<Expression*>* args = expr->arguments();
4258 DCHECK(args->length() == 2);
4261 VisitForStackValue(args->at(0));
4264 VisitForStackValue(args->at(1));
4265 __ CallRuntime(Runtime::kGetPrototype, 1);
4266 __ Push(result_register());
4268 // Check if the calling frame is an arguments adaptor frame.
4269 Label adaptor_frame, args_set_up, runtime;
4270 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4271 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4272 __ Branch(&adaptor_frame, eq, a3,
4273 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4274 // default constructor has no arguments, so no adaptor frame means no args.
4275 __ mov(a0, zero_reg);
4276 __ Branch(&args_set_up);
4278 // Copy arguments from adaptor frame.
4280 __ bind(&adaptor_frame);
4281 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4282 __ SmiUntag(a1, a1);
4284 // Subtract 1 from arguments count, for new.target.
4285 __ Daddu(a1, a1, Operand(-1));
4288 // Get arguments pointer in a2.
4289 __ dsll(at, a1, kPointerSizeLog2);
4290 __ Daddu(a2, a2, Operand(at));
4291 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4294 // Pre-decrement a2 with kPointerSize on each iteration.
4295 // Pre-decrement in order to skip receiver.
4296 __ Daddu(a2, a2, Operand(-kPointerSize));
4297 __ ld(a3, MemOperand(a2));
4299 __ Daddu(a1, a1, Operand(-1));
4300 __ Branch(&loop, ne, a1, Operand(zero_reg));
4303 __ bind(&args_set_up);
4304 __ dsll(at, a0, kPointerSizeLog2);
4305 __ Daddu(at, at, Operand(sp));
4306 __ ld(a1, MemOperand(at, 0));
4307 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4309 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4310 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4314 context()->Plug(result_register());
4318 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4319 RegExpConstructResultStub stub(isolate());
4320 ZoneList<Expression*>* args = expr->arguments();
4321 DCHECK(args->length() == 3);
4322 VisitForStackValue(args->at(0));
4323 VisitForStackValue(args->at(1));
4324 VisitForAccumulatorValue(args->at(2));
4325 __ mov(a0, result_register());
4329 context()->Plug(v0);
4333 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4334 ZoneList<Expression*>* args = expr->arguments();
4335 DCHECK_EQ(2, args->length());
4337 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4338 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4340 Handle<FixedArray> jsfunction_result_caches(
4341 isolate()->native_context()->jsfunction_result_caches());
4342 if (jsfunction_result_caches->length() <= cache_id) {
4343 __ Abort(kAttemptToUseUndefinedCache);
4344 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4345 context()->Plug(v0);
4349 VisitForAccumulatorValue(args->at(1));
4352 Register cache = a1;
4353 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4354 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4357 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4359 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4362 Label done, not_found;
4363 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4364 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4365 // a2 now holds finger offset as a smi.
4366 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4367 // a3 now points to the start of fixed array elements.
4368 __ SmiScale(at, a2, kPointerSizeLog2);
4369 __ daddu(a3, a3, at);
4370 // a3 now points to key of indexed element of cache.
4371 __ ld(a2, MemOperand(a3));
4372 __ Branch(¬_found, ne, key, Operand(a2));
4374 __ ld(v0, MemOperand(a3, kPointerSize));
4377 __ bind(¬_found);
4378 // Call runtime to perform the lookup.
4379 __ Push(cache, key);
4380 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4383 context()->Plug(v0);
4387 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4388 ZoneList<Expression*>* args = expr->arguments();
4389 VisitForAccumulatorValue(args->at(0));
4391 Label materialize_true, materialize_false;
4392 Label* if_true = NULL;
4393 Label* if_false = NULL;
4394 Label* fall_through = NULL;
4395 context()->PrepareTest(&materialize_true, &materialize_false,
4396 &if_true, &if_false, &fall_through);
4398 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4399 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4401 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4402 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4404 context()->Plug(if_true, if_false);
4408 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4409 ZoneList<Expression*>* args = expr->arguments();
4410 DCHECK(args->length() == 1);
4411 VisitForAccumulatorValue(args->at(0));
4413 __ AssertString(v0);
4415 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4416 __ IndexFromHash(v0, v0);
4418 context()->Plug(v0);
4422 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4423 Label bailout, done, one_char_separator, long_separator,
4424 non_trivial_array, not_size_one_array, loop,
4425 empty_separator_loop, one_char_separator_loop,
4426 one_char_separator_loop_entry, long_separator_loop;
4427 ZoneList<Expression*>* args = expr->arguments();
4428 DCHECK(args->length() == 2);
4429 VisitForStackValue(args->at(1));
4430 VisitForAccumulatorValue(args->at(0));
4432 // All aliases of the same register have disjoint lifetimes.
4433 Register array = v0;
4434 Register elements = no_reg; // Will be v0.
4435 Register result = no_reg; // Will be v0.
4436 Register separator = a1;
4437 Register array_length = a2;
4438 Register result_pos = no_reg; // Will be a2.
4439 Register string_length = a3;
4440 Register string = a4;
4441 Register element = a5;
4442 Register elements_end = a6;
4443 Register scratch1 = a7;
4444 Register scratch2 = t1;
4445 Register scratch3 = t0;
4447 // Separator operand is on the stack.
4450 // Check that the array is a JSArray.
4451 __ JumpIfSmi(array, &bailout);
4452 __ GetObjectType(array, scratch1, scratch2);
4453 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4455 // Check that the array has fast elements.
4456 __ CheckFastElements(scratch1, scratch2, &bailout);
4458 // If the array has length zero, return the empty string.
4459 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4460 __ SmiUntag(array_length);
4461 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4462 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4465 __ bind(&non_trivial_array);
4467 // Get the FixedArray containing array's elements.
4469 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4470 array = no_reg; // End of array's live range.
4472 // Check that all array elements are sequential one-byte strings, and
4473 // accumulate the sum of their lengths, as a smi-encoded value.
4474 __ mov(string_length, zero_reg);
4476 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4477 __ dsll(elements_end, array_length, kPointerSizeLog2);
4478 __ Daddu(elements_end, element, elements_end);
4479 // Loop condition: while (element < elements_end).
4480 // Live values in registers:
4481 // elements: Fixed array of strings.
4482 // array_length: Length of the fixed array of strings (not smi)
4483 // separator: Separator string
4484 // string_length: Accumulated sum of string lengths (smi).
4485 // element: Current array element.
4486 // elements_end: Array end.
4487 if (generate_debug_code_) {
4488 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4492 __ ld(string, MemOperand(element));
4493 __ Daddu(element, element, kPointerSize);
4494 __ JumpIfSmi(string, &bailout);
4495 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4496 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4497 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4498 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4499 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4500 __ BranchOnOverflow(&bailout, scratch3);
4501 __ Branch(&loop, lt, element, Operand(elements_end));
4503 // If array_length is 1, return elements[0], a string.
4504 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4505 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4508 __ bind(¬_size_one_array);
4510 // Live values in registers:
4511 // separator: Separator string
4512 // array_length: Length of the array.
4513 // string_length: Sum of string lengths (smi).
4514 // elements: FixedArray of strings.
4516 // Check that the separator is a flat one-byte string.
4517 __ JumpIfSmi(separator, &bailout);
4518 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4519 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4520 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4522 // Add (separator length times array_length) - separator length to the
4523 // string_length to get the length of the result string. array_length is not
4524 // smi but the other values are, so the result is a smi.
4525 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4526 __ Dsubu(string_length, string_length, Operand(scratch1));
4527 __ SmiUntag(scratch1);
4528 __ Dmul(scratch2, array_length, scratch1);
4529 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4531 __ dsra32(scratch1, scratch2, 0);
4532 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4533 __ SmiUntag(string_length);
4534 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4535 __ BranchOnOverflow(&bailout, scratch3);
4537 // Get first element in the array to free up the elements register to be used
4540 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4541 result = elements; // End of live range for elements.
4543 // Live values in registers:
4544 // element: First array element
4545 // separator: Separator string
4546 // string_length: Length of result string (not smi)
4547 // array_length: Length of the array.
4548 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4549 elements_end, &bailout);
4550 // Prepare for looping. Set up elements_end to end of the array. Set
4551 // result_pos to the position of the result where to write the first
4553 __ dsll(elements_end, array_length, kPointerSizeLog2);
4554 __ Daddu(elements_end, element, elements_end);
4555 result_pos = array_length; // End of live range for array_length.
4556 array_length = no_reg;
4557 __ Daddu(result_pos,
4559 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4561 // Check the length of the separator.
4562 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4563 __ li(at, Operand(Smi::FromInt(1)));
4564 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4565 __ Branch(&long_separator, gt, scratch1, Operand(at));
4567 // Empty separator case.
4568 __ bind(&empty_separator_loop);
4569 // Live values in registers:
4570 // result_pos: the position to which we are currently copying characters.
4571 // element: Current array element.
4572 // elements_end: Array end.
4574 // Copy next array element to the result.
4575 __ ld(string, MemOperand(element));
4576 __ Daddu(element, element, kPointerSize);
4577 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4578 __ SmiUntag(string_length);
4579 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4580 __ CopyBytes(string, result_pos, string_length, scratch1);
4581 // End while (element < elements_end).
4582 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4583 DCHECK(result.is(v0));
4586 // One-character separator case.
4587 __ bind(&one_char_separator);
4588 // Replace separator with its one-byte character value.
4589 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4590 // Jump into the loop after the code that copies the separator, so the first
4591 // element is not preceded by a separator.
4592 __ jmp(&one_char_separator_loop_entry);
4594 __ bind(&one_char_separator_loop);
4595 // Live values in registers:
4596 // result_pos: the position to which we are currently copying characters.
4597 // element: Current array element.
4598 // elements_end: Array end.
4599 // separator: Single separator one-byte char (in lower byte).
4601 // Copy the separator character to the result.
4602 __ sb(separator, MemOperand(result_pos));
4603 __ Daddu(result_pos, result_pos, 1);
4605 // Copy next array element to the result.
4606 __ bind(&one_char_separator_loop_entry);
4607 __ ld(string, MemOperand(element));
4608 __ Daddu(element, element, kPointerSize);
4609 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4610 __ SmiUntag(string_length);
4611 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4612 __ CopyBytes(string, result_pos, string_length, scratch1);
4613 // End while (element < elements_end).
4614 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4615 DCHECK(result.is(v0));
4618 // Long separator case (separator is more than one character). Entry is at the
4619 // label long_separator below.
4620 __ bind(&long_separator_loop);
4621 // Live values in registers:
4622 // result_pos: the position to which we are currently copying characters.
4623 // element: Current array element.
4624 // elements_end: Array end.
4625 // separator: Separator string.
4627 // Copy the separator to the result.
4628 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4629 __ SmiUntag(string_length);
4632 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4633 __ CopyBytes(string, result_pos, string_length, scratch1);
4635 __ bind(&long_separator);
4636 __ ld(string, MemOperand(element));
4637 __ Daddu(element, element, kPointerSize);
4638 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4639 __ SmiUntag(string_length);
4640 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4641 __ CopyBytes(string, result_pos, string_length, scratch1);
4642 // End while (element < elements_end).
4643 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4644 DCHECK(result.is(v0));
4648 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4650 context()->Plug(v0);
4654 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4655 DCHECK(expr->arguments()->length() == 0);
4656 ExternalReference debug_is_active =
4657 ExternalReference::debug_is_active_address(isolate());
4658 __ li(at, Operand(debug_is_active));
4659 __ lbu(v0, MemOperand(at));
4661 context()->Plug(v0);
4665 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4666 // Assert: expr === CallRuntime("ReflectConstruct")
4667 DCHECK_EQ(1, expr->arguments()->length());
4668 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4670 ZoneList<Expression*>* args = call->arguments();
4671 DCHECK_EQ(3, args->length());
4673 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4674 DCHECK_NOT_NULL(super_call_ref);
4676 // Load ReflectConstruct function
4677 EmitLoadJSRuntimeFunction(call);
4679 // Push the target function under the receiver.
4680 __ ld(at, MemOperand(sp, 0));
4682 __ sd(v0, MemOperand(sp, kPointerSize));
4684 // Push super constructor
4685 EmitLoadSuperConstructor(super_call_ref);
4686 __ Push(result_register());
4688 // Push arguments array
4689 VisitForStackValue(args->at(1));
4692 DCHECK(args->at(2)->IsVariableProxy());
4693 VisitForStackValue(args->at(2));
4695 EmitCallJSRuntimeFunction(call);
4697 // Restore context register.
4698 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4699 context()->DropAndPlug(1, v0);
4701 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4702 EmitInitializeThisAfterSuper(super_call_ref);
4706 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4707 // Push the builtins object as the receiver.
4708 Register receiver = LoadDescriptor::ReceiverRegister();
4709 __ ld(receiver, GlobalObjectOperand());
4710 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4713 // Load the function from the receiver.
4714 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4715 __ li(LoadDescriptor::SlotRegister(),
4716 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4717 CallLoadIC(NOT_CONTEXTUAL);
4721 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4722 ZoneList<Expression*>* args = expr->arguments();
4723 int arg_count = args->length();
4725 // Record source position of the IC call.
4726 SetSourcePosition(expr->position());
4727 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4728 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4733 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4734 ZoneList<Expression*>* args = expr->arguments();
4735 int arg_count = args->length();
4737 if (expr->is_jsruntime()) {
4738 Comment cmnt(masm_, "[ CallRuntime");
4739 EmitLoadJSRuntimeFunction(expr);
4741 // Push the target function under the receiver.
4742 __ ld(at, MemOperand(sp, 0));
4744 __ sd(v0, MemOperand(sp, kPointerSize));
4746 // Push the arguments ("left-to-right").
4747 for (int i = 0; i < arg_count; i++) {
4748 VisitForStackValue(args->at(i));
4751 EmitCallJSRuntimeFunction(expr);
4753 // Restore context register.
4754 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4756 context()->DropAndPlug(1, v0);
4758 const Runtime::Function* function = expr->function();
4759 switch (function->function_id) {
4760 #define CALL_INTRINSIC_GENERATOR(Name) \
4761 case Runtime::kInline##Name: { \
4762 Comment cmnt(masm_, "[ Inline" #Name); \
4763 return Emit##Name(expr); \
4765 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4766 #undef CALL_INTRINSIC_GENERATOR
4768 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4769 // Push the arguments ("left-to-right").
4770 for (int i = 0; i < arg_count; i++) {
4771 VisitForStackValue(args->at(i));
4774 // Call the C runtime function.
4775 __ CallRuntime(expr->function(), arg_count);
4776 context()->Plug(v0);
4783 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4784 switch (expr->op()) {
4785 case Token::DELETE: {
4786 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4787 Property* property = expr->expression()->AsProperty();
4788 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4790 if (property != NULL) {
4791 VisitForStackValue(property->obj());
4792 VisitForStackValue(property->key());
4793 __ li(a1, Operand(Smi::FromInt(language_mode())));
4795 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4796 context()->Plug(v0);
4797 } else if (proxy != NULL) {
4798 Variable* var = proxy->var();
4799 // Delete of an unqualified identifier is disallowed in strict mode
4800 // but "delete this" is allowed.
4801 DCHECK(is_sloppy(language_mode()) || var->is_this());
4802 if (var->IsUnallocated()) {
4803 __ ld(a2, GlobalObjectOperand());
4804 __ li(a1, Operand(var->name()));
4805 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4806 __ Push(a2, a1, a0);
4807 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4808 context()->Plug(v0);
4809 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4810 // Result of deleting non-global, non-dynamic variables is false.
4811 // The subexpression does not have side effects.
4812 context()->Plug(var->is_this());
4814 // Non-global variable. Call the runtime to try to delete from the
4815 // context where the variable was introduced.
4816 DCHECK(!context_register().is(a2));
4817 __ li(a2, Operand(var->name()));
4818 __ Push(context_register(), a2);
4819 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4820 context()->Plug(v0);
4823 // Result of deleting non-property, non-variable reference is true.
4824 // The subexpression may have side effects.
4825 VisitForEffect(expr->expression());
4826 context()->Plug(true);
4832 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4833 VisitForEffect(expr->expression());
4834 context()->Plug(Heap::kUndefinedValueRootIndex);
4839 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4840 if (context()->IsEffect()) {
4841 // Unary NOT has no side effects so it's only necessary to visit the
4842 // subexpression. Match the optimizing compiler by not branching.
4843 VisitForEffect(expr->expression());
4844 } else if (context()->IsTest()) {
4845 const TestContext* test = TestContext::cast(context());
4846 // The labels are swapped for the recursive call.
4847 VisitForControl(expr->expression(),
4848 test->false_label(),
4850 test->fall_through());
4851 context()->Plug(test->true_label(), test->false_label());
4853 // We handle value contexts explicitly rather than simply visiting
4854 // for control and plugging the control flow into the context,
4855 // because we need to prepare a pair of extra administrative AST ids
4856 // for the optimizing compiler.
4857 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4858 Label materialize_true, materialize_false, done;
4859 VisitForControl(expr->expression(),
4863 __ bind(&materialize_true);
4864 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4865 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4866 if (context()->IsStackValue()) __ push(v0);
4868 __ bind(&materialize_false);
4869 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4870 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4871 if (context()->IsStackValue()) __ push(v0);
4877 case Token::TYPEOF: {
4878 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4880 AccumulatorValueContext context(this);
4881 VisitForTypeofValue(expr->expression());
4884 TypeofStub typeof_stub(isolate());
4885 __ CallStub(&typeof_stub);
4886 context()->Plug(v0);
4896 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4897 DCHECK(expr->expression()->IsValidReferenceExpression());
4899 Comment cmnt(masm_, "[ CountOperation");
4900 SetSourcePosition(expr->position());
4902 Property* prop = expr->expression()->AsProperty();
4903 LhsKind assign_type = Property::GetAssignType(prop);
4905 // Evaluate expression and get value.
4906 if (assign_type == VARIABLE) {
4907 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4908 AccumulatorValueContext context(this);
4909 EmitVariableLoad(expr->expression()->AsVariableProxy());
4911 // Reserve space for result of postfix operation.
4912 if (expr->is_postfix() && !context()->IsEffect()) {
4913 __ li(at, Operand(Smi::FromInt(0)));
4916 switch (assign_type) {
4917 case NAMED_PROPERTY: {
4918 // Put the object both on the stack and in the register.
4919 VisitForStackValue(prop->obj());
4920 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4921 EmitNamedPropertyLoad(prop);
4925 case NAMED_SUPER_PROPERTY: {
4926 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4927 VisitForAccumulatorValue(
4928 prop->obj()->AsSuperPropertyReference()->home_object());
4929 __ Push(result_register());
4930 const Register scratch = a1;
4931 __ ld(scratch, MemOperand(sp, kPointerSize));
4932 __ Push(scratch, result_register());
4933 EmitNamedSuperPropertyLoad(prop);
4937 case KEYED_SUPER_PROPERTY: {
4938 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4939 VisitForAccumulatorValue(
4940 prop->obj()->AsSuperPropertyReference()->home_object());
4941 const Register scratch = a1;
4942 const Register scratch1 = a4;
4943 __ Move(scratch, result_register());
4944 VisitForAccumulatorValue(prop->key());
4945 __ Push(scratch, result_register());
4946 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
4947 __ Push(scratch1, scratch, result_register());
4948 EmitKeyedSuperPropertyLoad(prop);
4952 case KEYED_PROPERTY: {
4953 VisitForStackValue(prop->obj());
4954 VisitForStackValue(prop->key());
4955 __ ld(LoadDescriptor::ReceiverRegister(),
4956 MemOperand(sp, 1 * kPointerSize));
4957 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4958 EmitKeyedPropertyLoad(prop);
4967 // We need a second deoptimization point after loading the value
4968 // in case evaluating the property load my have a side effect.
4969 if (assign_type == VARIABLE) {
4970 PrepareForBailout(expr->expression(), TOS_REG);
4972 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4975 // Inline smi case if we are in a loop.
4976 Label stub_call, done;
4977 JumpPatchSite patch_site(masm_);
4979 int count_value = expr->op() == Token::INC ? 1 : -1;
4981 if (ShouldInlineSmiCase(expr->op())) {
4983 patch_site.EmitJumpIfNotSmi(v0, &slow);
4985 // Save result for postfix expressions.
4986 if (expr->is_postfix()) {
4987 if (!context()->IsEffect()) {
4988 // Save the result on the stack. If we have a named or keyed property
4989 // we store the result under the receiver that is currently on top
4991 switch (assign_type) {
4995 case NAMED_PROPERTY:
4996 __ sd(v0, MemOperand(sp, kPointerSize));
4998 case NAMED_SUPER_PROPERTY:
4999 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5001 case KEYED_PROPERTY:
5002 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5004 case KEYED_SUPER_PROPERTY:
5005 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
5011 Register scratch1 = a1;
5012 Register scratch2 = a4;
5013 __ li(scratch1, Operand(Smi::FromInt(count_value)));
5014 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
5015 __ BranchOnNoOverflow(&done, scratch2);
5016 // Call stub. Undo operation first.
5021 ToNumberStub convert_stub(isolate());
5022 __ CallStub(&convert_stub);
5023 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5025 // Save result for postfix expressions.
5026 if (expr->is_postfix()) {
5027 if (!context()->IsEffect()) {
5028 // Save the result on the stack. If we have a named or keyed property
5029 // we store the result under the receiver that is currently on top
5031 switch (assign_type) {
5035 case NAMED_PROPERTY:
5036 __ sd(v0, MemOperand(sp, kPointerSize));
5038 case NAMED_SUPER_PROPERTY:
5039 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5041 case KEYED_PROPERTY:
5042 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5044 case KEYED_SUPER_PROPERTY:
5045 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
5051 __ bind(&stub_call);
5053 __ li(a0, Operand(Smi::FromInt(count_value)));
5055 // Record position before stub call.
5056 SetSourcePosition(expr->position());
5058 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
5059 strength(language_mode())).code();
5060 CallIC(code, expr->CountBinOpFeedbackId());
5061 patch_site.EmitPatchInfo();
5064 // Store the value returned in v0.
5065 switch (assign_type) {
5067 if (expr->is_postfix()) {
5068 { EffectContext context(this);
5069 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5070 Token::ASSIGN, expr->CountSlot());
5071 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5074 // For all contexts except EffectConstant we have the result on
5075 // top of the stack.
5076 if (!context()->IsEffect()) {
5077 context()->PlugTOS();
5080 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5082 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5083 context()->Plug(v0);
5086 case NAMED_PROPERTY: {
5087 __ mov(StoreDescriptor::ValueRegister(), result_register());
5088 __ li(StoreDescriptor::NameRegister(),
5089 Operand(prop->key()->AsLiteral()->value()));
5090 __ pop(StoreDescriptor::ReceiverRegister());
5091 if (FLAG_vector_stores) {
5092 EmitLoadStoreICSlot(expr->CountSlot());
5095 CallStoreIC(expr->CountStoreFeedbackId());
5097 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5098 if (expr->is_postfix()) {
5099 if (!context()->IsEffect()) {
5100 context()->PlugTOS();
5103 context()->Plug(v0);
5107 case NAMED_SUPER_PROPERTY: {
5108 EmitNamedSuperPropertyStore(prop);
5109 if (expr->is_postfix()) {
5110 if (!context()->IsEffect()) {
5111 context()->PlugTOS();
5114 context()->Plug(v0);
5118 case KEYED_SUPER_PROPERTY: {
5119 EmitKeyedSuperPropertyStore(prop);
5120 if (expr->is_postfix()) {
5121 if (!context()->IsEffect()) {
5122 context()->PlugTOS();
5125 context()->Plug(v0);
5129 case KEYED_PROPERTY: {
5130 __ mov(StoreDescriptor::ValueRegister(), result_register());
5131 __ Pop(StoreDescriptor::ReceiverRegister(),
5132 StoreDescriptor::NameRegister());
5134 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5135 if (FLAG_vector_stores) {
5136 EmitLoadStoreICSlot(expr->CountSlot());
5139 CallIC(ic, expr->CountStoreFeedbackId());
5141 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5142 if (expr->is_postfix()) {
5143 if (!context()->IsEffect()) {
5144 context()->PlugTOS();
5147 context()->Plug(v0);
5155 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5156 DCHECK(!context()->IsEffect());
5157 DCHECK(!context()->IsTest());
5158 VariableProxy* proxy = expr->AsVariableProxy();
5159 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5160 Comment cmnt(masm_, "[ Global variable");
5161 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5162 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5163 __ li(LoadDescriptor::SlotRegister(),
5164 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5165 // Use a regular load, not a contextual load, to avoid a reference
5167 CallLoadIC(NOT_CONTEXTUAL);
5168 PrepareForBailout(expr, TOS_REG);
5169 context()->Plug(v0);
5170 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5171 Comment cmnt(masm_, "[ Lookup slot");
5174 // Generate code for loading from variables potentially shadowed
5175 // by eval-introduced variables.
5176 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5179 __ li(a0, Operand(proxy->name()));
5181 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5182 PrepareForBailout(expr, TOS_REG);
5185 context()->Plug(v0);
5187 // This expression cannot throw a reference error at the top level.
5188 VisitInDuplicateContext(expr);
5192 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5193 Expression* sub_expr,
5194 Handle<String> check) {
5195 Label materialize_true, materialize_false;
5196 Label* if_true = NULL;
5197 Label* if_false = NULL;
5198 Label* fall_through = NULL;
5199 context()->PrepareTest(&materialize_true, &materialize_false,
5200 &if_true, &if_false, &fall_through);
5202 { AccumulatorValueContext context(this);
5203 VisitForTypeofValue(sub_expr);
5205 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5207 Factory* factory = isolate()->factory();
5208 if (String::Equals(check, factory->number_string())) {
5209 __ JumpIfSmi(v0, if_true);
5210 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5211 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5212 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5213 } else if (String::Equals(check, factory->string_string())) {
5214 __ JumpIfSmi(v0, if_false);
5215 // Check for undetectable objects => false.
5216 __ GetObjectType(v0, v0, a1);
5217 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5218 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5219 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5220 Split(eq, a1, Operand(zero_reg),
5221 if_true, if_false, fall_through);
5222 } else if (String::Equals(check, factory->symbol_string())) {
5223 __ JumpIfSmi(v0, if_false);
5224 __ GetObjectType(v0, v0, a1);
5225 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5226 } else if (String::Equals(check, factory->boolean_string())) {
5227 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5228 __ Branch(if_true, eq, v0, Operand(at));
5229 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5230 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5231 } else if (String::Equals(check, factory->undefined_string())) {
5232 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5233 __ Branch(if_true, eq, v0, Operand(at));
5234 __ JumpIfSmi(v0, if_false);
5235 // Check for undetectable objects => true.
5236 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5237 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5238 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5239 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5240 } else if (String::Equals(check, factory->function_string())) {
5241 __ JumpIfSmi(v0, if_false);
5242 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5243 __ GetObjectType(v0, v0, a1);
5244 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5245 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5246 if_true, if_false, fall_through);
5247 } else if (String::Equals(check, factory->object_string())) {
5248 __ JumpIfSmi(v0, if_false);
5249 __ LoadRoot(at, Heap::kNullValueRootIndex);
5250 __ Branch(if_true, eq, v0, Operand(at));
5251 // Check for JS objects => true.
5252 __ GetObjectType(v0, v0, a1);
5253 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5254 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5255 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5256 // Check for undetectable objects => false.
5257 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5258 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5259 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5261 if (if_false != fall_through) __ jmp(if_false);
5263 context()->Plug(if_true, if_false);
5267 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5268 Comment cmnt(masm_, "[ CompareOperation");
5269 SetSourcePosition(expr->position());
5271 // First we try a fast inlined version of the compare when one of
5272 // the operands is a literal.
5273 if (TryLiteralCompare(expr)) return;
5275 // Always perform the comparison for its control flow. Pack the result
5276 // into the expression's context after the comparison is performed.
5277 Label materialize_true, materialize_false;
5278 Label* if_true = NULL;
5279 Label* if_false = NULL;
5280 Label* fall_through = NULL;
5281 context()->PrepareTest(&materialize_true, &materialize_false,
5282 &if_true, &if_false, &fall_through);
5284 Token::Value op = expr->op();
5285 VisitForStackValue(expr->left());
5288 VisitForStackValue(expr->right());
5289 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5290 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5291 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5292 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5295 case Token::INSTANCEOF: {
5296 VisitForStackValue(expr->right());
5297 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5299 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5300 // The stub returns 0 for true.
5301 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5306 VisitForAccumulatorValue(expr->right());
5307 Condition cc = CompareIC::ComputeCondition(op);
5308 __ mov(a0, result_register());
5311 bool inline_smi_code = ShouldInlineSmiCase(op);
5312 JumpPatchSite patch_site(masm_);
5313 if (inline_smi_code) {
5315 __ Or(a2, a0, Operand(a1));
5316 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5317 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5318 __ bind(&slow_case);
5320 // Record position and call the compare IC.
5321 SetSourcePosition(expr->position());
5322 Handle<Code> ic = CodeFactory::CompareIC(
5323 isolate(), op, strength(language_mode())).code();
5324 CallIC(ic, expr->CompareOperationFeedbackId());
5325 patch_site.EmitPatchInfo();
5326 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5327 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5331 // Convert the result of the comparison into one expected for this
5332 // expression's context.
5333 context()->Plug(if_true, if_false);
5337 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5338 Expression* sub_expr,
5340 Label materialize_true, materialize_false;
5341 Label* if_true = NULL;
5342 Label* if_false = NULL;
5343 Label* fall_through = NULL;
5344 context()->PrepareTest(&materialize_true, &materialize_false,
5345 &if_true, &if_false, &fall_through);
5347 VisitForAccumulatorValue(sub_expr);
5348 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5349 __ mov(a0, result_register());
5350 if (expr->op() == Token::EQ_STRICT) {
5351 Heap::RootListIndex nil_value = nil == kNullValue ?
5352 Heap::kNullValueRootIndex :
5353 Heap::kUndefinedValueRootIndex;
5354 __ LoadRoot(a1, nil_value);
5355 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5357 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5358 CallIC(ic, expr->CompareOperationFeedbackId());
5359 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5361 context()->Plug(if_true, if_false);
5365 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5366 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5367 context()->Plug(v0);
5371 Register FullCodeGenerator::result_register() {
5376 Register FullCodeGenerator::context_register() {
5381 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5382 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5383 DCHECK(IsAligned(frame_offset, kPointerSize));
5384 // __ sw(value, MemOperand(fp, frame_offset));
5385 __ sd(value, MemOperand(fp, frame_offset));
5389 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5390 __ ld(dst, ContextOperand(cp, context_index));
5394 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5395 Scope* declaration_scope = scope()->DeclarationScope();
5396 if (declaration_scope->is_script_scope() ||
5397 declaration_scope->is_module_scope()) {
5398 // Contexts nested in the native context have a canonical empty function
5399 // as their closure, not the anonymous closure containing the global
5400 // code. Pass a smi sentinel and let the runtime look up the empty
5402 __ li(at, Operand(Smi::FromInt(0)));
5403 } else if (declaration_scope->is_eval_scope()) {
5404 // Contexts created by a call to eval have the same closure as the
5405 // context calling eval, not the anonymous closure containing the eval
5406 // code. Fetch it from the context.
5407 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5409 DCHECK(declaration_scope->is_function_scope());
5410 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5416 // ----------------------------------------------------------------------------
5417 // Non-local control flow support.
5419 void FullCodeGenerator::EnterFinallyBlock() {
5420 DCHECK(!result_register().is(a1));
5421 // Store result register while executing finally block.
5422 __ push(result_register());
5423 // Cook return address in link register to stack (smi encoded Code* delta).
5424 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
5427 // Store result register while executing finally block.
5430 // Store pending message while executing finally block.
5431 ExternalReference pending_message_obj =
5432 ExternalReference::address_of_pending_message_obj(isolate());
5433 __ li(at, Operand(pending_message_obj));
5434 __ ld(a1, MemOperand(at));
5437 ClearPendingMessage();
5441 void FullCodeGenerator::ExitFinallyBlock() {
5442 DCHECK(!result_register().is(a1));
5443 // Restore pending message from stack.
5445 ExternalReference pending_message_obj =
5446 ExternalReference::address_of_pending_message_obj(isolate());
5447 __ li(at, Operand(pending_message_obj));
5448 __ sd(a1, MemOperand(at));
5450 // Restore result register from stack.
5453 // Uncook return address and return.
5454 __ pop(result_register());
5457 __ Daddu(at, a1, Operand(masm_->CodeObject()));
5462 void FullCodeGenerator::ClearPendingMessage() {
5463 DCHECK(!result_register().is(a1));
5464 ExternalReference pending_message_obj =
5465 ExternalReference::address_of_pending_message_obj(isolate());
5466 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
5467 __ li(at, Operand(pending_message_obj));
5468 __ sd(a1, MemOperand(at));
5472 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5473 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5474 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
5475 Operand(SmiFromSlot(slot)));
5482 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5484 BackEdgeState target_state,
5485 Code* replacement_code) {
5486 static const int kInstrSize = Assembler::kInstrSize;
5487 Address branch_address = pc - 8 * kInstrSize;
5488 CodePatcher patcher(branch_address, 1);
5490 switch (target_state) {
5492 // slt at, a3, zero_reg (in case of count based interrupts)
5493 // beq at, zero_reg, ok
5494 // lui t9, <interrupt stub address> upper
5495 // ori t9, <interrupt stub address> u-middle
5497 // ori t9, <interrupt stub address> lower
5500 // ok-label ----- pc_after points here
5501 patcher.masm()->slt(at, a3, zero_reg);
5503 case ON_STACK_REPLACEMENT:
5504 case OSR_AFTER_STACK_CHECK:
5505 // addiu at, zero_reg, 1
5506 // beq at, zero_reg, ok ;; Not changed
5507 // lui t9, <on-stack replacement address> upper
5508 // ori t9, <on-stack replacement address> middle
5510 // ori t9, <on-stack replacement address> lower
5511 // jalr t9 ;; Not changed
5512 // nop ;; Not changed
5513 // ok-label ----- pc_after points here
5514 patcher.masm()->daddiu(at, zero_reg, 1);
5517 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5518 // Replace the stack check address in the load-immediate (6-instr sequence)
5519 // with the entry address of the replacement code.
5520 Assembler::set_target_address_at(pc_immediate_load_address,
5521 replacement_code->entry());
5523 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5524 unoptimized_code, pc_immediate_load_address, replacement_code);
5528 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5530 Code* unoptimized_code,
5532 static const int kInstrSize = Assembler::kInstrSize;
5533 Address branch_address = pc - 8 * kInstrSize;
5534 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5536 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
5537 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5538 DCHECK(reinterpret_cast<uint64_t>(
5539 Assembler::target_address_at(pc_immediate_load_address)) ==
5540 reinterpret_cast<uint64_t>(
5541 isolate->builtins()->InterruptCheck()->entry()));
5545 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5547 if (reinterpret_cast<uint64_t>(
5548 Assembler::target_address_at(pc_immediate_load_address)) ==
5549 reinterpret_cast<uint64_t>(
5550 isolate->builtins()->OnStackReplacement()->entry())) {
5551 return ON_STACK_REPLACEMENT;
5554 DCHECK(reinterpret_cast<uint64_t>(
5555 Assembler::target_address_at(pc_immediate_load_address)) ==
5556 reinterpret_cast<uint64_t>(
5557 isolate->builtins()->OsrAfterStackCheck()->entry()));
5558 return OSR_AFTER_STACK_CHECK;
5562 } // namespace internal
5565 #endif // V8_TARGET_ARCH_MIPS64