1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS64
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
28 #include "src/mips64/code-stubs-mips64.h"
29 #include "src/mips64/macro-assembler-mips64.h"
34 #define __ ACCESS_MASM(masm_)
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
49 info_emitted_ = false;
54 DCHECK(patch_site_.is_bound() == info_emitted_);
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
64 // Always taken before patched.
65 __ BranchShort(target, eq, at, Operand(zero_reg));
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
75 // Never taken before patched.
76 __ BranchShort(target, ne, at, Operand(zero_reg));
79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
88 __ nop(); // Signals no inlined code.
93 MacroAssembler* masm_;
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
115 void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
118 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
119 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
121 profiling_counter_ = isolate()->factory()->NewCell(
122 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
123 SetFunctionPosition(function());
124 Comment cmnt(masm_, "[ function compiled by full code generator");
126 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
129 if (strlen(FLAG_stop_at) > 0 &&
130 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
135 // Sloppy mode functions and builtins need to replace the receiver with the
136 // global proxy when called as functions (without an explicit receiver
138 if (is_sloppy(info->language_mode()) && !info->is_native()) {
140 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
141 __ ld(at, MemOperand(sp, receiver_offset));
142 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
143 __ Branch(&ok, ne, a2, Operand(at));
145 __ ld(a2, GlobalObjectOperand());
146 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
148 __ sd(a2, MemOperand(sp, receiver_offset));
151 // Open a frame scope to indicate that there is a frame on the stack. The
152 // MANUAL indicates that the scope shouldn't actually generate code to set up
153 // the frame (that is done below).
154 FrameScope frame_scope(masm_, StackFrame::MANUAL);
155 info->set_prologue_offset(masm_->pc_offset());
156 __ Prologue(info->IsCodePreAgingActive());
157 info->AddNoFrameRange(0, masm_->pc_offset());
159 { Comment cmnt(masm_, "[ Allocate locals");
160 int locals_count = info->scope()->num_stack_slots();
161 // Generators allocate locals, if any, in context slots.
162 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
163 if (locals_count > 0) {
164 if (locals_count >= 128) {
166 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
167 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
168 __ Branch(&ok, hs, t1, Operand(a2));
169 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
172 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
173 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
174 if (locals_count >= kMaxPushes) {
175 int loop_iterations = locals_count / kMaxPushes;
176 __ li(a2, Operand(loop_iterations));
178 __ bind(&loop_header);
180 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
181 for (int i = 0; i < kMaxPushes; i++) {
182 __ sd(t1, MemOperand(sp, i * kPointerSize));
184 // Continue loop if not done.
185 __ Dsubu(a2, a2, Operand(1));
186 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
188 int remaining = locals_count % kMaxPushes;
189 // Emit the remaining pushes.
190 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
191 for (int i = 0; i < remaining; i++) {
192 __ sd(t1, MemOperand(sp, i * kPointerSize));
197 bool function_in_register = true;
199 // Possibly allocate a local context.
200 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201 if (heap_slots > 0) {
202 Comment cmnt(masm_, "[ Allocate context");
203 // Argument to NewContext is the function, which is still in a1.
204 bool need_write_barrier = true;
205 if (info->scope()->is_script_scope()) {
207 __ Push(info->scope()->GetScopeInfo(info->isolate()));
208 __ CallRuntime(Runtime::kNewScriptContext, 2);
209 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
210 FastNewContextStub stub(isolate(), heap_slots);
212 // Result of FastNewContextStub is always in new space.
213 need_write_barrier = false;
216 __ CallRuntime(Runtime::kNewFunctionContext, 1);
218 function_in_register = false;
219 // Context is returned in v0. It replaces the context passed to us.
220 // It's saved in the stack and kept live in cp.
222 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
223 // Copy any necessary parameters into the context.
224 int num_parameters = info->scope()->num_parameters();
225 for (int i = 0; i < num_parameters; i++) {
226 Variable* var = scope()->parameter(i);
227 if (var->IsContextSlot()) {
228 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
229 (num_parameters - 1 - i) * kPointerSize;
230 // Load parameter from stack.
231 __ ld(a0, MemOperand(fp, parameter_offset));
232 // Store it in the context.
233 MemOperand target = ContextOperand(cp, var->index());
236 // Update the write barrier.
237 if (need_write_barrier) {
238 __ RecordWriteContextSlot(
239 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
240 } else if (FLAG_debug_code) {
242 __ JumpIfInNewSpace(cp, a0, &done);
243 __ Abort(kExpectedNewSpaceObject);
250 ArgumentsAccessStub::HasNewTarget has_new_target =
251 IsSubclassConstructor(info->function()->kind())
252 ? ArgumentsAccessStub::HAS_NEW_TARGET
253 : ArgumentsAccessStub::NO_NEW_TARGET;
255 // Possibly allocate RestParameters
257 Variable* rest_param = scope()->rest_parameter(&rest_index);
259 Comment cmnt(masm_, "[ Allocate rest parameter array");
261 int num_parameters = info->scope()->num_parameters();
262 int offset = num_parameters * kPointerSize;
263 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
269 Operand(StandardFrameConstants::kCallerSPOffset + offset));
270 __ li(a2, Operand(Smi::FromInt(num_parameters)));
271 __ li(a1, Operand(Smi::FromInt(rest_index)));
274 RestParamAccessStub stub(isolate());
277 SetVar(rest_param, v0, a1, a2);
280 Variable* arguments = scope()->arguments();
281 if (arguments != NULL) {
282 // Function uses arguments object.
283 Comment cmnt(masm_, "[ Allocate arguments object");
284 if (!function_in_register) {
285 // Load this again, if it's used by the local context below.
286 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
290 // Receiver is just before the parameters on the caller's stack.
291 int num_parameters = info->scope()->num_parameters();
292 int offset = num_parameters * kPointerSize;
294 Operand(StandardFrameConstants::kCallerSPOffset + offset));
295 __ li(a1, Operand(Smi::FromInt(num_parameters)));
298 // Arguments to ArgumentsAccessStub:
299 // function, receiver address, parameter count.
300 // The stub will rewrite receiever and parameter count if the previous
301 // stack frame was an arguments adapter frame.
302 ArgumentsAccessStub::Type type;
303 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
304 type = ArgumentsAccessStub::NEW_STRICT;
305 } else if (function()->has_duplicate_parameters()) {
306 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
308 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
310 ArgumentsAccessStub stub(isolate(), type, has_new_target);
313 SetVar(arguments, v0, a1, a2);
317 __ CallRuntime(Runtime::kTraceEnter, 0);
319 // Visit the declarations and body unless there is an illegal
321 if (scope()->HasIllegalRedeclaration()) {
322 Comment cmnt(masm_, "[ Declarations");
323 scope()->VisitIllegalRedeclaration(this);
326 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
327 { Comment cmnt(masm_, "[ Declarations");
328 // For named function expressions, declare the function name as a
330 if (scope()->is_function_scope() && scope()->function() != NULL) {
331 VariableDeclaration* function = scope()->function();
332 DCHECK(function->proxy()->var()->mode() == CONST ||
333 function->proxy()->var()->mode() == CONST_LEGACY);
334 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
335 VisitVariableDeclaration(function);
337 VisitDeclarations(scope()->declarations());
339 { Comment cmnt(masm_, "[ Stack check");
340 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
342 __ LoadRoot(at, Heap::kStackLimitRootIndex);
343 __ Branch(&ok, hs, sp, Operand(at));
344 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
345 PredictableCodeSizeScope predictable(masm_,
346 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
347 __ Call(stack_check, RelocInfo::CODE_TARGET);
351 { Comment cmnt(masm_, "[ Body");
352 DCHECK(loop_depth() == 0);
354 VisitStatements(function()->body());
356 DCHECK(loop_depth() == 0);
360 // Always emit a 'return undefined' in case control fell off the end of
362 { Comment cmnt(masm_, "[ return <undefined>;");
363 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
365 EmitReturnSequence();
369 void FullCodeGenerator::ClearAccumulator() {
370 DCHECK(Smi::FromInt(0) == 0);
371 __ mov(v0, zero_reg);
375 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
376 __ li(a2, Operand(profiling_counter_));
377 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
378 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
379 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
383 void FullCodeGenerator::EmitProfilingCounterReset() {
384 int reset_value = FLAG_interrupt_budget;
385 if (info_->is_debug()) {
386 // Detect debug break requests as soon as possible.
387 reset_value = FLAG_interrupt_budget >> 4;
389 __ li(a2, Operand(profiling_counter_));
390 __ li(a3, Operand(Smi::FromInt(reset_value)));
391 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
395 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
396 Label* back_edge_target) {
397 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
398 // to make sure it is constant. Branch may emit a skip-or-jump sequence
399 // instead of the normal Branch. It seems that the "skip" part of that
400 // sequence is about as long as this Branch would be so it is safe to ignore
402 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
403 Comment cmnt(masm_, "[ Back edge bookkeeping");
405 DCHECK(back_edge_target->is_bound());
406 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
407 int weight = Min(kMaxBackEdgeWeight,
408 Max(1, distance / kCodeSizeMultiplier));
409 EmitProfilingCounterDecrement(weight);
410 __ slt(at, a3, zero_reg);
411 __ beq(at, zero_reg, &ok);
412 // Call will emit a li t9 first, so it is safe to use the delay slot.
413 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
414 // Record a mapping of this PC offset to the OSR id. This is used to find
415 // the AST id from the unoptimized code in order to use it as a key into
416 // the deoptimization input data found in the optimized code.
417 RecordBackEdge(stmt->OsrEntryId());
418 EmitProfilingCounterReset();
421 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
422 // Record a mapping of the OSR id to this PC. This is used if the OSR
423 // entry becomes the target of a bailout. We don't expect it to be, but
424 // we want it to work if it is.
425 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
429 void FullCodeGenerator::EmitReturnSequence() {
430 Comment cmnt(masm_, "[ Return sequence");
431 if (return_label_.is_bound()) {
432 __ Branch(&return_label_);
434 __ bind(&return_label_);
436 // Push the return value on the stack as the parameter.
437 // Runtime::TraceExit returns its parameter in v0.
439 __ CallRuntime(Runtime::kTraceExit, 1);
441 // Pretend that the exit is a backwards jump to the entry.
443 if (info_->ShouldSelfOptimize()) {
444 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
446 int distance = masm_->pc_offset();
447 weight = Min(kMaxBackEdgeWeight,
448 Max(1, distance / kCodeSizeMultiplier));
450 EmitProfilingCounterDecrement(weight);
452 __ Branch(&ok, ge, a3, Operand(zero_reg));
454 __ Call(isolate()->builtins()->InterruptCheck(),
455 RelocInfo::CODE_TARGET);
457 EmitProfilingCounterReset();
461 // Add a label for checking the size of the code used for returning.
462 Label check_exit_codesize;
463 masm_->bind(&check_exit_codesize);
465 // Make sure that the constant pool is not emitted inside of the return
467 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
468 // Here we use masm_-> instead of the __ macro to avoid the code coverage
469 // tool from instrumenting as we rely on the code size here.
470 int32_t arg_count = info_->scope()->num_parameters() + 1;
471 if (IsSubclassConstructor(info_->function()->kind())) {
474 int32_t sp_delta = arg_count * kPointerSize;
475 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
478 int no_frame_start = masm_->pc_offset();
479 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
480 masm_->Daddu(sp, sp, Operand(sp_delta));
482 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
486 // Check that the size of the code used for returning is large enough
487 // for the debugger's requirements.
488 DCHECK(Assembler::kJSReturnSequenceInstructions <=
489 masm_->InstructionsGeneratedSince(&check_exit_codesize));
495 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
496 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
500 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
501 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
502 codegen()->GetVar(result_register(), var);
506 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
507 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
508 codegen()->GetVar(result_register(), var);
509 __ push(result_register());
513 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
514 // For simplicity we always test the accumulator register.
515 codegen()->GetVar(result_register(), var);
516 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
517 codegen()->DoTest(this);
521 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
525 void FullCodeGenerator::AccumulatorValueContext::Plug(
526 Heap::RootListIndex index) const {
527 __ LoadRoot(result_register(), index);
531 void FullCodeGenerator::StackValueContext::Plug(
532 Heap::RootListIndex index) const {
533 __ LoadRoot(result_register(), index);
534 __ push(result_register());
538 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
539 codegen()->PrepareForBailoutBeforeSplit(condition(),
543 if (index == Heap::kUndefinedValueRootIndex ||
544 index == Heap::kNullValueRootIndex ||
545 index == Heap::kFalseValueRootIndex) {
546 if (false_label_ != fall_through_) __ Branch(false_label_);
547 } else if (index == Heap::kTrueValueRootIndex) {
548 if (true_label_ != fall_through_) __ Branch(true_label_);
550 __ LoadRoot(result_register(), index);
551 codegen()->DoTest(this);
556 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
560 void FullCodeGenerator::AccumulatorValueContext::Plug(
561 Handle<Object> lit) const {
562 __ li(result_register(), Operand(lit));
566 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
567 // Immediates cannot be pushed directly.
568 __ li(result_register(), Operand(lit));
569 __ push(result_register());
573 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
574 codegen()->PrepareForBailoutBeforeSplit(condition(),
578 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
579 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
580 if (false_label_ != fall_through_) __ Branch(false_label_);
581 } else if (lit->IsTrue() || lit->IsJSObject()) {
582 if (true_label_ != fall_through_) __ Branch(true_label_);
583 } else if (lit->IsString()) {
584 if (String::cast(*lit)->length() == 0) {
585 if (false_label_ != fall_through_) __ Branch(false_label_);
587 if (true_label_ != fall_through_) __ Branch(true_label_);
589 } else if (lit->IsSmi()) {
590 if (Smi::cast(*lit)->value() == 0) {
591 if (false_label_ != fall_through_) __ Branch(false_label_);
593 if (true_label_ != fall_through_) __ Branch(true_label_);
596 // For simplicity we always test the accumulator register.
597 __ li(result_register(), Operand(lit));
598 codegen()->DoTest(this);
603 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
604 Register reg) const {
610 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
612 Register reg) const {
615 __ Move(result_register(), reg);
619 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
620 Register reg) const {
622 if (count > 1) __ Drop(count - 1);
623 __ sd(reg, MemOperand(sp, 0));
627 void FullCodeGenerator::TestContext::DropAndPlug(int count,
628 Register reg) const {
630 // For simplicity we always test the accumulator register.
632 __ Move(result_register(), reg);
633 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
634 codegen()->DoTest(this);
638 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
639 Label* materialize_false) const {
640 DCHECK(materialize_true == materialize_false);
641 __ bind(materialize_true);
645 void FullCodeGenerator::AccumulatorValueContext::Plug(
646 Label* materialize_true,
647 Label* materialize_false) const {
649 __ bind(materialize_true);
650 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
652 __ bind(materialize_false);
653 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
658 void FullCodeGenerator::StackValueContext::Plug(
659 Label* materialize_true,
660 Label* materialize_false) const {
662 __ bind(materialize_true);
663 __ LoadRoot(at, Heap::kTrueValueRootIndex);
664 // Push the value as the following branch can clobber at in long branch mode.
667 __ bind(materialize_false);
668 __ LoadRoot(at, Heap::kFalseValueRootIndex);
674 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
675 Label* materialize_false) const {
676 DCHECK(materialize_true == true_label_);
677 DCHECK(materialize_false == false_label_);
681 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
685 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
686 Heap::RootListIndex value_root_index =
687 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
688 __ LoadRoot(result_register(), value_root_index);
692 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
693 Heap::RootListIndex value_root_index =
694 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
695 __ LoadRoot(at, value_root_index);
700 void FullCodeGenerator::TestContext::Plug(bool flag) const {
701 codegen()->PrepareForBailoutBeforeSplit(condition(),
706 if (true_label_ != fall_through_) __ Branch(true_label_);
708 if (false_label_ != fall_through_) __ Branch(false_label_);
713 void FullCodeGenerator::DoTest(Expression* condition,
716 Label* fall_through) {
717 __ mov(a0, result_register());
718 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
719 CallIC(ic, condition->test_id());
720 __ mov(at, zero_reg);
721 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
725 void FullCodeGenerator::Split(Condition cc,
730 Label* fall_through) {
731 if (if_false == fall_through) {
732 __ Branch(if_true, cc, lhs, rhs);
733 } else if (if_true == fall_through) {
734 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
736 __ Branch(if_true, cc, lhs, rhs);
742 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
743 DCHECK(var->IsStackAllocated());
744 // Offset is negative because higher indexes are at lower addresses.
745 int offset = -var->index() * kPointerSize;
746 // Adjust by a (parameter or local) base offset.
747 if (var->IsParameter()) {
748 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
750 offset += JavaScriptFrameConstants::kLocal0Offset;
752 return MemOperand(fp, offset);
756 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
757 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
758 if (var->IsContextSlot()) {
759 int context_chain_length = scope()->ContextChainLength(var->scope());
760 __ LoadContext(scratch, context_chain_length);
761 return ContextOperand(scratch, var->index());
763 return StackOperand(var);
768 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
769 // Use destination as scratch.
770 MemOperand location = VarOperand(var, dest);
771 __ ld(dest, location);
775 void FullCodeGenerator::SetVar(Variable* var,
779 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
780 DCHECK(!scratch0.is(src));
781 DCHECK(!scratch0.is(scratch1));
782 DCHECK(!scratch1.is(src));
783 MemOperand location = VarOperand(var, scratch0);
784 __ sd(src, location);
785 // Emit the write barrier code if the location is in the heap.
786 if (var->IsContextSlot()) {
787 __ RecordWriteContextSlot(scratch0,
797 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
798 bool should_normalize,
801 // Only prepare for bailouts before splits if we're in a test
802 // context. Otherwise, we let the Visit function deal with the
803 // preparation to avoid preparing with the same AST id twice.
804 if (!context()->IsTest() || !info_->IsOptimizable()) return;
807 if (should_normalize) __ Branch(&skip);
808 PrepareForBailout(expr, TOS_REG);
809 if (should_normalize) {
810 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
811 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
817 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
818 // The variable in the declaration always resides in the current function
820 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
821 if (generate_debug_code_) {
822 // Check that we're not inside a with or catch context.
823 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
824 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
825 __ Check(ne, kDeclarationInWithContext,
827 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
828 __ Check(ne, kDeclarationInCatchContext,
834 void FullCodeGenerator::VisitVariableDeclaration(
835 VariableDeclaration* declaration) {
836 // If it was not possible to allocate the variable at compile time, we
837 // need to "declare" it at runtime to make sure it actually exists in the
839 VariableProxy* proxy = declaration->proxy();
840 VariableMode mode = declaration->mode();
841 Variable* variable = proxy->var();
842 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
843 switch (variable->location()) {
844 case Variable::UNALLOCATED:
845 globals_->Add(variable->name(), zone());
846 globals_->Add(variable->binding_needs_init()
847 ? isolate()->factory()->the_hole_value()
848 : isolate()->factory()->undefined_value(),
852 case Variable::PARAMETER:
853 case Variable::LOCAL:
855 Comment cmnt(masm_, "[ VariableDeclaration");
856 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
857 __ sd(a4, StackOperand(variable));
861 case Variable::CONTEXT:
863 Comment cmnt(masm_, "[ VariableDeclaration");
864 EmitDebugCheckDeclarationContext(variable);
865 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
866 __ sd(at, ContextOperand(cp, variable->index()));
867 // No write barrier since the_hole_value is in old space.
868 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
872 case Variable::LOOKUP: {
873 Comment cmnt(masm_, "[ VariableDeclaration");
874 __ li(a2, Operand(variable->name()));
875 // Declaration nodes are always introduced in one of four modes.
876 DCHECK(IsDeclaredVariableMode(mode));
877 PropertyAttributes attr =
878 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
879 __ li(a1, Operand(Smi::FromInt(attr)));
880 // Push initial value, if any.
881 // Note: For variables we must not push an initial value (such as
882 // 'undefined') because we may have a (legal) redeclaration and we
883 // must not destroy the current value.
885 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
886 __ Push(cp, a2, a1, a0);
888 DCHECK(Smi::FromInt(0) == 0);
889 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
890 __ Push(cp, a2, a1, a0);
892 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
899 void FullCodeGenerator::VisitFunctionDeclaration(
900 FunctionDeclaration* declaration) {
901 VariableProxy* proxy = declaration->proxy();
902 Variable* variable = proxy->var();
903 switch (variable->location()) {
904 case Variable::UNALLOCATED: {
905 globals_->Add(variable->name(), zone());
906 Handle<SharedFunctionInfo> function =
907 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
908 // Check for stack-overflow exception.
909 if (function.is_null()) return SetStackOverflow();
910 globals_->Add(function, zone());
914 case Variable::PARAMETER:
915 case Variable::LOCAL: {
916 Comment cmnt(masm_, "[ FunctionDeclaration");
917 VisitForAccumulatorValue(declaration->fun());
918 __ sd(result_register(), StackOperand(variable));
922 case Variable::CONTEXT: {
923 Comment cmnt(masm_, "[ FunctionDeclaration");
924 EmitDebugCheckDeclarationContext(variable);
925 VisitForAccumulatorValue(declaration->fun());
926 __ sd(result_register(), ContextOperand(cp, variable->index()));
927 int offset = Context::SlotOffset(variable->index());
928 // We know that we have written a function, which is not a smi.
929 __ RecordWriteContextSlot(cp,
937 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
941 case Variable::LOOKUP: {
942 Comment cmnt(masm_, "[ FunctionDeclaration");
943 __ li(a2, Operand(variable->name()));
944 __ li(a1, Operand(Smi::FromInt(NONE)));
946 // Push initial value for function declaration.
947 VisitForStackValue(declaration->fun());
948 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
955 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
956 Variable* variable = declaration->proxy()->var();
957 ModuleDescriptor* descriptor = declaration->module()->descriptor();
958 DCHECK(variable->location() == Variable::CONTEXT);
959 DCHECK(descriptor->IsFrozen());
960 Comment cmnt(masm_, "[ ModuleDeclaration");
961 EmitDebugCheckDeclarationContext(variable);
963 // Load instance object.
964 __ LoadContext(a1, scope_->ContextChainLength(scope_->ScriptScope()));
965 __ ld(a1, ContextOperand(a1, descriptor->Index()));
966 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
969 __ sd(a1, ContextOperand(cp, variable->index()));
970 // We know that we have written a module, which is not a smi.
971 __ RecordWriteContextSlot(cp,
972 Context::SlotOffset(variable->index()),
979 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
981 // Traverse into body.
982 Visit(declaration->module());
986 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
987 VariableProxy* proxy = declaration->proxy();
988 Variable* variable = proxy->var();
989 switch (variable->location()) {
990 case Variable::UNALLOCATED:
994 case Variable::CONTEXT: {
995 Comment cmnt(masm_, "[ ImportDeclaration");
996 EmitDebugCheckDeclarationContext(variable);
1001 case Variable::PARAMETER:
1002 case Variable::LOCAL:
1003 case Variable::LOOKUP:
1009 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1014 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1015 // Call the runtime to declare the globals.
1016 // The context is the first argument.
1017 __ li(a1, Operand(pairs));
1018 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1019 __ Push(cp, a1, a0);
1020 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1021 // Return value is ignored.
1025 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1026 // Call the runtime to declare the modules.
1027 __ Push(descriptions);
1028 __ CallRuntime(Runtime::kDeclareModules, 1);
1029 // Return value is ignored.
1033 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1034 Comment cmnt(masm_, "[ SwitchStatement");
1035 Breakable nested_statement(this, stmt);
1036 SetStatementPosition(stmt);
1038 // Keep the switch value on the stack until a case matches.
1039 VisitForStackValue(stmt->tag());
1040 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1042 ZoneList<CaseClause*>* clauses = stmt->cases();
1043 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1045 Label next_test; // Recycled for each test.
1046 // Compile all the tests with branches to their bodies.
1047 for (int i = 0; i < clauses->length(); i++) {
1048 CaseClause* clause = clauses->at(i);
1049 clause->body_target()->Unuse();
1051 // The default is not a test, but remember it as final fall through.
1052 if (clause->is_default()) {
1053 default_clause = clause;
1057 Comment cmnt(masm_, "[ Case comparison");
1058 __ bind(&next_test);
1061 // Compile the label expression.
1062 VisitForAccumulatorValue(clause->label());
1063 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1065 // Perform the comparison as if via '==='.
1066 __ ld(a1, MemOperand(sp, 0)); // Switch value.
1067 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1068 JumpPatchSite patch_site(masm_);
1069 if (inline_smi_code) {
1072 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1074 __ Branch(&next_test, ne, a1, Operand(a0));
1075 __ Drop(1); // Switch value is no longer needed.
1076 __ Branch(clause->body_target());
1078 __ bind(&slow_case);
1081 // Record position before stub call for type feedback.
1082 SetSourcePosition(clause->position());
1084 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1085 CallIC(ic, clause->CompareId());
1086 patch_site.EmitPatchInfo();
1090 PrepareForBailout(clause, TOS_REG);
1091 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1092 __ Branch(&next_test, ne, v0, Operand(at));
1094 __ Branch(clause->body_target());
1097 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1098 __ Drop(1); // Switch value is no longer needed.
1099 __ Branch(clause->body_target());
1102 // Discard the test value and jump to the default if present, otherwise to
1103 // the end of the statement.
1104 __ bind(&next_test);
1105 __ Drop(1); // Switch value is no longer needed.
1106 if (default_clause == NULL) {
1107 __ Branch(nested_statement.break_label());
1109 __ Branch(default_clause->body_target());
1112 // Compile all the case bodies.
1113 for (int i = 0; i < clauses->length(); i++) {
1114 Comment cmnt(masm_, "[ Case body");
1115 CaseClause* clause = clauses->at(i);
1116 __ bind(clause->body_target());
1117 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1118 VisitStatements(clause->statements());
1121 __ bind(nested_statement.break_label());
1122 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1126 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1127 Comment cmnt(masm_, "[ ForInStatement");
1128 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1129 SetStatementPosition(stmt);
1132 ForIn loop_statement(this, stmt);
1133 increment_loop_depth();
1135 // Get the object to enumerate over. If the object is null or undefined, skip
1136 // over the loop. See ECMA-262 version 5, section 12.6.4.
1137 SetExpressionPosition(stmt->enumerable());
1138 VisitForAccumulatorValue(stmt->enumerable());
1139 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1140 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1141 __ Branch(&exit, eq, a0, Operand(at));
1142 Register null_value = a5;
1143 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1144 __ Branch(&exit, eq, a0, Operand(null_value));
1145 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1147 // Convert the object to a JS object.
1148 Label convert, done_convert;
1149 __ JumpIfSmi(a0, &convert);
1150 __ GetObjectType(a0, a1, a1);
1151 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1154 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1156 __ bind(&done_convert);
1157 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1160 // Check for proxies.
1162 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1163 __ GetObjectType(a0, a1, a1);
1164 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1166 // Check cache validity in generated code. This is a fast case for
1167 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1168 // guarantee cache validity, call the runtime system to check cache
1169 // validity or get the property names in a fixed array.
1170 __ CheckEnumCache(null_value, &call_runtime);
1172 // The enum cache is valid. Load the map of the object being
1173 // iterated over and use the cache for the iteration.
1175 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1176 __ Branch(&use_cache);
1178 // Get the set of properties to enumerate.
1179 __ bind(&call_runtime);
1180 __ push(a0); // Duplicate the enumerable object on the stack.
1181 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1182 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1184 // If we got a map from the runtime call, we can do a fast
1185 // modification check. Otherwise, we got a fixed array, and we have
1186 // to do a slow check.
1188 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1189 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1190 __ Branch(&fixed_array, ne, a2, Operand(at));
1192 // We got a map in register v0. Get the enumeration cache from it.
1193 Label no_descriptors;
1194 __ bind(&use_cache);
1196 __ EnumLength(a1, v0);
1197 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1199 __ LoadInstanceDescriptors(v0, a2);
1200 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1201 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1203 // Set up the four remaining stack slots.
1204 __ li(a0, Operand(Smi::FromInt(0)));
1205 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1206 __ Push(v0, a2, a1, a0);
1209 __ bind(&no_descriptors);
1213 // We got a fixed array in register v0. Iterate through that.
1215 __ bind(&fixed_array);
1217 __ li(a1, FeedbackVector());
1218 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1219 int vector_index = FeedbackVector()->GetIndex(slot);
1220 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1222 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1223 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1224 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1225 __ GetObjectType(a2, a3, a3);
1226 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1227 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1228 __ bind(&non_proxy);
1229 __ Push(a1, v0); // Smi and array
1230 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1231 __ li(a0, Operand(Smi::FromInt(0)));
1232 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1234 // Generate code for doing the condition check.
1235 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1237 SetExpressionPosition(stmt->each());
1239 // Load the current count to a0, load the length to a1.
1240 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1241 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1242 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1244 // Get the current entry of the array into register a3.
1245 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1246 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1247 __ SmiScale(a4, a0, kPointerSizeLog2);
1248 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1249 __ ld(a3, MemOperand(a4)); // Current entry.
1251 // Get the expected map from the stack or a smi in the
1252 // permanent slow case into register a2.
1253 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1255 // Check if the expected map still matches that of the enumerable.
1256 // If not, we may have to filter the key.
1258 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1259 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1260 __ Branch(&update_each, eq, a4, Operand(a2));
1262 // For proxies, no filtering is done.
1263 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1264 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1265 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1267 // Convert the entry to a string or (smi) 0 if it isn't a property
1268 // any more. If the property has been removed while iterating, we
1270 __ Push(a1, a3); // Enumerable and current entry.
1271 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1272 __ mov(a3, result_register());
1273 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1275 // Update the 'each' property or variable from the possibly filtered
1276 // entry in register a3.
1277 __ bind(&update_each);
1278 __ mov(result_register(), a3);
1279 // Perform the assignment as if via '='.
1280 { EffectContext context(this);
1281 EmitAssignment(stmt->each());
1282 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1285 // Generate code for the body of the loop.
1286 Visit(stmt->body());
1288 // Generate code for the going to the next element by incrementing
1289 // the index (smi) stored on top of the stack.
1290 __ bind(loop_statement.continue_label());
1292 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1295 EmitBackEdgeBookkeeping(stmt, &loop);
1298 // Remove the pointers stored on the stack.
1299 __ bind(loop_statement.break_label());
1302 // Exit and decrement the loop depth.
1303 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1305 decrement_loop_depth();
1309 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1311 // Use the fast case closure allocation code that allocates in new
1312 // space for nested functions that don't need literals cloning. If
1313 // we're running with the --always-opt or the --prepare-always-opt
1314 // flag, we need to use the runtime function so that the new function
1315 // we are creating here gets a chance to have its code optimized and
1316 // doesn't just get a copy of the existing unoptimized code.
1317 if (!FLAG_always_opt &&
1318 !FLAG_prepare_always_opt &&
1320 scope()->is_function_scope() &&
1321 info->num_literals() == 0) {
1322 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1323 __ li(a2, Operand(info));
1326 __ li(a0, Operand(info));
1327 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1328 : Heap::kFalseValueRootIndex);
1329 __ Push(cp, a0, a1);
1330 __ CallRuntime(Runtime::kNewClosure, 3);
1332 context()->Plug(v0);
1336 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1337 Comment cmnt(masm_, "[ VariableProxy");
1338 EmitVariableLoad(expr);
1342 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1343 Comment cnmt(masm_, "[ SuperReference ");
1345 __ ld(LoadDescriptor::ReceiverRegister(),
1346 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1348 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1349 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1351 if (FLAG_vector_ics) {
1352 __ li(VectorLoadICDescriptor::SlotRegister(),
1353 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1354 CallLoadIC(NOT_CONTEXTUAL);
1356 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1360 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1361 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1366 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1368 if (NeedsHomeObject(initializer)) {
1369 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1370 __ li(StoreDescriptor::NameRegister(),
1371 Operand(isolate()->factory()->home_object_symbol()));
1372 __ ld(StoreDescriptor::ValueRegister(),
1373 MemOperand(sp, offset * kPointerSize));
1379 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1380 TypeofState typeof_state,
1382 Register current = cp;
1388 if (s->num_heap_slots() > 0) {
1389 if (s->calls_sloppy_eval()) {
1390 // Check that extension is NULL.
1391 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1392 __ Branch(slow, ne, temp, Operand(zero_reg));
1394 // Load next context in chain.
1395 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1396 // Walk the rest of the chain without clobbering cp.
1399 // If no outer scope calls eval, we do not need to check more
1400 // context extensions.
1401 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1402 s = s->outer_scope();
1405 if (s->is_eval_scope()) {
1407 if (!current.is(next)) {
1408 __ Move(next, current);
1411 // Terminate at native context.
1412 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1413 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1414 __ Branch(&fast, eq, temp, Operand(a4));
1415 // Check that extension is NULL.
1416 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1417 __ Branch(slow, ne, temp, Operand(zero_reg));
1418 // Load next context in chain.
1419 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1424 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1425 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1426 if (FLAG_vector_ics) {
1427 __ li(VectorLoadICDescriptor::SlotRegister(),
1428 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1431 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1438 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1440 DCHECK(var->IsContextSlot());
1441 Register context = cp;
1445 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1446 if (s->num_heap_slots() > 0) {
1447 if (s->calls_sloppy_eval()) {
1448 // Check that extension is NULL.
1449 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1450 __ Branch(slow, ne, temp, Operand(zero_reg));
1452 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1453 // Walk the rest of the chain without clobbering cp.
1457 // Check that last extension is NULL.
1458 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1459 __ Branch(slow, ne, temp, Operand(zero_reg));
1461 // This function is used only for loads, not stores, so it's safe to
1462 // return an cp-based operand (the write barrier cannot be allowed to
1463 // destroy the cp register).
1464 return ContextOperand(context, var->index());
1468 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1469 TypeofState typeof_state,
1472 // Generate fast-case code for variables that might be shadowed by
1473 // eval-introduced variables. Eval is used a lot without
1474 // introducing variables. In those cases, we do not want to
1475 // perform a runtime call for all variables in the scope
1476 // containing the eval.
1477 Variable* var = proxy->var();
1478 if (var->mode() == DYNAMIC_GLOBAL) {
1479 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1481 } else if (var->mode() == DYNAMIC_LOCAL) {
1482 Variable* local = var->local_if_not_shadowed();
1483 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1484 if (local->mode() == LET || local->mode() == CONST ||
1485 local->mode() == CONST_LEGACY) {
1486 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1487 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1488 if (local->mode() == CONST_LEGACY) {
1489 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1490 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1491 } else { // LET || CONST
1492 __ Branch(done, ne, at, Operand(zero_reg));
1493 __ li(a0, Operand(var->name()));
1495 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1503 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1504 // Record position before possible IC call.
1505 SetSourcePosition(proxy->position());
1506 Variable* var = proxy->var();
1508 // Three cases: global variables, lookup variables, and all other types of
1510 switch (var->location()) {
1511 case Variable::UNALLOCATED: {
1512 Comment cmnt(masm_, "[ Global variable");
1513 // Use inline caching. Variable name is passed in a2 and the global
1514 // object (receiver) in a0.
1515 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1516 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1517 if (FLAG_vector_ics) {
1518 __ li(VectorLoadICDescriptor::SlotRegister(),
1519 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1521 CallGlobalLoadIC(var->name());
1522 context()->Plug(v0);
1526 case Variable::PARAMETER:
1527 case Variable::LOCAL:
1528 case Variable::CONTEXT: {
1529 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1530 : "[ Stack variable");
1531 if (var->binding_needs_init()) {
1532 // var->scope() may be NULL when the proxy is located in eval code and
1533 // refers to a potential outside binding. Currently those bindings are
1534 // always looked up dynamically, i.e. in that case
1535 // var->location() == LOOKUP.
1537 DCHECK(var->scope() != NULL);
1539 // Check if the binding really needs an initialization check. The check
1540 // can be skipped in the following situation: we have a LET or CONST
1541 // binding in harmony mode, both the Variable and the VariableProxy have
1542 // the same declaration scope (i.e. they are both in global code, in the
1543 // same function or in the same eval code) and the VariableProxy is in
1544 // the source physically located after the initializer of the variable.
1546 // We cannot skip any initialization checks for CONST in non-harmony
1547 // mode because const variables may be declared but never initialized:
1548 // if (false) { const x; }; var y = x;
1550 // The condition on the declaration scopes is a conservative check for
1551 // nested functions that access a binding and are called before the
1552 // binding is initialized:
1553 // function() { f(); let x = 1; function f() { x = 2; } }
1555 bool skip_init_check;
1556 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1557 skip_init_check = false;
1558 } else if (var->is_this()) {
1559 CHECK(info_->function() != nullptr &&
1560 (info_->function()->kind() & kSubclassConstructor) != 0);
1561 // TODO(dslomov): implement 'this' hole check elimination.
1562 skip_init_check = false;
1564 // Check that we always have valid source position.
1565 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1566 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1567 skip_init_check = var->mode() != CONST_LEGACY &&
1568 var->initializer_position() < proxy->position();
1571 if (!skip_init_check) {
1572 // Let and const need a read barrier.
1574 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1575 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1576 if (var->mode() == LET || var->mode() == CONST) {
1577 // Throw a reference error when using an uninitialized let/const
1578 // binding in harmony mode.
1580 __ Branch(&done, ne, at, Operand(zero_reg));
1581 __ li(a0, Operand(var->name()));
1583 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1586 // Uninitalized const bindings outside of harmony mode are unholed.
1587 DCHECK(var->mode() == CONST_LEGACY);
1588 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1589 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1591 context()->Plug(v0);
1595 context()->Plug(var);
1599 case Variable::LOOKUP: {
1600 Comment cmnt(masm_, "[ Lookup variable");
1602 // Generate code for loading from variables potentially shadowed
1603 // by eval-introduced variables.
1604 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1606 __ li(a1, Operand(var->name()));
1607 __ Push(cp, a1); // Context and name.
1608 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1610 context()->Plug(v0);
1616 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1617 Comment cmnt(masm_, "[ RegExpLiteral");
1619 // Registers will be used as follows:
1620 // a5 = materialized value (RegExp literal)
1621 // a4 = JS function, literals array
1622 // a3 = literal index
1623 // a2 = RegExp pattern
1624 // a1 = RegExp flags
1625 // a0 = RegExp literal clone
1626 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1627 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1628 int literal_offset =
1629 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1630 __ ld(a5, FieldMemOperand(a4, literal_offset));
1631 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1632 __ Branch(&materialized, ne, a5, Operand(at));
1634 // Create regexp literal using runtime function.
1635 // Result will be in v0.
1636 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1637 __ li(a2, Operand(expr->pattern()));
1638 __ li(a1, Operand(expr->flags()));
1639 __ Push(a4, a3, a2, a1);
1640 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1643 __ bind(&materialized);
1644 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1645 Label allocated, runtime_allocate;
1646 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1649 __ bind(&runtime_allocate);
1650 __ li(a0, Operand(Smi::FromInt(size)));
1652 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1655 __ bind(&allocated);
1657 // After this, registers are used as follows:
1658 // v0: Newly allocated regexp.
1659 // a5: Materialized regexp.
1661 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1662 context()->Plug(v0);
1666 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1667 if (expression == NULL) {
1668 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1671 VisitForStackValue(expression);
1676 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1677 Comment cmnt(masm_, "[ ObjectLiteral");
1679 expr->BuildConstantProperties(isolate());
1680 Handle<FixedArray> constant_properties = expr->constant_properties();
1681 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1682 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1683 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1684 __ li(a1, Operand(constant_properties));
1685 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1686 if (MustCreateObjectLiteralWithRuntime(expr)) {
1687 __ Push(a3, a2, a1, a0);
1688 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1690 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1693 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1695 // If result_saved is true the result is on top of the stack. If
1696 // result_saved is false the result is in v0.
1697 bool result_saved = false;
1699 // Mark all computed expressions that are bound to a key that
1700 // is shadowed by a later occurrence of the same key. For the
1701 // marked expressions, no store code is emitted.
1702 expr->CalculateEmitStore(zone());
1704 AccessorTable accessor_table(zone());
1705 int property_index = 0;
1706 for (; property_index < expr->properties()->length(); property_index++) {
1707 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1708 if (property->is_computed_name()) break;
1709 if (property->IsCompileTimeValue()) continue;
1711 Literal* key = property->key()->AsLiteral();
1712 Expression* value = property->value();
1713 if (!result_saved) {
1714 __ push(v0); // Save result on stack.
1715 result_saved = true;
1717 switch (property->kind()) {
1718 case ObjectLiteral::Property::CONSTANT:
1720 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1721 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1723 case ObjectLiteral::Property::COMPUTED:
1724 // It is safe to use [[Put]] here because the boilerplate already
1725 // contains computed properties with an uninitialized value.
1726 if (key->value()->IsInternalizedString()) {
1727 if (property->emit_store()) {
1728 VisitForAccumulatorValue(value);
1729 __ mov(StoreDescriptor::ValueRegister(), result_register());
1730 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1731 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1732 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1733 CallStoreIC(key->LiteralFeedbackId());
1734 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1736 if (NeedsHomeObject(value)) {
1737 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1738 __ li(StoreDescriptor::NameRegister(),
1739 Operand(isolate()->factory()->home_object_symbol()));
1740 __ ld(StoreDescriptor::ValueRegister(), MemOperand(sp));
1744 VisitForEffect(value);
1748 // Duplicate receiver on stack.
1749 __ ld(a0, MemOperand(sp));
1751 VisitForStackValue(key);
1752 VisitForStackValue(value);
1753 if (property->emit_store()) {
1754 EmitSetHomeObjectIfNeeded(value, 2);
1755 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1757 __ CallRuntime(Runtime::kSetProperty, 4);
1762 case ObjectLiteral::Property::PROTOTYPE:
1763 // Duplicate receiver on stack.
1764 __ ld(a0, MemOperand(sp));
1766 VisitForStackValue(value);
1767 DCHECK(property->emit_store());
1768 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1770 case ObjectLiteral::Property::GETTER:
1771 if (property->emit_store()) {
1772 accessor_table.lookup(key)->second->getter = value;
1775 case ObjectLiteral::Property::SETTER:
1776 if (property->emit_store()) {
1777 accessor_table.lookup(key)->second->setter = value;
1783 // Emit code to define accessors, using only a single call to the runtime for
1784 // each pair of corresponding getters and setters.
1785 for (AccessorTable::Iterator it = accessor_table.begin();
1786 it != accessor_table.end();
1788 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1790 VisitForStackValue(it->first);
1791 EmitAccessor(it->second->getter);
1792 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1793 EmitAccessor(it->second->setter);
1794 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1795 __ li(a0, Operand(Smi::FromInt(NONE)));
1797 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1800 // Object literals have two parts. The "static" part on the left contains no
1801 // computed property names, and so we can compute its map ahead of time; see
1802 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1803 // starts with the first computed property name, and continues with all
1804 // properties to its right. All the code from above initializes the static
1805 // component of the object literal, and arranges for the map of the result to
1806 // reflect the static order in which the keys appear. For the dynamic
1807 // properties, we compile them into a series of "SetOwnProperty" runtime
1808 // calls. This will preserve insertion order.
1809 for (; property_index < expr->properties()->length(); property_index++) {
1810 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1812 Expression* value = property->value();
1813 if (!result_saved) {
1814 __ push(v0); // Save result on the stack
1815 result_saved = true;
1818 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1821 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1822 DCHECK(!property->is_computed_name());
1823 VisitForStackValue(value);
1824 DCHECK(property->emit_store());
1825 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1827 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1828 VisitForStackValue(value);
1829 EmitSetHomeObjectIfNeeded(value, 2);
1831 switch (property->kind()) {
1832 case ObjectLiteral::Property::CONSTANT:
1833 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1834 case ObjectLiteral::Property::COMPUTED:
1835 if (property->emit_store()) {
1836 __ li(a0, Operand(Smi::FromInt(NONE)));
1838 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1844 case ObjectLiteral::Property::PROTOTYPE:
1848 case ObjectLiteral::Property::GETTER:
1849 __ li(a0, Operand(Smi::FromInt(NONE)));
1851 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1854 case ObjectLiteral::Property::SETTER:
1855 __ li(a0, Operand(Smi::FromInt(NONE)));
1857 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1863 if (expr->has_function()) {
1864 DCHECK(result_saved);
1865 __ ld(a0, MemOperand(sp));
1867 __ CallRuntime(Runtime::kToFastProperties, 1);
1871 context()->PlugTOS();
1873 context()->Plug(v0);
1878 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1879 Comment cmnt(masm_, "[ ArrayLiteral");
1881 expr->BuildConstantElements(isolate());
1883 Handle<FixedArray> constant_elements = expr->constant_elements();
1884 bool has_fast_elements =
1885 IsFastObjectElementsKind(expr->constant_elements_kind());
1887 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1888 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1889 // If the only customer of allocation sites is transitioning, then
1890 // we can turn it off if we don't have anywhere else to transition to.
1891 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1894 __ mov(a0, result_register());
1895 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1896 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1897 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1898 __ li(a1, Operand(constant_elements));
1899 if (MustCreateArrayLiteralWithRuntime(expr)) {
1900 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1901 __ Push(a3, a2, a1, a0);
1902 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1904 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1907 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1909 bool result_saved = false; // Is the result saved to the stack?
1910 ZoneList<Expression*>* subexprs = expr->values();
1911 int length = subexprs->length();
1913 // Emit code to evaluate all the non-constant subexpressions and to store
1914 // them into the newly cloned array.
1915 for (int i = 0; i < length; i++) {
1916 Expression* subexpr = subexprs->at(i);
1917 // If the subexpression is a literal or a simple materialized literal it
1918 // is already set in the cloned array.
1919 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1921 if (!result_saved) {
1922 __ push(v0); // array literal
1923 __ Push(Smi::FromInt(expr->literal_index()));
1924 result_saved = true;
1927 VisitForAccumulatorValue(subexpr);
1929 if (has_fast_elements) {
1930 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1931 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1932 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1933 __ sd(result_register(), FieldMemOperand(a1, offset));
1934 // Update the write barrier for the array store.
1935 __ RecordWriteField(a1, offset, result_register(), a2,
1936 kRAHasBeenSaved, kDontSaveFPRegs,
1937 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1939 __ li(a3, Operand(Smi::FromInt(i)));
1940 __ mov(a0, result_register());
1941 StoreArrayLiteralElementStub stub(isolate());
1945 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1948 __ Pop(); // literal index
1949 context()->PlugTOS();
1951 context()->Plug(v0);
1956 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1957 DCHECK(expr->target()->IsValidReferenceExpression());
1959 Comment cmnt(masm_, "[ Assignment");
1961 Property* property = expr->target()->AsProperty();
1962 LhsKind assign_type = GetAssignType(property);
1964 // Evaluate LHS expression.
1965 switch (assign_type) {
1967 // Nothing to do here.
1969 case NAMED_PROPERTY:
1970 if (expr->is_compound()) {
1971 // We need the receiver both on the stack and in the register.
1972 VisitForStackValue(property->obj());
1973 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1975 VisitForStackValue(property->obj());
1978 case NAMED_SUPER_PROPERTY:
1979 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1980 EmitLoadHomeObject(property->obj()->AsSuperReference());
1981 __ Push(result_register());
1982 if (expr->is_compound()) {
1983 const Register scratch = a1;
1984 __ ld(scratch, MemOperand(sp, kPointerSize));
1985 __ Push(scratch, result_register());
1988 case KEYED_SUPER_PROPERTY: {
1989 const Register scratch = a1;
1990 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1991 EmitLoadHomeObject(property->obj()->AsSuperReference());
1992 __ Move(scratch, result_register());
1993 VisitForAccumulatorValue(property->key());
1994 __ Push(scratch, result_register());
1995 if (expr->is_compound()) {
1996 const Register scratch1 = a4;
1997 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1998 __ Push(scratch1, scratch, result_register());
2002 case KEYED_PROPERTY:
2003 // We need the key and receiver on both the stack and in v0 and a1.
2004 if (expr->is_compound()) {
2005 VisitForStackValue(property->obj());
2006 VisitForStackValue(property->key());
2007 __ ld(LoadDescriptor::ReceiverRegister(),
2008 MemOperand(sp, 1 * kPointerSize));
2009 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2011 VisitForStackValue(property->obj());
2012 VisitForStackValue(property->key());
2017 // For compound assignments we need another deoptimization point after the
2018 // variable/property load.
2019 if (expr->is_compound()) {
2020 { AccumulatorValueContext context(this);
2021 switch (assign_type) {
2023 EmitVariableLoad(expr->target()->AsVariableProxy());
2024 PrepareForBailout(expr->target(), TOS_REG);
2026 case NAMED_PROPERTY:
2027 EmitNamedPropertyLoad(property);
2028 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2030 case NAMED_SUPER_PROPERTY:
2031 EmitNamedSuperPropertyLoad(property);
2032 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2034 case KEYED_SUPER_PROPERTY:
2035 EmitKeyedSuperPropertyLoad(property);
2036 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2038 case KEYED_PROPERTY:
2039 EmitKeyedPropertyLoad(property);
2040 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2045 Token::Value op = expr->binary_op();
2046 __ push(v0); // Left operand goes on the stack.
2047 VisitForAccumulatorValue(expr->value());
2049 SetSourcePosition(expr->position() + 1);
2050 AccumulatorValueContext context(this);
2051 if (ShouldInlineSmiCase(op)) {
2052 EmitInlineSmiBinaryOp(expr->binary_operation(),
2057 EmitBinaryOp(expr->binary_operation(), op);
2060 // Deoptimization point in case the binary operation may have side effects.
2061 PrepareForBailout(expr->binary_operation(), TOS_REG);
2063 VisitForAccumulatorValue(expr->value());
2066 // Record source position before possible IC call.
2067 SetSourcePosition(expr->position());
2070 switch (assign_type) {
2072 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2074 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2075 context()->Plug(v0);
2077 case NAMED_PROPERTY:
2078 EmitNamedPropertyAssignment(expr);
2080 case NAMED_SUPER_PROPERTY:
2081 EmitNamedSuperPropertyStore(property);
2082 context()->Plug(v0);
2084 case KEYED_SUPER_PROPERTY:
2085 EmitKeyedSuperPropertyStore(property);
2086 context()->Plug(v0);
2088 case KEYED_PROPERTY:
2089 EmitKeyedPropertyAssignment(expr);
2095 void FullCodeGenerator::VisitYield(Yield* expr) {
2096 Comment cmnt(masm_, "[ Yield");
2097 // Evaluate yielded value first; the initial iterator definition depends on
2098 // this. It stays on the stack while we update the iterator.
2099 VisitForStackValue(expr->expression());
2101 switch (expr->yield_kind()) {
2102 case Yield::kSuspend:
2103 // Pop value from top-of-stack slot; box result into result register.
2104 EmitCreateIteratorResult(false);
2105 __ push(result_register());
2107 case Yield::kInitial: {
2108 Label suspend, continuation, post_runtime, resume;
2112 __ bind(&continuation);
2116 VisitForAccumulatorValue(expr->generator_object());
2117 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2118 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2119 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2120 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2122 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2123 kRAHasBeenSaved, kDontSaveFPRegs);
2124 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2125 __ Branch(&post_runtime, eq, sp, Operand(a1));
2126 __ push(v0); // generator object
2127 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2128 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2129 __ bind(&post_runtime);
2130 __ pop(result_register());
2131 EmitReturnSequence();
2134 context()->Plug(result_register());
2138 case Yield::kFinal: {
2139 VisitForAccumulatorValue(expr->generator_object());
2140 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2141 __ sd(a1, FieldMemOperand(result_register(),
2142 JSGeneratorObject::kContinuationOffset));
2143 // Pop value from top-of-stack slot, box result into result register.
2144 EmitCreateIteratorResult(true);
2145 EmitUnwindBeforeReturn();
2146 EmitReturnSequence();
2150 case Yield::kDelegating: {
2151 VisitForStackValue(expr->generator_object());
2153 // Initial stack layout is as follows:
2154 // [sp + 1 * kPointerSize] iter
2155 // [sp + 0 * kPointerSize] g
2157 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2158 Label l_next, l_call;
2159 Register load_receiver = LoadDescriptor::ReceiverRegister();
2160 Register load_name = LoadDescriptor::NameRegister();
2161 // Initial send value is undefined.
2162 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2165 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2168 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2169 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2170 __ Push(a2, a3, a0); // "throw", iter, except
2173 // try { received = %yield result }
2174 // Shuffle the received result above a try handler and yield it without
2177 __ pop(a0); // result
2178 EnterTryBlock(expr->index(), &l_catch);
2179 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2180 __ push(a0); // result
2182 __ bind(&l_continuation);
2185 __ bind(&l_suspend);
2186 const int generator_object_depth = kPointerSize + try_block_size;
2187 __ ld(a0, MemOperand(sp, generator_object_depth));
2189 __ Push(Smi::FromInt(expr->index())); // handler-index
2190 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2191 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2192 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2193 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2195 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2196 kRAHasBeenSaved, kDontSaveFPRegs);
2197 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2198 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2199 __ pop(v0); // result
2200 EmitReturnSequence();
2202 __ bind(&l_resume); // received in a0
2203 ExitTryBlock(expr->index());
2205 // receiver = iter; f = 'next'; arg = received;
2207 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2208 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2209 __ Push(load_name, a3, a0); // "next", iter, received
2211 // result = receiver[f](arg);
2213 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2214 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2215 if (FLAG_vector_ics) {
2216 __ li(VectorLoadICDescriptor::SlotRegister(),
2217 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2219 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2220 CallIC(ic, TypeFeedbackId::None());
2223 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2224 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2227 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2228 __ Drop(1); // The function is still on the stack; drop it.
2230 // if (!result.done) goto l_try;
2231 __ Move(load_receiver, v0);
2233 __ push(load_receiver); // save result
2234 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2235 if (FLAG_vector_ics) {
2236 __ li(VectorLoadICDescriptor::SlotRegister(),
2237 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2239 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2241 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2243 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2246 __ pop(load_receiver); // result
2247 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2248 if (FLAG_vector_ics) {
2249 __ li(VectorLoadICDescriptor::SlotRegister(),
2250 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2252 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2253 context()->DropAndPlug(2, v0); // drop iter and g
2260 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2262 JSGeneratorObject::ResumeMode resume_mode) {
2263 // The value stays in a0, and is ultimately read by the resumed generator, as
2264 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2265 // is read to throw the value when the resumed generator is already closed.
2266 // a1 will hold the generator object until the activation has been resumed.
2267 VisitForStackValue(generator);
2268 VisitForAccumulatorValue(value);
2271 // Load suspended function and context.
2272 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2273 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2275 // Load receiver and store as the first argument.
2276 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2279 // Push holes for the rest of the arguments to the generator function.
2280 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2281 // The argument count is stored as int32_t on 64-bit platforms.
2282 // TODO(plind): Smi on 32-bit platforms.
2284 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2285 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2286 Label push_argument_holes, push_frame;
2287 __ bind(&push_argument_holes);
2288 __ Dsubu(a3, a3, Operand(1));
2289 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2291 __ jmp(&push_argument_holes);
2293 // Enter a new JavaScript frame, and initialize its slots as they were when
2294 // the generator was suspended.
2295 Label resume_frame, done;
2296 __ bind(&push_frame);
2297 __ Call(&resume_frame);
2299 __ bind(&resume_frame);
2300 // ra = return address.
2301 // fp = caller's frame pointer.
2302 // cp = callee's context,
2303 // a4 = callee's JS function.
2304 __ Push(ra, fp, cp, a4);
2305 // Adjust FP to point to saved FP.
2306 __ Daddu(fp, sp, 2 * kPointerSize);
2308 // Load the operand stack size.
2309 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2310 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2313 // If we are sending a value and there is no operand stack, we can jump back
2315 if (resume_mode == JSGeneratorObject::NEXT) {
2317 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2318 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2319 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2321 __ Daddu(a3, a3, Operand(a2));
2322 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2323 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2325 __ bind(&slow_resume);
2328 // Otherwise, we push holes for the operand stack and call the runtime to fix
2329 // up the stack and the handlers.
2330 Label push_operand_holes, call_resume;
2331 __ bind(&push_operand_holes);
2332 __ Dsubu(a3, a3, Operand(1));
2333 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2335 __ Branch(&push_operand_holes);
2336 __ bind(&call_resume);
2337 DCHECK(!result_register().is(a1));
2338 __ Push(a1, result_register());
2339 __ Push(Smi::FromInt(resume_mode));
2340 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2341 // Not reached: the runtime call returns elsewhere.
2342 __ stop("not-reached");
2345 context()->Plug(result_register());
2349 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2353 const int instance_size = 5 * kPointerSize;
2354 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2357 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2360 __ bind(&gc_required);
2361 __ Push(Smi::FromInt(instance_size));
2362 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2363 __ ld(context_register(),
2364 MemOperand(fp, StandardFrameConstants::kContextOffset));
2366 __ bind(&allocated);
2367 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2368 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2369 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2371 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2372 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2373 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2374 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2375 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2377 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2379 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2381 // Only the value field needs a write barrier, as the other values are in the
2383 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2384 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2388 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2389 SetSourcePosition(prop->position());
2390 Literal* key = prop->key()->AsLiteral();
2391 DCHECK(!prop->IsSuperAccess());
2393 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2394 if (FLAG_vector_ics) {
2395 __ li(VectorLoadICDescriptor::SlotRegister(),
2396 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2397 CallLoadIC(NOT_CONTEXTUAL);
2399 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2404 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2405 // Stack: receiver, home_object.
2406 SetSourcePosition(prop->position());
2407 Literal* key = prop->key()->AsLiteral();
2408 DCHECK(!key->value()->IsSmi());
2409 DCHECK(prop->IsSuperAccess());
2411 __ Push(key->value());
2412 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2416 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2417 SetSourcePosition(prop->position());
2418 // Call keyed load IC. It has register arguments receiver and key.
2419 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2420 if (FLAG_vector_ics) {
2421 __ li(VectorLoadICDescriptor::SlotRegister(),
2422 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2425 CallIC(ic, prop->PropertyFeedbackId());
2430 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2431 // Stack: receiver, home_object, key.
2432 SetSourcePosition(prop->position());
2434 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2438 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2440 Expression* left_expr,
2441 Expression* right_expr) {
2442 Label done, smi_case, stub_call;
2444 Register scratch1 = a2;
2445 Register scratch2 = a3;
2447 // Get the arguments.
2449 Register right = a0;
2451 __ mov(a0, result_register());
2453 // Perform combined smi check on both operands.
2454 __ Or(scratch1, left, Operand(right));
2455 STATIC_ASSERT(kSmiTag == 0);
2456 JumpPatchSite patch_site(masm_);
2457 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2459 __ bind(&stub_call);
2460 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2461 CallIC(code, expr->BinaryOperationFeedbackId());
2462 patch_site.EmitPatchInfo();
2466 // Smi case. This code works the same way as the smi-smi case in the type
2467 // recording binary operation stub, see
2470 __ GetLeastBitsFromSmi(scratch1, right, 5);
2471 __ dsrav(right, left, scratch1);
2472 __ And(v0, right, Operand(0xffffffff00000000L));
2475 __ SmiUntag(scratch1, left);
2476 __ GetLeastBitsFromSmi(scratch2, right, 5);
2477 __ dsllv(scratch1, scratch1, scratch2);
2478 __ SmiTag(v0, scratch1);
2482 __ SmiUntag(scratch1, left);
2483 __ GetLeastBitsFromSmi(scratch2, right, 5);
2484 __ dsrlv(scratch1, scratch1, scratch2);
2485 __ And(scratch2, scratch1, 0x80000000);
2486 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2487 __ SmiTag(v0, scratch1);
2491 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2492 __ BranchOnOverflow(&stub_call, scratch1);
2495 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2496 __ BranchOnOverflow(&stub_call, scratch1);
2499 __ Dmulh(v0, left, right);
2500 __ dsra32(scratch2, v0, 0);
2501 __ sra(scratch1, v0, 31);
2502 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2504 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2505 __ Daddu(scratch2, right, left);
2506 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2507 DCHECK(Smi::FromInt(0) == 0);
2508 __ mov(v0, zero_reg);
2512 __ Or(v0, left, Operand(right));
2514 case Token::BIT_AND:
2515 __ And(v0, left, Operand(right));
2517 case Token::BIT_XOR:
2518 __ Xor(v0, left, Operand(right));
2525 context()->Plug(v0);
2529 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2530 // Constructor is in v0.
2531 DCHECK(lit != NULL);
2534 // No access check is needed here since the constructor is created by the
2536 Register scratch = a1;
2538 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2541 for (int i = 0; i < lit->properties()->length(); i++) {
2542 ObjectLiteral::Property* property = lit->properties()->at(i);
2543 Expression* value = property->value();
2545 if (property->is_static()) {
2546 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2548 __ ld(scratch, MemOperand(sp, 0)); // prototype
2551 EmitPropertyKey(property, lit->GetIdForProperty(i));
2553 // The static prototype property is read only. We handle the non computed
2554 // property name case in the parser. Since this is the only case where we
2555 // need to check for an own read only property we special case this so we do
2556 // not need to do this for every property.
2557 if (property->is_static() && property->is_computed_name()) {
2558 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2562 VisitForStackValue(value);
2563 EmitSetHomeObjectIfNeeded(value, 2);
2565 switch (property->kind()) {
2566 case ObjectLiteral::Property::CONSTANT:
2567 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2568 case ObjectLiteral::Property::PROTOTYPE:
2570 case ObjectLiteral::Property::COMPUTED:
2571 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2574 case ObjectLiteral::Property::GETTER:
2575 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2577 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2580 case ObjectLiteral::Property::SETTER:
2581 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2583 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2592 __ CallRuntime(Runtime::kToFastProperties, 1);
2595 __ CallRuntime(Runtime::kToFastProperties, 1);
2599 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2600 __ mov(a0, result_register());
2602 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2603 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2604 CallIC(code, expr->BinaryOperationFeedbackId());
2605 patch_site.EmitPatchInfo();
2606 context()->Plug(v0);
2610 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2611 DCHECK(expr->IsValidReferenceExpression());
2613 Property* prop = expr->AsProperty();
2614 LhsKind assign_type = GetAssignType(prop);
2616 switch (assign_type) {
2618 Variable* var = expr->AsVariableProxy()->var();
2619 EffectContext context(this);
2620 EmitVariableAssignment(var, Token::ASSIGN);
2623 case NAMED_PROPERTY: {
2624 __ push(result_register()); // Preserve value.
2625 VisitForAccumulatorValue(prop->obj());
2626 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2627 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2628 __ li(StoreDescriptor::NameRegister(),
2629 Operand(prop->key()->AsLiteral()->value()));
2633 case NAMED_SUPER_PROPERTY: {
2635 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2636 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2637 // stack: value, this; v0: home_object
2638 Register scratch = a2;
2639 Register scratch2 = a3;
2640 __ mov(scratch, result_register()); // home_object
2641 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2642 __ ld(scratch2, MemOperand(sp, 0)); // this
2643 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2644 __ sd(scratch, MemOperand(sp, 0)); // home_object
2645 // stack: this, home_object; v0: value
2646 EmitNamedSuperPropertyStore(prop);
2649 case KEYED_SUPER_PROPERTY: {
2651 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2652 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2653 __ Push(result_register());
2654 VisitForAccumulatorValue(prop->key());
2655 Register scratch = a2;
2656 Register scratch2 = a3;
2657 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2658 // stack: value, this, home_object; v0: key, a3: value
2659 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2660 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2661 __ ld(scratch, MemOperand(sp, 0)); // home_object
2662 __ sd(scratch, MemOperand(sp, kPointerSize));
2663 __ sd(v0, MemOperand(sp, 0));
2664 __ Move(v0, scratch2);
2665 // stack: this, home_object, key; v0: value.
2666 EmitKeyedSuperPropertyStore(prop);
2669 case KEYED_PROPERTY: {
2670 __ push(result_register()); // Preserve value.
2671 VisitForStackValue(prop->obj());
2672 VisitForAccumulatorValue(prop->key());
2673 __ Move(StoreDescriptor::NameRegister(), result_register());
2674 __ Pop(StoreDescriptor::ValueRegister(),
2675 StoreDescriptor::ReceiverRegister());
2677 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2682 context()->Plug(v0);
2686 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2687 Variable* var, MemOperand location) {
2688 __ sd(result_register(), location);
2689 if (var->IsContextSlot()) {
2690 // RecordWrite may destroy all its register arguments.
2691 __ Move(a3, result_register());
2692 int offset = Context::SlotOffset(var->index());
2693 __ RecordWriteContextSlot(
2694 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2699 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2700 if (var->IsUnallocated()) {
2701 // Global var, const, or let.
2702 __ mov(StoreDescriptor::ValueRegister(), result_register());
2703 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2704 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2707 } else if (var->mode() == LET && op != Token::INIT_LET) {
2708 // Non-initializing assignment to let variable needs a write barrier.
2709 DCHECK(!var->IsLookupSlot());
2710 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2712 MemOperand location = VarOperand(var, a1);
2713 __ ld(a3, location);
2714 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2715 __ Branch(&assign, ne, a3, Operand(a4));
2716 __ li(a3, Operand(var->name()));
2718 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2719 // Perform the assignment.
2721 EmitStoreToStackLocalOrContextSlot(var, location);
2723 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2724 // Assignment to const variable needs a write barrier.
2725 DCHECK(!var->IsLookupSlot());
2726 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2728 MemOperand location = VarOperand(var, a1);
2729 __ ld(a3, location);
2730 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2731 __ Branch(&const_error, ne, a3, Operand(at));
2732 __ li(a3, Operand(var->name()));
2734 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2735 __ bind(&const_error);
2736 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2738 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2739 if (var->IsLookupSlot()) {
2740 // Assignment to var.
2741 __ li(a4, Operand(var->name()));
2742 __ li(a3, Operand(Smi::FromInt(language_mode())));
2743 // jssp[0] : language mode.
2745 // jssp[16] : context.
2746 // jssp[24] : value.
2747 __ Push(v0, cp, a4, a3);
2748 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2750 // Assignment to var or initializing assignment to let/const in harmony
2752 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2753 MemOperand location = VarOperand(var, a1);
2754 if (generate_debug_code_ && op == Token::INIT_LET) {
2755 // Check for an uninitialized let binding.
2756 __ ld(a2, location);
2757 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2758 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2760 EmitStoreToStackLocalOrContextSlot(var, location);
2763 } else if (op == Token::INIT_CONST_LEGACY) {
2764 // Const initializers need a write barrier.
2765 DCHECK(!var->IsParameter()); // No const parameters.
2766 if (var->IsLookupSlot()) {
2767 __ li(a0, Operand(var->name()));
2768 __ Push(v0, cp, a0); // Context and name.
2769 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2771 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2773 MemOperand location = VarOperand(var, a1);
2774 __ ld(a2, location);
2775 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2776 __ Branch(&skip, ne, a2, Operand(at));
2777 EmitStoreToStackLocalOrContextSlot(var, location);
2782 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2783 if (is_strict(language_mode())) {
2784 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2786 // Silently ignore store in sloppy mode.
2791 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2792 // Assignment to a property, using a named store IC.
2793 Property* prop = expr->target()->AsProperty();
2794 DCHECK(prop != NULL);
2795 DCHECK(prop->key()->IsLiteral());
2797 // Record source code position before IC call.
2798 SetSourcePosition(expr->position());
2799 __ mov(StoreDescriptor::ValueRegister(), result_register());
2800 __ li(StoreDescriptor::NameRegister(),
2801 Operand(prop->key()->AsLiteral()->value()));
2802 __ pop(StoreDescriptor::ReceiverRegister());
2803 CallStoreIC(expr->AssignmentFeedbackId());
2805 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2806 context()->Plug(v0);
2810 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2811 // Assignment to named property of super.
2813 // stack : receiver ('this'), home_object
2814 DCHECK(prop != NULL);
2815 Literal* key = prop->key()->AsLiteral();
2816 DCHECK(key != NULL);
2818 __ Push(key->value());
2820 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2821 : Runtime::kStoreToSuper_Sloppy),
2826 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2827 // Assignment to named property of super.
2829 // stack : receiver ('this'), home_object, key
2830 DCHECK(prop != NULL);
2834 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2835 : Runtime::kStoreKeyedToSuper_Sloppy),
2840 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2841 // Assignment to a property, using a keyed store IC.
2843 // Record source code position before IC call.
2844 SetSourcePosition(expr->position());
2845 // Call keyed store IC.
2846 // The arguments are:
2847 // - a0 is the value,
2849 // - a2 is the receiver.
2850 __ mov(StoreDescriptor::ValueRegister(), result_register());
2851 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2852 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2855 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2856 CallIC(ic, expr->AssignmentFeedbackId());
2858 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2859 context()->Plug(v0);
2863 void FullCodeGenerator::VisitProperty(Property* expr) {
2864 Comment cmnt(masm_, "[ Property");
2865 Expression* key = expr->key();
2867 if (key->IsPropertyName()) {
2868 if (!expr->IsSuperAccess()) {
2869 VisitForAccumulatorValue(expr->obj());
2870 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2871 EmitNamedPropertyLoad(expr);
2873 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2874 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2875 __ Push(result_register());
2876 EmitNamedSuperPropertyLoad(expr);
2879 if (!expr->IsSuperAccess()) {
2880 VisitForStackValue(expr->obj());
2881 VisitForAccumulatorValue(expr->key());
2882 __ Move(LoadDescriptor::NameRegister(), v0);
2883 __ pop(LoadDescriptor::ReceiverRegister());
2884 EmitKeyedPropertyLoad(expr);
2886 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2887 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2888 __ Push(result_register());
2889 VisitForStackValue(expr->key());
2890 EmitKeyedSuperPropertyLoad(expr);
2893 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2894 context()->Plug(v0);
2898 void FullCodeGenerator::CallIC(Handle<Code> code,
2899 TypeFeedbackId id) {
2901 __ Call(code, RelocInfo::CODE_TARGET, id);
2905 // Code common for calls using the IC.
2906 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2907 Expression* callee = expr->expression();
2909 CallICState::CallType call_type =
2910 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2912 // Get the target function.
2913 if (call_type == CallICState::FUNCTION) {
2914 { StackValueContext context(this);
2915 EmitVariableLoad(callee->AsVariableProxy());
2916 PrepareForBailout(callee, NO_REGISTERS);
2918 // Push undefined as receiver. This is patched in the method prologue if it
2919 // is a sloppy mode method.
2920 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2923 // Load the function from the receiver.
2924 DCHECK(callee->IsProperty());
2925 DCHECK(!callee->AsProperty()->IsSuperAccess());
2926 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2927 EmitNamedPropertyLoad(callee->AsProperty());
2928 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2929 // Push the target function under the receiver.
2930 __ ld(at, MemOperand(sp, 0));
2932 __ sd(v0, MemOperand(sp, kPointerSize));
2935 EmitCall(expr, call_type);
2939 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2940 Expression* callee = expr->expression();
2941 DCHECK(callee->IsProperty());
2942 Property* prop = callee->AsProperty();
2943 DCHECK(prop->IsSuperAccess());
2945 SetSourcePosition(prop->position());
2946 Literal* key = prop->key()->AsLiteral();
2947 DCHECK(!key->value()->IsSmi());
2948 // Load the function from the receiver.
2949 const Register scratch = a1;
2950 SuperReference* super_ref = prop->obj()->AsSuperReference();
2951 EmitLoadHomeObject(super_ref);
2952 __ mov(scratch, v0);
2953 VisitForAccumulatorValue(super_ref->this_var());
2954 __ Push(scratch, v0, v0, scratch);
2955 __ Push(key->value());
2959 // - this (receiver)
2960 // - this (receiver) <-- LoadFromSuper will pop here and below.
2963 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2965 // Replace home_object with target function.
2966 __ sd(v0, MemOperand(sp, kPointerSize));
2969 // - target function
2970 // - this (receiver)
2971 EmitCall(expr, CallICState::METHOD);
2975 // Code common for calls using the IC.
2976 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2979 VisitForAccumulatorValue(key);
2981 Expression* callee = expr->expression();
2983 // Load the function from the receiver.
2984 DCHECK(callee->IsProperty());
2985 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2986 __ Move(LoadDescriptor::NameRegister(), v0);
2987 EmitKeyedPropertyLoad(callee->AsProperty());
2988 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2990 // Push the target function under the receiver.
2991 __ ld(at, MemOperand(sp, 0));
2993 __ sd(v0, MemOperand(sp, kPointerSize));
2995 EmitCall(expr, CallICState::METHOD);
2999 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3000 Expression* callee = expr->expression();
3001 DCHECK(callee->IsProperty());
3002 Property* prop = callee->AsProperty();
3003 DCHECK(prop->IsSuperAccess());
3005 SetSourcePosition(prop->position());
3006 // Load the function from the receiver.
3007 const Register scratch = a1;
3008 SuperReference* super_ref = prop->obj()->AsSuperReference();
3009 EmitLoadHomeObject(super_ref);
3010 __ Move(scratch, v0);
3011 VisitForAccumulatorValue(super_ref->this_var());
3012 __ Push(scratch, v0, v0, scratch);
3013 VisitForStackValue(prop->key());
3017 // - this (receiver)
3018 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3021 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3023 // Replace home_object with target function.
3024 __ sd(v0, MemOperand(sp, kPointerSize));
3027 // - target function
3028 // - this (receiver)
3029 EmitCall(expr, CallICState::METHOD);
3033 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3034 // Load the arguments.
3035 ZoneList<Expression*>* args = expr->arguments();
3036 int arg_count = args->length();
3037 { PreservePositionScope scope(masm()->positions_recorder());
3038 for (int i = 0; i < arg_count; i++) {
3039 VisitForStackValue(args->at(i));
3043 // Record source position of the IC call.
3044 SetSourcePosition(expr->position());
3045 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3046 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3047 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3048 // Don't assign a type feedback id to the IC, since type feedback is provided
3049 // by the vector above.
3051 RecordJSReturnSite(expr);
3052 // Restore context register.
3053 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3054 context()->DropAndPlug(1, v0);
3058 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3059 // a7: copy of the first argument or undefined if it doesn't exist.
3060 if (arg_count > 0) {
3061 __ ld(a7, MemOperand(sp, arg_count * kPointerSize));
3063 __ LoadRoot(a7, Heap::kUndefinedValueRootIndex);
3066 // a6: the receiver of the enclosing function.
3067 __ ld(a6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3069 // a5: the receiver of the enclosing function.
3070 int receiver_offset = 2 + info_->scope()->num_parameters();
3071 __ ld(a5, MemOperand(fp, receiver_offset * kPointerSize));
3073 // a4: the language mode.
3074 __ li(a4, Operand(Smi::FromInt(language_mode())));
3076 // a1: the start position of the scope the calls resides in.
3077 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3079 // Do the runtime call.
3081 __ Push(a6, a5, a4, a1);
3082 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3086 void FullCodeGenerator::EmitLoadSuperConstructor() {
3087 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3089 __ CallRuntime(Runtime::kGetPrototype, 1);
3093 void FullCodeGenerator::VisitCall(Call* expr) {
3095 // We want to verify that RecordJSReturnSite gets called on all paths
3096 // through this function. Avoid early returns.
3097 expr->return_is_recorded_ = false;
3100 Comment cmnt(masm_, "[ Call");
3101 Expression* callee = expr->expression();
3102 Call::CallType call_type = expr->GetCallType(isolate());
3104 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3105 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3106 // to resolve the function we need to call and the receiver of the
3107 // call. Then we call the resolved function using the given
3109 ZoneList<Expression*>* args = expr->arguments();
3110 int arg_count = args->length();
3112 { PreservePositionScope pos_scope(masm()->positions_recorder());
3113 VisitForStackValue(callee);
3114 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3115 __ push(a2); // Reserved receiver slot.
3117 // Push the arguments.
3118 for (int i = 0; i < arg_count; i++) {
3119 VisitForStackValue(args->at(i));
3122 // Push a copy of the function (found below the arguments) and
3124 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3126 EmitResolvePossiblyDirectEval(arg_count);
3128 // The runtime call returns a pair of values in v0 (function) and
3129 // v1 (receiver). Touch up the stack with the right values.
3130 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3131 __ sd(v1, MemOperand(sp, arg_count * kPointerSize));
3133 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3135 // Record source position for debugger.
3136 SetSourcePosition(expr->position());
3137 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3138 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3140 RecordJSReturnSite(expr);
3141 // Restore context register.
3142 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3143 context()->DropAndPlug(1, v0);
3144 } else if (call_type == Call::GLOBAL_CALL) {
3145 EmitCallWithLoadIC(expr);
3146 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3147 // Call to a lookup slot (dynamically introduced variable).
3148 VariableProxy* proxy = callee->AsVariableProxy();
3151 { PreservePositionScope scope(masm()->positions_recorder());
3152 // Generate code for loading from variables potentially shadowed
3153 // by eval-introduced variables.
3154 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3158 // Call the runtime to find the function to call (returned in v0)
3159 // and the object holding it (returned in v1).
3160 DCHECK(!context_register().is(a2));
3161 __ li(a2, Operand(proxy->name()));
3162 __ Push(context_register(), a2);
3163 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3164 __ Push(v0, v1); // Function, receiver.
3165 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3167 // If fast case code has been generated, emit code to push the
3168 // function and receiver and have the slow path jump around this
3170 if (done.is_linked()) {
3176 // The receiver is implicitly the global receiver. Indicate this
3177 // by passing the hole to the call function stub.
3178 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3183 // The receiver is either the global receiver or an object found
3184 // by LoadContextSlot.
3186 } else if (call_type == Call::PROPERTY_CALL) {
3187 Property* property = callee->AsProperty();
3188 bool is_named_call = property->key()->IsPropertyName();
3189 if (property->IsSuperAccess()) {
3190 if (is_named_call) {
3191 EmitSuperCallWithLoadIC(expr);
3193 EmitKeyedSuperCallWithLoadIC(expr);
3197 PreservePositionScope scope(masm()->positions_recorder());
3198 VisitForStackValue(property->obj());
3200 if (is_named_call) {
3201 EmitCallWithLoadIC(expr);
3203 EmitKeyedCallWithLoadIC(expr, property->key());
3206 } else if (call_type == Call::SUPER_CALL) {
3207 EmitSuperConstructorCall(expr);
3209 DCHECK(call_type == Call::OTHER_CALL);
3210 // Call to an arbitrary expression not handled specially above.
3211 { PreservePositionScope scope(masm()->positions_recorder());
3212 VisitForStackValue(callee);
3214 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3216 // Emit function call.
3221 // RecordJSReturnSite should have been called.
3222 DCHECK(expr->return_is_recorded_);
3227 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3228 Comment cmnt(masm_, "[ CallNew");
3229 // According to ECMA-262, section 11.2.2, page 44, the function
3230 // expression in new calls must be evaluated before the
3233 // Push constructor on the stack. If it's not a function it's used as
3234 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3236 DCHECK(!expr->expression()->IsSuperReference());
3237 VisitForStackValue(expr->expression());
3239 // Push the arguments ("left-to-right") on the stack.
3240 ZoneList<Expression*>* args = expr->arguments();
3241 int arg_count = args->length();
3242 for (int i = 0; i < arg_count; i++) {
3243 VisitForStackValue(args->at(i));
3246 // Call the construct call builtin that handles allocation and
3247 // constructor invocation.
3248 SetSourcePosition(expr->position());
3250 // Load function and argument count into a1 and a0.
3251 __ li(a0, Operand(arg_count));
3252 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3254 // Record call targets in unoptimized code.
3255 if (FLAG_pretenuring_call_new) {
3256 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3257 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3258 expr->CallNewFeedbackSlot().ToInt() + 1);
3261 __ li(a2, FeedbackVector());
3262 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3264 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3265 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3266 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3267 context()->Plug(v0);
3271 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3272 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3273 GetVar(result_register(), new_target_var);
3274 __ Push(result_register());
3276 EmitLoadSuperConstructor();
3277 __ push(result_register());
3279 // Push the arguments ("left-to-right") on the stack.
3280 ZoneList<Expression*>* args = expr->arguments();
3281 int arg_count = args->length();
3282 for (int i = 0; i < arg_count; i++) {
3283 VisitForStackValue(args->at(i));
3286 // Call the construct call builtin that handles allocation and
3287 // constructor invocation.
3288 SetSourcePosition(expr->position());
3290 // Load function and argument count into a1 and a0.
3291 __ li(a0, Operand(arg_count));
3292 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3294 // Record call targets in unoptimized code.
3295 if (FLAG_pretenuring_call_new) {
3297 /* TODO(dslomov): support pretenuring.
3298 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3299 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3300 expr->CallNewFeedbackSlot().ToInt() + 1);
3304 __ li(a2, FeedbackVector());
3305 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3307 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3308 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3312 RecordJSReturnSite(expr);
3314 SuperReference* super_ref = expr->expression()->AsSuperReference();
3315 Variable* this_var = super_ref->this_var()->var();
3316 GetVar(a1, this_var);
3317 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3318 Label uninitialized_this;
3319 __ Branch(&uninitialized_this, eq, a1, Operand(at));
3320 __ li(a0, Operand(this_var->name()));
3322 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3323 __ bind(&uninitialized_this);
3325 EmitVariableAssignment(this_var, Token::INIT_CONST);
3326 context()->Plug(v0);
3330 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3331 ZoneList<Expression*>* args = expr->arguments();
3332 DCHECK(args->length() == 1);
3334 VisitForAccumulatorValue(args->at(0));
3336 Label materialize_true, materialize_false;
3337 Label* if_true = NULL;
3338 Label* if_false = NULL;
3339 Label* fall_through = NULL;
3340 context()->PrepareTest(&materialize_true, &materialize_false,
3341 &if_true, &if_false, &fall_through);
3343 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3345 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3347 context()->Plug(if_true, if_false);
3351 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3352 ZoneList<Expression*>* args = expr->arguments();
3353 DCHECK(args->length() == 1);
3355 VisitForAccumulatorValue(args->at(0));
3357 Label materialize_true, materialize_false;
3358 Label* if_true = NULL;
3359 Label* if_false = NULL;
3360 Label* fall_through = NULL;
3361 context()->PrepareTest(&materialize_true, &materialize_false,
3362 &if_true, &if_false, &fall_through);
3364 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3365 __ NonNegativeSmiTst(v0, at);
3366 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3368 context()->Plug(if_true, if_false);
3372 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3373 ZoneList<Expression*>* args = expr->arguments();
3374 DCHECK(args->length() == 1);
3376 VisitForAccumulatorValue(args->at(0));
3378 Label materialize_true, materialize_false;
3379 Label* if_true = NULL;
3380 Label* if_false = NULL;
3381 Label* fall_through = NULL;
3382 context()->PrepareTest(&materialize_true, &materialize_false,
3383 &if_true, &if_false, &fall_through);
3385 __ JumpIfSmi(v0, if_false);
3386 __ LoadRoot(at, Heap::kNullValueRootIndex);
3387 __ Branch(if_true, eq, v0, Operand(at));
3388 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3389 // Undetectable objects behave like undefined when tested with typeof.
3390 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3391 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3392 __ Branch(if_false, ne, at, Operand(zero_reg));
3393 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3394 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3395 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3396 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3397 if_true, if_false, fall_through);
3399 context()->Plug(if_true, if_false);
3403 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3404 ZoneList<Expression*>* args = expr->arguments();
3405 DCHECK(args->length() == 1);
3407 VisitForAccumulatorValue(args->at(0));
3409 Label materialize_true, materialize_false;
3410 Label* if_true = NULL;
3411 Label* if_false = NULL;
3412 Label* fall_through = NULL;
3413 context()->PrepareTest(&materialize_true, &materialize_false,
3414 &if_true, &if_false, &fall_through);
3416 __ JumpIfSmi(v0, if_false);
3417 __ GetObjectType(v0, a1, a1);
3418 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3419 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3420 if_true, if_false, fall_through);
3422 context()->Plug(if_true, if_false);
3426 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3427 ZoneList<Expression*>* args = expr->arguments();
3428 DCHECK(args->length() == 1);
3430 VisitForAccumulatorValue(args->at(0));
3432 Label materialize_true, materialize_false;
3433 Label* if_true = NULL;
3434 Label* if_false = NULL;
3435 Label* fall_through = NULL;
3436 context()->PrepareTest(&materialize_true, &materialize_false,
3437 &if_true, &if_false, &fall_through);
3439 __ JumpIfSmi(v0, if_false);
3440 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3441 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3442 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3443 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3444 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3446 context()->Plug(if_true, if_false);
3450 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3451 CallRuntime* expr) {
3452 ZoneList<Expression*>* args = expr->arguments();
3453 DCHECK(args->length() == 1);
3455 VisitForAccumulatorValue(args->at(0));
3457 Label materialize_true, materialize_false, skip_lookup;
3458 Label* if_true = NULL;
3459 Label* if_false = NULL;
3460 Label* fall_through = NULL;
3461 context()->PrepareTest(&materialize_true, &materialize_false,
3462 &if_true, &if_false, &fall_through);
3464 __ AssertNotSmi(v0);
3466 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3467 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3468 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3469 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3471 // Check for fast case object. Generate false result for slow case object.
3472 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3473 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3474 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3475 __ Branch(if_false, eq, a2, Operand(a4));
3477 // Look for valueOf name in the descriptor array, and indicate false if
3478 // found. Since we omit an enumeration index check, if it is added via a
3479 // transition that shares its descriptor array, this is a false positive.
3480 Label entry, loop, done;
3482 // Skip loop if no descriptors are valid.
3483 __ NumberOfOwnDescriptors(a3, a1);
3484 __ Branch(&done, eq, a3, Operand(zero_reg));
3486 __ LoadInstanceDescriptors(a1, a4);
3487 // a4: descriptor array.
3488 // a3: valid entries in the descriptor array.
3489 STATIC_ASSERT(kSmiTag == 0);
3490 STATIC_ASSERT(kSmiTagSize == 1);
3492 // STATIC_ASSERT(kPointerSize == 4);
3493 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3494 __ Dmul(a3, a3, at);
3495 // Calculate location of the first key name.
3496 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3497 // Calculate the end of the descriptor array.
3499 __ dsll(a5, a3, kPointerSizeLog2);
3500 __ Daddu(a2, a2, a5);
3502 // Loop through all the keys in the descriptor array. If one of these is the
3503 // string "valueOf" the result is false.
3504 // The use of a6 to store the valueOf string assumes that it is not otherwise
3505 // used in the loop below.
3506 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3509 __ ld(a3, MemOperand(a4, 0));
3510 __ Branch(if_false, eq, a3, Operand(a6));
3511 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3513 __ Branch(&loop, ne, a4, Operand(a2));
3517 // Set the bit in the map to indicate that there is no local valueOf field.
3518 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3519 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3520 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3522 __ bind(&skip_lookup);
3524 // If a valueOf property is not found on the object check that its
3525 // prototype is the un-modified String prototype. If not result is false.
3526 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3527 __ JumpIfSmi(a2, if_false);
3528 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3529 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3530 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3531 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3532 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3533 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3535 context()->Plug(if_true, if_false);
3539 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3540 ZoneList<Expression*>* args = expr->arguments();
3541 DCHECK(args->length() == 1);
3543 VisitForAccumulatorValue(args->at(0));
3545 Label materialize_true, materialize_false;
3546 Label* if_true = NULL;
3547 Label* if_false = NULL;
3548 Label* fall_through = NULL;
3549 context()->PrepareTest(&materialize_true, &materialize_false,
3550 &if_true, &if_false, &fall_through);
3552 __ JumpIfSmi(v0, if_false);
3553 __ GetObjectType(v0, a1, a2);
3554 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3555 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3556 __ Branch(if_false);
3558 context()->Plug(if_true, if_false);
3562 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3563 ZoneList<Expression*>* args = expr->arguments();
3564 DCHECK(args->length() == 1);
3566 VisitForAccumulatorValue(args->at(0));
3568 Label materialize_true, materialize_false;
3569 Label* if_true = NULL;
3570 Label* if_false = NULL;
3571 Label* fall_through = NULL;
3572 context()->PrepareTest(&materialize_true, &materialize_false,
3573 &if_true, &if_false, &fall_through);
3575 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3576 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3577 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3578 __ li(a4, 0x80000000);
3580 __ Branch(¬_nan, ne, a2, Operand(a4));
3581 __ mov(a4, zero_reg);
3585 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3586 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3588 context()->Plug(if_true, if_false);
3592 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3593 ZoneList<Expression*>* args = expr->arguments();
3594 DCHECK(args->length() == 1);
3596 VisitForAccumulatorValue(args->at(0));
3598 Label materialize_true, materialize_false;
3599 Label* if_true = NULL;
3600 Label* if_false = NULL;
3601 Label* fall_through = NULL;
3602 context()->PrepareTest(&materialize_true, &materialize_false,
3603 &if_true, &if_false, &fall_through);
3605 __ JumpIfSmi(v0, if_false);
3606 __ GetObjectType(v0, a1, a1);
3607 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3608 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3609 if_true, if_false, fall_through);
3611 context()->Plug(if_true, if_false);
3615 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3616 ZoneList<Expression*>* args = expr->arguments();
3617 DCHECK(args->length() == 1);
3619 VisitForAccumulatorValue(args->at(0));
3621 Label materialize_true, materialize_false;
3622 Label* if_true = NULL;
3623 Label* if_false = NULL;
3624 Label* fall_through = NULL;
3625 context()->PrepareTest(&materialize_true, &materialize_false,
3626 &if_true, &if_false, &fall_through);
3628 __ JumpIfSmi(v0, if_false);
3629 __ GetObjectType(v0, a1, a1);
3630 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3631 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3633 context()->Plug(if_true, if_false);
3637 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3638 ZoneList<Expression*>* args = expr->arguments();
3639 DCHECK(args->length() == 1);
3641 VisitForAccumulatorValue(args->at(0));
3643 Label materialize_true, materialize_false;
3644 Label* if_true = NULL;
3645 Label* if_false = NULL;
3646 Label* fall_through = NULL;
3647 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3648 &if_false, &fall_through);
3650 __ JumpIfSmi(v0, if_false);
3652 Register type_reg = a2;
3653 __ GetObjectType(v0, map, type_reg);
3654 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3655 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3656 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3657 if_true, if_false, fall_through);
3659 context()->Plug(if_true, if_false);
3663 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3664 DCHECK(expr->arguments()->length() == 0);
3666 Label materialize_true, materialize_false;
3667 Label* if_true = NULL;
3668 Label* if_false = NULL;
3669 Label* fall_through = NULL;
3670 context()->PrepareTest(&materialize_true, &materialize_false,
3671 &if_true, &if_false, &fall_through);
3673 // Get the frame pointer for the calling frame.
3674 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3676 // Skip the arguments adaptor frame if it exists.
3677 Label check_frame_marker;
3678 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3679 __ Branch(&check_frame_marker, ne,
3680 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3681 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3683 // Check the marker in the calling frame.
3684 __ bind(&check_frame_marker);
3685 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3686 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3687 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3688 if_true, if_false, fall_through);
3690 context()->Plug(if_true, if_false);
3694 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3695 ZoneList<Expression*>* args = expr->arguments();
3696 DCHECK(args->length() == 2);
3698 // Load the two objects into registers and perform the comparison.
3699 VisitForStackValue(args->at(0));
3700 VisitForAccumulatorValue(args->at(1));
3702 Label materialize_true, materialize_false;
3703 Label* if_true = NULL;
3704 Label* if_false = NULL;
3705 Label* fall_through = NULL;
3706 context()->PrepareTest(&materialize_true, &materialize_false,
3707 &if_true, &if_false, &fall_through);
3710 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3711 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3713 context()->Plug(if_true, if_false);
3717 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3718 ZoneList<Expression*>* args = expr->arguments();
3719 DCHECK(args->length() == 1);
3721 // ArgumentsAccessStub expects the key in a1 and the formal
3722 // parameter count in a0.
3723 VisitForAccumulatorValue(args->at(0));
3725 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3726 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3728 context()->Plug(v0);
3732 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3733 DCHECK(expr->arguments()->length() == 0);
3735 // Get the number of formal parameters.
3736 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3738 // Check if the calling frame is an arguments adaptor frame.
3739 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3740 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3741 __ Branch(&exit, ne, a3,
3742 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3744 // Arguments adaptor case: Read the arguments length from the
3746 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3749 context()->Plug(v0);
3753 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3754 ZoneList<Expression*>* args = expr->arguments();
3755 DCHECK(args->length() == 1);
3756 Label done, null, function, non_function_constructor;
3758 VisitForAccumulatorValue(args->at(0));
3760 // If the object is a smi, we return null.
3761 __ JumpIfSmi(v0, &null);
3763 // Check that the object is a JS object but take special care of JS
3764 // functions to make sure they have 'Function' as their class.
3765 // Assume that there are only two callable types, and one of them is at
3766 // either end of the type range for JS object types. Saves extra comparisons.
3767 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3768 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3769 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3771 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3772 FIRST_SPEC_OBJECT_TYPE + 1);
3773 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3775 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3776 LAST_SPEC_OBJECT_TYPE - 1);
3777 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3778 // Assume that there is no larger type.
3779 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3781 // Check if the constructor in the map is a JS function.
3782 Register instance_type = a2;
3783 __ GetMapConstructor(v0, v0, a1, instance_type);
3784 __ Branch(&non_function_constructor, ne, instance_type,
3785 Operand(JS_FUNCTION_TYPE));
3787 // v0 now contains the constructor function. Grab the
3788 // instance class name from there.
3789 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3790 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3793 // Functions have class 'Function'.
3795 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3798 // Objects with a non-function constructor have class 'Object'.
3799 __ bind(&non_function_constructor);
3800 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3803 // Non-JS objects have class null.
3805 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3810 context()->Plug(v0);
3814 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3815 // Load the arguments on the stack and call the stub.
3816 SubStringStub stub(isolate());
3817 ZoneList<Expression*>* args = expr->arguments();
3818 DCHECK(args->length() == 3);
3819 VisitForStackValue(args->at(0));
3820 VisitForStackValue(args->at(1));
3821 VisitForStackValue(args->at(2));
3823 context()->Plug(v0);
3827 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3828 // Load the arguments on the stack and call the stub.
3829 RegExpExecStub stub(isolate());
3830 ZoneList<Expression*>* args = expr->arguments();
3831 DCHECK(args->length() == 4);
3832 VisitForStackValue(args->at(0));
3833 VisitForStackValue(args->at(1));
3834 VisitForStackValue(args->at(2));
3835 VisitForStackValue(args->at(3));
3837 context()->Plug(v0);
3841 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3842 ZoneList<Expression*>* args = expr->arguments();
3843 DCHECK(args->length() == 1);
3845 VisitForAccumulatorValue(args->at(0)); // Load the object.
3848 // If the object is a smi return the object.
3849 __ JumpIfSmi(v0, &done);
3850 // If the object is not a value type, return the object.
3851 __ GetObjectType(v0, a1, a1);
3852 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3854 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3857 context()->Plug(v0);
3861 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3862 ZoneList<Expression*>* args = expr->arguments();
3863 DCHECK(args->length() == 2);
3864 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3865 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3867 VisitForAccumulatorValue(args->at(0)); // Load the object.
3869 Label runtime, done, not_date_object;
3870 Register object = v0;
3871 Register result = v0;
3872 Register scratch0 = t1;
3873 Register scratch1 = a1;
3875 __ JumpIfSmi(object, ¬_date_object);
3876 __ GetObjectType(object, scratch1, scratch1);
3877 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3879 if (index->value() == 0) {
3880 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3883 if (index->value() < JSDate::kFirstUncachedField) {
3884 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3885 __ li(scratch1, Operand(stamp));
3886 __ ld(scratch1, MemOperand(scratch1));
3887 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3888 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3889 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3890 kPointerSize * index->value()));
3894 __ PrepareCallCFunction(2, scratch1);
3895 __ li(a1, Operand(index));
3896 __ Move(a0, object);
3897 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3901 __ bind(¬_date_object);
3902 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3904 context()->Plug(v0);
3908 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3909 ZoneList<Expression*>* args = expr->arguments();
3910 DCHECK_EQ(3, args->length());
3912 Register string = v0;
3913 Register index = a1;
3914 Register value = a2;
3916 VisitForStackValue(args->at(0)); // index
3917 VisitForStackValue(args->at(1)); // value
3918 VisitForAccumulatorValue(args->at(2)); // string
3919 __ Pop(index, value);
3921 if (FLAG_debug_code) {
3922 __ SmiTst(value, at);
3923 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3924 __ SmiTst(index, at);
3925 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3926 __ SmiUntag(index, index);
3927 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3928 Register scratch = t1;
3929 __ EmitSeqStringSetCharCheck(
3930 string, index, value, scratch, one_byte_seq_type);
3931 __ SmiTag(index, index);
3934 __ SmiUntag(value, value);
3937 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3939 __ Daddu(at, at, index);
3940 __ sb(value, MemOperand(at));
3941 context()->Plug(string);
3945 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3946 ZoneList<Expression*>* args = expr->arguments();
3947 DCHECK_EQ(3, args->length());
3949 Register string = v0;
3950 Register index = a1;
3951 Register value = a2;
3953 VisitForStackValue(args->at(0)); // index
3954 VisitForStackValue(args->at(1)); // value
3955 VisitForAccumulatorValue(args->at(2)); // string
3956 __ Pop(index, value);
3958 if (FLAG_debug_code) {
3959 __ SmiTst(value, at);
3960 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3961 __ SmiTst(index, at);
3962 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3963 __ SmiUntag(index, index);
3964 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3965 Register scratch = t1;
3966 __ EmitSeqStringSetCharCheck(
3967 string, index, value, scratch, two_byte_seq_type);
3968 __ SmiTag(index, index);
3971 __ SmiUntag(value, value);
3974 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3975 __ dsra(index, index, 32 - 1);
3976 __ Daddu(at, at, index);
3977 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3978 __ sh(value, MemOperand(at));
3979 context()->Plug(string);
3983 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3984 // Load the arguments on the stack and call the runtime function.
3985 ZoneList<Expression*>* args = expr->arguments();
3986 DCHECK(args->length() == 2);
3987 VisitForStackValue(args->at(0));
3988 VisitForStackValue(args->at(1));
3989 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3991 context()->Plug(v0);
3995 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3996 ZoneList<Expression*>* args = expr->arguments();
3997 DCHECK(args->length() == 2);
3999 VisitForStackValue(args->at(0)); // Load the object.
4000 VisitForAccumulatorValue(args->at(1)); // Load the value.
4001 __ pop(a1); // v0 = value. a1 = object.
4004 // If the object is a smi, return the value.
4005 __ JumpIfSmi(a1, &done);
4007 // If the object is not a value type, return the value.
4008 __ GetObjectType(a1, a2, a2);
4009 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
4012 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
4013 // Update the write barrier. Save the value as it will be
4014 // overwritten by the write barrier code and is needed afterward.
4016 __ RecordWriteField(
4017 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
4020 context()->Plug(v0);
4024 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4025 ZoneList<Expression*>* args = expr->arguments();
4026 DCHECK_EQ(args->length(), 1);
4028 // Load the argument into a0 and call the stub.
4029 VisitForAccumulatorValue(args->at(0));
4030 __ mov(a0, result_register());
4032 NumberToStringStub stub(isolate());
4034 context()->Plug(v0);
4038 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4039 ZoneList<Expression*>* args = expr->arguments();
4040 DCHECK(args->length() == 1);
4042 VisitForAccumulatorValue(args->at(0));
4045 StringCharFromCodeGenerator generator(v0, a1);
4046 generator.GenerateFast(masm_);
4049 NopRuntimeCallHelper call_helper;
4050 generator.GenerateSlow(masm_, call_helper);
4053 context()->Plug(a1);
4057 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4058 ZoneList<Expression*>* args = expr->arguments();
4059 DCHECK(args->length() == 2);
4061 VisitForStackValue(args->at(0));
4062 VisitForAccumulatorValue(args->at(1));
4063 __ mov(a0, result_register());
4065 Register object = a1;
4066 Register index = a0;
4067 Register result = v0;
4071 Label need_conversion;
4072 Label index_out_of_range;
4074 StringCharCodeAtGenerator generator(object,
4079 &index_out_of_range,
4080 STRING_INDEX_IS_NUMBER);
4081 generator.GenerateFast(masm_);
4084 __ bind(&index_out_of_range);
4085 // When the index is out of range, the spec requires us to return
4087 __ LoadRoot(result, Heap::kNanValueRootIndex);
4090 __ bind(&need_conversion);
4091 // Load the undefined value into the result register, which will
4092 // trigger conversion.
4093 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4096 NopRuntimeCallHelper call_helper;
4097 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4100 context()->Plug(result);
4104 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4105 ZoneList<Expression*>* args = expr->arguments();
4106 DCHECK(args->length() == 2);
4108 VisitForStackValue(args->at(0));
4109 VisitForAccumulatorValue(args->at(1));
4110 __ mov(a0, result_register());
4112 Register object = a1;
4113 Register index = a0;
4114 Register scratch = a3;
4115 Register result = v0;
4119 Label need_conversion;
4120 Label index_out_of_range;
4122 StringCharAtGenerator generator(object,
4128 &index_out_of_range,
4129 STRING_INDEX_IS_NUMBER);
4130 generator.GenerateFast(masm_);
4133 __ bind(&index_out_of_range);
4134 // When the index is out of range, the spec requires us to return
4135 // the empty string.
4136 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4139 __ bind(&need_conversion);
4140 // Move smi zero into the result register, which will trigger
4142 __ li(result, Operand(Smi::FromInt(0)));
4145 NopRuntimeCallHelper call_helper;
4146 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4149 context()->Plug(result);
4153 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4154 ZoneList<Expression*>* args = expr->arguments();
4155 DCHECK_EQ(2, args->length());
4156 VisitForStackValue(args->at(0));
4157 VisitForAccumulatorValue(args->at(1));
4160 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4161 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4163 context()->Plug(v0);
4167 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4168 ZoneList<Expression*>* args = expr->arguments();
4169 DCHECK_EQ(2, args->length());
4171 VisitForStackValue(args->at(0));
4172 VisitForStackValue(args->at(1));
4174 StringCompareStub stub(isolate());
4176 context()->Plug(v0);
4180 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4181 ZoneList<Expression*>* args = expr->arguments();
4182 DCHECK(args->length() >= 2);
4184 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4185 for (int i = 0; i < arg_count + 1; i++) {
4186 VisitForStackValue(args->at(i));
4188 VisitForAccumulatorValue(args->last()); // Function.
4190 Label runtime, done;
4191 // Check for non-function argument (including proxy).
4192 __ JumpIfSmi(v0, &runtime);
4193 __ GetObjectType(v0, a1, a1);
4194 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4196 // InvokeFunction requires the function in a1. Move it in there.
4197 __ mov(a1, result_register());
4198 ParameterCount count(arg_count);
4199 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4200 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4205 __ CallRuntime(Runtime::kCall, args->length());
4208 context()->Plug(v0);
4212 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4213 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4214 GetVar(result_register(), new_target_var);
4215 __ Push(result_register());
4217 EmitLoadSuperConstructor();
4218 __ Push(result_register());
4220 // Check if the calling frame is an arguments adaptor frame.
4221 Label adaptor_frame, args_set_up, runtime;
4222 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4223 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4224 __ Branch(&adaptor_frame, eq, a3,
4225 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4226 // default constructor has no arguments, so no adaptor frame means no args.
4227 __ mov(a0, zero_reg);
4228 __ Branch(&args_set_up);
4230 // Copy arguments from adaptor frame.
4232 __ bind(&adaptor_frame);
4233 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4234 __ SmiUntag(a1, a1);
4236 // Subtract 1 from arguments count, for new.target.
4237 __ Daddu(a1, a1, Operand(-1));
4240 // Get arguments pointer in a2.
4241 __ dsll(at, a1, kPointerSizeLog2);
4242 __ Daddu(a2, a2, Operand(at));
4243 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4246 // Pre-decrement a2 with kPointerSize on each iteration.
4247 // Pre-decrement in order to skip receiver.
4248 __ Daddu(a2, a2, Operand(-kPointerSize));
4249 __ ld(a3, MemOperand(a2));
4251 __ Daddu(a1, a1, Operand(-1));
4252 __ Branch(&loop, ne, a1, Operand(zero_reg));
4255 __ bind(&args_set_up);
4256 __ dsll(at, a0, kPointerSizeLog2);
4257 __ Daddu(at, at, Operand(sp));
4258 __ ld(a1, MemOperand(at, 0));
4259 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4261 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4262 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4266 context()->Plug(result_register());
4270 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4271 RegExpConstructResultStub stub(isolate());
4272 ZoneList<Expression*>* args = expr->arguments();
4273 DCHECK(args->length() == 3);
4274 VisitForStackValue(args->at(0));
4275 VisitForStackValue(args->at(1));
4276 VisitForAccumulatorValue(args->at(2));
4277 __ mov(a0, result_register());
4281 context()->Plug(v0);
4285 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4286 ZoneList<Expression*>* args = expr->arguments();
4287 DCHECK_EQ(2, args->length());
4289 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4290 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4292 Handle<FixedArray> jsfunction_result_caches(
4293 isolate()->native_context()->jsfunction_result_caches());
4294 if (jsfunction_result_caches->length() <= cache_id) {
4295 __ Abort(kAttemptToUseUndefinedCache);
4296 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4297 context()->Plug(v0);
4301 VisitForAccumulatorValue(args->at(1));
4304 Register cache = a1;
4305 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4306 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4309 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4311 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4314 Label done, not_found;
4315 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4316 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4317 // a2 now holds finger offset as a smi.
4318 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4319 // a3 now points to the start of fixed array elements.
4320 __ SmiScale(at, a2, kPointerSizeLog2);
4321 __ daddu(a3, a3, at);
4322 // a3 now points to key of indexed element of cache.
4323 __ ld(a2, MemOperand(a3));
4324 __ Branch(¬_found, ne, key, Operand(a2));
4326 __ ld(v0, MemOperand(a3, kPointerSize));
4329 __ bind(¬_found);
4330 // Call runtime to perform the lookup.
4331 __ Push(cache, key);
4332 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4335 context()->Plug(v0);
4339 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4340 ZoneList<Expression*>* args = expr->arguments();
4341 VisitForAccumulatorValue(args->at(0));
4343 Label materialize_true, materialize_false;
4344 Label* if_true = NULL;
4345 Label* if_false = NULL;
4346 Label* fall_through = NULL;
4347 context()->PrepareTest(&materialize_true, &materialize_false,
4348 &if_true, &if_false, &fall_through);
4350 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4351 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4353 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4354 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4356 context()->Plug(if_true, if_false);
4360 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4361 ZoneList<Expression*>* args = expr->arguments();
4362 DCHECK(args->length() == 1);
4363 VisitForAccumulatorValue(args->at(0));
4365 __ AssertString(v0);
4367 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4368 __ IndexFromHash(v0, v0);
4370 context()->Plug(v0);
4374 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4375 Label bailout, done, one_char_separator, long_separator,
4376 non_trivial_array, not_size_one_array, loop,
4377 empty_separator_loop, one_char_separator_loop,
4378 one_char_separator_loop_entry, long_separator_loop;
4379 ZoneList<Expression*>* args = expr->arguments();
4380 DCHECK(args->length() == 2);
4381 VisitForStackValue(args->at(1));
4382 VisitForAccumulatorValue(args->at(0));
4384 // All aliases of the same register have disjoint lifetimes.
4385 Register array = v0;
4386 Register elements = no_reg; // Will be v0.
4387 Register result = no_reg; // Will be v0.
4388 Register separator = a1;
4389 Register array_length = a2;
4390 Register result_pos = no_reg; // Will be a2.
4391 Register string_length = a3;
4392 Register string = a4;
4393 Register element = a5;
4394 Register elements_end = a6;
4395 Register scratch1 = a7;
4396 Register scratch2 = t1;
4397 Register scratch3 = t0;
4399 // Separator operand is on the stack.
4402 // Check that the array is a JSArray.
4403 __ JumpIfSmi(array, &bailout);
4404 __ GetObjectType(array, scratch1, scratch2);
4405 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4407 // Check that the array has fast elements.
4408 __ CheckFastElements(scratch1, scratch2, &bailout);
4410 // If the array has length zero, return the empty string.
4411 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4412 __ SmiUntag(array_length);
4413 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4414 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4417 __ bind(&non_trivial_array);
4419 // Get the FixedArray containing array's elements.
4421 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4422 array = no_reg; // End of array's live range.
4424 // Check that all array elements are sequential one-byte strings, and
4425 // accumulate the sum of their lengths, as a smi-encoded value.
4426 __ mov(string_length, zero_reg);
4428 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4429 __ dsll(elements_end, array_length, kPointerSizeLog2);
4430 __ Daddu(elements_end, element, elements_end);
4431 // Loop condition: while (element < elements_end).
4432 // Live values in registers:
4433 // elements: Fixed array of strings.
4434 // array_length: Length of the fixed array of strings (not smi)
4435 // separator: Separator string
4436 // string_length: Accumulated sum of string lengths (smi).
4437 // element: Current array element.
4438 // elements_end: Array end.
4439 if (generate_debug_code_) {
4440 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4444 __ ld(string, MemOperand(element));
4445 __ Daddu(element, element, kPointerSize);
4446 __ JumpIfSmi(string, &bailout);
4447 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4448 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4449 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4450 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4451 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4452 __ BranchOnOverflow(&bailout, scratch3);
4453 __ Branch(&loop, lt, element, Operand(elements_end));
4455 // If array_length is 1, return elements[0], a string.
4456 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4457 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4460 __ bind(¬_size_one_array);
4462 // Live values in registers:
4463 // separator: Separator string
4464 // array_length: Length of the array.
4465 // string_length: Sum of string lengths (smi).
4466 // elements: FixedArray of strings.
4468 // Check that the separator is a flat one-byte string.
4469 __ JumpIfSmi(separator, &bailout);
4470 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4471 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4472 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4474 // Add (separator length times array_length) - separator length to the
4475 // string_length to get the length of the result string. array_length is not
4476 // smi but the other values are, so the result is a smi.
4477 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4478 __ Dsubu(string_length, string_length, Operand(scratch1));
4479 __ SmiUntag(scratch1);
4480 __ Dmul(scratch2, array_length, scratch1);
4481 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4483 __ dsra32(scratch1, scratch2, 0);
4484 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4485 __ SmiUntag(string_length);
4486 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4487 __ BranchOnOverflow(&bailout, scratch3);
4489 // Get first element in the array to free up the elements register to be used
4492 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4493 result = elements; // End of live range for elements.
4495 // Live values in registers:
4496 // element: First array element
4497 // separator: Separator string
4498 // string_length: Length of result string (not smi)
4499 // array_length: Length of the array.
4500 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4501 elements_end, &bailout);
4502 // Prepare for looping. Set up elements_end to end of the array. Set
4503 // result_pos to the position of the result where to write the first
4505 __ dsll(elements_end, array_length, kPointerSizeLog2);
4506 __ Daddu(elements_end, element, elements_end);
4507 result_pos = array_length; // End of live range for array_length.
4508 array_length = no_reg;
4509 __ Daddu(result_pos,
4511 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4513 // Check the length of the separator.
4514 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4515 __ li(at, Operand(Smi::FromInt(1)));
4516 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4517 __ Branch(&long_separator, gt, scratch1, Operand(at));
4519 // Empty separator case.
4520 __ bind(&empty_separator_loop);
4521 // Live values in registers:
4522 // result_pos: the position to which we are currently copying characters.
4523 // element: Current array element.
4524 // elements_end: Array end.
4526 // Copy next array element to the result.
4527 __ ld(string, MemOperand(element));
4528 __ Daddu(element, element, kPointerSize);
4529 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4530 __ SmiUntag(string_length);
4531 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4532 __ CopyBytes(string, result_pos, string_length, scratch1);
4533 // End while (element < elements_end).
4534 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4535 DCHECK(result.is(v0));
4538 // One-character separator case.
4539 __ bind(&one_char_separator);
4540 // Replace separator with its one-byte character value.
4541 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4542 // Jump into the loop after the code that copies the separator, so the first
4543 // element is not preceded by a separator.
4544 __ jmp(&one_char_separator_loop_entry);
4546 __ bind(&one_char_separator_loop);
4547 // Live values in registers:
4548 // result_pos: the position to which we are currently copying characters.
4549 // element: Current array element.
4550 // elements_end: Array end.
4551 // separator: Single separator one-byte char (in lower byte).
4553 // Copy the separator character to the result.
4554 __ sb(separator, MemOperand(result_pos));
4555 __ Daddu(result_pos, result_pos, 1);
4557 // Copy next array element to the result.
4558 __ bind(&one_char_separator_loop_entry);
4559 __ ld(string, MemOperand(element));
4560 __ Daddu(element, element, kPointerSize);
4561 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4562 __ SmiUntag(string_length);
4563 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4564 __ CopyBytes(string, result_pos, string_length, scratch1);
4565 // End while (element < elements_end).
4566 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4567 DCHECK(result.is(v0));
4570 // Long separator case (separator is more than one character). Entry is at the
4571 // label long_separator below.
4572 __ bind(&long_separator_loop);
4573 // Live values in registers:
4574 // result_pos: the position to which we are currently copying characters.
4575 // element: Current array element.
4576 // elements_end: Array end.
4577 // separator: Separator string.
4579 // Copy the separator to the result.
4580 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4581 __ SmiUntag(string_length);
4584 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4585 __ CopyBytes(string, result_pos, string_length, scratch1);
4587 __ bind(&long_separator);
4588 __ ld(string, MemOperand(element));
4589 __ Daddu(element, element, kPointerSize);
4590 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4591 __ SmiUntag(string_length);
4592 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4593 __ CopyBytes(string, result_pos, string_length, scratch1);
4594 // End while (element < elements_end).
4595 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4596 DCHECK(result.is(v0));
4600 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4602 context()->Plug(v0);
4606 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4607 DCHECK(expr->arguments()->length() == 0);
4608 ExternalReference debug_is_active =
4609 ExternalReference::debug_is_active_address(isolate());
4610 __ li(at, Operand(debug_is_active));
4611 __ lbu(v0, MemOperand(at));
4613 context()->Plug(v0);
4617 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4618 ZoneList<Expression*>* args = expr->arguments();
4619 int arg_count = args->length();
4621 if (expr->is_jsruntime()) {
4622 Comment cmnt(masm_, "[ CallRuntime");
4623 // Push the builtins object as the receiver.
4624 Register receiver = LoadDescriptor::ReceiverRegister();
4625 __ ld(receiver, GlobalObjectOperand());
4626 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4629 // Load the function from the receiver.
4630 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4631 if (FLAG_vector_ics) {
4632 __ li(VectorLoadICDescriptor::SlotRegister(),
4633 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4634 CallLoadIC(NOT_CONTEXTUAL);
4636 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4639 // Push the target function under the receiver.
4640 __ ld(at, MemOperand(sp, 0));
4642 __ sd(v0, MemOperand(sp, kPointerSize));
4644 // Push the arguments ("left-to-right").
4645 for (int i = 0; i < arg_count; i++) {
4646 VisitForStackValue(args->at(i));
4649 // Record source position of the IC call.
4650 SetSourcePosition(expr->position());
4651 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4652 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4655 // Restore context register.
4656 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4658 context()->DropAndPlug(1, v0);
4660 const Runtime::Function* function = expr->function();
4661 switch (function->function_id) {
4662 #define CALL_INTRINSIC_GENERATOR(Name) \
4663 case Runtime::kInline##Name: { \
4664 Comment cmnt(masm_, "[ Inline" #Name); \
4665 return Emit##Name(expr); \
4667 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4668 #undef CALL_INTRINSIC_GENERATOR
4670 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4671 // Push the arguments ("left-to-right").
4672 for (int i = 0; i < arg_count; i++) {
4673 VisitForStackValue(args->at(i));
4676 // Call the C runtime function.
4677 __ CallRuntime(expr->function(), arg_count);
4678 context()->Plug(v0);
4685 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4686 switch (expr->op()) {
4687 case Token::DELETE: {
4688 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4689 Property* property = expr->expression()->AsProperty();
4690 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4692 if (property != NULL) {
4693 VisitForStackValue(property->obj());
4694 VisitForStackValue(property->key());
4695 __ li(a1, Operand(Smi::FromInt(language_mode())));
4697 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4698 context()->Plug(v0);
4699 } else if (proxy != NULL) {
4700 Variable* var = proxy->var();
4701 // Delete of an unqualified identifier is disallowed in strict mode
4702 // but "delete this" is allowed.
4703 DCHECK(is_sloppy(language_mode()) || var->is_this());
4704 if (var->IsUnallocated()) {
4705 __ ld(a2, GlobalObjectOperand());
4706 __ li(a1, Operand(var->name()));
4707 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4708 __ Push(a2, a1, a0);
4709 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4710 context()->Plug(v0);
4711 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4712 // Result of deleting non-global, non-dynamic variables is false.
4713 // The subexpression does not have side effects.
4714 context()->Plug(var->is_this());
4716 // Non-global variable. Call the runtime to try to delete from the
4717 // context where the variable was introduced.
4718 DCHECK(!context_register().is(a2));
4719 __ li(a2, Operand(var->name()));
4720 __ Push(context_register(), a2);
4721 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4722 context()->Plug(v0);
4725 // Result of deleting non-property, non-variable reference is true.
4726 // The subexpression may have side effects.
4727 VisitForEffect(expr->expression());
4728 context()->Plug(true);
4734 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4735 VisitForEffect(expr->expression());
4736 context()->Plug(Heap::kUndefinedValueRootIndex);
4741 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4742 if (context()->IsEffect()) {
4743 // Unary NOT has no side effects so it's only necessary to visit the
4744 // subexpression. Match the optimizing compiler by not branching.
4745 VisitForEffect(expr->expression());
4746 } else if (context()->IsTest()) {
4747 const TestContext* test = TestContext::cast(context());
4748 // The labels are swapped for the recursive call.
4749 VisitForControl(expr->expression(),
4750 test->false_label(),
4752 test->fall_through());
4753 context()->Plug(test->true_label(), test->false_label());
4755 // We handle value contexts explicitly rather than simply visiting
4756 // for control and plugging the control flow into the context,
4757 // because we need to prepare a pair of extra administrative AST ids
4758 // for the optimizing compiler.
4759 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4760 Label materialize_true, materialize_false, done;
4761 VisitForControl(expr->expression(),
4765 __ bind(&materialize_true);
4766 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4767 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4768 if (context()->IsStackValue()) __ push(v0);
4770 __ bind(&materialize_false);
4771 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4772 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4773 if (context()->IsStackValue()) __ push(v0);
4779 case Token::TYPEOF: {
4780 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4781 { StackValueContext context(this);
4782 VisitForTypeofValue(expr->expression());
4784 __ CallRuntime(Runtime::kTypeof, 1);
4785 context()->Plug(v0);
4795 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4796 DCHECK(expr->expression()->IsValidReferenceExpression());
4798 Comment cmnt(masm_, "[ CountOperation");
4799 SetSourcePosition(expr->position());
4801 Property* prop = expr->expression()->AsProperty();
4802 LhsKind assign_type = GetAssignType(prop);
4804 // Evaluate expression and get value.
4805 if (assign_type == VARIABLE) {
4806 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4807 AccumulatorValueContext context(this);
4808 EmitVariableLoad(expr->expression()->AsVariableProxy());
4810 // Reserve space for result of postfix operation.
4811 if (expr->is_postfix() && !context()->IsEffect()) {
4812 __ li(at, Operand(Smi::FromInt(0)));
4815 switch (assign_type) {
4816 case NAMED_PROPERTY: {
4817 // Put the object both on the stack and in the register.
4818 VisitForStackValue(prop->obj());
4819 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4820 EmitNamedPropertyLoad(prop);
4824 case NAMED_SUPER_PROPERTY: {
4825 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4826 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4827 __ Push(result_register());
4828 const Register scratch = a1;
4829 __ ld(scratch, MemOperand(sp, kPointerSize));
4830 __ Push(scratch, result_register());
4831 EmitNamedSuperPropertyLoad(prop);
4835 case KEYED_SUPER_PROPERTY: {
4836 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4837 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4838 const Register scratch = a1;
4839 const Register scratch1 = a4;
4840 __ Move(scratch, result_register());
4841 VisitForAccumulatorValue(prop->key());
4842 __ Push(scratch, result_register());
4843 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
4844 __ Push(scratch1, scratch, result_register());
4845 EmitKeyedSuperPropertyLoad(prop);
4849 case KEYED_PROPERTY: {
4850 VisitForStackValue(prop->obj());
4851 VisitForStackValue(prop->key());
4852 __ ld(LoadDescriptor::ReceiverRegister(),
4853 MemOperand(sp, 1 * kPointerSize));
4854 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4855 EmitKeyedPropertyLoad(prop);
4864 // We need a second deoptimization point after loading the value
4865 // in case evaluating the property load my have a side effect.
4866 if (assign_type == VARIABLE) {
4867 PrepareForBailout(expr->expression(), TOS_REG);
4869 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4872 // Inline smi case if we are in a loop.
4873 Label stub_call, done;
4874 JumpPatchSite patch_site(masm_);
4876 int count_value = expr->op() == Token::INC ? 1 : -1;
4878 if (ShouldInlineSmiCase(expr->op())) {
4880 patch_site.EmitJumpIfNotSmi(v0, &slow);
4882 // Save result for postfix expressions.
4883 if (expr->is_postfix()) {
4884 if (!context()->IsEffect()) {
4885 // Save the result on the stack. If we have a named or keyed property
4886 // we store the result under the receiver that is currently on top
4888 switch (assign_type) {
4892 case NAMED_PROPERTY:
4893 __ sd(v0, MemOperand(sp, kPointerSize));
4895 case NAMED_SUPER_PROPERTY:
4896 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4898 case KEYED_PROPERTY:
4899 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4901 case KEYED_SUPER_PROPERTY:
4902 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4908 Register scratch1 = a1;
4909 Register scratch2 = a4;
4910 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4911 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4912 __ BranchOnNoOverflow(&done, scratch2);
4913 // Call stub. Undo operation first.
4918 ToNumberStub convert_stub(isolate());
4919 __ CallStub(&convert_stub);
4920 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4922 // Save result for postfix expressions.
4923 if (expr->is_postfix()) {
4924 if (!context()->IsEffect()) {
4925 // Save the result on the stack. If we have a named or keyed property
4926 // we store the result under the receiver that is currently on top
4928 switch (assign_type) {
4932 case NAMED_PROPERTY:
4933 __ sd(v0, MemOperand(sp, kPointerSize));
4935 case NAMED_SUPER_PROPERTY:
4936 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4938 case KEYED_PROPERTY:
4939 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4941 case KEYED_SUPER_PROPERTY:
4942 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4948 __ bind(&stub_call);
4950 __ li(a0, Operand(Smi::FromInt(count_value)));
4952 // Record position before stub call.
4953 SetSourcePosition(expr->position());
4955 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4956 CallIC(code, expr->CountBinOpFeedbackId());
4957 patch_site.EmitPatchInfo();
4960 // Store the value returned in v0.
4961 switch (assign_type) {
4963 if (expr->is_postfix()) {
4964 { EffectContext context(this);
4965 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4967 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4970 // For all contexts except EffectConstant we have the result on
4971 // top of the stack.
4972 if (!context()->IsEffect()) {
4973 context()->PlugTOS();
4976 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4978 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4979 context()->Plug(v0);
4982 case NAMED_PROPERTY: {
4983 __ mov(StoreDescriptor::ValueRegister(), result_register());
4984 __ li(StoreDescriptor::NameRegister(),
4985 Operand(prop->key()->AsLiteral()->value()));
4986 __ pop(StoreDescriptor::ReceiverRegister());
4987 CallStoreIC(expr->CountStoreFeedbackId());
4988 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4989 if (expr->is_postfix()) {
4990 if (!context()->IsEffect()) {
4991 context()->PlugTOS();
4994 context()->Plug(v0);
4998 case NAMED_SUPER_PROPERTY: {
4999 EmitNamedSuperPropertyStore(prop);
5000 if (expr->is_postfix()) {
5001 if (!context()->IsEffect()) {
5002 context()->PlugTOS();
5005 context()->Plug(v0);
5009 case KEYED_SUPER_PROPERTY: {
5010 EmitKeyedSuperPropertyStore(prop);
5011 if (expr->is_postfix()) {
5012 if (!context()->IsEffect()) {
5013 context()->PlugTOS();
5016 context()->Plug(v0);
5020 case KEYED_PROPERTY: {
5021 __ mov(StoreDescriptor::ValueRegister(), result_register());
5022 __ Pop(StoreDescriptor::ReceiverRegister(),
5023 StoreDescriptor::NameRegister());
5025 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5026 CallIC(ic, expr->CountStoreFeedbackId());
5027 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5028 if (expr->is_postfix()) {
5029 if (!context()->IsEffect()) {
5030 context()->PlugTOS();
5033 context()->Plug(v0);
5041 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5042 DCHECK(!context()->IsEffect());
5043 DCHECK(!context()->IsTest());
5044 VariableProxy* proxy = expr->AsVariableProxy();
5045 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5046 Comment cmnt(masm_, "[ Global variable");
5047 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5048 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5049 if (FLAG_vector_ics) {
5050 __ li(VectorLoadICDescriptor::SlotRegister(),
5051 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5053 // Use a regular load, not a contextual load, to avoid a reference
5055 CallLoadIC(NOT_CONTEXTUAL);
5056 PrepareForBailout(expr, TOS_REG);
5057 context()->Plug(v0);
5058 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5059 Comment cmnt(masm_, "[ Lookup slot");
5062 // Generate code for loading from variables potentially shadowed
5063 // by eval-introduced variables.
5064 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5067 __ li(a0, Operand(proxy->name()));
5069 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5070 PrepareForBailout(expr, TOS_REG);
5073 context()->Plug(v0);
5075 // This expression cannot throw a reference error at the top level.
5076 VisitInDuplicateContext(expr);
5080 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5081 Expression* sub_expr,
5082 Handle<String> check) {
5083 Label materialize_true, materialize_false;
5084 Label* if_true = NULL;
5085 Label* if_false = NULL;
5086 Label* fall_through = NULL;
5087 context()->PrepareTest(&materialize_true, &materialize_false,
5088 &if_true, &if_false, &fall_through);
5090 { AccumulatorValueContext context(this);
5091 VisitForTypeofValue(sub_expr);
5093 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5095 Factory* factory = isolate()->factory();
5096 if (String::Equals(check, factory->number_string())) {
5097 __ JumpIfSmi(v0, if_true);
5098 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5099 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5100 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5101 } else if (String::Equals(check, factory->string_string())) {
5102 __ JumpIfSmi(v0, if_false);
5103 // Check for undetectable objects => false.
5104 __ GetObjectType(v0, v0, a1);
5105 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5106 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5107 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5108 Split(eq, a1, Operand(zero_reg),
5109 if_true, if_false, fall_through);
5110 } else if (String::Equals(check, factory->symbol_string())) {
5111 __ JumpIfSmi(v0, if_false);
5112 __ GetObjectType(v0, v0, a1);
5113 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5114 } else if (String::Equals(check, factory->boolean_string())) {
5115 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5116 __ Branch(if_true, eq, v0, Operand(at));
5117 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5118 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5119 } else if (String::Equals(check, factory->undefined_string())) {
5120 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5121 __ Branch(if_true, eq, v0, Operand(at));
5122 __ JumpIfSmi(v0, if_false);
5123 // Check for undetectable objects => true.
5124 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5125 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5126 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5127 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5128 } else if (String::Equals(check, factory->function_string())) {
5129 __ JumpIfSmi(v0, if_false);
5130 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5131 __ GetObjectType(v0, v0, a1);
5132 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5133 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5134 if_true, if_false, fall_through);
5135 } else if (String::Equals(check, factory->object_string())) {
5136 __ JumpIfSmi(v0, if_false);
5137 __ LoadRoot(at, Heap::kNullValueRootIndex);
5138 __ Branch(if_true, eq, v0, Operand(at));
5139 // Check for JS objects => true.
5140 __ GetObjectType(v0, v0, a1);
5141 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5142 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5143 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5144 // Check for undetectable objects => false.
5145 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5146 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5147 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5149 if (if_false != fall_through) __ jmp(if_false);
5151 context()->Plug(if_true, if_false);
5155 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5156 Comment cmnt(masm_, "[ CompareOperation");
5157 SetSourcePosition(expr->position());
5159 // First we try a fast inlined version of the compare when one of
5160 // the operands is a literal.
5161 if (TryLiteralCompare(expr)) return;
5163 // Always perform the comparison for its control flow. Pack the result
5164 // into the expression's context after the comparison is performed.
5165 Label materialize_true, materialize_false;
5166 Label* if_true = NULL;
5167 Label* if_false = NULL;
5168 Label* fall_through = NULL;
5169 context()->PrepareTest(&materialize_true, &materialize_false,
5170 &if_true, &if_false, &fall_through);
5172 Token::Value op = expr->op();
5173 VisitForStackValue(expr->left());
5176 VisitForStackValue(expr->right());
5177 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5178 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5179 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5180 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5183 case Token::INSTANCEOF: {
5184 VisitForStackValue(expr->right());
5185 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5187 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5188 // The stub returns 0 for true.
5189 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5194 VisitForAccumulatorValue(expr->right());
5195 Condition cc = CompareIC::ComputeCondition(op);
5196 __ mov(a0, result_register());
5199 bool inline_smi_code = ShouldInlineSmiCase(op);
5200 JumpPatchSite patch_site(masm_);
5201 if (inline_smi_code) {
5203 __ Or(a2, a0, Operand(a1));
5204 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5205 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5206 __ bind(&slow_case);
5208 // Record position and call the compare IC.
5209 SetSourcePosition(expr->position());
5210 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5211 CallIC(ic, expr->CompareOperationFeedbackId());
5212 patch_site.EmitPatchInfo();
5213 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5214 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5218 // Convert the result of the comparison into one expected for this
5219 // expression's context.
5220 context()->Plug(if_true, if_false);
5224 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5225 Expression* sub_expr,
5227 Label materialize_true, materialize_false;
5228 Label* if_true = NULL;
5229 Label* if_false = NULL;
5230 Label* fall_through = NULL;
5231 context()->PrepareTest(&materialize_true, &materialize_false,
5232 &if_true, &if_false, &fall_through);
5234 VisitForAccumulatorValue(sub_expr);
5235 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5236 __ mov(a0, result_register());
5237 if (expr->op() == Token::EQ_STRICT) {
5238 Heap::RootListIndex nil_value = nil == kNullValue ?
5239 Heap::kNullValueRootIndex :
5240 Heap::kUndefinedValueRootIndex;
5241 __ LoadRoot(a1, nil_value);
5242 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5244 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5245 CallIC(ic, expr->CompareOperationFeedbackId());
5246 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5248 context()->Plug(if_true, if_false);
5252 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5253 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5254 context()->Plug(v0);
5258 Register FullCodeGenerator::result_register() {
5263 Register FullCodeGenerator::context_register() {
5268 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5269 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5270 DCHECK(IsAligned(frame_offset, kPointerSize));
5271 // __ sw(value, MemOperand(fp, frame_offset));
5272 __ sd(value, MemOperand(fp, frame_offset));
5276 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5277 __ ld(dst, ContextOperand(cp, context_index));
5281 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5282 Scope* declaration_scope = scope()->DeclarationScope();
5283 if (declaration_scope->is_script_scope() ||
5284 declaration_scope->is_module_scope()) {
5285 // Contexts nested in the native context have a canonical empty function
5286 // as their closure, not the anonymous closure containing the global
5287 // code. Pass a smi sentinel and let the runtime look up the empty
5289 __ li(at, Operand(Smi::FromInt(0)));
5290 } else if (declaration_scope->is_eval_scope()) {
5291 // Contexts created by a call to eval have the same closure as the
5292 // context calling eval, not the anonymous closure containing the eval
5293 // code. Fetch it from the context.
5294 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5296 DCHECK(declaration_scope->is_function_scope());
5297 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5303 // ----------------------------------------------------------------------------
5304 // Non-local control flow support.
5306 void FullCodeGenerator::EnterFinallyBlock() {
5307 DCHECK(!result_register().is(a1));
5308 // Store result register while executing finally block.
5309 __ push(result_register());
5310 // Cook return address in link register to stack (smi encoded Code* delta).
5311 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
5314 // Store result register while executing finally block.
5317 // Store pending message while executing finally block.
5318 ExternalReference pending_message_obj =
5319 ExternalReference::address_of_pending_message_obj(isolate());
5320 __ li(at, Operand(pending_message_obj));
5321 __ ld(a1, MemOperand(at));
5326 void FullCodeGenerator::ExitFinallyBlock() {
5327 DCHECK(!result_register().is(a1));
5328 // Restore pending message from stack.
5330 ExternalReference pending_message_obj =
5331 ExternalReference::address_of_pending_message_obj(isolate());
5332 __ li(at, Operand(pending_message_obj));
5333 __ sd(a1, MemOperand(at));
5335 // Restore result register from stack.
5338 // Uncook return address and return.
5339 __ pop(result_register());
5342 __ Daddu(at, a1, Operand(masm_->CodeObject()));
5350 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5352 BackEdgeState target_state,
5353 Code* replacement_code) {
5354 static const int kInstrSize = Assembler::kInstrSize;
5355 Address branch_address = pc - 8 * kInstrSize;
5356 CodePatcher patcher(branch_address, 1);
5358 switch (target_state) {
5360 // slt at, a3, zero_reg (in case of count based interrupts)
5361 // beq at, zero_reg, ok
5362 // lui t9, <interrupt stub address> upper
5363 // ori t9, <interrupt stub address> u-middle
5365 // ori t9, <interrupt stub address> lower
5368 // ok-label ----- pc_after points here
5369 patcher.masm()->slt(at, a3, zero_reg);
5371 case ON_STACK_REPLACEMENT:
5372 case OSR_AFTER_STACK_CHECK:
5373 // addiu at, zero_reg, 1
5374 // beq at, zero_reg, ok ;; Not changed
5375 // lui t9, <on-stack replacement address> upper
5376 // ori t9, <on-stack replacement address> middle
5378 // ori t9, <on-stack replacement address> lower
5379 // jalr t9 ;; Not changed
5380 // nop ;; Not changed
5381 // ok-label ----- pc_after points here
5382 patcher.masm()->daddiu(at, zero_reg, 1);
5385 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5386 // Replace the stack check address in the load-immediate (6-instr sequence)
5387 // with the entry address of the replacement code.
5388 Assembler::set_target_address_at(pc_immediate_load_address,
5389 replacement_code->entry());
5391 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5392 unoptimized_code, pc_immediate_load_address, replacement_code);
5396 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5398 Code* unoptimized_code,
5400 static const int kInstrSize = Assembler::kInstrSize;
5401 Address branch_address = pc - 8 * kInstrSize;
5402 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5404 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
5405 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5406 DCHECK(reinterpret_cast<uint64_t>(
5407 Assembler::target_address_at(pc_immediate_load_address)) ==
5408 reinterpret_cast<uint64_t>(
5409 isolate->builtins()->InterruptCheck()->entry()));
5413 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5415 if (reinterpret_cast<uint64_t>(
5416 Assembler::target_address_at(pc_immediate_load_address)) ==
5417 reinterpret_cast<uint64_t>(
5418 isolate->builtins()->OnStackReplacement()->entry())) {
5419 return ON_STACK_REPLACEMENT;
5422 DCHECK(reinterpret_cast<uint64_t>(
5423 Assembler::target_address_at(pc_immediate_load_address)) ==
5424 reinterpret_cast<uint64_t>(
5425 isolate->builtins()->OsrAfterStackCheck()->entry()));
5426 return OSR_AFTER_STACK_CHECK;
5430 } } // namespace v8::internal
5432 #endif // V8_TARGET_ARCH_MIPS64