1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS64
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
28 #include "src/mips64/code-stubs-mips64.h"
29 #include "src/mips64/macro-assembler-mips64.h"
34 #define __ ACCESS_MASM(masm_)
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
49 info_emitted_ = false;
54 DCHECK(patch_site_.is_bound() == info_emitted_);
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
64 // Always taken before patched.
65 __ BranchShort(target, eq, at, Operand(zero_reg));
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
75 // Never taken before patched.
76 __ BranchShort(target, ne, at, Operand(zero_reg));
79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
88 __ nop(); // Signals no inlined code.
93 MacroAssembler* masm_;
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
115 void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
118 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
120 profiling_counter_ = isolate()->factory()->NewCell(
121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122 SetFunctionPosition(function());
123 Comment cmnt(masm_, "[ function compiled by full code generator");
125 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
134 // Sloppy mode functions and builtins need to replace the receiver with the
135 // global proxy when called as functions (without an explicit receiver
137 if (is_sloppy(info->language_mode()) && !info->is_native()) {
139 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
140 __ ld(at, MemOperand(sp, receiver_offset));
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 __ Branch(&ok, ne, a2, Operand(at));
144 __ ld(a2, GlobalObjectOperand());
145 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
147 __ sd(a2, MemOperand(sp, receiver_offset));
150 // Open a frame scope to indicate that there is a frame on the stack. The
151 // MANUAL indicates that the scope shouldn't actually generate code to set up
152 // the frame (that is done below).
153 FrameScope frame_scope(masm_, StackFrame::MANUAL);
154 info->set_prologue_offset(masm_->pc_offset());
155 __ Prologue(info->IsCodePreAgingActive());
156 info->AddNoFrameRange(0, masm_->pc_offset());
158 { Comment cmnt(masm_, "[ Allocate locals");
159 int locals_count = info->scope()->num_stack_slots();
160 // Generators allocate locals, if any, in context slots.
161 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
162 if (locals_count > 0) {
163 if (locals_count >= 128) {
165 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
166 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
167 __ Branch(&ok, hs, t1, Operand(a2));
168 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
171 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
172 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
173 if (locals_count >= kMaxPushes) {
174 int loop_iterations = locals_count / kMaxPushes;
175 __ li(a2, Operand(loop_iterations));
177 __ bind(&loop_header);
179 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
180 for (int i = 0; i < kMaxPushes; i++) {
181 __ sd(t1, MemOperand(sp, i * kPointerSize));
183 // Continue loop if not done.
184 __ Dsubu(a2, a2, Operand(1));
185 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
187 int remaining = locals_count % kMaxPushes;
188 // Emit the remaining pushes.
189 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
190 for (int i = 0; i < remaining; i++) {
191 __ sd(t1, MemOperand(sp, i * kPointerSize));
196 bool function_in_register = true;
198 // Possibly allocate a local context.
199 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (heap_slots > 0) {
201 Comment cmnt(masm_, "[ Allocate context");
202 // Argument to NewContext is the function, which is still in a1.
203 bool need_write_barrier = true;
204 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) {
206 __ Push(info->scope()->GetScopeInfo(info->isolate()));
207 __ CallRuntime(Runtime::kNewScriptContext, 2);
208 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
209 FastNewContextStub stub(isolate(), heap_slots);
211 // Result of FastNewContextStub is always in new space.
212 need_write_barrier = false;
215 __ CallRuntime(Runtime::kNewFunctionContext, 1);
217 function_in_register = false;
218 // Context is returned in v0. It replaces the context passed to us.
219 // It's saved in the stack and kept live in cp.
221 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
222 // Copy any necessary parameters into the context.
223 int num_parameters = info->scope()->num_parameters();
224 for (int i = 0; i < num_parameters; i++) {
225 Variable* var = scope()->parameter(i);
226 if (var->IsContextSlot()) {
227 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
228 (num_parameters - 1 - i) * kPointerSize;
229 // Load parameter from stack.
230 __ ld(a0, MemOperand(fp, parameter_offset));
231 // Store it in the context.
232 MemOperand target = ContextOperand(cp, var->index());
235 // Update the write barrier.
236 if (need_write_barrier) {
237 __ RecordWriteContextSlot(
238 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
239 } else if (FLAG_debug_code) {
241 __ JumpIfInNewSpace(cp, a0, &done);
242 __ Abort(kExpectedNewSpaceObject);
249 // Possibly allocate RestParameters
251 Variable* rest_param = scope()->rest_parameter(&rest_index);
253 Comment cmnt(masm_, "[ Allocate rest parameter array");
255 int num_parameters = info->scope()->num_parameters();
256 int offset = num_parameters * kPointerSize;
258 Operand(StandardFrameConstants::kCallerSPOffset + offset));
259 __ li(a2, Operand(Smi::FromInt(num_parameters)));
260 __ li(a1, Operand(Smi::FromInt(rest_index)));
263 RestParamAccessStub stub(isolate());
266 SetVar(rest_param, v0, a1, a2);
269 Variable* arguments = scope()->arguments();
270 if (arguments != NULL) {
271 // Function uses arguments object.
272 Comment cmnt(masm_, "[ Allocate arguments object");
273 if (!function_in_register) {
274 // Load this again, if it's used by the local context below.
275 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
279 // Receiver is just before the parameters on the caller's stack.
280 int num_parameters = info->scope()->num_parameters();
281 int offset = num_parameters * kPointerSize;
283 Operand(StandardFrameConstants::kCallerSPOffset + offset));
284 __ li(a1, Operand(Smi::FromInt(num_parameters)));
287 // Arguments to ArgumentsAccessStub:
288 // function, receiver address, parameter count.
289 // The stub will rewrite receiever and parameter count if the previous
290 // stack frame was an arguments adapter frame.
291 ArgumentsAccessStub::HasNewTarget has_new_target =
292 IsSubclassConstructor(info->function()->kind())
293 ? ArgumentsAccessStub::HAS_NEW_TARGET
294 : ArgumentsAccessStub::NO_NEW_TARGET;
295 ArgumentsAccessStub::Type type;
296 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
297 type = ArgumentsAccessStub::NEW_STRICT;
298 } else if (function()->has_duplicate_parameters()) {
299 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
301 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
303 ArgumentsAccessStub stub(isolate(), type, has_new_target);
306 SetVar(arguments, v0, a1, a2);
310 __ CallRuntime(Runtime::kTraceEnter, 0);
312 // Visit the declarations and body unless there is an illegal
314 if (scope()->HasIllegalRedeclaration()) {
315 Comment cmnt(masm_, "[ Declarations");
316 scope()->VisitIllegalRedeclaration(this);
319 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
320 { Comment cmnt(masm_, "[ Declarations");
321 // For named function expressions, declare the function name as a
323 if (scope()->is_function_scope() && scope()->function() != NULL) {
324 VariableDeclaration* function = scope()->function();
325 DCHECK(function->proxy()->var()->mode() == CONST ||
326 function->proxy()->var()->mode() == CONST_LEGACY);
327 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
328 VisitVariableDeclaration(function);
330 VisitDeclarations(scope()->declarations());
332 { Comment cmnt(masm_, "[ Stack check");
333 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
335 __ LoadRoot(at, Heap::kStackLimitRootIndex);
336 __ Branch(&ok, hs, sp, Operand(at));
337 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
338 PredictableCodeSizeScope predictable(masm_,
339 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
340 __ Call(stack_check, RelocInfo::CODE_TARGET);
344 { Comment cmnt(masm_, "[ Body");
345 DCHECK(loop_depth() == 0);
347 VisitStatements(function()->body());
349 DCHECK(loop_depth() == 0);
353 // Always emit a 'return undefined' in case control fell off the end of
355 { Comment cmnt(masm_, "[ return <undefined>;");
356 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
358 EmitReturnSequence();
362 void FullCodeGenerator::ClearAccumulator() {
363 DCHECK(Smi::FromInt(0) == 0);
364 __ mov(v0, zero_reg);
368 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
369 __ li(a2, Operand(profiling_counter_));
370 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
371 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
372 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
376 void FullCodeGenerator::EmitProfilingCounterReset() {
377 int reset_value = FLAG_interrupt_budget;
378 if (info_->is_debug()) {
379 // Detect debug break requests as soon as possible.
380 reset_value = FLAG_interrupt_budget >> 4;
382 __ li(a2, Operand(profiling_counter_));
383 __ li(a3, Operand(Smi::FromInt(reset_value)));
384 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
388 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
389 Label* back_edge_target) {
390 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
391 // to make sure it is constant. Branch may emit a skip-or-jump sequence
392 // instead of the normal Branch. It seems that the "skip" part of that
393 // sequence is about as long as this Branch would be so it is safe to ignore
395 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
396 Comment cmnt(masm_, "[ Back edge bookkeeping");
398 DCHECK(back_edge_target->is_bound());
399 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
400 int weight = Min(kMaxBackEdgeWeight,
401 Max(1, distance / kCodeSizeMultiplier));
402 EmitProfilingCounterDecrement(weight);
403 __ slt(at, a3, zero_reg);
404 __ beq(at, zero_reg, &ok);
405 // Call will emit a li t9 first, so it is safe to use the delay slot.
406 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
407 // Record a mapping of this PC offset to the OSR id. This is used to find
408 // the AST id from the unoptimized code in order to use it as a key into
409 // the deoptimization input data found in the optimized code.
410 RecordBackEdge(stmt->OsrEntryId());
411 EmitProfilingCounterReset();
414 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
415 // Record a mapping of the OSR id to this PC. This is used if the OSR
416 // entry becomes the target of a bailout. We don't expect it to be, but
417 // we want it to work if it is.
418 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
422 void FullCodeGenerator::EmitReturnSequence() {
423 Comment cmnt(masm_, "[ Return sequence");
424 if (return_label_.is_bound()) {
425 __ Branch(&return_label_);
427 __ bind(&return_label_);
429 // Push the return value on the stack as the parameter.
430 // Runtime::TraceExit returns its parameter in v0.
432 __ CallRuntime(Runtime::kTraceExit, 1);
434 // Pretend that the exit is a backwards jump to the entry.
436 if (info_->ShouldSelfOptimize()) {
437 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
439 int distance = masm_->pc_offset();
440 weight = Min(kMaxBackEdgeWeight,
441 Max(1, distance / kCodeSizeMultiplier));
443 EmitProfilingCounterDecrement(weight);
445 __ Branch(&ok, ge, a3, Operand(zero_reg));
447 __ Call(isolate()->builtins()->InterruptCheck(),
448 RelocInfo::CODE_TARGET);
450 EmitProfilingCounterReset();
454 // Add a label for checking the size of the code used for returning.
455 Label check_exit_codesize;
456 masm_->bind(&check_exit_codesize);
458 // Make sure that the constant pool is not emitted inside of the return
460 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
461 // Here we use masm_-> instead of the __ macro to avoid the code coverage
462 // tool from instrumenting as we rely on the code size here.
463 int32_t arg_count = info_->scope()->num_parameters() + 1;
464 if (IsSubclassConstructor(info_->function()->kind())) {
467 int32_t sp_delta = arg_count * kPointerSize;
468 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
471 int no_frame_start = masm_->pc_offset();
472 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
473 masm_->Daddu(sp, sp, Operand(sp_delta));
475 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
479 // Check that the size of the code used for returning is large enough
480 // for the debugger's requirements.
481 DCHECK(Assembler::kJSReturnSequenceInstructions <=
482 masm_->InstructionsGeneratedSince(&check_exit_codesize));
488 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
489 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
493 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
494 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
495 codegen()->GetVar(result_register(), var);
499 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
500 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
501 codegen()->GetVar(result_register(), var);
502 __ push(result_register());
506 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
507 // For simplicity we always test the accumulator register.
508 codegen()->GetVar(result_register(), var);
509 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
510 codegen()->DoTest(this);
514 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
518 void FullCodeGenerator::AccumulatorValueContext::Plug(
519 Heap::RootListIndex index) const {
520 __ LoadRoot(result_register(), index);
524 void FullCodeGenerator::StackValueContext::Plug(
525 Heap::RootListIndex index) const {
526 __ LoadRoot(result_register(), index);
527 __ push(result_register());
531 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
532 codegen()->PrepareForBailoutBeforeSplit(condition(),
536 if (index == Heap::kUndefinedValueRootIndex ||
537 index == Heap::kNullValueRootIndex ||
538 index == Heap::kFalseValueRootIndex) {
539 if (false_label_ != fall_through_) __ Branch(false_label_);
540 } else if (index == Heap::kTrueValueRootIndex) {
541 if (true_label_ != fall_through_) __ Branch(true_label_);
543 __ LoadRoot(result_register(), index);
544 codegen()->DoTest(this);
549 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
553 void FullCodeGenerator::AccumulatorValueContext::Plug(
554 Handle<Object> lit) const {
555 __ li(result_register(), Operand(lit));
559 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
560 // Immediates cannot be pushed directly.
561 __ li(result_register(), Operand(lit));
562 __ push(result_register());
566 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
567 codegen()->PrepareForBailoutBeforeSplit(condition(),
571 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
572 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
573 if (false_label_ != fall_through_) __ Branch(false_label_);
574 } else if (lit->IsTrue() || lit->IsJSObject()) {
575 if (true_label_ != fall_through_) __ Branch(true_label_);
576 } else if (lit->IsString()) {
577 if (String::cast(*lit)->length() == 0) {
578 if (false_label_ != fall_through_) __ Branch(false_label_);
580 if (true_label_ != fall_through_) __ Branch(true_label_);
582 } else if (lit->IsSmi()) {
583 if (Smi::cast(*lit)->value() == 0) {
584 if (false_label_ != fall_through_) __ Branch(false_label_);
586 if (true_label_ != fall_through_) __ Branch(true_label_);
589 // For simplicity we always test the accumulator register.
590 __ li(result_register(), Operand(lit));
591 codegen()->DoTest(this);
596 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
597 Register reg) const {
603 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
605 Register reg) const {
608 __ Move(result_register(), reg);
612 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
613 Register reg) const {
615 if (count > 1) __ Drop(count - 1);
616 __ sd(reg, MemOperand(sp, 0));
620 void FullCodeGenerator::TestContext::DropAndPlug(int count,
621 Register reg) const {
623 // For simplicity we always test the accumulator register.
625 __ Move(result_register(), reg);
626 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
627 codegen()->DoTest(this);
631 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
632 Label* materialize_false) const {
633 DCHECK(materialize_true == materialize_false);
634 __ bind(materialize_true);
638 void FullCodeGenerator::AccumulatorValueContext::Plug(
639 Label* materialize_true,
640 Label* materialize_false) const {
642 __ bind(materialize_true);
643 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
645 __ bind(materialize_false);
646 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
651 void FullCodeGenerator::StackValueContext::Plug(
652 Label* materialize_true,
653 Label* materialize_false) const {
655 __ bind(materialize_true);
656 __ LoadRoot(at, Heap::kTrueValueRootIndex);
657 // Push the value as the following branch can clobber at in long branch mode.
660 __ bind(materialize_false);
661 __ LoadRoot(at, Heap::kFalseValueRootIndex);
667 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
668 Label* materialize_false) const {
669 DCHECK(materialize_true == true_label_);
670 DCHECK(materialize_false == false_label_);
674 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
678 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
679 Heap::RootListIndex value_root_index =
680 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
681 __ LoadRoot(result_register(), value_root_index);
685 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
686 Heap::RootListIndex value_root_index =
687 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
688 __ LoadRoot(at, value_root_index);
693 void FullCodeGenerator::TestContext::Plug(bool flag) const {
694 codegen()->PrepareForBailoutBeforeSplit(condition(),
699 if (true_label_ != fall_through_) __ Branch(true_label_);
701 if (false_label_ != fall_through_) __ Branch(false_label_);
706 void FullCodeGenerator::DoTest(Expression* condition,
709 Label* fall_through) {
710 __ mov(a0, result_register());
711 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
712 CallIC(ic, condition->test_id());
713 __ mov(at, zero_reg);
714 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
718 void FullCodeGenerator::Split(Condition cc,
723 Label* fall_through) {
724 if (if_false == fall_through) {
725 __ Branch(if_true, cc, lhs, rhs);
726 } else if (if_true == fall_through) {
727 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
729 __ Branch(if_true, cc, lhs, rhs);
735 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
736 DCHECK(var->IsStackAllocated());
737 // Offset is negative because higher indexes are at lower addresses.
738 int offset = -var->index() * kPointerSize;
739 // Adjust by a (parameter or local) base offset.
740 if (var->IsParameter()) {
741 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
743 offset += JavaScriptFrameConstants::kLocal0Offset;
745 return MemOperand(fp, offset);
749 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
750 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
751 if (var->IsContextSlot()) {
752 int context_chain_length = scope()->ContextChainLength(var->scope());
753 __ LoadContext(scratch, context_chain_length);
754 return ContextOperand(scratch, var->index());
756 return StackOperand(var);
761 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
762 // Use destination as scratch.
763 MemOperand location = VarOperand(var, dest);
764 __ ld(dest, location);
768 void FullCodeGenerator::SetVar(Variable* var,
772 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
773 DCHECK(!scratch0.is(src));
774 DCHECK(!scratch0.is(scratch1));
775 DCHECK(!scratch1.is(src));
776 MemOperand location = VarOperand(var, scratch0);
777 __ sd(src, location);
778 // Emit the write barrier code if the location is in the heap.
779 if (var->IsContextSlot()) {
780 __ RecordWriteContextSlot(scratch0,
790 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
791 bool should_normalize,
794 // Only prepare for bailouts before splits if we're in a test
795 // context. Otherwise, we let the Visit function deal with the
796 // preparation to avoid preparing with the same AST id twice.
797 if (!context()->IsTest() || !info_->IsOptimizable()) return;
800 if (should_normalize) __ Branch(&skip);
801 PrepareForBailout(expr, TOS_REG);
802 if (should_normalize) {
803 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
804 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
810 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
811 // The variable in the declaration always resides in the current function
813 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
814 if (generate_debug_code_) {
815 // Check that we're not inside a with or catch context.
816 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
817 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
818 __ Check(ne, kDeclarationInWithContext,
820 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
821 __ Check(ne, kDeclarationInCatchContext,
827 void FullCodeGenerator::VisitVariableDeclaration(
828 VariableDeclaration* declaration) {
829 // If it was not possible to allocate the variable at compile time, we
830 // need to "declare" it at runtime to make sure it actually exists in the
832 VariableProxy* proxy = declaration->proxy();
833 VariableMode mode = declaration->mode();
834 Variable* variable = proxy->var();
835 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
836 switch (variable->location()) {
837 case Variable::UNALLOCATED:
838 globals_->Add(variable->name(), zone());
839 globals_->Add(variable->binding_needs_init()
840 ? isolate()->factory()->the_hole_value()
841 : isolate()->factory()->undefined_value(),
845 case Variable::PARAMETER:
846 case Variable::LOCAL:
848 Comment cmnt(masm_, "[ VariableDeclaration");
849 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
850 __ sd(a4, StackOperand(variable));
854 case Variable::CONTEXT:
856 Comment cmnt(masm_, "[ VariableDeclaration");
857 EmitDebugCheckDeclarationContext(variable);
858 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
859 __ sd(at, ContextOperand(cp, variable->index()));
860 // No write barrier since the_hole_value is in old space.
861 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
865 case Variable::LOOKUP: {
866 Comment cmnt(masm_, "[ VariableDeclaration");
867 __ li(a2, Operand(variable->name()));
868 // Declaration nodes are always introduced in one of four modes.
869 DCHECK(IsDeclaredVariableMode(mode));
870 PropertyAttributes attr =
871 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
872 __ li(a1, Operand(Smi::FromInt(attr)));
873 // Push initial value, if any.
874 // Note: For variables we must not push an initial value (such as
875 // 'undefined') because we may have a (legal) redeclaration and we
876 // must not destroy the current value.
878 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
879 __ Push(cp, a2, a1, a0);
881 DCHECK(Smi::FromInt(0) == 0);
882 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
883 __ Push(cp, a2, a1, a0);
885 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
892 void FullCodeGenerator::VisitFunctionDeclaration(
893 FunctionDeclaration* declaration) {
894 VariableProxy* proxy = declaration->proxy();
895 Variable* variable = proxy->var();
896 switch (variable->location()) {
897 case Variable::UNALLOCATED: {
898 globals_->Add(variable->name(), zone());
899 Handle<SharedFunctionInfo> function =
900 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
901 // Check for stack-overflow exception.
902 if (function.is_null()) return SetStackOverflow();
903 globals_->Add(function, zone());
907 case Variable::PARAMETER:
908 case Variable::LOCAL: {
909 Comment cmnt(masm_, "[ FunctionDeclaration");
910 VisitForAccumulatorValue(declaration->fun());
911 __ sd(result_register(), StackOperand(variable));
915 case Variable::CONTEXT: {
916 Comment cmnt(masm_, "[ FunctionDeclaration");
917 EmitDebugCheckDeclarationContext(variable);
918 VisitForAccumulatorValue(declaration->fun());
919 __ sd(result_register(), ContextOperand(cp, variable->index()));
920 int offset = Context::SlotOffset(variable->index());
921 // We know that we have written a function, which is not a smi.
922 __ RecordWriteContextSlot(cp,
930 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
934 case Variable::LOOKUP: {
935 Comment cmnt(masm_, "[ FunctionDeclaration");
936 __ li(a2, Operand(variable->name()));
937 __ li(a1, Operand(Smi::FromInt(NONE)));
939 // Push initial value for function declaration.
940 VisitForStackValue(declaration->fun());
941 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
948 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
949 Variable* variable = declaration->proxy()->var();
950 ModuleDescriptor* descriptor = declaration->module()->descriptor();
951 DCHECK(variable->location() == Variable::CONTEXT);
952 DCHECK(descriptor->IsFrozen());
953 Comment cmnt(masm_, "[ ModuleDeclaration");
954 EmitDebugCheckDeclarationContext(variable);
956 // Load instance object.
957 __ LoadContext(a1, scope_->ContextChainLength(scope_->ScriptScope()));
958 __ ld(a1, ContextOperand(a1, descriptor->Index()));
959 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
962 __ sd(a1, ContextOperand(cp, variable->index()));
963 // We know that we have written a module, which is not a smi.
964 __ RecordWriteContextSlot(cp,
965 Context::SlotOffset(variable->index()),
972 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
974 // Traverse into body.
975 Visit(declaration->module());
979 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
980 VariableProxy* proxy = declaration->proxy();
981 Variable* variable = proxy->var();
982 switch (variable->location()) {
983 case Variable::UNALLOCATED:
987 case Variable::CONTEXT: {
988 Comment cmnt(masm_, "[ ImportDeclaration");
989 EmitDebugCheckDeclarationContext(variable);
994 case Variable::PARAMETER:
995 case Variable::LOCAL:
996 case Variable::LOOKUP:
1002 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1007 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1008 // Call the runtime to declare the globals.
1009 // The context is the first argument.
1010 __ li(a1, Operand(pairs));
1011 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1012 __ Push(cp, a1, a0);
1013 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1014 // Return value is ignored.
1018 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1019 // Call the runtime to declare the modules.
1020 __ Push(descriptions);
1021 __ CallRuntime(Runtime::kDeclareModules, 1);
1022 // Return value is ignored.
1026 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1027 Comment cmnt(masm_, "[ SwitchStatement");
1028 Breakable nested_statement(this, stmt);
1029 SetStatementPosition(stmt);
1031 // Keep the switch value on the stack until a case matches.
1032 VisitForStackValue(stmt->tag());
1033 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1035 ZoneList<CaseClause*>* clauses = stmt->cases();
1036 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1038 Label next_test; // Recycled for each test.
1039 // Compile all the tests with branches to their bodies.
1040 for (int i = 0; i < clauses->length(); i++) {
1041 CaseClause* clause = clauses->at(i);
1042 clause->body_target()->Unuse();
1044 // The default is not a test, but remember it as final fall through.
1045 if (clause->is_default()) {
1046 default_clause = clause;
1050 Comment cmnt(masm_, "[ Case comparison");
1051 __ bind(&next_test);
1054 // Compile the label expression.
1055 VisitForAccumulatorValue(clause->label());
1056 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1058 // Perform the comparison as if via '==='.
1059 __ ld(a1, MemOperand(sp, 0)); // Switch value.
1060 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1061 JumpPatchSite patch_site(masm_);
1062 if (inline_smi_code) {
1065 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1067 __ Branch(&next_test, ne, a1, Operand(a0));
1068 __ Drop(1); // Switch value is no longer needed.
1069 __ Branch(clause->body_target());
1071 __ bind(&slow_case);
1074 // Record position before stub call for type feedback.
1075 SetSourcePosition(clause->position());
1077 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1078 CallIC(ic, clause->CompareId());
1079 patch_site.EmitPatchInfo();
1083 PrepareForBailout(clause, TOS_REG);
1084 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1085 __ Branch(&next_test, ne, v0, Operand(at));
1087 __ Branch(clause->body_target());
1090 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1091 __ Drop(1); // Switch value is no longer needed.
1092 __ Branch(clause->body_target());
1095 // Discard the test value and jump to the default if present, otherwise to
1096 // the end of the statement.
1097 __ bind(&next_test);
1098 __ Drop(1); // Switch value is no longer needed.
1099 if (default_clause == NULL) {
1100 __ Branch(nested_statement.break_label());
1102 __ Branch(default_clause->body_target());
1105 // Compile all the case bodies.
1106 for (int i = 0; i < clauses->length(); i++) {
1107 Comment cmnt(masm_, "[ Case body");
1108 CaseClause* clause = clauses->at(i);
1109 __ bind(clause->body_target());
1110 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1111 VisitStatements(clause->statements());
1114 __ bind(nested_statement.break_label());
1115 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1119 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1120 Comment cmnt(masm_, "[ ForInStatement");
1121 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1122 SetStatementPosition(stmt);
1125 ForIn loop_statement(this, stmt);
1126 increment_loop_depth();
1128 // Get the object to enumerate over. If the object is null or undefined, skip
1129 // over the loop. See ECMA-262 version 5, section 12.6.4.
1130 SetExpressionPosition(stmt->enumerable());
1131 VisitForAccumulatorValue(stmt->enumerable());
1132 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1133 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1134 __ Branch(&exit, eq, a0, Operand(at));
1135 Register null_value = a5;
1136 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1137 __ Branch(&exit, eq, a0, Operand(null_value));
1138 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1140 // Convert the object to a JS object.
1141 Label convert, done_convert;
1142 __ JumpIfSmi(a0, &convert);
1143 __ GetObjectType(a0, a1, a1);
1144 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1147 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1149 __ bind(&done_convert);
1150 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1153 // Check for proxies.
1155 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1156 __ GetObjectType(a0, a1, a1);
1157 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1159 // Check cache validity in generated code. This is a fast case for
1160 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1161 // guarantee cache validity, call the runtime system to check cache
1162 // validity or get the property names in a fixed array.
1163 __ CheckEnumCache(null_value, &call_runtime);
1165 // The enum cache is valid. Load the map of the object being
1166 // iterated over and use the cache for the iteration.
1168 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1169 __ Branch(&use_cache);
1171 // Get the set of properties to enumerate.
1172 __ bind(&call_runtime);
1173 __ push(a0); // Duplicate the enumerable object on the stack.
1174 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1175 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1177 // If we got a map from the runtime call, we can do a fast
1178 // modification check. Otherwise, we got a fixed array, and we have
1179 // to do a slow check.
1181 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1182 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1183 __ Branch(&fixed_array, ne, a2, Operand(at));
1185 // We got a map in register v0. Get the enumeration cache from it.
1186 Label no_descriptors;
1187 __ bind(&use_cache);
1189 __ EnumLength(a1, v0);
1190 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1192 __ LoadInstanceDescriptors(v0, a2);
1193 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1194 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1196 // Set up the four remaining stack slots.
1197 __ li(a0, Operand(Smi::FromInt(0)));
1198 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1199 __ Push(v0, a2, a1, a0);
1202 __ bind(&no_descriptors);
1206 // We got a fixed array in register v0. Iterate through that.
1208 __ bind(&fixed_array);
1210 __ li(a1, FeedbackVector());
1211 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1212 int vector_index = FeedbackVector()->GetIndex(slot);
1213 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1215 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1216 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1217 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1218 __ GetObjectType(a2, a3, a3);
1219 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1220 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1221 __ bind(&non_proxy);
1222 __ Push(a1, v0); // Smi and array
1223 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1224 __ li(a0, Operand(Smi::FromInt(0)));
1225 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1227 // Generate code for doing the condition check.
1228 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1230 SetExpressionPosition(stmt->each());
1232 // Load the current count to a0, load the length to a1.
1233 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1234 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1235 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1237 // Get the current entry of the array into register a3.
1238 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1239 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1240 __ SmiScale(a4, a0, kPointerSizeLog2);
1241 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1242 __ ld(a3, MemOperand(a4)); // Current entry.
1244 // Get the expected map from the stack or a smi in the
1245 // permanent slow case into register a2.
1246 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1248 // Check if the expected map still matches that of the enumerable.
1249 // If not, we may have to filter the key.
1251 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1252 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1253 __ Branch(&update_each, eq, a4, Operand(a2));
1255 // For proxies, no filtering is done.
1256 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1257 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1258 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1260 // Convert the entry to a string or (smi) 0 if it isn't a property
1261 // any more. If the property has been removed while iterating, we
1263 __ Push(a1, a3); // Enumerable and current entry.
1264 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1265 __ mov(a3, result_register());
1266 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1268 // Update the 'each' property or variable from the possibly filtered
1269 // entry in register a3.
1270 __ bind(&update_each);
1271 __ mov(result_register(), a3);
1272 // Perform the assignment as if via '='.
1273 { EffectContext context(this);
1274 EmitAssignment(stmt->each());
1275 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1278 // Generate code for the body of the loop.
1279 Visit(stmt->body());
1281 // Generate code for the going to the next element by incrementing
1282 // the index (smi) stored on top of the stack.
1283 __ bind(loop_statement.continue_label());
1285 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1288 EmitBackEdgeBookkeeping(stmt, &loop);
1291 // Remove the pointers stored on the stack.
1292 __ bind(loop_statement.break_label());
1295 // Exit and decrement the loop depth.
1296 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1298 decrement_loop_depth();
1302 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1304 // Use the fast case closure allocation code that allocates in new
1305 // space for nested functions that don't need literals cloning. If
1306 // we're running with the --always-opt or the --prepare-always-opt
1307 // flag, we need to use the runtime function so that the new function
1308 // we are creating here gets a chance to have its code optimized and
1309 // doesn't just get a copy of the existing unoptimized code.
1310 if (!FLAG_always_opt &&
1311 !FLAG_prepare_always_opt &&
1313 scope()->is_function_scope() &&
1314 info->num_literals() == 0) {
1315 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1316 __ li(a2, Operand(info));
1319 __ li(a0, Operand(info));
1320 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1321 : Heap::kFalseValueRootIndex);
1322 __ Push(cp, a0, a1);
1323 __ CallRuntime(Runtime::kNewClosure, 3);
1325 context()->Plug(v0);
1329 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1330 Comment cmnt(masm_, "[ VariableProxy");
1331 EmitVariableLoad(expr);
1335 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1336 Comment cnmt(masm_, "[ SuperReference ");
1338 __ ld(LoadDescriptor::ReceiverRegister(),
1339 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1341 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1342 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1344 if (FLAG_vector_ics) {
1345 __ li(VectorLoadICDescriptor::SlotRegister(),
1346 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1347 CallLoadIC(NOT_CONTEXTUAL);
1349 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1353 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1354 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1359 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1361 if (NeedsHomeObject(initializer)) {
1362 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1363 __ li(StoreDescriptor::NameRegister(),
1364 Operand(isolate()->factory()->home_object_symbol()));
1365 __ ld(StoreDescriptor::ValueRegister(),
1366 MemOperand(sp, offset * kPointerSize));
1372 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1373 TypeofState typeof_state,
1375 Register current = cp;
1381 if (s->num_heap_slots() > 0) {
1382 if (s->calls_sloppy_eval()) {
1383 // Check that extension is NULL.
1384 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1385 __ Branch(slow, ne, temp, Operand(zero_reg));
1387 // Load next context in chain.
1388 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1389 // Walk the rest of the chain without clobbering cp.
1392 // If no outer scope calls eval, we do not need to check more
1393 // context extensions.
1394 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1395 s = s->outer_scope();
1398 if (s->is_eval_scope()) {
1400 if (!current.is(next)) {
1401 __ Move(next, current);
1404 // Terminate at native context.
1405 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1406 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1407 __ Branch(&fast, eq, temp, Operand(a4));
1408 // Check that extension is NULL.
1409 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1410 __ Branch(slow, ne, temp, Operand(zero_reg));
1411 // Load next context in chain.
1412 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1417 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1418 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1419 if (FLAG_vector_ics) {
1420 __ li(VectorLoadICDescriptor::SlotRegister(),
1421 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1424 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1431 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1433 DCHECK(var->IsContextSlot());
1434 Register context = cp;
1438 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1439 if (s->num_heap_slots() > 0) {
1440 if (s->calls_sloppy_eval()) {
1441 // Check that extension is NULL.
1442 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1443 __ Branch(slow, ne, temp, Operand(zero_reg));
1445 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1446 // Walk the rest of the chain without clobbering cp.
1450 // Check that last extension is NULL.
1451 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1452 __ Branch(slow, ne, temp, Operand(zero_reg));
1454 // This function is used only for loads, not stores, so it's safe to
1455 // return an cp-based operand (the write barrier cannot be allowed to
1456 // destroy the cp register).
1457 return ContextOperand(context, var->index());
1461 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1462 TypeofState typeof_state,
1465 // Generate fast-case code for variables that might be shadowed by
1466 // eval-introduced variables. Eval is used a lot without
1467 // introducing variables. In those cases, we do not want to
1468 // perform a runtime call for all variables in the scope
1469 // containing the eval.
1470 Variable* var = proxy->var();
1471 if (var->mode() == DYNAMIC_GLOBAL) {
1472 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1474 } else if (var->mode() == DYNAMIC_LOCAL) {
1475 Variable* local = var->local_if_not_shadowed();
1476 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1477 if (local->mode() == LET || local->mode() == CONST ||
1478 local->mode() == CONST_LEGACY) {
1479 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1480 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1481 if (local->mode() == CONST_LEGACY) {
1482 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1483 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1484 } else { // LET || CONST
1485 __ Branch(done, ne, at, Operand(zero_reg));
1486 __ li(a0, Operand(var->name()));
1488 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1496 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1497 // Record position before possible IC call.
1498 SetSourcePosition(proxy->position());
1499 Variable* var = proxy->var();
1501 // Three cases: global variables, lookup variables, and all other types of
1503 switch (var->location()) {
1504 case Variable::UNALLOCATED: {
1505 Comment cmnt(masm_, "[ Global variable");
1506 // Use inline caching. Variable name is passed in a2 and the global
1507 // object (receiver) in a0.
1508 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1509 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1510 if (FLAG_vector_ics) {
1511 __ li(VectorLoadICDescriptor::SlotRegister(),
1512 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1514 CallLoadIC(CONTEXTUAL);
1515 context()->Plug(v0);
1519 case Variable::PARAMETER:
1520 case Variable::LOCAL:
1521 case Variable::CONTEXT: {
1522 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1523 : "[ Stack variable");
1524 if (var->binding_needs_init()) {
1525 // var->scope() may be NULL when the proxy is located in eval code and
1526 // refers to a potential outside binding. Currently those bindings are
1527 // always looked up dynamically, i.e. in that case
1528 // var->location() == LOOKUP.
1530 DCHECK(var->scope() != NULL);
1532 // Check if the binding really needs an initialization check. The check
1533 // can be skipped in the following situation: we have a LET or CONST
1534 // binding in harmony mode, both the Variable and the VariableProxy have
1535 // the same declaration scope (i.e. they are both in global code, in the
1536 // same function or in the same eval code) and the VariableProxy is in
1537 // the source physically located after the initializer of the variable.
1539 // We cannot skip any initialization checks for CONST in non-harmony
1540 // mode because const variables may be declared but never initialized:
1541 // if (false) { const x; }; var y = x;
1543 // The condition on the declaration scopes is a conservative check for
1544 // nested functions that access a binding and are called before the
1545 // binding is initialized:
1546 // function() { f(); let x = 1; function f() { x = 2; } }
1548 bool skip_init_check;
1549 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1550 skip_init_check = false;
1551 } else if (var->is_this()) {
1552 CHECK(info_->function() != nullptr &&
1553 (info_->function()->kind() & kSubclassConstructor) != 0);
1554 // TODO(dslomov): implement 'this' hole check elimination.
1555 skip_init_check = false;
1557 // Check that we always have valid source position.
1558 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1559 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1560 skip_init_check = var->mode() != CONST_LEGACY &&
1561 var->initializer_position() < proxy->position();
1564 if (!skip_init_check) {
1565 // Let and const need a read barrier.
1567 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1568 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1569 if (var->mode() == LET || var->mode() == CONST) {
1570 // Throw a reference error when using an uninitialized let/const
1571 // binding in harmony mode.
1573 __ Branch(&done, ne, at, Operand(zero_reg));
1574 __ li(a0, Operand(var->name()));
1576 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1579 // Uninitalized const bindings outside of harmony mode are unholed.
1580 DCHECK(var->mode() == CONST_LEGACY);
1581 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1582 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1584 context()->Plug(v0);
1588 context()->Plug(var);
1592 case Variable::LOOKUP: {
1593 Comment cmnt(masm_, "[ Lookup variable");
1595 // Generate code for loading from variables potentially shadowed
1596 // by eval-introduced variables.
1597 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1599 __ li(a1, Operand(var->name()));
1600 __ Push(cp, a1); // Context and name.
1601 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1603 context()->Plug(v0);
1609 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1610 Comment cmnt(masm_, "[ RegExpLiteral");
1612 // Registers will be used as follows:
1613 // a5 = materialized value (RegExp literal)
1614 // a4 = JS function, literals array
1615 // a3 = literal index
1616 // a2 = RegExp pattern
1617 // a1 = RegExp flags
1618 // a0 = RegExp literal clone
1619 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1620 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1621 int literal_offset =
1622 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1623 __ ld(a5, FieldMemOperand(a4, literal_offset));
1624 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1625 __ Branch(&materialized, ne, a5, Operand(at));
1627 // Create regexp literal using runtime function.
1628 // Result will be in v0.
1629 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1630 __ li(a2, Operand(expr->pattern()));
1631 __ li(a1, Operand(expr->flags()));
1632 __ Push(a4, a3, a2, a1);
1633 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1636 __ bind(&materialized);
1637 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1638 Label allocated, runtime_allocate;
1639 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1642 __ bind(&runtime_allocate);
1643 __ li(a0, Operand(Smi::FromInt(size)));
1645 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1648 __ bind(&allocated);
1650 // After this, registers are used as follows:
1651 // v0: Newly allocated regexp.
1652 // a5: Materialized regexp.
1654 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1655 context()->Plug(v0);
1659 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1660 if (expression == NULL) {
1661 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1664 VisitForStackValue(expression);
1669 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1670 Comment cmnt(masm_, "[ ObjectLiteral");
1672 expr->BuildConstantProperties(isolate());
1673 Handle<FixedArray> constant_properties = expr->constant_properties();
1674 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1675 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1676 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1677 __ li(a1, Operand(constant_properties));
1678 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1679 if (MustCreateObjectLiteralWithRuntime(expr)) {
1680 __ Push(a3, a2, a1, a0);
1681 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1683 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1686 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1688 // If result_saved is true the result is on top of the stack. If
1689 // result_saved is false the result is in v0.
1690 bool result_saved = false;
1692 // Mark all computed expressions that are bound to a key that
1693 // is shadowed by a later occurrence of the same key. For the
1694 // marked expressions, no store code is emitted.
1695 expr->CalculateEmitStore(zone());
1697 AccessorTable accessor_table(zone());
1698 int property_index = 0;
1699 for (; property_index < expr->properties()->length(); property_index++) {
1700 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1701 if (property->is_computed_name()) break;
1702 if (property->IsCompileTimeValue()) continue;
1704 Literal* key = property->key()->AsLiteral();
1705 Expression* value = property->value();
1706 if (!result_saved) {
1707 __ push(v0); // Save result on stack.
1708 result_saved = true;
1710 switch (property->kind()) {
1711 case ObjectLiteral::Property::CONSTANT:
1713 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1714 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1716 case ObjectLiteral::Property::COMPUTED:
1717 // It is safe to use [[Put]] here because the boilerplate already
1718 // contains computed properties with an uninitialized value.
1719 if (key->value()->IsInternalizedString()) {
1720 if (property->emit_store()) {
1721 VisitForAccumulatorValue(value);
1722 __ mov(StoreDescriptor::ValueRegister(), result_register());
1723 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1724 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1725 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1726 CallStoreIC(key->LiteralFeedbackId());
1727 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1729 if (NeedsHomeObject(value)) {
1730 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1731 __ li(StoreDescriptor::NameRegister(),
1732 Operand(isolate()->factory()->home_object_symbol()));
1733 __ ld(StoreDescriptor::ValueRegister(), MemOperand(sp));
1737 VisitForEffect(value);
1741 // Duplicate receiver on stack.
1742 __ ld(a0, MemOperand(sp));
1744 VisitForStackValue(key);
1745 VisitForStackValue(value);
1746 if (property->emit_store()) {
1747 EmitSetHomeObjectIfNeeded(value, 2);
1748 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1750 __ CallRuntime(Runtime::kSetProperty, 4);
1755 case ObjectLiteral::Property::PROTOTYPE:
1756 // Duplicate receiver on stack.
1757 __ ld(a0, MemOperand(sp));
1759 VisitForStackValue(value);
1760 DCHECK(property->emit_store());
1761 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1763 case ObjectLiteral::Property::GETTER:
1764 if (property->emit_store()) {
1765 accessor_table.lookup(key)->second->getter = value;
1768 case ObjectLiteral::Property::SETTER:
1769 if (property->emit_store()) {
1770 accessor_table.lookup(key)->second->setter = value;
1776 // Emit code to define accessors, using only a single call to the runtime for
1777 // each pair of corresponding getters and setters.
1778 for (AccessorTable::Iterator it = accessor_table.begin();
1779 it != accessor_table.end();
1781 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1783 VisitForStackValue(it->first);
1784 EmitAccessor(it->second->getter);
1785 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1786 EmitAccessor(it->second->setter);
1787 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1788 __ li(a0, Operand(Smi::FromInt(NONE)));
1790 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1793 // Object literals have two parts. The "static" part on the left contains no
1794 // computed property names, and so we can compute its map ahead of time; see
1795 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1796 // starts with the first computed property name, and continues with all
1797 // properties to its right. All the code from above initializes the static
1798 // component of the object literal, and arranges for the map of the result to
1799 // reflect the static order in which the keys appear. For the dynamic
1800 // properties, we compile them into a series of "SetOwnProperty" runtime
1801 // calls. This will preserve insertion order.
1802 for (; property_index < expr->properties()->length(); property_index++) {
1803 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1805 Expression* value = property->value();
1806 if (!result_saved) {
1807 __ push(v0); // Save result on the stack
1808 result_saved = true;
1811 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1814 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1815 DCHECK(!property->is_computed_name());
1816 VisitForStackValue(value);
1817 DCHECK(property->emit_store());
1818 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1820 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1821 VisitForStackValue(value);
1822 EmitSetHomeObjectIfNeeded(value, 2);
1824 switch (property->kind()) {
1825 case ObjectLiteral::Property::CONSTANT:
1826 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1827 case ObjectLiteral::Property::COMPUTED:
1828 if (property->emit_store()) {
1829 __ li(a0, Operand(Smi::FromInt(NONE)));
1831 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1837 case ObjectLiteral::Property::PROTOTYPE:
1841 case ObjectLiteral::Property::GETTER:
1842 __ li(a0, Operand(Smi::FromInt(NONE)));
1844 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1847 case ObjectLiteral::Property::SETTER:
1848 __ li(a0, Operand(Smi::FromInt(NONE)));
1850 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1856 if (expr->has_function()) {
1857 DCHECK(result_saved);
1858 __ ld(a0, MemOperand(sp));
1860 __ CallRuntime(Runtime::kToFastProperties, 1);
1864 context()->PlugTOS();
1866 context()->Plug(v0);
1871 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1872 Comment cmnt(masm_, "[ ArrayLiteral");
1874 expr->BuildConstantElements(isolate());
1876 Handle<FixedArray> constant_elements = expr->constant_elements();
1877 bool has_fast_elements =
1878 IsFastObjectElementsKind(expr->constant_elements_kind());
1880 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1881 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1882 // If the only customer of allocation sites is transitioning, then
1883 // we can turn it off if we don't have anywhere else to transition to.
1884 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1887 __ mov(a0, result_register());
1888 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1889 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1890 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1891 __ li(a1, Operand(constant_elements));
1892 if (MustCreateArrayLiteralWithRuntime(expr)) {
1893 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1894 __ Push(a3, a2, a1, a0);
1895 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1897 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1900 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1902 bool result_saved = false; // Is the result saved to the stack?
1903 ZoneList<Expression*>* subexprs = expr->values();
1904 int length = subexprs->length();
1906 // Emit code to evaluate all the non-constant subexpressions and to store
1907 // them into the newly cloned array.
1908 for (int i = 0; i < length; i++) {
1909 Expression* subexpr = subexprs->at(i);
1910 // If the subexpression is a literal or a simple materialized literal it
1911 // is already set in the cloned array.
1912 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1914 if (!result_saved) {
1915 __ push(v0); // array literal
1916 __ Push(Smi::FromInt(expr->literal_index()));
1917 result_saved = true;
1920 VisitForAccumulatorValue(subexpr);
1922 if (has_fast_elements) {
1923 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1924 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1925 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1926 __ sd(result_register(), FieldMemOperand(a1, offset));
1927 // Update the write barrier for the array store.
1928 __ RecordWriteField(a1, offset, result_register(), a2,
1929 kRAHasBeenSaved, kDontSaveFPRegs,
1930 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1932 __ li(a3, Operand(Smi::FromInt(i)));
1933 __ mov(a0, result_register());
1934 StoreArrayLiteralElementStub stub(isolate());
1938 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1941 __ Pop(); // literal index
1942 context()->PlugTOS();
1944 context()->Plug(v0);
1949 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1950 DCHECK(expr->target()->IsValidReferenceExpression());
1952 Comment cmnt(masm_, "[ Assignment");
1954 Property* property = expr->target()->AsProperty();
1955 LhsKind assign_type = GetAssignType(property);
1957 // Evaluate LHS expression.
1958 switch (assign_type) {
1960 // Nothing to do here.
1962 case NAMED_PROPERTY:
1963 if (expr->is_compound()) {
1964 // We need the receiver both on the stack and in the register.
1965 VisitForStackValue(property->obj());
1966 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1968 VisitForStackValue(property->obj());
1971 case NAMED_SUPER_PROPERTY:
1972 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1973 EmitLoadHomeObject(property->obj()->AsSuperReference());
1974 __ Push(result_register());
1975 if (expr->is_compound()) {
1976 const Register scratch = a1;
1977 __ ld(scratch, MemOperand(sp, kPointerSize));
1978 __ Push(scratch, result_register());
1981 case KEYED_SUPER_PROPERTY: {
1982 const Register scratch = a1;
1983 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1984 EmitLoadHomeObject(property->obj()->AsSuperReference());
1985 __ Move(scratch, result_register());
1986 VisitForAccumulatorValue(property->key());
1987 __ Push(scratch, result_register());
1988 if (expr->is_compound()) {
1989 const Register scratch1 = a4;
1990 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1991 __ Push(scratch1, scratch, result_register());
1995 case KEYED_PROPERTY:
1996 // We need the key and receiver on both the stack and in v0 and a1.
1997 if (expr->is_compound()) {
1998 VisitForStackValue(property->obj());
1999 VisitForStackValue(property->key());
2000 __ ld(LoadDescriptor::ReceiverRegister(),
2001 MemOperand(sp, 1 * kPointerSize));
2002 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2004 VisitForStackValue(property->obj());
2005 VisitForStackValue(property->key());
2010 // For compound assignments we need another deoptimization point after the
2011 // variable/property load.
2012 if (expr->is_compound()) {
2013 { AccumulatorValueContext context(this);
2014 switch (assign_type) {
2016 EmitVariableLoad(expr->target()->AsVariableProxy());
2017 PrepareForBailout(expr->target(), TOS_REG);
2019 case NAMED_PROPERTY:
2020 EmitNamedPropertyLoad(property);
2021 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2023 case NAMED_SUPER_PROPERTY:
2024 EmitNamedSuperPropertyLoad(property);
2025 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2027 case KEYED_SUPER_PROPERTY:
2028 EmitKeyedSuperPropertyLoad(property);
2029 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2031 case KEYED_PROPERTY:
2032 EmitKeyedPropertyLoad(property);
2033 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2038 Token::Value op = expr->binary_op();
2039 __ push(v0); // Left operand goes on the stack.
2040 VisitForAccumulatorValue(expr->value());
2042 SetSourcePosition(expr->position() + 1);
2043 AccumulatorValueContext context(this);
2044 if (ShouldInlineSmiCase(op)) {
2045 EmitInlineSmiBinaryOp(expr->binary_operation(),
2050 EmitBinaryOp(expr->binary_operation(), op);
2053 // Deoptimization point in case the binary operation may have side effects.
2054 PrepareForBailout(expr->binary_operation(), TOS_REG);
2056 VisitForAccumulatorValue(expr->value());
2059 // Record source position before possible IC call.
2060 SetSourcePosition(expr->position());
2063 switch (assign_type) {
2065 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2067 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2068 context()->Plug(v0);
2070 case NAMED_PROPERTY:
2071 EmitNamedPropertyAssignment(expr);
2073 case NAMED_SUPER_PROPERTY:
2074 EmitNamedSuperPropertyStore(property);
2075 context()->Plug(v0);
2077 case KEYED_SUPER_PROPERTY:
2078 EmitKeyedSuperPropertyStore(property);
2079 context()->Plug(v0);
2081 case KEYED_PROPERTY:
2082 EmitKeyedPropertyAssignment(expr);
2088 void FullCodeGenerator::VisitYield(Yield* expr) {
2089 Comment cmnt(masm_, "[ Yield");
2090 // Evaluate yielded value first; the initial iterator definition depends on
2091 // this. It stays on the stack while we update the iterator.
2092 VisitForStackValue(expr->expression());
2094 switch (expr->yield_kind()) {
2095 case Yield::kSuspend:
2096 // Pop value from top-of-stack slot; box result into result register.
2097 EmitCreateIteratorResult(false);
2098 __ push(result_register());
2100 case Yield::kInitial: {
2101 Label suspend, continuation, post_runtime, resume;
2105 __ bind(&continuation);
2109 VisitForAccumulatorValue(expr->generator_object());
2110 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2111 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2112 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2113 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2115 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2116 kRAHasBeenSaved, kDontSaveFPRegs);
2117 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2118 __ Branch(&post_runtime, eq, sp, Operand(a1));
2119 __ push(v0); // generator object
2120 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2121 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2122 __ bind(&post_runtime);
2123 __ pop(result_register());
2124 EmitReturnSequence();
2127 context()->Plug(result_register());
2131 case Yield::kFinal: {
2132 VisitForAccumulatorValue(expr->generator_object());
2133 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2134 __ sd(a1, FieldMemOperand(result_register(),
2135 JSGeneratorObject::kContinuationOffset));
2136 // Pop value from top-of-stack slot, box result into result register.
2137 EmitCreateIteratorResult(true);
2138 EmitUnwindBeforeReturn();
2139 EmitReturnSequence();
2143 case Yield::kDelegating: {
2144 VisitForStackValue(expr->generator_object());
2146 // Initial stack layout is as follows:
2147 // [sp + 1 * kPointerSize] iter
2148 // [sp + 0 * kPointerSize] g
2150 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2151 Label l_next, l_call;
2152 Register load_receiver = LoadDescriptor::ReceiverRegister();
2153 Register load_name = LoadDescriptor::NameRegister();
2154 // Initial send value is undefined.
2155 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2158 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2161 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2162 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2163 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2164 __ Push(a2, a3, a0); // "throw", iter, except
2167 // try { received = %yield result }
2168 // Shuffle the received result above a try handler and yield it without
2171 __ pop(a0); // result
2172 __ PushTryHandler(StackHandler::CATCH, expr->index());
2173 const int handler_size = StackHandlerConstants::kSize;
2174 __ push(a0); // result
2176 __ bind(&l_continuation);
2179 __ bind(&l_suspend);
2180 const int generator_object_depth = kPointerSize + handler_size;
2181 __ ld(a0, MemOperand(sp, generator_object_depth));
2183 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2184 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2185 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2186 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2188 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2189 kRAHasBeenSaved, kDontSaveFPRegs);
2190 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2191 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2192 __ pop(v0); // result
2193 EmitReturnSequence();
2195 __ bind(&l_resume); // received in a0
2198 // receiver = iter; f = 'next'; arg = received;
2200 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2201 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2202 __ Push(load_name, a3, a0); // "next", iter, received
2204 // result = receiver[f](arg);
2206 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2207 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2208 if (FLAG_vector_ics) {
2209 __ li(VectorLoadICDescriptor::SlotRegister(),
2210 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2212 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2213 CallIC(ic, TypeFeedbackId::None());
2216 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2217 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2220 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2221 __ Drop(1); // The function is still on the stack; drop it.
2223 // if (!result.done) goto l_try;
2224 __ Move(load_receiver, v0);
2226 __ push(load_receiver); // save result
2227 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2228 if (FLAG_vector_ics) {
2229 __ li(VectorLoadICDescriptor::SlotRegister(),
2230 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2232 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2234 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2236 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2239 __ pop(load_receiver); // result
2240 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2241 if (FLAG_vector_ics) {
2242 __ li(VectorLoadICDescriptor::SlotRegister(),
2243 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2245 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2246 context()->DropAndPlug(2, v0); // drop iter and g
2253 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2255 JSGeneratorObject::ResumeMode resume_mode) {
2256 // The value stays in a0, and is ultimately read by the resumed generator, as
2257 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2258 // is read to throw the value when the resumed generator is already closed.
2259 // a1 will hold the generator object until the activation has been resumed.
2260 VisitForStackValue(generator);
2261 VisitForAccumulatorValue(value);
2264 // Load suspended function and context.
2265 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2266 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2268 // Load receiver and store as the first argument.
2269 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2272 // Push holes for the rest of the arguments to the generator function.
2273 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2274 // The argument count is stored as int32_t on 64-bit platforms.
2275 // TODO(plind): Smi on 32-bit platforms.
2277 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2278 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2279 Label push_argument_holes, push_frame;
2280 __ bind(&push_argument_holes);
2281 __ Dsubu(a3, a3, Operand(1));
2282 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2284 __ jmp(&push_argument_holes);
2286 // Enter a new JavaScript frame, and initialize its slots as they were when
2287 // the generator was suspended.
2288 Label resume_frame, done;
2289 __ bind(&push_frame);
2290 __ Call(&resume_frame);
2292 __ bind(&resume_frame);
2293 // ra = return address.
2294 // fp = caller's frame pointer.
2295 // cp = callee's context,
2296 // a4 = callee's JS function.
2297 __ Push(ra, fp, cp, a4);
2298 // Adjust FP to point to saved FP.
2299 __ Daddu(fp, sp, 2 * kPointerSize);
2301 // Load the operand stack size.
2302 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2303 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2306 // If we are sending a value and there is no operand stack, we can jump back
2308 if (resume_mode == JSGeneratorObject::NEXT) {
2310 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2311 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2312 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2314 __ Daddu(a3, a3, Operand(a2));
2315 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2316 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2318 __ bind(&slow_resume);
2321 // Otherwise, we push holes for the operand stack and call the runtime to fix
2322 // up the stack and the handlers.
2323 Label push_operand_holes, call_resume;
2324 __ bind(&push_operand_holes);
2325 __ Dsubu(a3, a3, Operand(1));
2326 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2328 __ Branch(&push_operand_holes);
2329 __ bind(&call_resume);
2330 DCHECK(!result_register().is(a1));
2331 __ Push(a1, result_register());
2332 __ Push(Smi::FromInt(resume_mode));
2333 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2334 // Not reached: the runtime call returns elsewhere.
2335 __ stop("not-reached");
2338 context()->Plug(result_register());
2342 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2346 const int instance_size = 5 * kPointerSize;
2347 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2350 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2353 __ bind(&gc_required);
2354 __ Push(Smi::FromInt(instance_size));
2355 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2356 __ ld(context_register(),
2357 MemOperand(fp, StandardFrameConstants::kContextOffset));
2359 __ bind(&allocated);
2360 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2361 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2362 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2364 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2365 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2366 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2367 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2368 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2370 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2372 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2374 // Only the value field needs a write barrier, as the other values are in the
2376 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2377 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2381 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2382 SetSourcePosition(prop->position());
2383 Literal* key = prop->key()->AsLiteral();
2384 DCHECK(!prop->IsSuperAccess());
2386 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2387 if (FLAG_vector_ics) {
2388 __ li(VectorLoadICDescriptor::SlotRegister(),
2389 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2390 CallLoadIC(NOT_CONTEXTUAL);
2392 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2397 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2398 // Stack: receiver, home_object.
2399 SetSourcePosition(prop->position());
2400 Literal* key = prop->key()->AsLiteral();
2401 DCHECK(!key->value()->IsSmi());
2402 DCHECK(prop->IsSuperAccess());
2404 __ Push(key->value());
2405 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2409 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2410 SetSourcePosition(prop->position());
2411 // Call keyed load IC. It has register arguments receiver and key.
2412 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2413 if (FLAG_vector_ics) {
2414 __ li(VectorLoadICDescriptor::SlotRegister(),
2415 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2418 CallIC(ic, prop->PropertyFeedbackId());
2423 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2424 // Stack: receiver, home_object, key.
2425 SetSourcePosition(prop->position());
2427 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2431 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2433 Expression* left_expr,
2434 Expression* right_expr) {
2435 Label done, smi_case, stub_call;
2437 Register scratch1 = a2;
2438 Register scratch2 = a3;
2440 // Get the arguments.
2442 Register right = a0;
2444 __ mov(a0, result_register());
2446 // Perform combined smi check on both operands.
2447 __ Or(scratch1, left, Operand(right));
2448 STATIC_ASSERT(kSmiTag == 0);
2449 JumpPatchSite patch_site(masm_);
2450 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2452 __ bind(&stub_call);
2453 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2454 CallIC(code, expr->BinaryOperationFeedbackId());
2455 patch_site.EmitPatchInfo();
2459 // Smi case. This code works the same way as the smi-smi case in the type
2460 // recording binary operation stub, see
2463 __ GetLeastBitsFromSmi(scratch1, right, 5);
2464 __ dsrav(right, left, scratch1);
2465 __ And(v0, right, Operand(0xffffffff00000000L));
2468 __ SmiUntag(scratch1, left);
2469 __ GetLeastBitsFromSmi(scratch2, right, 5);
2470 __ dsllv(scratch1, scratch1, scratch2);
2471 __ SmiTag(v0, scratch1);
2475 __ SmiUntag(scratch1, left);
2476 __ GetLeastBitsFromSmi(scratch2, right, 5);
2477 __ dsrlv(scratch1, scratch1, scratch2);
2478 __ And(scratch2, scratch1, 0x80000000);
2479 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2480 __ SmiTag(v0, scratch1);
2484 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2485 __ BranchOnOverflow(&stub_call, scratch1);
2488 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2489 __ BranchOnOverflow(&stub_call, scratch1);
2492 __ Dmulh(v0, left, right);
2493 __ dsra32(scratch2, v0, 0);
2494 __ sra(scratch1, v0, 31);
2495 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2497 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2498 __ Daddu(scratch2, right, left);
2499 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2500 DCHECK(Smi::FromInt(0) == 0);
2501 __ mov(v0, zero_reg);
2505 __ Or(v0, left, Operand(right));
2507 case Token::BIT_AND:
2508 __ And(v0, left, Operand(right));
2510 case Token::BIT_XOR:
2511 __ Xor(v0, left, Operand(right));
2518 context()->Plug(v0);
2522 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2523 // Constructor is in v0.
2524 DCHECK(lit != NULL);
2527 // No access check is needed here since the constructor is created by the
2529 Register scratch = a1;
2531 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2534 for (int i = 0; i < lit->properties()->length(); i++) {
2535 ObjectLiteral::Property* property = lit->properties()->at(i);
2536 Expression* value = property->value();
2538 if (property->is_static()) {
2539 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2541 __ ld(scratch, MemOperand(sp, 0)); // prototype
2544 EmitPropertyKey(property, lit->GetIdForProperty(i));
2545 VisitForStackValue(value);
2546 EmitSetHomeObjectIfNeeded(value, 2);
2548 switch (property->kind()) {
2549 case ObjectLiteral::Property::CONSTANT:
2550 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2551 case ObjectLiteral::Property::PROTOTYPE:
2553 case ObjectLiteral::Property::COMPUTED:
2554 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2557 case ObjectLiteral::Property::GETTER:
2558 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2560 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2563 case ObjectLiteral::Property::SETTER:
2564 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2566 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2575 __ CallRuntime(Runtime::kToFastProperties, 1);
2578 __ CallRuntime(Runtime::kToFastProperties, 1);
2582 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2583 __ mov(a0, result_register());
2585 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2586 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2587 CallIC(code, expr->BinaryOperationFeedbackId());
2588 patch_site.EmitPatchInfo();
2589 context()->Plug(v0);
2593 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2594 DCHECK(expr->IsValidReferenceExpression());
2596 Property* prop = expr->AsProperty();
2597 LhsKind assign_type = GetAssignType(prop);
2599 switch (assign_type) {
2601 Variable* var = expr->AsVariableProxy()->var();
2602 EffectContext context(this);
2603 EmitVariableAssignment(var, Token::ASSIGN);
2606 case NAMED_PROPERTY: {
2607 __ push(result_register()); // Preserve value.
2608 VisitForAccumulatorValue(prop->obj());
2609 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2610 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2611 __ li(StoreDescriptor::NameRegister(),
2612 Operand(prop->key()->AsLiteral()->value()));
2616 case NAMED_SUPER_PROPERTY: {
2618 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2619 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2620 // stack: value, this; v0: home_object
2621 Register scratch = a2;
2622 Register scratch2 = a3;
2623 __ mov(scratch, result_register()); // home_object
2624 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2625 __ ld(scratch2, MemOperand(sp, 0)); // this
2626 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2627 __ sd(scratch, MemOperand(sp, 0)); // home_object
2628 // stack: this, home_object; v0: value
2629 EmitNamedSuperPropertyStore(prop);
2632 case KEYED_SUPER_PROPERTY: {
2634 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2635 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2636 __ Push(result_register());
2637 VisitForAccumulatorValue(prop->key());
2638 Register scratch = a2;
2639 Register scratch2 = a3;
2640 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2641 // stack: value, this, home_object; v0: key, a3: value
2642 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2643 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2644 __ ld(scratch, MemOperand(sp, 0)); // home_object
2645 __ sd(scratch, MemOperand(sp, kPointerSize));
2646 __ sd(v0, MemOperand(sp, 0));
2647 __ Move(v0, scratch2);
2648 // stack: this, home_object, key; v0: value.
2649 EmitKeyedSuperPropertyStore(prop);
2652 case KEYED_PROPERTY: {
2653 __ push(result_register()); // Preserve value.
2654 VisitForStackValue(prop->obj());
2655 VisitForAccumulatorValue(prop->key());
2656 __ Move(StoreDescriptor::NameRegister(), result_register());
2657 __ Pop(StoreDescriptor::ValueRegister(),
2658 StoreDescriptor::ReceiverRegister());
2660 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2665 context()->Plug(v0);
2669 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2670 Variable* var, MemOperand location) {
2671 __ sd(result_register(), location);
2672 if (var->IsContextSlot()) {
2673 // RecordWrite may destroy all its register arguments.
2674 __ Move(a3, result_register());
2675 int offset = Context::SlotOffset(var->index());
2676 __ RecordWriteContextSlot(
2677 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2682 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2683 if (var->IsUnallocated()) {
2684 // Global var, const, or let.
2685 __ mov(StoreDescriptor::ValueRegister(), result_register());
2686 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2687 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2689 } else if (op == Token::INIT_CONST_LEGACY) {
2690 // Const initializers need a write barrier.
2691 DCHECK(!var->IsParameter()); // No const parameters.
2692 if (var->IsLookupSlot()) {
2693 __ li(a0, Operand(var->name()));
2694 __ Push(v0, cp, a0); // Context and name.
2695 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2697 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2699 MemOperand location = VarOperand(var, a1);
2700 __ ld(a2, location);
2701 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2702 __ Branch(&skip, ne, a2, Operand(at));
2703 EmitStoreToStackLocalOrContextSlot(var, location);
2707 } else if (var->mode() == LET && op != Token::INIT_LET) {
2708 // Non-initializing assignment to let variable needs a write barrier.
2709 DCHECK(!var->IsLookupSlot());
2710 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2712 MemOperand location = VarOperand(var, a1);
2713 __ ld(a3, location);
2714 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2715 __ Branch(&assign, ne, a3, Operand(a4));
2716 __ li(a3, Operand(var->name()));
2718 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2719 // Perform the assignment.
2721 EmitStoreToStackLocalOrContextSlot(var, location);
2722 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2723 if (var->IsLookupSlot()) {
2724 // Assignment to var.
2725 __ li(a4, Operand(var->name()));
2726 __ li(a3, Operand(Smi::FromInt(language_mode())));
2727 // jssp[0] : language mode.
2729 // jssp[16] : context.
2730 // jssp[24] : value.
2731 __ Push(v0, cp, a4, a3);
2732 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2734 // Assignment to var or initializing assignment to let/const in harmony
2736 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2737 MemOperand location = VarOperand(var, a1);
2738 if (generate_debug_code_ && op == Token::INIT_LET) {
2739 // Check for an uninitialized let binding.
2740 __ ld(a2, location);
2741 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2742 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2744 EmitStoreToStackLocalOrContextSlot(var, location);
2746 } else if (IsSignallingAssignmentToConst(var, op, language_mode())) {
2747 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2752 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2753 // Assignment to a property, using a named store IC.
2754 Property* prop = expr->target()->AsProperty();
2755 DCHECK(prop != NULL);
2756 DCHECK(prop->key()->IsLiteral());
2758 // Record source code position before IC call.
2759 SetSourcePosition(expr->position());
2760 __ mov(StoreDescriptor::ValueRegister(), result_register());
2761 __ li(StoreDescriptor::NameRegister(),
2762 Operand(prop->key()->AsLiteral()->value()));
2763 __ pop(StoreDescriptor::ReceiverRegister());
2764 CallStoreIC(expr->AssignmentFeedbackId());
2766 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2767 context()->Plug(v0);
2771 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2772 // Assignment to named property of super.
2774 // stack : receiver ('this'), home_object
2775 DCHECK(prop != NULL);
2776 Literal* key = prop->key()->AsLiteral();
2777 DCHECK(key != NULL);
2779 __ Push(key->value());
2781 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2782 : Runtime::kStoreToSuper_Sloppy),
2787 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2788 // Assignment to named property of super.
2790 // stack : receiver ('this'), home_object, key
2791 DCHECK(prop != NULL);
2795 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2796 : Runtime::kStoreKeyedToSuper_Sloppy),
2801 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2802 // Assignment to a property, using a keyed store IC.
2804 // Record source code position before IC call.
2805 SetSourcePosition(expr->position());
2806 // Call keyed store IC.
2807 // The arguments are:
2808 // - a0 is the value,
2810 // - a2 is the receiver.
2811 __ mov(StoreDescriptor::ValueRegister(), result_register());
2812 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2813 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2816 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2817 CallIC(ic, expr->AssignmentFeedbackId());
2819 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2820 context()->Plug(v0);
2824 void FullCodeGenerator::VisitProperty(Property* expr) {
2825 Comment cmnt(masm_, "[ Property");
2826 Expression* key = expr->key();
2828 if (key->IsPropertyName()) {
2829 if (!expr->IsSuperAccess()) {
2830 VisitForAccumulatorValue(expr->obj());
2831 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2832 EmitNamedPropertyLoad(expr);
2834 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2835 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2836 __ Push(result_register());
2837 EmitNamedSuperPropertyLoad(expr);
2840 if (!expr->IsSuperAccess()) {
2841 VisitForStackValue(expr->obj());
2842 VisitForAccumulatorValue(expr->key());
2843 __ Move(LoadDescriptor::NameRegister(), v0);
2844 __ pop(LoadDescriptor::ReceiverRegister());
2845 EmitKeyedPropertyLoad(expr);
2847 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2848 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2849 __ Push(result_register());
2850 VisitForStackValue(expr->key());
2851 EmitKeyedSuperPropertyLoad(expr);
2854 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2855 context()->Plug(v0);
2859 void FullCodeGenerator::CallIC(Handle<Code> code,
2860 TypeFeedbackId id) {
2862 __ Call(code, RelocInfo::CODE_TARGET, id);
2866 // Code common for calls using the IC.
2867 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2868 Expression* callee = expr->expression();
2870 CallICState::CallType call_type =
2871 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2873 // Get the target function.
2874 if (call_type == CallICState::FUNCTION) {
2875 { StackValueContext context(this);
2876 EmitVariableLoad(callee->AsVariableProxy());
2877 PrepareForBailout(callee, NO_REGISTERS);
2879 // Push undefined as receiver. This is patched in the method prologue if it
2880 // is a sloppy mode method.
2881 __ Push(isolate()->factory()->undefined_value());
2883 // Load the function from the receiver.
2884 DCHECK(callee->IsProperty());
2885 DCHECK(!callee->AsProperty()->IsSuperAccess());
2886 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2887 EmitNamedPropertyLoad(callee->AsProperty());
2888 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2889 // Push the target function under the receiver.
2890 __ ld(at, MemOperand(sp, 0));
2892 __ sd(v0, MemOperand(sp, kPointerSize));
2895 EmitCall(expr, call_type);
2899 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2900 Expression* callee = expr->expression();
2901 DCHECK(callee->IsProperty());
2902 Property* prop = callee->AsProperty();
2903 DCHECK(prop->IsSuperAccess());
2905 SetSourcePosition(prop->position());
2906 Literal* key = prop->key()->AsLiteral();
2907 DCHECK(!key->value()->IsSmi());
2908 // Load the function from the receiver.
2909 const Register scratch = a1;
2910 SuperReference* super_ref = prop->obj()->AsSuperReference();
2911 EmitLoadHomeObject(super_ref);
2912 __ mov(scratch, v0);
2913 VisitForAccumulatorValue(super_ref->this_var());
2914 __ Push(scratch, v0, v0, scratch);
2915 __ Push(key->value());
2919 // - this (receiver)
2920 // - this (receiver) <-- LoadFromSuper will pop here and below.
2923 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2925 // Replace home_object with target function.
2926 __ sd(v0, MemOperand(sp, kPointerSize));
2929 // - target function
2930 // - this (receiver)
2931 EmitCall(expr, CallICState::METHOD);
2935 // Code common for calls using the IC.
2936 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2939 VisitForAccumulatorValue(key);
2941 Expression* callee = expr->expression();
2943 // Load the function from the receiver.
2944 DCHECK(callee->IsProperty());
2945 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2946 __ Move(LoadDescriptor::NameRegister(), v0);
2947 EmitKeyedPropertyLoad(callee->AsProperty());
2948 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2950 // Push the target function under the receiver.
2951 __ ld(at, MemOperand(sp, 0));
2953 __ sd(v0, MemOperand(sp, kPointerSize));
2955 EmitCall(expr, CallICState::METHOD);
2959 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2960 Expression* callee = expr->expression();
2961 DCHECK(callee->IsProperty());
2962 Property* prop = callee->AsProperty();
2963 DCHECK(prop->IsSuperAccess());
2965 SetSourcePosition(prop->position());
2966 // Load the function from the receiver.
2967 const Register scratch = a1;
2968 SuperReference* super_ref = prop->obj()->AsSuperReference();
2969 EmitLoadHomeObject(super_ref);
2970 __ Move(scratch, v0);
2971 VisitForAccumulatorValue(super_ref->this_var());
2972 __ Push(scratch, v0, v0, scratch);
2973 VisitForStackValue(prop->key());
2977 // - this (receiver)
2978 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2981 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2983 // Replace home_object with target function.
2984 __ sd(v0, MemOperand(sp, kPointerSize));
2987 // - target function
2988 // - this (receiver)
2989 EmitCall(expr, CallICState::METHOD);
2993 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2994 // Load the arguments.
2995 ZoneList<Expression*>* args = expr->arguments();
2996 int arg_count = args->length();
2997 { PreservePositionScope scope(masm()->positions_recorder());
2998 for (int i = 0; i < arg_count; i++) {
2999 VisitForStackValue(args->at(i));
3003 // Record source position of the IC call.
3004 SetSourcePosition(expr->position());
3005 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3006 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3007 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3008 // Don't assign a type feedback id to the IC, since type feedback is provided
3009 // by the vector above.
3011 RecordJSReturnSite(expr);
3012 // Restore context register.
3013 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3014 context()->DropAndPlug(1, v0);
3018 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3019 // a7: copy of the first argument or undefined if it doesn't exist.
3020 if (arg_count > 0) {
3021 __ ld(a7, MemOperand(sp, arg_count * kPointerSize));
3023 __ LoadRoot(a7, Heap::kUndefinedValueRootIndex);
3026 // a6: the receiver of the enclosing function.
3027 __ ld(a6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3029 // a5: the receiver of the enclosing function.
3030 int receiver_offset = 2 + info_->scope()->num_parameters();
3031 __ ld(a5, MemOperand(fp, receiver_offset * kPointerSize));
3033 // a4: the language mode.
3034 __ li(a4, Operand(Smi::FromInt(language_mode())));
3036 // a1: the start position of the scope the calls resides in.
3037 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3039 // Do the runtime call.
3041 __ Push(a6, a5, a4, a1);
3042 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3046 void FullCodeGenerator::EmitLoadSuperConstructor() {
3047 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3049 __ CallRuntime(Runtime::kGetPrototype, 1);
3053 void FullCodeGenerator::VisitCall(Call* expr) {
3055 // We want to verify that RecordJSReturnSite gets called on all paths
3056 // through this function. Avoid early returns.
3057 expr->return_is_recorded_ = false;
3060 Comment cmnt(masm_, "[ Call");
3061 Expression* callee = expr->expression();
3062 Call::CallType call_type = expr->GetCallType(isolate());
3064 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3065 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3066 // to resolve the function we need to call and the receiver of the
3067 // call. Then we call the resolved function using the given
3069 ZoneList<Expression*>* args = expr->arguments();
3070 int arg_count = args->length();
3072 { PreservePositionScope pos_scope(masm()->positions_recorder());
3073 VisitForStackValue(callee);
3074 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3075 __ push(a2); // Reserved receiver slot.
3077 // Push the arguments.
3078 for (int i = 0; i < arg_count; i++) {
3079 VisitForStackValue(args->at(i));
3082 // Push a copy of the function (found below the arguments) and
3084 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3086 EmitResolvePossiblyDirectEval(arg_count);
3088 // The runtime call returns a pair of values in v0 (function) and
3089 // v1 (receiver). Touch up the stack with the right values.
3090 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3091 __ sd(v1, MemOperand(sp, arg_count * kPointerSize));
3093 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3095 // Record source position for debugger.
3096 SetSourcePosition(expr->position());
3097 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3098 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3100 RecordJSReturnSite(expr);
3101 // Restore context register.
3102 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3103 context()->DropAndPlug(1, v0);
3104 } else if (call_type == Call::GLOBAL_CALL) {
3105 EmitCallWithLoadIC(expr);
3106 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3107 // Call to a lookup slot (dynamically introduced variable).
3108 VariableProxy* proxy = callee->AsVariableProxy();
3111 { PreservePositionScope scope(masm()->positions_recorder());
3112 // Generate code for loading from variables potentially shadowed
3113 // by eval-introduced variables.
3114 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3118 // Call the runtime to find the function to call (returned in v0)
3119 // and the object holding it (returned in v1).
3120 DCHECK(!context_register().is(a2));
3121 __ li(a2, Operand(proxy->name()));
3122 __ Push(context_register(), a2);
3123 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3124 __ Push(v0, v1); // Function, receiver.
3125 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3127 // If fast case code has been generated, emit code to push the
3128 // function and receiver and have the slow path jump around this
3130 if (done.is_linked()) {
3136 // The receiver is implicitly the global receiver. Indicate this
3137 // by passing the hole to the call function stub.
3138 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3143 // The receiver is either the global receiver or an object found
3144 // by LoadContextSlot.
3146 } else if (call_type == Call::PROPERTY_CALL) {
3147 Property* property = callee->AsProperty();
3148 bool is_named_call = property->key()->IsPropertyName();
3149 if (property->IsSuperAccess()) {
3150 if (is_named_call) {
3151 EmitSuperCallWithLoadIC(expr);
3153 EmitKeyedSuperCallWithLoadIC(expr);
3157 PreservePositionScope scope(masm()->positions_recorder());
3158 VisitForStackValue(property->obj());
3160 if (is_named_call) {
3161 EmitCallWithLoadIC(expr);
3163 EmitKeyedCallWithLoadIC(expr, property->key());
3166 } else if (call_type == Call::SUPER_CALL) {
3167 EmitSuperConstructorCall(expr);
3169 DCHECK(call_type == Call::OTHER_CALL);
3170 // Call to an arbitrary expression not handled specially above.
3171 { PreservePositionScope scope(masm()->positions_recorder());
3172 VisitForStackValue(callee);
3174 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3176 // Emit function call.
3181 // RecordJSReturnSite should have been called.
3182 DCHECK(expr->return_is_recorded_);
3187 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3188 Comment cmnt(masm_, "[ CallNew");
3189 // According to ECMA-262, section 11.2.2, page 44, the function
3190 // expression in new calls must be evaluated before the
3193 // Push constructor on the stack. If it's not a function it's used as
3194 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3196 DCHECK(!expr->expression()->IsSuperReference());
3197 VisitForStackValue(expr->expression());
3199 // Push the arguments ("left-to-right") on the stack.
3200 ZoneList<Expression*>* args = expr->arguments();
3201 int arg_count = args->length();
3202 for (int i = 0; i < arg_count; i++) {
3203 VisitForStackValue(args->at(i));
3206 // Call the construct call builtin that handles allocation and
3207 // constructor invocation.
3208 SetSourcePosition(expr->position());
3210 // Load function and argument count into a1 and a0.
3211 __ li(a0, Operand(arg_count));
3212 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3214 // Record call targets in unoptimized code.
3215 if (FLAG_pretenuring_call_new) {
3216 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3217 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3218 expr->CallNewFeedbackSlot().ToInt() + 1);
3221 __ li(a2, FeedbackVector());
3222 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3224 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3225 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3226 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3227 context()->Plug(v0);
3231 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3232 if (!ValidateSuperCall(expr)) return;
3233 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3234 GetVar(result_register(), new_target_var);
3235 __ Push(result_register());
3237 EmitLoadSuperConstructor();
3238 __ push(result_register());
3240 // Push the arguments ("left-to-right") on the stack.
3241 ZoneList<Expression*>* args = expr->arguments();
3242 int arg_count = args->length();
3243 for (int i = 0; i < arg_count; i++) {
3244 VisitForStackValue(args->at(i));
3247 // Call the construct call builtin that handles allocation and
3248 // constructor invocation.
3249 SetSourcePosition(expr->position());
3251 // Load function and argument count into a1 and a0.
3252 __ li(a0, Operand(arg_count));
3253 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3255 // Record call targets in unoptimized code.
3256 if (FLAG_pretenuring_call_new) {
3258 /* TODO(dslomov): support pretenuring.
3259 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3260 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3261 expr->CallNewFeedbackSlot().ToInt() + 1);
3265 __ li(a2, FeedbackVector());
3266 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3268 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3269 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3273 RecordJSReturnSite(expr);
3275 SuperReference* super_ref = expr->expression()->AsSuperReference();
3276 Variable* this_var = super_ref->this_var()->var();
3277 GetVar(a1, this_var);
3278 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3279 Label uninitialized_this;
3280 __ Branch(&uninitialized_this, eq, a1, Operand(at));
3281 __ li(a0, Operand(this_var->name()));
3283 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3284 __ bind(&uninitialized_this);
3286 EmitVariableAssignment(this_var, Token::INIT_CONST);
3287 context()->Plug(v0);
3291 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK(args->length() == 1);
3295 VisitForAccumulatorValue(args->at(0));
3297 Label materialize_true, materialize_false;
3298 Label* if_true = NULL;
3299 Label* if_false = NULL;
3300 Label* fall_through = NULL;
3301 context()->PrepareTest(&materialize_true, &materialize_false,
3302 &if_true, &if_false, &fall_through);
3304 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3306 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3308 context()->Plug(if_true, if_false);
3312 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 DCHECK(args->length() == 1);
3316 VisitForAccumulatorValue(args->at(0));
3318 Label materialize_true, materialize_false;
3319 Label* if_true = NULL;
3320 Label* if_false = NULL;
3321 Label* fall_through = NULL;
3322 context()->PrepareTest(&materialize_true, &materialize_false,
3323 &if_true, &if_false, &fall_through);
3325 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3326 __ NonNegativeSmiTst(v0, at);
3327 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3329 context()->Plug(if_true, if_false);
3333 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3334 ZoneList<Expression*>* args = expr->arguments();
3335 DCHECK(args->length() == 1);
3337 VisitForAccumulatorValue(args->at(0));
3339 Label materialize_true, materialize_false;
3340 Label* if_true = NULL;
3341 Label* if_false = NULL;
3342 Label* fall_through = NULL;
3343 context()->PrepareTest(&materialize_true, &materialize_false,
3344 &if_true, &if_false, &fall_through);
3346 __ JumpIfSmi(v0, if_false);
3347 __ LoadRoot(at, Heap::kNullValueRootIndex);
3348 __ Branch(if_true, eq, v0, Operand(at));
3349 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3350 // Undetectable objects behave like undefined when tested with typeof.
3351 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3352 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3353 __ Branch(if_false, ne, at, Operand(zero_reg));
3354 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3355 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3356 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3357 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3358 if_true, if_false, fall_through);
3360 context()->Plug(if_true, if_false);
3364 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3365 ZoneList<Expression*>* args = expr->arguments();
3366 DCHECK(args->length() == 1);
3368 VisitForAccumulatorValue(args->at(0));
3370 Label materialize_true, materialize_false;
3371 Label* if_true = NULL;
3372 Label* if_false = NULL;
3373 Label* fall_through = NULL;
3374 context()->PrepareTest(&materialize_true, &materialize_false,
3375 &if_true, &if_false, &fall_through);
3377 __ JumpIfSmi(v0, if_false);
3378 __ GetObjectType(v0, a1, a1);
3379 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3380 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3381 if_true, if_false, fall_through);
3383 context()->Plug(if_true, if_false);
3387 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3388 ZoneList<Expression*>* args = expr->arguments();
3389 DCHECK(args->length() == 1);
3391 VisitForAccumulatorValue(args->at(0));
3393 Label materialize_true, materialize_false;
3394 Label* if_true = NULL;
3395 Label* if_false = NULL;
3396 Label* fall_through = NULL;
3397 context()->PrepareTest(&materialize_true, &materialize_false,
3398 &if_true, &if_false, &fall_through);
3400 __ JumpIfSmi(v0, if_false);
3401 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3402 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3403 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3404 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3405 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3407 context()->Plug(if_true, if_false);
3411 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3412 CallRuntime* expr) {
3413 ZoneList<Expression*>* args = expr->arguments();
3414 DCHECK(args->length() == 1);
3416 VisitForAccumulatorValue(args->at(0));
3418 Label materialize_true, materialize_false, skip_lookup;
3419 Label* if_true = NULL;
3420 Label* if_false = NULL;
3421 Label* fall_through = NULL;
3422 context()->PrepareTest(&materialize_true, &materialize_false,
3423 &if_true, &if_false, &fall_through);
3425 __ AssertNotSmi(v0);
3427 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3428 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3429 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3430 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3432 // Check for fast case object. Generate false result for slow case object.
3433 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3434 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3435 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3436 __ Branch(if_false, eq, a2, Operand(a4));
3438 // Look for valueOf name in the descriptor array, and indicate false if
3439 // found. Since we omit an enumeration index check, if it is added via a
3440 // transition that shares its descriptor array, this is a false positive.
3441 Label entry, loop, done;
3443 // Skip loop if no descriptors are valid.
3444 __ NumberOfOwnDescriptors(a3, a1);
3445 __ Branch(&done, eq, a3, Operand(zero_reg));
3447 __ LoadInstanceDescriptors(a1, a4);
3448 // a4: descriptor array.
3449 // a3: valid entries in the descriptor array.
3450 STATIC_ASSERT(kSmiTag == 0);
3451 STATIC_ASSERT(kSmiTagSize == 1);
3453 // STATIC_ASSERT(kPointerSize == 4);
3454 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3455 __ Dmul(a3, a3, at);
3456 // Calculate location of the first key name.
3457 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3458 // Calculate the end of the descriptor array.
3460 __ dsll(a5, a3, kPointerSizeLog2);
3461 __ Daddu(a2, a2, a5);
3463 // Loop through all the keys in the descriptor array. If one of these is the
3464 // string "valueOf" the result is false.
3465 // The use of a6 to store the valueOf string assumes that it is not otherwise
3466 // used in the loop below.
3467 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3470 __ ld(a3, MemOperand(a4, 0));
3471 __ Branch(if_false, eq, a3, Operand(a6));
3472 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3474 __ Branch(&loop, ne, a4, Operand(a2));
3478 // Set the bit in the map to indicate that there is no local valueOf field.
3479 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3480 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3481 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3483 __ bind(&skip_lookup);
3485 // If a valueOf property is not found on the object check that its
3486 // prototype is the un-modified String prototype. If not result is false.
3487 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3488 __ JumpIfSmi(a2, if_false);
3489 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3490 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3491 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3492 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3493 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3494 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3496 context()->Plug(if_true, if_false);
3500 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3501 ZoneList<Expression*>* args = expr->arguments();
3502 DCHECK(args->length() == 1);
3504 VisitForAccumulatorValue(args->at(0));
3506 Label materialize_true, materialize_false;
3507 Label* if_true = NULL;
3508 Label* if_false = NULL;
3509 Label* fall_through = NULL;
3510 context()->PrepareTest(&materialize_true, &materialize_false,
3511 &if_true, &if_false, &fall_through);
3513 __ JumpIfSmi(v0, if_false);
3514 __ GetObjectType(v0, a1, a2);
3515 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3516 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3517 __ Branch(if_false);
3519 context()->Plug(if_true, if_false);
3523 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3524 ZoneList<Expression*>* args = expr->arguments();
3525 DCHECK(args->length() == 1);
3527 VisitForAccumulatorValue(args->at(0));
3529 Label materialize_true, materialize_false;
3530 Label* if_true = NULL;
3531 Label* if_false = NULL;
3532 Label* fall_through = NULL;
3533 context()->PrepareTest(&materialize_true, &materialize_false,
3534 &if_true, &if_false, &fall_through);
3536 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3537 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3538 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3539 __ li(a4, 0x80000000);
3541 __ Branch(¬_nan, ne, a2, Operand(a4));
3542 __ mov(a4, zero_reg);
3546 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3547 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3549 context()->Plug(if_true, if_false);
3553 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3554 ZoneList<Expression*>* args = expr->arguments();
3555 DCHECK(args->length() == 1);
3557 VisitForAccumulatorValue(args->at(0));
3559 Label materialize_true, materialize_false;
3560 Label* if_true = NULL;
3561 Label* if_false = NULL;
3562 Label* fall_through = NULL;
3563 context()->PrepareTest(&materialize_true, &materialize_false,
3564 &if_true, &if_false, &fall_through);
3566 __ JumpIfSmi(v0, if_false);
3567 __ GetObjectType(v0, a1, a1);
3568 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3569 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3570 if_true, if_false, fall_through);
3572 context()->Plug(if_true, if_false);
3576 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3577 ZoneList<Expression*>* args = expr->arguments();
3578 DCHECK(args->length() == 1);
3580 VisitForAccumulatorValue(args->at(0));
3582 Label materialize_true, materialize_false;
3583 Label* if_true = NULL;
3584 Label* if_false = NULL;
3585 Label* fall_through = NULL;
3586 context()->PrepareTest(&materialize_true, &materialize_false,
3587 &if_true, &if_false, &fall_through);
3589 __ JumpIfSmi(v0, if_false);
3590 __ GetObjectType(v0, a1, a1);
3591 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3592 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3594 context()->Plug(if_true, if_false);
3598 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3599 ZoneList<Expression*>* args = expr->arguments();
3600 DCHECK(args->length() == 1);
3602 VisitForAccumulatorValue(args->at(0));
3604 Label materialize_true, materialize_false;
3605 Label* if_true = NULL;
3606 Label* if_false = NULL;
3607 Label* fall_through = NULL;
3608 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3609 &if_false, &fall_through);
3611 __ JumpIfSmi(v0, if_false);
3613 Register type_reg = a2;
3614 __ GetObjectType(v0, map, type_reg);
3615 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3616 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3617 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3618 if_true, if_false, fall_through);
3620 context()->Plug(if_true, if_false);
3624 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3625 DCHECK(expr->arguments()->length() == 0);
3627 Label materialize_true, materialize_false;
3628 Label* if_true = NULL;
3629 Label* if_false = NULL;
3630 Label* fall_through = NULL;
3631 context()->PrepareTest(&materialize_true, &materialize_false,
3632 &if_true, &if_false, &fall_through);
3634 // Get the frame pointer for the calling frame.
3635 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3637 // Skip the arguments adaptor frame if it exists.
3638 Label check_frame_marker;
3639 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3640 __ Branch(&check_frame_marker, ne,
3641 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3642 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3644 // Check the marker in the calling frame.
3645 __ bind(&check_frame_marker);
3646 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3647 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3648 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3649 if_true, if_false, fall_through);
3651 context()->Plug(if_true, if_false);
3655 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3656 ZoneList<Expression*>* args = expr->arguments();
3657 DCHECK(args->length() == 2);
3659 // Load the two objects into registers and perform the comparison.
3660 VisitForStackValue(args->at(0));
3661 VisitForAccumulatorValue(args->at(1));
3663 Label materialize_true, materialize_false;
3664 Label* if_true = NULL;
3665 Label* if_false = NULL;
3666 Label* fall_through = NULL;
3667 context()->PrepareTest(&materialize_true, &materialize_false,
3668 &if_true, &if_false, &fall_through);
3671 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3672 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3674 context()->Plug(if_true, if_false);
3678 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3679 ZoneList<Expression*>* args = expr->arguments();
3680 DCHECK(args->length() == 1);
3682 // ArgumentsAccessStub expects the key in a1 and the formal
3683 // parameter count in a0.
3684 VisitForAccumulatorValue(args->at(0));
3686 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3687 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3689 context()->Plug(v0);
3693 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3694 DCHECK(expr->arguments()->length() == 0);
3696 // Get the number of formal parameters.
3697 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3699 // Check if the calling frame is an arguments adaptor frame.
3700 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3701 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3702 __ Branch(&exit, ne, a3,
3703 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3705 // Arguments adaptor case: Read the arguments length from the
3707 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3710 context()->Plug(v0);
3714 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3715 ZoneList<Expression*>* args = expr->arguments();
3716 DCHECK(args->length() == 1);
3717 Label done, null, function, non_function_constructor;
3719 VisitForAccumulatorValue(args->at(0));
3721 // If the object is a smi, we return null.
3722 __ JumpIfSmi(v0, &null);
3724 // Check that the object is a JS object but take special care of JS
3725 // functions to make sure they have 'Function' as their class.
3726 // Assume that there are only two callable types, and one of them is at
3727 // either end of the type range for JS object types. Saves extra comparisons.
3728 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3729 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3730 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3732 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3733 FIRST_SPEC_OBJECT_TYPE + 1);
3734 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3736 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3737 LAST_SPEC_OBJECT_TYPE - 1);
3738 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3739 // Assume that there is no larger type.
3740 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3742 // Check if the constructor in the map is a JS function.
3743 __ ld(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3744 __ GetObjectType(v0, a1, a1);
3745 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3747 // v0 now contains the constructor function. Grab the
3748 // instance class name from there.
3749 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3750 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3753 // Functions have class 'Function'.
3755 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3758 // Objects with a non-function constructor have class 'Object'.
3759 __ bind(&non_function_constructor);
3760 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3763 // Non-JS objects have class null.
3765 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3770 context()->Plug(v0);
3774 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3775 // Load the arguments on the stack and call the stub.
3776 SubStringStub stub(isolate());
3777 ZoneList<Expression*>* args = expr->arguments();
3778 DCHECK(args->length() == 3);
3779 VisitForStackValue(args->at(0));
3780 VisitForStackValue(args->at(1));
3781 VisitForStackValue(args->at(2));
3783 context()->Plug(v0);
3787 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3788 // Load the arguments on the stack and call the stub.
3789 RegExpExecStub stub(isolate());
3790 ZoneList<Expression*>* args = expr->arguments();
3791 DCHECK(args->length() == 4);
3792 VisitForStackValue(args->at(0));
3793 VisitForStackValue(args->at(1));
3794 VisitForStackValue(args->at(2));
3795 VisitForStackValue(args->at(3));
3797 context()->Plug(v0);
3801 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3802 ZoneList<Expression*>* args = expr->arguments();
3803 DCHECK(args->length() == 1);
3805 VisitForAccumulatorValue(args->at(0)); // Load the object.
3808 // If the object is a smi return the object.
3809 __ JumpIfSmi(v0, &done);
3810 // If the object is not a value type, return the object.
3811 __ GetObjectType(v0, a1, a1);
3812 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3814 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3817 context()->Plug(v0);
3821 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3822 ZoneList<Expression*>* args = expr->arguments();
3823 DCHECK(args->length() == 2);
3824 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3825 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3827 VisitForAccumulatorValue(args->at(0)); // Load the object.
3829 Label runtime, done, not_date_object;
3830 Register object = v0;
3831 Register result = v0;
3832 Register scratch0 = t1;
3833 Register scratch1 = a1;
3835 __ JumpIfSmi(object, ¬_date_object);
3836 __ GetObjectType(object, scratch1, scratch1);
3837 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3839 if (index->value() == 0) {
3840 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3843 if (index->value() < JSDate::kFirstUncachedField) {
3844 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3845 __ li(scratch1, Operand(stamp));
3846 __ ld(scratch1, MemOperand(scratch1));
3847 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3848 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3849 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3850 kPointerSize * index->value()));
3854 __ PrepareCallCFunction(2, scratch1);
3855 __ li(a1, Operand(index));
3856 __ Move(a0, object);
3857 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3861 __ bind(¬_date_object);
3862 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3864 context()->Plug(v0);
3868 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3869 ZoneList<Expression*>* args = expr->arguments();
3870 DCHECK_EQ(3, args->length());
3872 Register string = v0;
3873 Register index = a1;
3874 Register value = a2;
3876 VisitForStackValue(args->at(0)); // index
3877 VisitForStackValue(args->at(1)); // value
3878 VisitForAccumulatorValue(args->at(2)); // string
3879 __ Pop(index, value);
3881 if (FLAG_debug_code) {
3882 __ SmiTst(value, at);
3883 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3884 __ SmiTst(index, at);
3885 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3886 __ SmiUntag(index, index);
3887 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3888 Register scratch = t1;
3889 __ EmitSeqStringSetCharCheck(
3890 string, index, value, scratch, one_byte_seq_type);
3891 __ SmiTag(index, index);
3894 __ SmiUntag(value, value);
3897 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3899 __ Daddu(at, at, index);
3900 __ sb(value, MemOperand(at));
3901 context()->Plug(string);
3905 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3906 ZoneList<Expression*>* args = expr->arguments();
3907 DCHECK_EQ(3, args->length());
3909 Register string = v0;
3910 Register index = a1;
3911 Register value = a2;
3913 VisitForStackValue(args->at(0)); // index
3914 VisitForStackValue(args->at(1)); // value
3915 VisitForAccumulatorValue(args->at(2)); // string
3916 __ Pop(index, value);
3918 if (FLAG_debug_code) {
3919 __ SmiTst(value, at);
3920 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3921 __ SmiTst(index, at);
3922 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3923 __ SmiUntag(index, index);
3924 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3925 Register scratch = t1;
3926 __ EmitSeqStringSetCharCheck(
3927 string, index, value, scratch, two_byte_seq_type);
3928 __ SmiTag(index, index);
3931 __ SmiUntag(value, value);
3934 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3935 __ dsra(index, index, 32 - 1);
3936 __ Daddu(at, at, index);
3937 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3938 __ sh(value, MemOperand(at));
3939 context()->Plug(string);
3943 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3944 // Load the arguments on the stack and call the runtime function.
3945 ZoneList<Expression*>* args = expr->arguments();
3946 DCHECK(args->length() == 2);
3947 VisitForStackValue(args->at(0));
3948 VisitForStackValue(args->at(1));
3949 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3951 context()->Plug(v0);
3955 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3956 ZoneList<Expression*>* args = expr->arguments();
3957 DCHECK(args->length() == 2);
3959 VisitForStackValue(args->at(0)); // Load the object.
3960 VisitForAccumulatorValue(args->at(1)); // Load the value.
3961 __ pop(a1); // v0 = value. a1 = object.
3964 // If the object is a smi, return the value.
3965 __ JumpIfSmi(a1, &done);
3967 // If the object is not a value type, return the value.
3968 __ GetObjectType(a1, a2, a2);
3969 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3972 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3973 // Update the write barrier. Save the value as it will be
3974 // overwritten by the write barrier code and is needed afterward.
3976 __ RecordWriteField(
3977 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3980 context()->Plug(v0);
3984 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3985 ZoneList<Expression*>* args = expr->arguments();
3986 DCHECK_EQ(args->length(), 1);
3988 // Load the argument into a0 and call the stub.
3989 VisitForAccumulatorValue(args->at(0));
3990 __ mov(a0, result_register());
3992 NumberToStringStub stub(isolate());
3994 context()->Plug(v0);
3998 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3999 ZoneList<Expression*>* args = expr->arguments();
4000 DCHECK(args->length() == 1);
4002 VisitForAccumulatorValue(args->at(0));
4005 StringCharFromCodeGenerator generator(v0, a1);
4006 generator.GenerateFast(masm_);
4009 NopRuntimeCallHelper call_helper;
4010 generator.GenerateSlow(masm_, call_helper);
4013 context()->Plug(a1);
4017 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4018 ZoneList<Expression*>* args = expr->arguments();
4019 DCHECK(args->length() == 2);
4021 VisitForStackValue(args->at(0));
4022 VisitForAccumulatorValue(args->at(1));
4023 __ mov(a0, result_register());
4025 Register object = a1;
4026 Register index = a0;
4027 Register result = v0;
4031 Label need_conversion;
4032 Label index_out_of_range;
4034 StringCharCodeAtGenerator generator(object,
4039 &index_out_of_range,
4040 STRING_INDEX_IS_NUMBER);
4041 generator.GenerateFast(masm_);
4044 __ bind(&index_out_of_range);
4045 // When the index is out of range, the spec requires us to return
4047 __ LoadRoot(result, Heap::kNanValueRootIndex);
4050 __ bind(&need_conversion);
4051 // Load the undefined value into the result register, which will
4052 // trigger conversion.
4053 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4056 NopRuntimeCallHelper call_helper;
4057 generator.GenerateSlow(masm_, call_helper);
4060 context()->Plug(result);
4064 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4065 ZoneList<Expression*>* args = expr->arguments();
4066 DCHECK(args->length() == 2);
4068 VisitForStackValue(args->at(0));
4069 VisitForAccumulatorValue(args->at(1));
4070 __ mov(a0, result_register());
4072 Register object = a1;
4073 Register index = a0;
4074 Register scratch = a3;
4075 Register result = v0;
4079 Label need_conversion;
4080 Label index_out_of_range;
4082 StringCharAtGenerator generator(object,
4088 &index_out_of_range,
4089 STRING_INDEX_IS_NUMBER);
4090 generator.GenerateFast(masm_);
4093 __ bind(&index_out_of_range);
4094 // When the index is out of range, the spec requires us to return
4095 // the empty string.
4096 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4099 __ bind(&need_conversion);
4100 // Move smi zero into the result register, which will trigger
4102 __ li(result, Operand(Smi::FromInt(0)));
4105 NopRuntimeCallHelper call_helper;
4106 generator.GenerateSlow(masm_, call_helper);
4109 context()->Plug(result);
4113 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4114 ZoneList<Expression*>* args = expr->arguments();
4115 DCHECK_EQ(2, args->length());
4116 VisitForStackValue(args->at(0));
4117 VisitForAccumulatorValue(args->at(1));
4120 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4121 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4123 context()->Plug(v0);
4127 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4128 ZoneList<Expression*>* args = expr->arguments();
4129 DCHECK_EQ(2, args->length());
4131 VisitForStackValue(args->at(0));
4132 VisitForStackValue(args->at(1));
4134 StringCompareStub stub(isolate());
4136 context()->Plug(v0);
4140 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4141 ZoneList<Expression*>* args = expr->arguments();
4142 DCHECK(args->length() >= 2);
4144 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4145 for (int i = 0; i < arg_count + 1; i++) {
4146 VisitForStackValue(args->at(i));
4148 VisitForAccumulatorValue(args->last()); // Function.
4150 Label runtime, done;
4151 // Check for non-function argument (including proxy).
4152 __ JumpIfSmi(v0, &runtime);
4153 __ GetObjectType(v0, a1, a1);
4154 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4156 // InvokeFunction requires the function in a1. Move it in there.
4157 __ mov(a1, result_register());
4158 ParameterCount count(arg_count);
4159 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4160 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4165 __ CallRuntime(Runtime::kCall, args->length());
4168 context()->Plug(v0);
4172 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4173 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4174 GetVar(result_register(), new_target_var);
4175 __ Push(result_register());
4177 EmitLoadSuperConstructor();
4178 __ Push(result_register());
4180 // Check if the calling frame is an arguments adaptor frame.
4181 Label adaptor_frame, args_set_up, runtime;
4182 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4183 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4184 __ Branch(&adaptor_frame, eq, a3,
4185 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4186 // default constructor has no arguments, so no adaptor frame means no args.
4187 __ mov(a0, zero_reg);
4188 __ Branch(&args_set_up);
4190 // Copy arguments from adaptor frame.
4192 __ bind(&adaptor_frame);
4193 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4194 __ SmiUntag(a1, a1);
4196 // Subtract 1 from arguments count, for new.target.
4197 __ Daddu(a1, a1, Operand(-1));
4200 // Get arguments pointer in a2.
4201 __ dsll(at, a1, kPointerSizeLog2);
4202 __ Daddu(a2, a2, Operand(at));
4203 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4206 // Pre-decrement a2 with kPointerSize on each iteration.
4207 // Pre-decrement in order to skip receiver.
4208 __ Daddu(a2, a2, Operand(-kPointerSize));
4209 __ ld(a3, MemOperand(a2));
4211 __ Daddu(a1, a1, Operand(-1));
4212 __ Branch(&loop, ne, a1, Operand(zero_reg));
4215 __ bind(&args_set_up);
4216 __ dsll(at, a0, kPointerSizeLog2);
4217 __ Daddu(at, at, Operand(sp));
4218 __ ld(a1, MemOperand(at, 0));
4219 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4221 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4222 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4226 context()->Plug(result_register());
4230 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4231 RegExpConstructResultStub stub(isolate());
4232 ZoneList<Expression*>* args = expr->arguments();
4233 DCHECK(args->length() == 3);
4234 VisitForStackValue(args->at(0));
4235 VisitForStackValue(args->at(1));
4236 VisitForAccumulatorValue(args->at(2));
4237 __ mov(a0, result_register());
4241 context()->Plug(v0);
4245 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4246 ZoneList<Expression*>* args = expr->arguments();
4247 DCHECK_EQ(2, args->length());
4249 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4250 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4252 Handle<FixedArray> jsfunction_result_caches(
4253 isolate()->native_context()->jsfunction_result_caches());
4254 if (jsfunction_result_caches->length() <= cache_id) {
4255 __ Abort(kAttemptToUseUndefinedCache);
4256 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4257 context()->Plug(v0);
4261 VisitForAccumulatorValue(args->at(1));
4264 Register cache = a1;
4265 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4266 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4269 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4271 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4274 Label done, not_found;
4275 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4276 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4277 // a2 now holds finger offset as a smi.
4278 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4279 // a3 now points to the start of fixed array elements.
4280 __ SmiScale(at, a2, kPointerSizeLog2);
4281 __ daddu(a3, a3, at);
4282 // a3 now points to key of indexed element of cache.
4283 __ ld(a2, MemOperand(a3));
4284 __ Branch(¬_found, ne, key, Operand(a2));
4286 __ ld(v0, MemOperand(a3, kPointerSize));
4289 __ bind(¬_found);
4290 // Call runtime to perform the lookup.
4291 __ Push(cache, key);
4292 __ CallRuntime(Runtime::kGetFromCache, 2);
4295 context()->Plug(v0);
4299 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4300 ZoneList<Expression*>* args = expr->arguments();
4301 VisitForAccumulatorValue(args->at(0));
4303 Label materialize_true, materialize_false;
4304 Label* if_true = NULL;
4305 Label* if_false = NULL;
4306 Label* fall_through = NULL;
4307 context()->PrepareTest(&materialize_true, &materialize_false,
4308 &if_true, &if_false, &fall_through);
4310 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4311 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4313 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4314 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4316 context()->Plug(if_true, if_false);
4320 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4321 ZoneList<Expression*>* args = expr->arguments();
4322 DCHECK(args->length() == 1);
4323 VisitForAccumulatorValue(args->at(0));
4325 __ AssertString(v0);
4327 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4328 __ IndexFromHash(v0, v0);
4330 context()->Plug(v0);
4334 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4335 Label bailout, done, one_char_separator, long_separator,
4336 non_trivial_array, not_size_one_array, loop,
4337 empty_separator_loop, one_char_separator_loop,
4338 one_char_separator_loop_entry, long_separator_loop;
4339 ZoneList<Expression*>* args = expr->arguments();
4340 DCHECK(args->length() == 2);
4341 VisitForStackValue(args->at(1));
4342 VisitForAccumulatorValue(args->at(0));
4344 // All aliases of the same register have disjoint lifetimes.
4345 Register array = v0;
4346 Register elements = no_reg; // Will be v0.
4347 Register result = no_reg; // Will be v0.
4348 Register separator = a1;
4349 Register array_length = a2;
4350 Register result_pos = no_reg; // Will be a2.
4351 Register string_length = a3;
4352 Register string = a4;
4353 Register element = a5;
4354 Register elements_end = a6;
4355 Register scratch1 = a7;
4356 Register scratch2 = t1;
4357 Register scratch3 = t0;
4359 // Separator operand is on the stack.
4362 // Check that the array is a JSArray.
4363 __ JumpIfSmi(array, &bailout);
4364 __ GetObjectType(array, scratch1, scratch2);
4365 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4367 // Check that the array has fast elements.
4368 __ CheckFastElements(scratch1, scratch2, &bailout);
4370 // If the array has length zero, return the empty string.
4371 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4372 __ SmiUntag(array_length);
4373 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4374 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4377 __ bind(&non_trivial_array);
4379 // Get the FixedArray containing array's elements.
4381 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4382 array = no_reg; // End of array's live range.
4384 // Check that all array elements are sequential one-byte strings, and
4385 // accumulate the sum of their lengths, as a smi-encoded value.
4386 __ mov(string_length, zero_reg);
4388 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4389 __ dsll(elements_end, array_length, kPointerSizeLog2);
4390 __ Daddu(elements_end, element, elements_end);
4391 // Loop condition: while (element < elements_end).
4392 // Live values in registers:
4393 // elements: Fixed array of strings.
4394 // array_length: Length of the fixed array of strings (not smi)
4395 // separator: Separator string
4396 // string_length: Accumulated sum of string lengths (smi).
4397 // element: Current array element.
4398 // elements_end: Array end.
4399 if (generate_debug_code_) {
4400 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4404 __ ld(string, MemOperand(element));
4405 __ Daddu(element, element, kPointerSize);
4406 __ JumpIfSmi(string, &bailout);
4407 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4408 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4409 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4410 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4411 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4412 __ BranchOnOverflow(&bailout, scratch3);
4413 __ Branch(&loop, lt, element, Operand(elements_end));
4415 // If array_length is 1, return elements[0], a string.
4416 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4417 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4420 __ bind(¬_size_one_array);
4422 // Live values in registers:
4423 // separator: Separator string
4424 // array_length: Length of the array.
4425 // string_length: Sum of string lengths (smi).
4426 // elements: FixedArray of strings.
4428 // Check that the separator is a flat one-byte string.
4429 __ JumpIfSmi(separator, &bailout);
4430 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4431 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4432 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4434 // Add (separator length times array_length) - separator length to the
4435 // string_length to get the length of the result string. array_length is not
4436 // smi but the other values are, so the result is a smi.
4437 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4438 __ Dsubu(string_length, string_length, Operand(scratch1));
4439 __ SmiUntag(scratch1);
4440 __ Dmul(scratch2, array_length, scratch1);
4441 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4443 __ dsra32(scratch1, scratch2, 0);
4444 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4445 __ SmiUntag(string_length);
4446 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4447 __ BranchOnOverflow(&bailout, scratch3);
4449 // Get first element in the array to free up the elements register to be used
4452 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4453 result = elements; // End of live range for elements.
4455 // Live values in registers:
4456 // element: First array element
4457 // separator: Separator string
4458 // string_length: Length of result string (not smi)
4459 // array_length: Length of the array.
4460 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4461 elements_end, &bailout);
4462 // Prepare for looping. Set up elements_end to end of the array. Set
4463 // result_pos to the position of the result where to write the first
4465 __ dsll(elements_end, array_length, kPointerSizeLog2);
4466 __ Daddu(elements_end, element, elements_end);
4467 result_pos = array_length; // End of live range for array_length.
4468 array_length = no_reg;
4469 __ Daddu(result_pos,
4471 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4473 // Check the length of the separator.
4474 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4475 __ li(at, Operand(Smi::FromInt(1)));
4476 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4477 __ Branch(&long_separator, gt, scratch1, Operand(at));
4479 // Empty separator case.
4480 __ bind(&empty_separator_loop);
4481 // Live values in registers:
4482 // result_pos: the position to which we are currently copying characters.
4483 // element: Current array element.
4484 // elements_end: Array end.
4486 // Copy next array element to the result.
4487 __ ld(string, MemOperand(element));
4488 __ Daddu(element, element, kPointerSize);
4489 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4490 __ SmiUntag(string_length);
4491 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4492 __ CopyBytes(string, result_pos, string_length, scratch1);
4493 // End while (element < elements_end).
4494 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4495 DCHECK(result.is(v0));
4498 // One-character separator case.
4499 __ bind(&one_char_separator);
4500 // Replace separator with its one-byte character value.
4501 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4502 // Jump into the loop after the code that copies the separator, so the first
4503 // element is not preceded by a separator.
4504 __ jmp(&one_char_separator_loop_entry);
4506 __ bind(&one_char_separator_loop);
4507 // Live values in registers:
4508 // result_pos: the position to which we are currently copying characters.
4509 // element: Current array element.
4510 // elements_end: Array end.
4511 // separator: Single separator one-byte char (in lower byte).
4513 // Copy the separator character to the result.
4514 __ sb(separator, MemOperand(result_pos));
4515 __ Daddu(result_pos, result_pos, 1);
4517 // Copy next array element to the result.
4518 __ bind(&one_char_separator_loop_entry);
4519 __ ld(string, MemOperand(element));
4520 __ Daddu(element, element, kPointerSize);
4521 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4522 __ SmiUntag(string_length);
4523 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4524 __ CopyBytes(string, result_pos, string_length, scratch1);
4525 // End while (element < elements_end).
4526 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4527 DCHECK(result.is(v0));
4530 // Long separator case (separator is more than one character). Entry is at the
4531 // label long_separator below.
4532 __ bind(&long_separator_loop);
4533 // Live values in registers:
4534 // result_pos: the position to which we are currently copying characters.
4535 // element: Current array element.
4536 // elements_end: Array end.
4537 // separator: Separator string.
4539 // Copy the separator to the result.
4540 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4541 __ SmiUntag(string_length);
4544 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4545 __ CopyBytes(string, result_pos, string_length, scratch1);
4547 __ bind(&long_separator);
4548 __ ld(string, MemOperand(element));
4549 __ Daddu(element, element, kPointerSize);
4550 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4551 __ SmiUntag(string_length);
4552 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4553 __ CopyBytes(string, result_pos, string_length, scratch1);
4554 // End while (element < elements_end).
4555 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4556 DCHECK(result.is(v0));
4560 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4562 context()->Plug(v0);
4566 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4567 DCHECK(expr->arguments()->length() == 0);
4568 ExternalReference debug_is_active =
4569 ExternalReference::debug_is_active_address(isolate());
4570 __ li(at, Operand(debug_is_active));
4571 __ lbu(v0, MemOperand(at));
4573 context()->Plug(v0);
4577 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4578 if (expr->function() != NULL &&
4579 expr->function()->intrinsic_type == Runtime::INLINE) {
4580 Comment cmnt(masm_, "[ InlineRuntimeCall");
4581 EmitInlineRuntimeCall(expr);
4585 Comment cmnt(masm_, "[ CallRuntime");
4586 ZoneList<Expression*>* args = expr->arguments();
4587 int arg_count = args->length();
4589 if (expr->is_jsruntime()) {
4590 // Push the builtins object as the receiver.
4591 Register receiver = LoadDescriptor::ReceiverRegister();
4592 __ ld(receiver, GlobalObjectOperand());
4593 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4596 // Load the function from the receiver.
4597 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4598 if (FLAG_vector_ics) {
4599 __ li(VectorLoadICDescriptor::SlotRegister(),
4600 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4601 CallLoadIC(NOT_CONTEXTUAL);
4603 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4606 // Push the target function under the receiver.
4607 __ ld(at, MemOperand(sp, 0));
4609 __ sd(v0, MemOperand(sp, kPointerSize));
4611 // Push the arguments ("left-to-right").
4612 int arg_count = args->length();
4613 for (int i = 0; i < arg_count; i++) {
4614 VisitForStackValue(args->at(i));
4617 // Record source position of the IC call.
4618 SetSourcePosition(expr->position());
4619 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4620 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4623 // Restore context register.
4624 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4626 context()->DropAndPlug(1, v0);
4628 // Push the arguments ("left-to-right").
4629 for (int i = 0; i < arg_count; i++) {
4630 VisitForStackValue(args->at(i));
4633 // Call the C runtime function.
4634 __ CallRuntime(expr->function(), arg_count);
4635 context()->Plug(v0);
4640 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4641 switch (expr->op()) {
4642 case Token::DELETE: {
4643 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4644 Property* property = expr->expression()->AsProperty();
4645 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4647 if (property != NULL) {
4648 VisitForStackValue(property->obj());
4649 VisitForStackValue(property->key());
4650 __ li(a1, Operand(Smi::FromInt(language_mode())));
4652 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4653 context()->Plug(v0);
4654 } else if (proxy != NULL) {
4655 Variable* var = proxy->var();
4656 // Delete of an unqualified identifier is disallowed in strict mode
4657 // but "delete this" is allowed.
4658 DCHECK(is_sloppy(language_mode()) || var->is_this());
4659 if (var->IsUnallocated()) {
4660 __ ld(a2, GlobalObjectOperand());
4661 __ li(a1, Operand(var->name()));
4662 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4663 __ Push(a2, a1, a0);
4664 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4665 context()->Plug(v0);
4666 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4667 // Result of deleting non-global, non-dynamic variables is false.
4668 // The subexpression does not have side effects.
4669 context()->Plug(var->is_this());
4671 // Non-global variable. Call the runtime to try to delete from the
4672 // context where the variable was introduced.
4673 DCHECK(!context_register().is(a2));
4674 __ li(a2, Operand(var->name()));
4675 __ Push(context_register(), a2);
4676 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4677 context()->Plug(v0);
4680 // Result of deleting non-property, non-variable reference is true.
4681 // The subexpression may have side effects.
4682 VisitForEffect(expr->expression());
4683 context()->Plug(true);
4689 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4690 VisitForEffect(expr->expression());
4691 context()->Plug(Heap::kUndefinedValueRootIndex);
4696 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4697 if (context()->IsEffect()) {
4698 // Unary NOT has no side effects so it's only necessary to visit the
4699 // subexpression. Match the optimizing compiler by not branching.
4700 VisitForEffect(expr->expression());
4701 } else if (context()->IsTest()) {
4702 const TestContext* test = TestContext::cast(context());
4703 // The labels are swapped for the recursive call.
4704 VisitForControl(expr->expression(),
4705 test->false_label(),
4707 test->fall_through());
4708 context()->Plug(test->true_label(), test->false_label());
4710 // We handle value contexts explicitly rather than simply visiting
4711 // for control and plugging the control flow into the context,
4712 // because we need to prepare a pair of extra administrative AST ids
4713 // for the optimizing compiler.
4714 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4715 Label materialize_true, materialize_false, done;
4716 VisitForControl(expr->expression(),
4720 __ bind(&materialize_true);
4721 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4722 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4723 if (context()->IsStackValue()) __ push(v0);
4725 __ bind(&materialize_false);
4726 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4727 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4728 if (context()->IsStackValue()) __ push(v0);
4734 case Token::TYPEOF: {
4735 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4736 { StackValueContext context(this);
4737 VisitForTypeofValue(expr->expression());
4739 __ CallRuntime(Runtime::kTypeof, 1);
4740 context()->Plug(v0);
4750 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4751 DCHECK(expr->expression()->IsValidReferenceExpression());
4753 Comment cmnt(masm_, "[ CountOperation");
4754 SetSourcePosition(expr->position());
4756 Property* prop = expr->expression()->AsProperty();
4757 LhsKind assign_type = GetAssignType(prop);
4759 // Evaluate expression and get value.
4760 if (assign_type == VARIABLE) {
4761 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4762 AccumulatorValueContext context(this);
4763 EmitVariableLoad(expr->expression()->AsVariableProxy());
4765 // Reserve space for result of postfix operation.
4766 if (expr->is_postfix() && !context()->IsEffect()) {
4767 __ li(at, Operand(Smi::FromInt(0)));
4770 switch (assign_type) {
4771 case NAMED_PROPERTY: {
4772 // Put the object both on the stack and in the register.
4773 VisitForStackValue(prop->obj());
4774 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4775 EmitNamedPropertyLoad(prop);
4779 case NAMED_SUPER_PROPERTY: {
4780 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4781 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4782 __ Push(result_register());
4783 const Register scratch = a1;
4784 __ ld(scratch, MemOperand(sp, kPointerSize));
4785 __ Push(scratch, result_register());
4786 EmitNamedSuperPropertyLoad(prop);
4790 case KEYED_SUPER_PROPERTY: {
4791 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4792 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4793 const Register scratch = a1;
4794 const Register scratch1 = a4;
4795 __ Move(scratch, result_register());
4796 VisitForAccumulatorValue(prop->key());
4797 __ Push(scratch, result_register());
4798 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
4799 __ Push(scratch1, scratch, result_register());
4800 EmitKeyedSuperPropertyLoad(prop);
4804 case KEYED_PROPERTY: {
4805 VisitForStackValue(prop->obj());
4806 VisitForStackValue(prop->key());
4807 __ ld(LoadDescriptor::ReceiverRegister(),
4808 MemOperand(sp, 1 * kPointerSize));
4809 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4810 EmitKeyedPropertyLoad(prop);
4819 // We need a second deoptimization point after loading the value
4820 // in case evaluating the property load my have a side effect.
4821 if (assign_type == VARIABLE) {
4822 PrepareForBailout(expr->expression(), TOS_REG);
4824 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4827 // Inline smi case if we are in a loop.
4828 Label stub_call, done;
4829 JumpPatchSite patch_site(masm_);
4831 int count_value = expr->op() == Token::INC ? 1 : -1;
4833 if (ShouldInlineSmiCase(expr->op())) {
4835 patch_site.EmitJumpIfNotSmi(v0, &slow);
4837 // Save result for postfix expressions.
4838 if (expr->is_postfix()) {
4839 if (!context()->IsEffect()) {
4840 // Save the result on the stack. If we have a named or keyed property
4841 // we store the result under the receiver that is currently on top
4843 switch (assign_type) {
4847 case NAMED_PROPERTY:
4848 __ sd(v0, MemOperand(sp, kPointerSize));
4850 case NAMED_SUPER_PROPERTY:
4851 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4853 case KEYED_PROPERTY:
4854 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4856 case KEYED_SUPER_PROPERTY:
4857 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4863 Register scratch1 = a1;
4864 Register scratch2 = a4;
4865 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4866 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4867 __ BranchOnNoOverflow(&done, scratch2);
4868 // Call stub. Undo operation first.
4873 ToNumberStub convert_stub(isolate());
4874 __ CallStub(&convert_stub);
4875 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4877 // Save result for postfix expressions.
4878 if (expr->is_postfix()) {
4879 if (!context()->IsEffect()) {
4880 // Save the result on the stack. If we have a named or keyed property
4881 // we store the result under the receiver that is currently on top
4883 switch (assign_type) {
4887 case NAMED_PROPERTY:
4888 __ sd(v0, MemOperand(sp, kPointerSize));
4890 case NAMED_SUPER_PROPERTY:
4891 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4893 case KEYED_PROPERTY:
4894 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4896 case KEYED_SUPER_PROPERTY:
4897 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4903 __ bind(&stub_call);
4905 __ li(a0, Operand(Smi::FromInt(count_value)));
4907 // Record position before stub call.
4908 SetSourcePosition(expr->position());
4910 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4911 CallIC(code, expr->CountBinOpFeedbackId());
4912 patch_site.EmitPatchInfo();
4915 // Store the value returned in v0.
4916 switch (assign_type) {
4918 if (expr->is_postfix()) {
4919 { EffectContext context(this);
4920 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4922 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4925 // For all contexts except EffectConstant we have the result on
4926 // top of the stack.
4927 if (!context()->IsEffect()) {
4928 context()->PlugTOS();
4931 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4933 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4934 context()->Plug(v0);
4937 case NAMED_PROPERTY: {
4938 __ mov(StoreDescriptor::ValueRegister(), result_register());
4939 __ li(StoreDescriptor::NameRegister(),
4940 Operand(prop->key()->AsLiteral()->value()));
4941 __ pop(StoreDescriptor::ReceiverRegister());
4942 CallStoreIC(expr->CountStoreFeedbackId());
4943 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4944 if (expr->is_postfix()) {
4945 if (!context()->IsEffect()) {
4946 context()->PlugTOS();
4949 context()->Plug(v0);
4953 case NAMED_SUPER_PROPERTY: {
4954 EmitNamedSuperPropertyStore(prop);
4955 if (expr->is_postfix()) {
4956 if (!context()->IsEffect()) {
4957 context()->PlugTOS();
4960 context()->Plug(v0);
4964 case KEYED_SUPER_PROPERTY: {
4965 EmitKeyedSuperPropertyStore(prop);
4966 if (expr->is_postfix()) {
4967 if (!context()->IsEffect()) {
4968 context()->PlugTOS();
4971 context()->Plug(v0);
4975 case KEYED_PROPERTY: {
4976 __ mov(StoreDescriptor::ValueRegister(), result_register());
4977 __ Pop(StoreDescriptor::ReceiverRegister(),
4978 StoreDescriptor::NameRegister());
4980 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4981 CallIC(ic, expr->CountStoreFeedbackId());
4982 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4983 if (expr->is_postfix()) {
4984 if (!context()->IsEffect()) {
4985 context()->PlugTOS();
4988 context()->Plug(v0);
4996 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4997 DCHECK(!context()->IsEffect());
4998 DCHECK(!context()->IsTest());
4999 VariableProxy* proxy = expr->AsVariableProxy();
5000 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5001 Comment cmnt(masm_, "[ Global variable");
5002 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5003 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5004 if (FLAG_vector_ics) {
5005 __ li(VectorLoadICDescriptor::SlotRegister(),
5006 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5008 // Use a regular load, not a contextual load, to avoid a reference
5010 CallLoadIC(NOT_CONTEXTUAL);
5011 PrepareForBailout(expr, TOS_REG);
5012 context()->Plug(v0);
5013 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5014 Comment cmnt(masm_, "[ Lookup slot");
5017 // Generate code for loading from variables potentially shadowed
5018 // by eval-introduced variables.
5019 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5022 __ li(a0, Operand(proxy->name()));
5024 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5025 PrepareForBailout(expr, TOS_REG);
5028 context()->Plug(v0);
5030 // This expression cannot throw a reference error at the top level.
5031 VisitInDuplicateContext(expr);
5035 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5036 Expression* sub_expr,
5037 Handle<String> check) {
5038 Label materialize_true, materialize_false;
5039 Label* if_true = NULL;
5040 Label* if_false = NULL;
5041 Label* fall_through = NULL;
5042 context()->PrepareTest(&materialize_true, &materialize_false,
5043 &if_true, &if_false, &fall_through);
5045 { AccumulatorValueContext context(this);
5046 VisitForTypeofValue(sub_expr);
5048 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5050 Factory* factory = isolate()->factory();
5051 if (String::Equals(check, factory->number_string())) {
5052 __ JumpIfSmi(v0, if_true);
5053 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5054 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5055 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5056 } else if (String::Equals(check, factory->string_string())) {
5057 __ JumpIfSmi(v0, if_false);
5058 // Check for undetectable objects => false.
5059 __ GetObjectType(v0, v0, a1);
5060 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5061 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5062 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5063 Split(eq, a1, Operand(zero_reg),
5064 if_true, if_false, fall_through);
5065 } else if (String::Equals(check, factory->symbol_string())) {
5066 __ JumpIfSmi(v0, if_false);
5067 __ GetObjectType(v0, v0, a1);
5068 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5069 } else if (String::Equals(check, factory->boolean_string())) {
5070 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5071 __ Branch(if_true, eq, v0, Operand(at));
5072 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5073 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5074 } else if (String::Equals(check, factory->undefined_string())) {
5075 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5076 __ Branch(if_true, eq, v0, Operand(at));
5077 __ JumpIfSmi(v0, if_false);
5078 // Check for undetectable objects => true.
5079 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5080 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5081 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5082 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5083 } else if (String::Equals(check, factory->function_string())) {
5084 __ JumpIfSmi(v0, if_false);
5085 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5086 __ GetObjectType(v0, v0, a1);
5087 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5088 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5089 if_true, if_false, fall_through);
5090 } else if (String::Equals(check, factory->object_string())) {
5091 __ JumpIfSmi(v0, if_false);
5092 __ LoadRoot(at, Heap::kNullValueRootIndex);
5093 __ Branch(if_true, eq, v0, Operand(at));
5094 // Check for JS objects => true.
5095 __ GetObjectType(v0, v0, a1);
5096 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5097 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5098 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5099 // Check for undetectable objects => false.
5100 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5101 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5102 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5104 if (if_false != fall_through) __ jmp(if_false);
5106 context()->Plug(if_true, if_false);
5110 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5111 Comment cmnt(masm_, "[ CompareOperation");
5112 SetSourcePosition(expr->position());
5114 // First we try a fast inlined version of the compare when one of
5115 // the operands is a literal.
5116 if (TryLiteralCompare(expr)) return;
5118 // Always perform the comparison for its control flow. Pack the result
5119 // into the expression's context after the comparison is performed.
5120 Label materialize_true, materialize_false;
5121 Label* if_true = NULL;
5122 Label* if_false = NULL;
5123 Label* fall_through = NULL;
5124 context()->PrepareTest(&materialize_true, &materialize_false,
5125 &if_true, &if_false, &fall_through);
5127 Token::Value op = expr->op();
5128 VisitForStackValue(expr->left());
5131 VisitForStackValue(expr->right());
5132 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5133 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5134 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5135 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5138 case Token::INSTANCEOF: {
5139 VisitForStackValue(expr->right());
5140 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5142 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5143 // The stub returns 0 for true.
5144 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5149 VisitForAccumulatorValue(expr->right());
5150 Condition cc = CompareIC::ComputeCondition(op);
5151 __ mov(a0, result_register());
5154 bool inline_smi_code = ShouldInlineSmiCase(op);
5155 JumpPatchSite patch_site(masm_);
5156 if (inline_smi_code) {
5158 __ Or(a2, a0, Operand(a1));
5159 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5160 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5161 __ bind(&slow_case);
5163 // Record position and call the compare IC.
5164 SetSourcePosition(expr->position());
5165 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5166 CallIC(ic, expr->CompareOperationFeedbackId());
5167 patch_site.EmitPatchInfo();
5168 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5169 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5173 // Convert the result of the comparison into one expected for this
5174 // expression's context.
5175 context()->Plug(if_true, if_false);
5179 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5180 Expression* sub_expr,
5182 Label materialize_true, materialize_false;
5183 Label* if_true = NULL;
5184 Label* if_false = NULL;
5185 Label* fall_through = NULL;
5186 context()->PrepareTest(&materialize_true, &materialize_false,
5187 &if_true, &if_false, &fall_through);
5189 VisitForAccumulatorValue(sub_expr);
5190 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5191 __ mov(a0, result_register());
5192 if (expr->op() == Token::EQ_STRICT) {
5193 Heap::RootListIndex nil_value = nil == kNullValue ?
5194 Heap::kNullValueRootIndex :
5195 Heap::kUndefinedValueRootIndex;
5196 __ LoadRoot(a1, nil_value);
5197 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5199 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5200 CallIC(ic, expr->CompareOperationFeedbackId());
5201 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5203 context()->Plug(if_true, if_false);
5207 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5208 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5209 context()->Plug(v0);
5213 Register FullCodeGenerator::result_register() {
5218 Register FullCodeGenerator::context_register() {
5223 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5224 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5225 DCHECK(IsAligned(frame_offset, kPointerSize));
5226 // __ sw(value, MemOperand(fp, frame_offset));
5227 __ sd(value, MemOperand(fp, frame_offset));
5231 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5232 __ ld(dst, ContextOperand(cp, context_index));
5236 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5237 Scope* declaration_scope = scope()->DeclarationScope();
5238 if (declaration_scope->is_script_scope() ||
5239 declaration_scope->is_module_scope()) {
5240 // Contexts nested in the native context have a canonical empty function
5241 // as their closure, not the anonymous closure containing the global
5242 // code. Pass a smi sentinel and let the runtime look up the empty
5244 __ li(at, Operand(Smi::FromInt(0)));
5245 } else if (declaration_scope->is_eval_scope()) {
5246 // Contexts created by a call to eval have the same closure as the
5247 // context calling eval, not the anonymous closure containing the eval
5248 // code. Fetch it from the context.
5249 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5251 DCHECK(declaration_scope->is_function_scope());
5252 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5258 // ----------------------------------------------------------------------------
5259 // Non-local control flow support.
5261 void FullCodeGenerator::EnterFinallyBlock() {
5262 DCHECK(!result_register().is(a1));
5263 // Store result register while executing finally block.
5264 __ push(result_register());
5265 // Cook return address in link register to stack (smi encoded Code* delta).
5266 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
5269 // Store result register while executing finally block.
5272 // Store pending message while executing finally block.
5273 ExternalReference pending_message_obj =
5274 ExternalReference::address_of_pending_message_obj(isolate());
5275 __ li(at, Operand(pending_message_obj));
5276 __ ld(a1, MemOperand(at));
5279 ExternalReference has_pending_message =
5280 ExternalReference::address_of_has_pending_message(isolate());
5281 __ li(at, Operand(has_pending_message));
5282 __ ld(a1, MemOperand(at));
5286 ExternalReference pending_message_script =
5287 ExternalReference::address_of_pending_message_script(isolate());
5288 __ li(at, Operand(pending_message_script));
5289 __ ld(a1, MemOperand(at));
5294 void FullCodeGenerator::ExitFinallyBlock() {
5295 DCHECK(!result_register().is(a1));
5296 // Restore pending message from stack.
5298 ExternalReference pending_message_script =
5299 ExternalReference::address_of_pending_message_script(isolate());
5300 __ li(at, Operand(pending_message_script));
5301 __ sd(a1, MemOperand(at));
5305 ExternalReference has_pending_message =
5306 ExternalReference::address_of_has_pending_message(isolate());
5307 __ li(at, Operand(has_pending_message));
5308 __ sd(a1, MemOperand(at));
5311 ExternalReference pending_message_obj =
5312 ExternalReference::address_of_pending_message_obj(isolate());
5313 __ li(at, Operand(pending_message_obj));
5314 __ sd(a1, MemOperand(at));
5316 // Restore result register from stack.
5319 // Uncook return address and return.
5320 __ pop(result_register());
5323 __ Daddu(at, a1, Operand(masm_->CodeObject()));
5330 #define __ ACCESS_MASM(masm())
5332 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5334 int* context_length) {
5335 // The macros used here must preserve the result register.
5337 // Because the handler block contains the context of the finally
5338 // code, we can restore it directly from there for the finally code
5339 // rather than iteratively unwinding contexts via their previous
5341 __ Drop(*stack_depth); // Down to the handler block.
5342 if (*context_length > 0) {
5343 // Restore the context to its dedicated register and the stack.
5344 __ ld(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
5345 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5348 __ Call(finally_entry_);
5351 *context_length = 0;
5359 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5361 BackEdgeState target_state,
5362 Code* replacement_code) {
5363 static const int kInstrSize = Assembler::kInstrSize;
5364 Address branch_address = pc - 8 * kInstrSize;
5365 CodePatcher patcher(branch_address, 1);
5367 switch (target_state) {
5369 // slt at, a3, zero_reg (in case of count based interrupts)
5370 // beq at, zero_reg, ok
5371 // lui t9, <interrupt stub address> upper
5372 // ori t9, <interrupt stub address> u-middle
5374 // ori t9, <interrupt stub address> lower
5377 // ok-label ----- pc_after points here
5378 patcher.masm()->slt(at, a3, zero_reg);
5380 case ON_STACK_REPLACEMENT:
5381 case OSR_AFTER_STACK_CHECK:
5382 // addiu at, zero_reg, 1
5383 // beq at, zero_reg, ok ;; Not changed
5384 // lui t9, <on-stack replacement address> upper
5385 // ori t9, <on-stack replacement address> middle
5387 // ori t9, <on-stack replacement address> lower
5388 // jalr t9 ;; Not changed
5389 // nop ;; Not changed
5390 // ok-label ----- pc_after points here
5391 patcher.masm()->daddiu(at, zero_reg, 1);
5394 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5395 // Replace the stack check address in the load-immediate (6-instr sequence)
5396 // with the entry address of the replacement code.
5397 Assembler::set_target_address_at(pc_immediate_load_address,
5398 replacement_code->entry());
5400 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5401 unoptimized_code, pc_immediate_load_address, replacement_code);
5405 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5407 Code* unoptimized_code,
5409 static const int kInstrSize = Assembler::kInstrSize;
5410 Address branch_address = pc - 8 * kInstrSize;
5411 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5413 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
5414 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5415 DCHECK(reinterpret_cast<uint64_t>(
5416 Assembler::target_address_at(pc_immediate_load_address)) ==
5417 reinterpret_cast<uint64_t>(
5418 isolate->builtins()->InterruptCheck()->entry()));
5422 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5424 if (reinterpret_cast<uint64_t>(
5425 Assembler::target_address_at(pc_immediate_load_address)) ==
5426 reinterpret_cast<uint64_t>(
5427 isolate->builtins()->OnStackReplacement()->entry())) {
5428 return ON_STACK_REPLACEMENT;
5431 DCHECK(reinterpret_cast<uint64_t>(
5432 Assembler::target_address_at(pc_immediate_load_address)) ==
5433 reinterpret_cast<uint64_t>(
5434 isolate->builtins()->OsrAfterStackCheck()->entry()));
5435 return OSR_AFTER_STACK_CHECK;
5439 } } // namespace v8::internal
5441 #endif // V8_TARGET_ARCH_MIPS64