1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS64
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/parser.h"
25 #include "src/scopes.h"
27 #include "src/mips64/code-stubs-mips64.h"
28 #include "src/mips64/macro-assembler-mips64.h"
33 #define __ ACCESS_MASM(masm_)
36 // A patch site is a location in the code which it is possible to patch. This
37 // class has a number of methods to emit the code which is patchable and the
38 // method EmitPatchInfo to record a marker back to the patchable code. This
39 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
40 // (raw 16 bit immediate value is used) is the delta from the pc to the first
41 // instruction of the patchable code.
42 // The marker instruction is effectively a NOP (dest is zero_reg) and will
43 // never be emitted by normal code.
44 class JumpPatchSite BASE_EMBEDDED {
46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48 info_emitted_ = false;
53 DCHECK(patch_site_.is_bound() == info_emitted_);
56 // When initially emitting this ensure that a jump is always generated to skip
57 // the inlined smi code.
58 void EmitJumpIfNotSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
61 __ bind(&patch_site_);
63 // Always taken before patched.
64 __ BranchShort(target, eq, at, Operand(zero_reg));
67 // When initially emitting this ensure that a jump is never generated to skip
68 // the inlined smi code.
69 void EmitJumpIfSmi(Register reg, Label* target) {
70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
71 DCHECK(!patch_site_.is_bound() && !info_emitted_);
72 __ bind(&patch_site_);
74 // Never taken before patched.
75 __ BranchShort(target, ne, at, Operand(zero_reg));
78 void EmitPatchInfo() {
79 if (patch_site_.is_bound()) {
80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
82 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
87 __ nop(); // Signals no inlined code.
92 MacroAssembler* masm_;
100 // Generate code for a JS function. On entry to the function the receiver
101 // and arguments have been pushed on the stack left to right. The actual
102 // argument count matches the formal parameter count expected by the
105 // The live registers are:
106 // o a1: the JS function object being called (i.e. ourselves)
108 // o fp: our caller's frame pointer
109 // o sp: stack pointer
110 // o ra: return address
112 // The function builds a JS frame. Please see JavaScriptFrameConstants in
113 // frames-mips.h for its layout.
114 void FullCodeGenerator::Generate() {
115 CompilationInfo* info = info_;
116 profiling_counter_ = isolate()->factory()->NewCell(
117 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
118 SetFunctionPosition(function());
119 Comment cmnt(masm_, "[ function compiled by full code generator");
121 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
124 if (strlen(FLAG_stop_at) > 0 &&
125 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
130 // Sloppy mode functions and builtins need to replace the receiver with the
131 // global proxy when called as functions (without an explicit receiver
133 if (is_sloppy(info->language_mode()) && !info->is_native() &&
134 info->MayUseThis() && info->scope()->has_this_declaration()) {
136 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
137 __ ld(at, MemOperand(sp, receiver_offset));
138 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
139 __ Branch(&ok, ne, a2, Operand(at));
141 __ ld(a2, GlobalObjectOperand());
142 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
144 __ sd(a2, MemOperand(sp, receiver_offset));
147 // Open a frame scope to indicate that there is a frame on the stack. The
148 // MANUAL indicates that the scope shouldn't actually generate code to set up
149 // the frame (that is done below).
150 FrameScope frame_scope(masm_, StackFrame::MANUAL);
151 info->set_prologue_offset(masm_->pc_offset());
152 __ Prologue(info->IsCodePreAgingActive());
153 info->AddNoFrameRange(0, masm_->pc_offset());
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
162 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
163 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
164 __ Branch(&ok, hs, t1, Operand(a2));
165 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
168 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ li(a2, Operand(loop_iterations));
174 __ bind(&loop_header);
176 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ sd(t1, MemOperand(sp, i * kPointerSize));
180 // Continue loop if not done.
181 __ Dsubu(a2, a2, Operand(1));
182 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
187 for (int i = 0; i < remaining; i++) {
188 __ sd(t1, MemOperand(sp, i * kPointerSize));
193 bool function_in_register = true;
195 // Possibly allocate a local context.
196 if (info->scope()->num_heap_slots() > 0) {
197 Comment cmnt(masm_, "[ Allocate context");
198 // Argument to NewContext is the function, which is still in a1.
199 bool need_write_barrier = true;
200 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201 if (info->scope()->is_script_scope()) {
203 __ Push(info->scope()->GetScopeInfo(info->isolate()));
204 __ CallRuntime(Runtime::kNewScriptContext, 2);
205 } else if (slots <= FastNewContextStub::kMaximumSlots) {
206 FastNewContextStub stub(isolate(), slots);
208 // Result of FastNewContextStub is always in new space.
209 need_write_barrier = false;
212 __ CallRuntime(Runtime::kNewFunctionContext, 1);
214 function_in_register = false;
215 // Context is returned in v0. It replaces the context passed to us.
216 // It's saved in the stack and kept live in cp.
218 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
219 // Copy any necessary parameters into the context.
220 int num_parameters = info->scope()->num_parameters();
221 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
222 for (int i = first_parameter; i < num_parameters; i++) {
223 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
224 if (var->IsContextSlot()) {
225 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
226 (num_parameters - 1 - i) * kPointerSize;
227 // Load parameter from stack.
228 __ ld(a0, MemOperand(fp, parameter_offset));
229 // Store it in the context.
230 MemOperand target = ContextOperand(cp, var->index());
233 // Update the write barrier.
234 if (need_write_barrier) {
235 __ RecordWriteContextSlot(
236 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
237 } else if (FLAG_debug_code) {
239 __ JumpIfInNewSpace(cp, a0, &done);
240 __ Abort(kExpectedNewSpaceObject);
247 // Possibly set up a local binding to the this function which is used in
248 // derived constructors with super calls.
249 Variable* this_function_var = scope()->this_function_var();
250 if (this_function_var != nullptr) {
251 Comment cmnt(masm_, "[ This function");
252 if (!function_in_register) {
253 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // The write barrier clobbers register again, keep is marked as such.
256 SetVar(this_function_var, a1, a2, a3);
259 Variable* new_target_var = scope()->new_target_var();
260 if (new_target_var != nullptr) {
261 Comment cmnt(masm_, "[ new.target");
262 // Get the frame pointer for the calling frame.
263 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
265 // Skip the arguments adaptor frame if it exists.
266 Label check_frame_marker;
267 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
268 __ Branch(&check_frame_marker, ne, a1,
269 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
270 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
272 // Check the marker in the calling frame.
273 __ bind(&check_frame_marker);
274 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
276 Label non_construct_frame, done;
277 __ Branch(&non_construct_frame, ne, a1,
278 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
281 MemOperand(a2, ConstructFrameConstants::kOriginalConstructorOffset));
284 __ bind(&non_construct_frame);
285 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
288 SetVar(new_target_var, v0, a2, a3);
291 // Possibly allocate RestParameters
293 Variable* rest_param = scope()->rest_parameter(&rest_index);
295 Comment cmnt(masm_, "[ Allocate rest parameter array");
297 int num_parameters = info->scope()->num_parameters();
298 int offset = num_parameters * kPointerSize;
301 Operand(StandardFrameConstants::kCallerSPOffset + offset));
302 __ li(a2, Operand(Smi::FromInt(num_parameters)));
303 __ li(a1, Operand(Smi::FromInt(rest_index)));
304 __ li(a0, Operand(Smi::FromInt(language_mode())));
305 __ Push(a3, a2, a1, a0);
307 RestParamAccessStub stub(isolate());
310 SetVar(rest_param, v0, a1, a2);
313 Variable* arguments = scope()->arguments();
314 if (arguments != NULL) {
315 // Function uses arguments object.
316 Comment cmnt(masm_, "[ Allocate arguments object");
317 if (!function_in_register) {
318 // Load this again, if it's used by the local context below.
319 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
323 // Receiver is just before the parameters on the caller's stack.
324 int num_parameters = info->scope()->num_parameters();
325 int offset = num_parameters * kPointerSize;
327 Operand(StandardFrameConstants::kCallerSPOffset + offset));
328 __ li(a1, Operand(Smi::FromInt(num_parameters)));
331 // Arguments to ArgumentsAccessStub:
332 // function, receiver address, parameter count.
333 // The stub will rewrite receiever and parameter count if the previous
334 // stack frame was an arguments adapter frame.
335 ArgumentsAccessStub::Type type;
336 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
337 type = ArgumentsAccessStub::NEW_STRICT;
338 } else if (function()->has_duplicate_parameters()) {
339 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
341 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
343 ArgumentsAccessStub stub(isolate(), type);
346 SetVar(arguments, v0, a1, a2);
350 __ CallRuntime(Runtime::kTraceEnter, 0);
352 // Visit the declarations and body unless there is an illegal
354 if (scope()->HasIllegalRedeclaration()) {
355 Comment cmnt(masm_, "[ Declarations");
356 scope()->VisitIllegalRedeclaration(this);
359 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
360 { Comment cmnt(masm_, "[ Declarations");
361 // For named function expressions, declare the function name as a
363 if (scope()->is_function_scope() && scope()->function() != NULL) {
364 VariableDeclaration* function = scope()->function();
365 DCHECK(function->proxy()->var()->mode() == CONST ||
366 function->proxy()->var()->mode() == CONST_LEGACY);
367 DCHECK(!function->proxy()->var()->IsUnallocatedOrGlobalSlot());
368 VisitVariableDeclaration(function);
370 VisitDeclarations(scope()->declarations());
372 { Comment cmnt(masm_, "[ Stack check");
373 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
375 __ LoadRoot(at, Heap::kStackLimitRootIndex);
376 __ Branch(&ok, hs, sp, Operand(at));
377 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
378 PredictableCodeSizeScope predictable(masm_,
379 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
380 __ Call(stack_check, RelocInfo::CODE_TARGET);
384 { Comment cmnt(masm_, "[ Body");
385 DCHECK(loop_depth() == 0);
387 VisitStatements(function()->body());
389 DCHECK(loop_depth() == 0);
393 // Always emit a 'return undefined' in case control fell off the end of
395 { Comment cmnt(masm_, "[ return <undefined>;");
396 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
398 EmitReturnSequence();
402 void FullCodeGenerator::ClearAccumulator() {
403 DCHECK(Smi::FromInt(0) == 0);
404 __ mov(v0, zero_reg);
408 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
409 __ li(a2, Operand(profiling_counter_));
410 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
411 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
412 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
416 void FullCodeGenerator::EmitProfilingCounterReset() {
417 int reset_value = FLAG_interrupt_budget;
418 if (info_->is_debug()) {
419 // Detect debug break requests as soon as possible.
420 reset_value = FLAG_interrupt_budget >> 4;
422 __ li(a2, Operand(profiling_counter_));
423 __ li(a3, Operand(Smi::FromInt(reset_value)));
424 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
428 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
429 Label* back_edge_target) {
430 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
431 // to make sure it is constant. Branch may emit a skip-or-jump sequence
432 // instead of the normal Branch. It seems that the "skip" part of that
433 // sequence is about as long as this Branch would be so it is safe to ignore
435 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
436 Comment cmnt(masm_, "[ Back edge bookkeeping");
438 DCHECK(back_edge_target->is_bound());
439 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
440 int weight = Min(kMaxBackEdgeWeight,
441 Max(1, distance / kCodeSizeMultiplier));
442 EmitProfilingCounterDecrement(weight);
443 __ slt(at, a3, zero_reg);
444 __ beq(at, zero_reg, &ok);
445 // Call will emit a li t9 first, so it is safe to use the delay slot.
446 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
447 // Record a mapping of this PC offset to the OSR id. This is used to find
448 // the AST id from the unoptimized code in order to use it as a key into
449 // the deoptimization input data found in the optimized code.
450 RecordBackEdge(stmt->OsrEntryId());
451 EmitProfilingCounterReset();
454 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
455 // Record a mapping of the OSR id to this PC. This is used if the OSR
456 // entry becomes the target of a bailout. We don't expect it to be, but
457 // we want it to work if it is.
458 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
462 void FullCodeGenerator::EmitReturnSequence() {
463 Comment cmnt(masm_, "[ Return sequence");
464 if (return_label_.is_bound()) {
465 __ Branch(&return_label_);
467 __ bind(&return_label_);
469 // Push the return value on the stack as the parameter.
470 // Runtime::TraceExit returns its parameter in v0.
472 __ CallRuntime(Runtime::kTraceExit, 1);
474 // Pretend that the exit is a backwards jump to the entry.
476 if (info_->ShouldSelfOptimize()) {
477 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
479 int distance = masm_->pc_offset();
480 weight = Min(kMaxBackEdgeWeight,
481 Max(1, distance / kCodeSizeMultiplier));
483 EmitProfilingCounterDecrement(weight);
485 __ Branch(&ok, ge, a3, Operand(zero_reg));
487 __ Call(isolate()->builtins()->InterruptCheck(),
488 RelocInfo::CODE_TARGET);
490 EmitProfilingCounterReset();
493 // Make sure that the constant pool is not emitted inside of the return
495 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
496 // Here we use masm_-> instead of the __ macro to avoid the code coverage
497 // tool from instrumenting as we rely on the code size here.
498 int32_t arg_count = info_->scope()->num_parameters() + 1;
499 int32_t sp_delta = arg_count * kPointerSize;
500 SetReturnPosition(function());
502 int no_frame_start = masm_->pc_offset();
503 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
504 masm_->Daddu(sp, sp, Operand(sp_delta));
506 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
512 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
513 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
517 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
518 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
519 codegen()->GetVar(result_register(), var);
523 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
524 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
525 codegen()->GetVar(result_register(), var);
526 __ push(result_register());
530 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
531 // For simplicity we always test the accumulator register.
532 codegen()->GetVar(result_register(), var);
533 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
534 codegen()->DoTest(this);
538 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
542 void FullCodeGenerator::AccumulatorValueContext::Plug(
543 Heap::RootListIndex index) const {
544 __ LoadRoot(result_register(), index);
548 void FullCodeGenerator::StackValueContext::Plug(
549 Heap::RootListIndex index) const {
550 __ LoadRoot(result_register(), index);
551 __ push(result_register());
555 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(),
560 if (index == Heap::kUndefinedValueRootIndex ||
561 index == Heap::kNullValueRootIndex ||
562 index == Heap::kFalseValueRootIndex) {
563 if (false_label_ != fall_through_) __ Branch(false_label_);
564 } else if (index == Heap::kTrueValueRootIndex) {
565 if (true_label_ != fall_through_) __ Branch(true_label_);
567 __ LoadRoot(result_register(), index);
568 codegen()->DoTest(this);
573 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
577 void FullCodeGenerator::AccumulatorValueContext::Plug(
578 Handle<Object> lit) const {
579 __ li(result_register(), Operand(lit));
583 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
584 // Immediates cannot be pushed directly.
585 __ li(result_register(), Operand(lit));
586 __ push(result_register());
590 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
591 codegen()->PrepareForBailoutBeforeSplit(condition(),
595 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
596 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
597 if (false_label_ != fall_through_) __ Branch(false_label_);
598 } else if (lit->IsTrue() || lit->IsJSObject()) {
599 if (true_label_ != fall_through_) __ Branch(true_label_);
600 } else if (lit->IsString()) {
601 if (String::cast(*lit)->length() == 0) {
602 if (false_label_ != fall_through_) __ Branch(false_label_);
604 if (true_label_ != fall_through_) __ Branch(true_label_);
606 } else if (lit->IsSmi()) {
607 if (Smi::cast(*lit)->value() == 0) {
608 if (false_label_ != fall_through_) __ Branch(false_label_);
610 if (true_label_ != fall_through_) __ Branch(true_label_);
613 // For simplicity we always test the accumulator register.
614 __ li(result_register(), Operand(lit));
615 codegen()->DoTest(this);
620 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
621 Register reg) const {
627 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
629 Register reg) const {
632 __ Move(result_register(), reg);
636 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
637 Register reg) const {
639 if (count > 1) __ Drop(count - 1);
640 __ sd(reg, MemOperand(sp, 0));
644 void FullCodeGenerator::TestContext::DropAndPlug(int count,
645 Register reg) const {
647 // For simplicity we always test the accumulator register.
649 __ Move(result_register(), reg);
650 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
651 codegen()->DoTest(this);
655 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
656 Label* materialize_false) const {
657 DCHECK(materialize_true == materialize_false);
658 __ bind(materialize_true);
662 void FullCodeGenerator::AccumulatorValueContext::Plug(
663 Label* materialize_true,
664 Label* materialize_false) const {
666 __ bind(materialize_true);
667 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
669 __ bind(materialize_false);
670 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
675 void FullCodeGenerator::StackValueContext::Plug(
676 Label* materialize_true,
677 Label* materialize_false) const {
679 __ bind(materialize_true);
680 __ LoadRoot(at, Heap::kTrueValueRootIndex);
681 // Push the value as the following branch can clobber at in long branch mode.
684 __ bind(materialize_false);
685 __ LoadRoot(at, Heap::kFalseValueRootIndex);
691 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
692 Label* materialize_false) const {
693 DCHECK(materialize_true == true_label_);
694 DCHECK(materialize_false == false_label_);
698 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
702 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
703 Heap::RootListIndex value_root_index =
704 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
705 __ LoadRoot(result_register(), value_root_index);
709 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
710 Heap::RootListIndex value_root_index =
711 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
712 __ LoadRoot(at, value_root_index);
717 void FullCodeGenerator::TestContext::Plug(bool flag) const {
718 codegen()->PrepareForBailoutBeforeSplit(condition(),
723 if (true_label_ != fall_through_) __ Branch(true_label_);
725 if (false_label_ != fall_through_) __ Branch(false_label_);
730 void FullCodeGenerator::DoTest(Expression* condition,
733 Label* fall_through) {
734 __ mov(a0, result_register());
735 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
736 CallIC(ic, condition->test_id());
737 __ mov(at, zero_reg);
738 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
742 void FullCodeGenerator::Split(Condition cc,
747 Label* fall_through) {
748 if (if_false == fall_through) {
749 __ Branch(if_true, cc, lhs, rhs);
750 } else if (if_true == fall_through) {
751 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
753 __ Branch(if_true, cc, lhs, rhs);
759 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
760 DCHECK(var->IsStackAllocated());
761 // Offset is negative because higher indexes are at lower addresses.
762 int offset = -var->index() * kPointerSize;
763 // Adjust by a (parameter or local) base offset.
764 if (var->IsParameter()) {
765 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
767 offset += JavaScriptFrameConstants::kLocal0Offset;
769 return MemOperand(fp, offset);
773 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
774 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
775 if (var->IsContextSlot()) {
776 int context_chain_length = scope()->ContextChainLength(var->scope());
777 __ LoadContext(scratch, context_chain_length);
778 return ContextOperand(scratch, var->index());
780 return StackOperand(var);
785 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
786 // Use destination as scratch.
787 MemOperand location = VarOperand(var, dest);
788 __ ld(dest, location);
792 void FullCodeGenerator::SetVar(Variable* var,
796 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
797 DCHECK(!scratch0.is(src));
798 DCHECK(!scratch0.is(scratch1));
799 DCHECK(!scratch1.is(src));
800 MemOperand location = VarOperand(var, scratch0);
801 __ sd(src, location);
802 // Emit the write barrier code if the location is in the heap.
803 if (var->IsContextSlot()) {
804 __ RecordWriteContextSlot(scratch0,
814 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
815 bool should_normalize,
818 // Only prepare for bailouts before splits if we're in a test
819 // context. Otherwise, we let the Visit function deal with the
820 // preparation to avoid preparing with the same AST id twice.
821 if (!context()->IsTest() || !info_->IsOptimizable()) return;
824 if (should_normalize) __ Branch(&skip);
825 PrepareForBailout(expr, TOS_REG);
826 if (should_normalize) {
827 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
828 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
834 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
835 // The variable in the declaration always resides in the current function
837 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
838 if (generate_debug_code_) {
839 // Check that we're not inside a with or catch context.
840 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
841 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
842 __ Check(ne, kDeclarationInWithContext,
844 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
845 __ Check(ne, kDeclarationInCatchContext,
851 void FullCodeGenerator::VisitVariableDeclaration(
852 VariableDeclaration* declaration) {
853 // If it was not possible to allocate the variable at compile time, we
854 // need to "declare" it at runtime to make sure it actually exists in the
856 VariableProxy* proxy = declaration->proxy();
857 VariableMode mode = declaration->mode();
858 Variable* variable = proxy->var();
859 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
860 switch (variable->location()) {
861 case VariableLocation::GLOBAL:
862 case VariableLocation::UNALLOCATED:
863 globals_->Add(variable->name(), zone());
864 globals_->Add(variable->binding_needs_init()
865 ? isolate()->factory()->the_hole_value()
866 : isolate()->factory()->undefined_value(),
870 case VariableLocation::PARAMETER:
871 case VariableLocation::LOCAL:
873 Comment cmnt(masm_, "[ VariableDeclaration");
874 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
875 __ sd(a4, StackOperand(variable));
879 case VariableLocation::CONTEXT:
881 Comment cmnt(masm_, "[ VariableDeclaration");
882 EmitDebugCheckDeclarationContext(variable);
883 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
884 __ sd(at, ContextOperand(cp, variable->index()));
885 // No write barrier since the_hole_value is in old space.
886 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
890 case VariableLocation::LOOKUP: {
891 Comment cmnt(masm_, "[ VariableDeclaration");
892 __ li(a2, Operand(variable->name()));
893 // Declaration nodes are always introduced in one of four modes.
894 DCHECK(IsDeclaredVariableMode(mode));
895 PropertyAttributes attr =
896 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
897 __ li(a1, Operand(Smi::FromInt(attr)));
898 // Push initial value, if any.
899 // Note: For variables we must not push an initial value (such as
900 // 'undefined') because we may have a (legal) redeclaration and we
901 // must not destroy the current value.
903 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
904 __ Push(cp, a2, a1, a0);
906 DCHECK(Smi::FromInt(0) == 0);
907 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
908 __ Push(cp, a2, a1, a0);
910 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
917 void FullCodeGenerator::VisitFunctionDeclaration(
918 FunctionDeclaration* declaration) {
919 VariableProxy* proxy = declaration->proxy();
920 Variable* variable = proxy->var();
921 switch (variable->location()) {
922 case VariableLocation::GLOBAL:
923 case VariableLocation::UNALLOCATED: {
924 globals_->Add(variable->name(), zone());
925 Handle<SharedFunctionInfo> function =
926 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
927 // Check for stack-overflow exception.
928 if (function.is_null()) return SetStackOverflow();
929 globals_->Add(function, zone());
933 case VariableLocation::PARAMETER:
934 case VariableLocation::LOCAL: {
935 Comment cmnt(masm_, "[ FunctionDeclaration");
936 VisitForAccumulatorValue(declaration->fun());
937 __ sd(result_register(), StackOperand(variable));
941 case VariableLocation::CONTEXT: {
942 Comment cmnt(masm_, "[ FunctionDeclaration");
943 EmitDebugCheckDeclarationContext(variable);
944 VisitForAccumulatorValue(declaration->fun());
945 __ sd(result_register(), ContextOperand(cp, variable->index()));
946 int offset = Context::SlotOffset(variable->index());
947 // We know that we have written a function, which is not a smi.
948 __ RecordWriteContextSlot(cp,
956 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
960 case VariableLocation::LOOKUP: {
961 Comment cmnt(masm_, "[ FunctionDeclaration");
962 __ li(a2, Operand(variable->name()));
963 __ li(a1, Operand(Smi::FromInt(NONE)));
965 // Push initial value for function declaration.
966 VisitForStackValue(declaration->fun());
967 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
974 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
975 VariableProxy* proxy = declaration->proxy();
976 Variable* variable = proxy->var();
977 switch (variable->location()) {
978 case VariableLocation::GLOBAL:
979 case VariableLocation::UNALLOCATED:
983 case VariableLocation::CONTEXT: {
984 Comment cmnt(masm_, "[ ImportDeclaration");
985 EmitDebugCheckDeclarationContext(variable);
990 case VariableLocation::PARAMETER:
991 case VariableLocation::LOCAL:
992 case VariableLocation::LOOKUP:
998 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1003 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1004 // Call the runtime to declare the globals.
1005 // The context is the first argument.
1006 __ li(a1, Operand(pairs));
1007 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1008 __ Push(cp, a1, a0);
1009 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1010 // Return value is ignored.
1014 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1015 // Call the runtime to declare the modules.
1016 __ Push(descriptions);
1017 __ CallRuntime(Runtime::kDeclareModules, 1);
1018 // Return value is ignored.
1022 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1023 Comment cmnt(masm_, "[ SwitchStatement");
1024 Breakable nested_statement(this, stmt);
1025 SetStatementPosition(stmt);
1027 // Keep the switch value on the stack until a case matches.
1028 VisitForStackValue(stmt->tag());
1029 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1031 ZoneList<CaseClause*>* clauses = stmt->cases();
1032 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1034 Label next_test; // Recycled for each test.
1035 // Compile all the tests with branches to their bodies.
1036 for (int i = 0; i < clauses->length(); i++) {
1037 CaseClause* clause = clauses->at(i);
1038 clause->body_target()->Unuse();
1040 // The default is not a test, but remember it as final fall through.
1041 if (clause->is_default()) {
1042 default_clause = clause;
1046 Comment cmnt(masm_, "[ Case comparison");
1047 __ bind(&next_test);
1050 // Compile the label expression.
1051 VisitForAccumulatorValue(clause->label());
1052 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1054 // Perform the comparison as if via '==='.
1055 __ ld(a1, MemOperand(sp, 0)); // Switch value.
1056 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1057 JumpPatchSite patch_site(masm_);
1058 if (inline_smi_code) {
1061 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1063 __ Branch(&next_test, ne, a1, Operand(a0));
1064 __ Drop(1); // Switch value is no longer needed.
1065 __ Branch(clause->body_target());
1067 __ bind(&slow_case);
1070 // Record position before stub call for type feedback.
1071 SetExpressionPosition(clause);
1072 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1073 strength(language_mode())).code();
1074 CallIC(ic, clause->CompareId());
1075 patch_site.EmitPatchInfo();
1079 PrepareForBailout(clause, TOS_REG);
1080 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1081 __ Branch(&next_test, ne, v0, Operand(at));
1083 __ Branch(clause->body_target());
1086 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1087 __ Drop(1); // Switch value is no longer needed.
1088 __ Branch(clause->body_target());
1091 // Discard the test value and jump to the default if present, otherwise to
1092 // the end of the statement.
1093 __ bind(&next_test);
1094 __ Drop(1); // Switch value is no longer needed.
1095 if (default_clause == NULL) {
1096 __ Branch(nested_statement.break_label());
1098 __ Branch(default_clause->body_target());
1101 // Compile all the case bodies.
1102 for (int i = 0; i < clauses->length(); i++) {
1103 Comment cmnt(masm_, "[ Case body");
1104 CaseClause* clause = clauses->at(i);
1105 __ bind(clause->body_target());
1106 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1107 VisitStatements(clause->statements());
1110 __ bind(nested_statement.break_label());
1111 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1115 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1116 Comment cmnt(masm_, "[ ForInStatement");
1117 SetStatementPosition(stmt, SKIP_BREAK);
1119 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1122 ForIn loop_statement(this, stmt);
1123 increment_loop_depth();
1125 // Get the object to enumerate over. If the object is null or undefined, skip
1126 // over the loop. See ECMA-262 version 5, section 12.6.4.
1127 SetExpressionAsStatementPosition(stmt->enumerable());
1128 VisitForAccumulatorValue(stmt->enumerable());
1129 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1130 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1131 __ Branch(&exit, eq, a0, Operand(at));
1132 Register null_value = a5;
1133 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1134 __ Branch(&exit, eq, a0, Operand(null_value));
1135 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1137 // Convert the object to a JS object.
1138 Label convert, done_convert;
1139 __ JumpIfSmi(a0, &convert);
1140 __ GetObjectType(a0, a1, a1);
1141 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1144 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1146 __ bind(&done_convert);
1147 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1150 // Check for proxies.
1152 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1153 __ GetObjectType(a0, a1, a1);
1154 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1156 // Check cache validity in generated code. This is a fast case for
1157 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1158 // guarantee cache validity, call the runtime system to check cache
1159 // validity or get the property names in a fixed array.
1160 __ CheckEnumCache(null_value, &call_runtime);
1162 // The enum cache is valid. Load the map of the object being
1163 // iterated over and use the cache for the iteration.
1165 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1166 __ Branch(&use_cache);
1168 // Get the set of properties to enumerate.
1169 __ bind(&call_runtime);
1170 __ push(a0); // Duplicate the enumerable object on the stack.
1171 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1172 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1174 // If we got a map from the runtime call, we can do a fast
1175 // modification check. Otherwise, we got a fixed array, and we have
1176 // to do a slow check.
1178 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1179 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1180 __ Branch(&fixed_array, ne, a2, Operand(at));
1182 // We got a map in register v0. Get the enumeration cache from it.
1183 Label no_descriptors;
1184 __ bind(&use_cache);
1186 __ EnumLength(a1, v0);
1187 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1189 __ LoadInstanceDescriptors(v0, a2);
1190 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1191 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1193 // Set up the four remaining stack slots.
1194 __ li(a0, Operand(Smi::FromInt(0)));
1195 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1196 __ Push(v0, a2, a1, a0);
1199 __ bind(&no_descriptors);
1203 // We got a fixed array in register v0. Iterate through that.
1205 __ bind(&fixed_array);
1207 __ li(a1, FeedbackVector());
1208 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1209 int vector_index = FeedbackVector()->GetIndex(slot);
1210 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1212 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1213 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1214 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1215 __ GetObjectType(a2, a3, a3);
1216 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1217 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1218 __ bind(&non_proxy);
1219 __ Push(a1, v0); // Smi and array
1220 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1221 __ li(a0, Operand(Smi::FromInt(0)));
1222 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1224 // Generate code for doing the condition check.
1225 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1227 SetExpressionAsStatementPosition(stmt->each());
1229 // Load the current count to a0, load the length to a1.
1230 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1231 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1232 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1234 // Get the current entry of the array into register a3.
1235 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1236 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1237 __ SmiScale(a4, a0, kPointerSizeLog2);
1238 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1239 __ ld(a3, MemOperand(a4)); // Current entry.
1241 // Get the expected map from the stack or a smi in the
1242 // permanent slow case into register a2.
1243 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1245 // Check if the expected map still matches that of the enumerable.
1246 // If not, we may have to filter the key.
1248 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1249 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1250 __ Branch(&update_each, eq, a4, Operand(a2));
1252 // For proxies, no filtering is done.
1253 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1254 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1255 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1257 // Convert the entry to a string or (smi) 0 if it isn't a property
1258 // any more. If the property has been removed while iterating, we
1260 __ Push(a1, a3); // Enumerable and current entry.
1261 __ CallRuntime(Runtime::kForInFilter, 2);
1262 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1263 __ mov(a3, result_register());
1264 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1265 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1267 // Update the 'each' property or variable from the possibly filtered
1268 // entry in register a3.
1269 __ bind(&update_each);
1270 __ mov(result_register(), a3);
1271 // Perform the assignment as if via '='.
1272 { EffectContext context(this);
1273 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1274 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1277 // Generate code for the body of the loop.
1278 Visit(stmt->body());
1280 // Generate code for the going to the next element by incrementing
1281 // the index (smi) stored on top of the stack.
1282 __ bind(loop_statement.continue_label());
1284 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1287 EmitBackEdgeBookkeeping(stmt, &loop);
1290 // Remove the pointers stored on the stack.
1291 __ bind(loop_statement.break_label());
1294 // Exit and decrement the loop depth.
1295 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1297 decrement_loop_depth();
1301 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1303 // Use the fast case closure allocation code that allocates in new
1304 // space for nested functions that don't need literals cloning. If
1305 // we're running with the --always-opt or the --prepare-always-opt
1306 // flag, we need to use the runtime function so that the new function
1307 // we are creating here gets a chance to have its code optimized and
1308 // doesn't just get a copy of the existing unoptimized code.
1309 if (!FLAG_always_opt &&
1310 !FLAG_prepare_always_opt &&
1312 scope()->is_function_scope() &&
1313 info->num_literals() == 0) {
1314 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1315 __ li(a2, Operand(info));
1318 __ li(a0, Operand(info));
1319 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1320 : Heap::kFalseValueRootIndex);
1321 __ Push(cp, a0, a1);
1322 __ CallRuntime(Runtime::kNewClosure, 3);
1324 context()->Plug(v0);
1328 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1329 Comment cmnt(masm_, "[ VariableProxy");
1330 EmitVariableLoad(expr);
1334 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1336 FeedbackVectorICSlot slot) {
1337 if (NeedsHomeObject(initializer)) {
1338 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1339 __ li(StoreDescriptor::NameRegister(),
1340 Operand(isolate()->factory()->home_object_symbol()));
1341 __ ld(StoreDescriptor::ValueRegister(),
1342 MemOperand(sp, offset * kPointerSize));
1343 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1349 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1350 TypeofMode typeof_mode,
1352 Register current = cp;
1358 if (s->num_heap_slots() > 0) {
1359 if (s->calls_sloppy_eval()) {
1360 // Check that extension is NULL.
1361 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1362 __ Branch(slow, ne, temp, Operand(zero_reg));
1364 // Load next context in chain.
1365 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1366 // Walk the rest of the chain without clobbering cp.
1369 // If no outer scope calls eval, we do not need to check more
1370 // context extensions.
1371 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1372 s = s->outer_scope();
1375 if (s->is_eval_scope()) {
1377 if (!current.is(next)) {
1378 __ Move(next, current);
1381 // Terminate at native context.
1382 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1383 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1384 __ Branch(&fast, eq, temp, Operand(a4));
1385 // Check that extension is NULL.
1386 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1387 __ Branch(slow, ne, temp, Operand(zero_reg));
1388 // Load next context in chain.
1389 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1394 // All extension objects were empty and it is safe to use a normal global
1396 EmitGlobalVariableLoad(proxy, typeof_mode);
1400 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1402 DCHECK(var->IsContextSlot());
1403 Register context = cp;
1407 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1408 if (s->num_heap_slots() > 0) {
1409 if (s->calls_sloppy_eval()) {
1410 // Check that extension is NULL.
1411 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1412 __ Branch(slow, ne, temp, Operand(zero_reg));
1414 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1415 // Walk the rest of the chain without clobbering cp.
1419 // Check that last extension is NULL.
1420 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1421 __ Branch(slow, ne, temp, Operand(zero_reg));
1423 // This function is used only for loads, not stores, so it's safe to
1424 // return an cp-based operand (the write barrier cannot be allowed to
1425 // destroy the cp register).
1426 return ContextOperand(context, var->index());
1430 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1431 TypeofMode typeof_mode,
1432 Label* slow, Label* done) {
1433 // Generate fast-case code for variables that might be shadowed by
1434 // eval-introduced variables. Eval is used a lot without
1435 // introducing variables. In those cases, we do not want to
1436 // perform a runtime call for all variables in the scope
1437 // containing the eval.
1438 Variable* var = proxy->var();
1439 if (var->mode() == DYNAMIC_GLOBAL) {
1440 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1442 } else if (var->mode() == DYNAMIC_LOCAL) {
1443 Variable* local = var->local_if_not_shadowed();
1444 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1445 if (local->mode() == LET || local->mode() == CONST ||
1446 local->mode() == CONST_LEGACY) {
1447 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1448 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1449 if (local->mode() == CONST_LEGACY) {
1450 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1451 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1452 } else { // LET || CONST
1453 __ Branch(done, ne, at, Operand(zero_reg));
1454 __ li(a0, Operand(var->name()));
1456 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1464 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1465 TypeofMode typeof_mode) {
1466 Variable* var = proxy->var();
1467 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1468 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1469 if (var->IsGlobalSlot()) {
1470 DCHECK(var->index() > 0);
1471 DCHECK(var->IsStaticGlobalObjectProperty());
1472 // Each var occupies two slots in the context: for reads and writes.
1473 int slot_index = var->index();
1474 int depth = scope()->ContextChainLength(var->scope());
1475 __ li(LoadGlobalViaContextDescriptor::DepthRegister(),
1476 Operand(Smi::FromInt(depth)));
1477 __ li(LoadGlobalViaContextDescriptor::SlotRegister(),
1478 Operand(Smi::FromInt(slot_index)));
1479 __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name()));
1480 LoadGlobalViaContextStub stub(isolate(), depth);
1484 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1485 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1486 __ li(LoadDescriptor::SlotRegister(),
1487 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1488 CallLoadIC(typeof_mode);
1493 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1494 TypeofMode typeof_mode) {
1495 // Record position before possible IC call.
1496 SetExpressionPosition(proxy);
1497 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1498 Variable* var = proxy->var();
1500 // Three cases: global variables, lookup variables, and all other types of
1502 switch (var->location()) {
1503 case VariableLocation::GLOBAL:
1504 case VariableLocation::UNALLOCATED: {
1505 Comment cmnt(masm_, "[ Global variable");
1506 EmitGlobalVariableLoad(proxy, typeof_mode);
1507 context()->Plug(v0);
1511 case VariableLocation::PARAMETER:
1512 case VariableLocation::LOCAL:
1513 case VariableLocation::CONTEXT: {
1514 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1515 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1516 : "[ Stack variable");
1517 if (var->binding_needs_init()) {
1518 // var->scope() may be NULL when the proxy is located in eval code and
1519 // refers to a potential outside binding. Currently those bindings are
1520 // always looked up dynamically, i.e. in that case
1521 // var->location() == LOOKUP.
1523 DCHECK(var->scope() != NULL);
1525 // Check if the binding really needs an initialization check. The check
1526 // can be skipped in the following situation: we have a LET or CONST
1527 // binding in harmony mode, both the Variable and the VariableProxy have
1528 // the same declaration scope (i.e. they are both in global code, in the
1529 // same function or in the same eval code) and the VariableProxy is in
1530 // the source physically located after the initializer of the variable.
1532 // We cannot skip any initialization checks for CONST in non-harmony
1533 // mode because const variables may be declared but never initialized:
1534 // if (false) { const x; }; var y = x;
1536 // The condition on the declaration scopes is a conservative check for
1537 // nested functions that access a binding and are called before the
1538 // binding is initialized:
1539 // function() { f(); let x = 1; function f() { x = 2; } }
1541 bool skip_init_check;
1542 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1543 skip_init_check = false;
1544 } else if (var->is_this()) {
1545 CHECK(info_->function() != nullptr &&
1546 (info_->function()->kind() & kSubclassConstructor) != 0);
1547 // TODO(dslomov): implement 'this' hole check elimination.
1548 skip_init_check = false;
1550 // Check that we always have valid source position.
1551 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1552 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1553 skip_init_check = var->mode() != CONST_LEGACY &&
1554 var->initializer_position() < proxy->position();
1557 if (!skip_init_check) {
1558 // Let and const need a read barrier.
1560 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1561 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1562 if (var->mode() == LET || var->mode() == CONST) {
1563 // Throw a reference error when using an uninitialized let/const
1564 // binding in harmony mode.
1566 __ Branch(&done, ne, at, Operand(zero_reg));
1567 __ li(a0, Operand(var->name()));
1569 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1572 // Uninitalized const bindings outside of harmony mode are unholed.
1573 DCHECK(var->mode() == CONST_LEGACY);
1574 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1575 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1577 context()->Plug(v0);
1581 context()->Plug(var);
1585 case VariableLocation::LOOKUP: {
1586 Comment cmnt(masm_, "[ Lookup variable");
1588 // Generate code for loading from variables potentially shadowed
1589 // by eval-introduced variables.
1590 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1592 __ li(a1, Operand(var->name()));
1593 __ Push(cp, a1); // Context and name.
1594 Runtime::FunctionId function_id =
1595 typeof_mode == NOT_INSIDE_TYPEOF
1596 ? Runtime::kLoadLookupSlot
1597 : Runtime::kLoadLookupSlotNoReferenceError;
1598 __ CallRuntime(function_id, 2);
1600 context()->Plug(v0);
1606 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1607 Comment cmnt(masm_, "[ RegExpLiteral");
1609 // Registers will be used as follows:
1610 // a5 = materialized value (RegExp literal)
1611 // a4 = JS function, literals array
1612 // a3 = literal index
1613 // a2 = RegExp pattern
1614 // a1 = RegExp flags
1615 // a0 = RegExp literal clone
1616 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1617 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1618 int literal_offset =
1619 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1620 __ ld(a5, FieldMemOperand(a4, literal_offset));
1621 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1622 __ Branch(&materialized, ne, a5, Operand(at));
1624 // Create regexp literal using runtime function.
1625 // Result will be in v0.
1626 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1627 __ li(a2, Operand(expr->pattern()));
1628 __ li(a1, Operand(expr->flags()));
1629 __ Push(a4, a3, a2, a1);
1630 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1633 __ bind(&materialized);
1634 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1635 Label allocated, runtime_allocate;
1636 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1639 __ bind(&runtime_allocate);
1640 __ li(a0, Operand(Smi::FromInt(size)));
1642 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1645 __ bind(&allocated);
1647 // After this, registers are used as follows:
1648 // v0: Newly allocated regexp.
1649 // a5: Materialized regexp.
1651 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1652 context()->Plug(v0);
1656 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1657 if (expression == NULL) {
1658 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1661 VisitForStackValue(expression);
1666 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1667 Comment cmnt(masm_, "[ ObjectLiteral");
1669 Handle<FixedArray> constant_properties = expr->constant_properties();
1670 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1671 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1672 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1673 __ li(a1, Operand(constant_properties));
1674 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1675 if (MustCreateObjectLiteralWithRuntime(expr)) {
1676 __ Push(a3, a2, a1, a0);
1677 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1679 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1682 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1684 // If result_saved is true the result is on top of the stack. If
1685 // result_saved is false the result is in v0.
1686 bool result_saved = false;
1688 AccessorTable accessor_table(zone());
1689 int property_index = 0;
1690 // store_slot_index points to the vector IC slot for the next store IC used.
1691 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1692 // and must be updated if the number of store ICs emitted here changes.
1693 int store_slot_index = 0;
1694 for (; property_index < expr->properties()->length(); property_index++) {
1695 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1696 if (property->is_computed_name()) break;
1697 if (property->IsCompileTimeValue()) continue;
1699 Literal* key = property->key()->AsLiteral();
1700 Expression* value = property->value();
1701 if (!result_saved) {
1702 __ push(v0); // Save result on stack.
1703 result_saved = true;
1705 switch (property->kind()) {
1706 case ObjectLiteral::Property::CONSTANT:
1708 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1709 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1711 case ObjectLiteral::Property::COMPUTED:
1712 // It is safe to use [[Put]] here because the boilerplate already
1713 // contains computed properties with an uninitialized value.
1714 if (key->value()->IsInternalizedString()) {
1715 if (property->emit_store()) {
1716 VisitForAccumulatorValue(value);
1717 __ mov(StoreDescriptor::ValueRegister(), result_register());
1718 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1719 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1720 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1721 if (FLAG_vector_stores) {
1722 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1725 CallStoreIC(key->LiteralFeedbackId());
1727 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1729 if (NeedsHomeObject(value)) {
1730 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1731 __ li(StoreDescriptor::NameRegister(),
1732 Operand(isolate()->factory()->home_object_symbol()));
1733 __ ld(StoreDescriptor::ValueRegister(), MemOperand(sp));
1734 if (FLAG_vector_stores) {
1735 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1740 VisitForEffect(value);
1744 // Duplicate receiver on stack.
1745 __ ld(a0, MemOperand(sp));
1747 VisitForStackValue(key);
1748 VisitForStackValue(value);
1749 if (property->emit_store()) {
1750 EmitSetHomeObjectIfNeeded(
1751 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1752 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1754 __ CallRuntime(Runtime::kSetProperty, 4);
1759 case ObjectLiteral::Property::PROTOTYPE:
1760 // Duplicate receiver on stack.
1761 __ ld(a0, MemOperand(sp));
1763 VisitForStackValue(value);
1764 DCHECK(property->emit_store());
1765 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1767 case ObjectLiteral::Property::GETTER:
1768 if (property->emit_store()) {
1769 accessor_table.lookup(key)->second->getter = value;
1772 case ObjectLiteral::Property::SETTER:
1773 if (property->emit_store()) {
1774 accessor_table.lookup(key)->second->setter = value;
1780 // Emit code to define accessors, using only a single call to the runtime for
1781 // each pair of corresponding getters and setters.
1782 for (AccessorTable::Iterator it = accessor_table.begin();
1783 it != accessor_table.end();
1785 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1787 VisitForStackValue(it->first);
1788 EmitAccessor(it->second->getter);
1789 EmitSetHomeObjectIfNeeded(
1790 it->second->getter, 2,
1791 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1792 EmitAccessor(it->second->setter);
1793 EmitSetHomeObjectIfNeeded(
1794 it->second->setter, 3,
1795 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1796 __ li(a0, Operand(Smi::FromInt(NONE)));
1798 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1801 // Object literals have two parts. The "static" part on the left contains no
1802 // computed property names, and so we can compute its map ahead of time; see
1803 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1804 // starts with the first computed property name, and continues with all
1805 // properties to its right. All the code from above initializes the static
1806 // component of the object literal, and arranges for the map of the result to
1807 // reflect the static order in which the keys appear. For the dynamic
1808 // properties, we compile them into a series of "SetOwnProperty" runtime
1809 // calls. This will preserve insertion order.
1810 for (; property_index < expr->properties()->length(); property_index++) {
1811 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1813 Expression* value = property->value();
1814 if (!result_saved) {
1815 __ push(v0); // Save result on the stack
1816 result_saved = true;
1819 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1822 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1823 DCHECK(!property->is_computed_name());
1824 VisitForStackValue(value);
1825 DCHECK(property->emit_store());
1826 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1828 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1829 VisitForStackValue(value);
1830 EmitSetHomeObjectIfNeeded(
1831 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1833 switch (property->kind()) {
1834 case ObjectLiteral::Property::CONSTANT:
1835 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1836 case ObjectLiteral::Property::COMPUTED:
1837 if (property->emit_store()) {
1838 __ li(a0, Operand(Smi::FromInt(NONE)));
1840 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1846 case ObjectLiteral::Property::PROTOTYPE:
1850 case ObjectLiteral::Property::GETTER:
1851 __ li(a0, Operand(Smi::FromInt(NONE)));
1853 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1856 case ObjectLiteral::Property::SETTER:
1857 __ li(a0, Operand(Smi::FromInt(NONE)));
1859 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1865 if (expr->has_function()) {
1866 DCHECK(result_saved);
1867 __ ld(a0, MemOperand(sp));
1869 __ CallRuntime(Runtime::kToFastProperties, 1);
1873 context()->PlugTOS();
1875 context()->Plug(v0);
1878 // Verify that compilation exactly consumed the number of store ic slots that
1879 // the ObjectLiteral node had to offer.
1880 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1884 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1885 Comment cmnt(masm_, "[ ArrayLiteral");
1887 expr->BuildConstantElements(isolate());
1889 Handle<FixedArray> constant_elements = expr->constant_elements();
1890 bool has_fast_elements =
1891 IsFastObjectElementsKind(expr->constant_elements_kind());
1893 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1894 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1895 // If the only customer of allocation sites is transitioning, then
1896 // we can turn it off if we don't have anywhere else to transition to.
1897 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1900 __ mov(a0, result_register());
1901 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1902 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1903 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1904 __ li(a1, Operand(constant_elements));
1905 if (MustCreateArrayLiteralWithRuntime(expr)) {
1906 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1907 __ Push(a3, a2, a1, a0);
1908 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1910 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1913 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1915 bool result_saved = false; // Is the result saved to the stack?
1916 ZoneList<Expression*>* subexprs = expr->values();
1917 int length = subexprs->length();
1919 // Emit code to evaluate all the non-constant subexpressions and to store
1920 // them into the newly cloned array.
1921 int array_index = 0;
1922 for (; array_index < length; array_index++) {
1923 Expression* subexpr = subexprs->at(array_index);
1924 if (subexpr->IsSpread()) break;
1926 // If the subexpression is a literal or a simple materialized literal it
1927 // is already set in the cloned array.
1928 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1930 if (!result_saved) {
1931 __ push(v0); // array literal
1932 __ Push(Smi::FromInt(expr->literal_index()));
1933 result_saved = true;
1936 VisitForAccumulatorValue(subexpr);
1938 if (has_fast_elements) {
1939 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1940 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1941 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1942 __ sd(result_register(), FieldMemOperand(a1, offset));
1943 // Update the write barrier for the array store.
1944 __ RecordWriteField(a1, offset, result_register(), a2,
1945 kRAHasBeenSaved, kDontSaveFPRegs,
1946 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1948 __ li(a3, Operand(Smi::FromInt(array_index)));
1949 __ mov(a0, result_register());
1950 StoreArrayLiteralElementStub stub(isolate());
1954 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1957 // In case the array literal contains spread expressions it has two parts. The
1958 // first part is the "static" array which has a literal index is handled
1959 // above. The second part is the part after the first spread expression
1960 // (inclusive) and these elements gets appended to the array. Note that the
1961 // number elements an iterable produces is unknown ahead of time.
1962 if (array_index < length && result_saved) {
1963 __ Pop(); // literal index
1965 result_saved = false;
1967 for (; array_index < length; array_index++) {
1968 Expression* subexpr = subexprs->at(array_index);
1971 if (subexpr->IsSpread()) {
1972 VisitForStackValue(subexpr->AsSpread()->expression());
1973 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1975 VisitForStackValue(subexpr);
1976 __ CallRuntime(Runtime::kAppendElement, 2);
1979 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1983 __ Pop(); // literal index
1984 context()->PlugTOS();
1986 context()->Plug(v0);
1991 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1992 DCHECK(expr->target()->IsValidReferenceExpression());
1994 Comment cmnt(masm_, "[ Assignment");
1995 SetExpressionPosition(expr, INSERT_BREAK);
1997 Property* property = expr->target()->AsProperty();
1998 LhsKind assign_type = Property::GetAssignType(property);
2000 // Evaluate LHS expression.
2001 switch (assign_type) {
2003 // Nothing to do here.
2005 case NAMED_PROPERTY:
2006 if (expr->is_compound()) {
2007 // We need the receiver both on the stack and in the register.
2008 VisitForStackValue(property->obj());
2009 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2011 VisitForStackValue(property->obj());
2014 case NAMED_SUPER_PROPERTY:
2016 property->obj()->AsSuperPropertyReference()->this_var());
2017 VisitForAccumulatorValue(
2018 property->obj()->AsSuperPropertyReference()->home_object());
2019 __ Push(result_register());
2020 if (expr->is_compound()) {
2021 const Register scratch = a1;
2022 __ ld(scratch, MemOperand(sp, kPointerSize));
2023 __ Push(scratch, result_register());
2026 case KEYED_SUPER_PROPERTY: {
2027 const Register scratch = a1;
2029 property->obj()->AsSuperPropertyReference()->this_var());
2030 VisitForAccumulatorValue(
2031 property->obj()->AsSuperPropertyReference()->home_object());
2032 __ Move(scratch, result_register());
2033 VisitForAccumulatorValue(property->key());
2034 __ Push(scratch, result_register());
2035 if (expr->is_compound()) {
2036 const Register scratch1 = a4;
2037 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
2038 __ Push(scratch1, scratch, result_register());
2042 case KEYED_PROPERTY:
2043 // We need the key and receiver on both the stack and in v0 and a1.
2044 if (expr->is_compound()) {
2045 VisitForStackValue(property->obj());
2046 VisitForStackValue(property->key());
2047 __ ld(LoadDescriptor::ReceiverRegister(),
2048 MemOperand(sp, 1 * kPointerSize));
2049 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2051 VisitForStackValue(property->obj());
2052 VisitForStackValue(property->key());
2057 // For compound assignments we need another deoptimization point after the
2058 // variable/property load.
2059 if (expr->is_compound()) {
2060 { AccumulatorValueContext context(this);
2061 switch (assign_type) {
2063 EmitVariableLoad(expr->target()->AsVariableProxy());
2064 PrepareForBailout(expr->target(), TOS_REG);
2066 case NAMED_PROPERTY:
2067 EmitNamedPropertyLoad(property);
2068 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2070 case NAMED_SUPER_PROPERTY:
2071 EmitNamedSuperPropertyLoad(property);
2072 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2074 case KEYED_SUPER_PROPERTY:
2075 EmitKeyedSuperPropertyLoad(property);
2076 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2078 case KEYED_PROPERTY:
2079 EmitKeyedPropertyLoad(property);
2080 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2085 Token::Value op = expr->binary_op();
2086 __ push(v0); // Left operand goes on the stack.
2087 VisitForAccumulatorValue(expr->value());
2089 AccumulatorValueContext context(this);
2090 if (ShouldInlineSmiCase(op)) {
2091 EmitInlineSmiBinaryOp(expr->binary_operation(),
2096 EmitBinaryOp(expr->binary_operation(), op);
2099 // Deoptimization point in case the binary operation may have side effects.
2100 PrepareForBailout(expr->binary_operation(), TOS_REG);
2102 VisitForAccumulatorValue(expr->value());
2105 SetExpressionPosition(expr);
2108 switch (assign_type) {
2110 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2111 expr->op(), expr->AssignmentSlot());
2112 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2113 context()->Plug(v0);
2115 case NAMED_PROPERTY:
2116 EmitNamedPropertyAssignment(expr);
2118 case NAMED_SUPER_PROPERTY:
2119 EmitNamedSuperPropertyStore(property);
2120 context()->Plug(v0);
2122 case KEYED_SUPER_PROPERTY:
2123 EmitKeyedSuperPropertyStore(property);
2124 context()->Plug(v0);
2126 case KEYED_PROPERTY:
2127 EmitKeyedPropertyAssignment(expr);
2133 void FullCodeGenerator::VisitYield(Yield* expr) {
2134 Comment cmnt(masm_, "[ Yield");
2135 SetExpressionPosition(expr);
2137 // Evaluate yielded value first; the initial iterator definition depends on
2138 // this. It stays on the stack while we update the iterator.
2139 VisitForStackValue(expr->expression());
2141 switch (expr->yield_kind()) {
2142 case Yield::kSuspend:
2143 // Pop value from top-of-stack slot; box result into result register.
2144 EmitCreateIteratorResult(false);
2145 __ push(result_register());
2147 case Yield::kInitial: {
2148 Label suspend, continuation, post_runtime, resume;
2151 __ bind(&continuation);
2152 __ RecordGeneratorContinuation();
2156 VisitForAccumulatorValue(expr->generator_object());
2157 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2158 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2159 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2160 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2162 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2163 kRAHasBeenSaved, kDontSaveFPRegs);
2164 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2165 __ Branch(&post_runtime, eq, sp, Operand(a1));
2166 __ push(v0); // generator object
2167 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2168 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2169 __ bind(&post_runtime);
2170 __ pop(result_register());
2171 EmitReturnSequence();
2174 context()->Plug(result_register());
2178 case Yield::kFinal: {
2179 VisitForAccumulatorValue(expr->generator_object());
2180 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2181 __ sd(a1, FieldMemOperand(result_register(),
2182 JSGeneratorObject::kContinuationOffset));
2183 // Pop value from top-of-stack slot, box result into result register.
2184 EmitCreateIteratorResult(true);
2185 EmitUnwindBeforeReturn();
2186 EmitReturnSequence();
2190 case Yield::kDelegating: {
2191 VisitForStackValue(expr->generator_object());
2193 // Initial stack layout is as follows:
2194 // [sp + 1 * kPointerSize] iter
2195 // [sp + 0 * kPointerSize] g
2197 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2198 Label l_next, l_call;
2199 Register load_receiver = LoadDescriptor::ReceiverRegister();
2200 Register load_name = LoadDescriptor::NameRegister();
2201 // Initial send value is undefined.
2202 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2205 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2208 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2209 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2210 __ Push(a2, a3, a0); // "throw", iter, except
2213 // try { received = %yield result }
2214 // Shuffle the received result above a try handler and yield it without
2217 __ pop(a0); // result
2218 int handler_index = NewHandlerTableEntry();
2219 EnterTryBlock(handler_index, &l_catch);
2220 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2221 __ push(a0); // result
2224 __ bind(&l_continuation);
2225 __ RecordGeneratorContinuation();
2229 __ bind(&l_suspend);
2230 const int generator_object_depth = kPointerSize + try_block_size;
2231 __ ld(a0, MemOperand(sp, generator_object_depth));
2233 __ Push(Smi::FromInt(handler_index)); // handler-index
2234 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2235 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2236 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2237 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2239 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2240 kRAHasBeenSaved, kDontSaveFPRegs);
2241 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2242 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2243 __ pop(v0); // result
2244 EmitReturnSequence();
2246 __ bind(&l_resume); // received in a0
2247 ExitTryBlock(handler_index);
2249 // receiver = iter; f = 'next'; arg = received;
2251 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2252 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2253 __ Push(load_name, a3, a0); // "next", iter, received
2255 // result = receiver[f](arg);
2257 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2258 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2259 __ li(LoadDescriptor::SlotRegister(),
2260 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2261 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2262 CallIC(ic, TypeFeedbackId::None());
2265 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2266 SetCallPosition(expr, 1);
2267 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2270 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2271 __ Drop(1); // The function is still on the stack; drop it.
2273 // if (!result.done) goto l_try;
2274 __ Move(load_receiver, v0);
2276 __ push(load_receiver); // save result
2277 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2278 __ li(LoadDescriptor::SlotRegister(),
2279 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2280 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.done
2282 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2284 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2287 __ pop(load_receiver); // result
2288 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2289 __ li(LoadDescriptor::SlotRegister(),
2290 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2291 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.value
2292 context()->DropAndPlug(2, v0); // drop iter and g
2299 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2301 JSGeneratorObject::ResumeMode resume_mode) {
2302 // The value stays in a0, and is ultimately read by the resumed generator, as
2303 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2304 // is read to throw the value when the resumed generator is already closed.
2305 // a1 will hold the generator object until the activation has been resumed.
2306 VisitForStackValue(generator);
2307 VisitForAccumulatorValue(value);
2310 // Load suspended function and context.
2311 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2312 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2314 // Load receiver and store as the first argument.
2315 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2318 // Push holes for the rest of the arguments to the generator function.
2319 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2320 // The argument count is stored as int32_t on 64-bit platforms.
2321 // TODO(plind): Smi on 32-bit platforms.
2323 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2324 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2325 Label push_argument_holes, push_frame;
2326 __ bind(&push_argument_holes);
2327 __ Dsubu(a3, a3, Operand(1));
2328 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2330 __ jmp(&push_argument_holes);
2332 // Enter a new JavaScript frame, and initialize its slots as they were when
2333 // the generator was suspended.
2334 Label resume_frame, done;
2335 __ bind(&push_frame);
2336 __ Call(&resume_frame);
2338 __ bind(&resume_frame);
2339 // ra = return address.
2340 // fp = caller's frame pointer.
2341 // cp = callee's context,
2342 // a4 = callee's JS function.
2343 __ Push(ra, fp, cp, a4);
2344 // Adjust FP to point to saved FP.
2345 __ Daddu(fp, sp, 2 * kPointerSize);
2347 // Load the operand stack size.
2348 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2349 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2352 // If we are sending a value and there is no operand stack, we can jump back
2354 if (resume_mode == JSGeneratorObject::NEXT) {
2356 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2357 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2358 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2360 __ Daddu(a3, a3, Operand(a2));
2361 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2362 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2364 __ bind(&slow_resume);
2367 // Otherwise, we push holes for the operand stack and call the runtime to fix
2368 // up the stack and the handlers.
2369 Label push_operand_holes, call_resume;
2370 __ bind(&push_operand_holes);
2371 __ Dsubu(a3, a3, Operand(1));
2372 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2374 __ Branch(&push_operand_holes);
2375 __ bind(&call_resume);
2376 DCHECK(!result_register().is(a1));
2377 __ Push(a1, result_register());
2378 __ Push(Smi::FromInt(resume_mode));
2379 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2380 // Not reached: the runtime call returns elsewhere.
2381 __ stop("not-reached");
2384 context()->Plug(result_register());
2388 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2392 const int instance_size = 5 * kPointerSize;
2393 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2396 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2399 __ bind(&gc_required);
2400 __ Push(Smi::FromInt(instance_size));
2401 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2402 __ ld(context_register(),
2403 MemOperand(fp, StandardFrameConstants::kContextOffset));
2405 __ bind(&allocated);
2406 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2407 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2408 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2410 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2411 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2412 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2413 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2414 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2416 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2418 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2420 // Only the value field needs a write barrier, as the other values are in the
2422 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2423 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2427 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2428 SetExpressionPosition(prop);
2429 Literal* key = prop->key()->AsLiteral();
2430 DCHECK(!prop->IsSuperAccess());
2432 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2433 __ li(LoadDescriptor::SlotRegister(),
2434 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2435 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2439 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2440 // Stack: receiver, home_object.
2441 SetExpressionPosition(prop);
2443 Literal* key = prop->key()->AsLiteral();
2444 DCHECK(!key->value()->IsSmi());
2445 DCHECK(prop->IsSuperAccess());
2447 __ Push(key->value());
2448 __ Push(Smi::FromInt(language_mode()));
2449 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2453 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2454 // Call keyed load IC. It has register arguments receiver and key.
2455 SetExpressionPosition(prop);
2457 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2458 __ li(LoadDescriptor::SlotRegister(),
2459 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2464 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2465 // Stack: receiver, home_object, key.
2466 SetExpressionPosition(prop);
2467 __ Push(Smi::FromInt(language_mode()));
2468 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2472 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2474 Expression* left_expr,
2475 Expression* right_expr) {
2476 Label done, smi_case, stub_call;
2478 Register scratch1 = a2;
2479 Register scratch2 = a3;
2481 // Get the arguments.
2483 Register right = a0;
2485 __ mov(a0, result_register());
2487 // Perform combined smi check on both operands.
2488 __ Or(scratch1, left, Operand(right));
2489 STATIC_ASSERT(kSmiTag == 0);
2490 JumpPatchSite patch_site(masm_);
2491 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2493 __ bind(&stub_call);
2495 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2496 CallIC(code, expr->BinaryOperationFeedbackId());
2497 patch_site.EmitPatchInfo();
2501 // Smi case. This code works the same way as the smi-smi case in the type
2502 // recording binary operation stub, see
2505 __ GetLeastBitsFromSmi(scratch1, right, 5);
2506 __ dsrav(right, left, scratch1);
2507 __ And(v0, right, Operand(0xffffffff00000000L));
2510 __ SmiUntag(scratch1, left);
2511 __ GetLeastBitsFromSmi(scratch2, right, 5);
2512 __ dsllv(scratch1, scratch1, scratch2);
2513 __ SmiTag(v0, scratch1);
2517 __ SmiUntag(scratch1, left);
2518 __ GetLeastBitsFromSmi(scratch2, right, 5);
2519 __ dsrlv(scratch1, scratch1, scratch2);
2520 __ And(scratch2, scratch1, 0x80000000);
2521 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2522 __ SmiTag(v0, scratch1);
2526 __ DadduAndCheckForOverflow(v0, left, right, scratch1);
2527 __ BranchOnOverflow(&stub_call, scratch1);
2530 __ DsubuAndCheckForOverflow(v0, left, right, scratch1);
2531 __ BranchOnOverflow(&stub_call, scratch1);
2534 __ Dmulh(v0, left, right);
2535 __ dsra32(scratch2, v0, 0);
2536 __ sra(scratch1, v0, 31);
2537 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2539 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2540 __ Daddu(scratch2, right, left);
2541 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2542 DCHECK(Smi::FromInt(0) == 0);
2543 __ mov(v0, zero_reg);
2547 __ Or(v0, left, Operand(right));
2549 case Token::BIT_AND:
2550 __ And(v0, left, Operand(right));
2552 case Token::BIT_XOR:
2553 __ Xor(v0, left, Operand(right));
2560 context()->Plug(v0);
2564 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2565 int* used_store_slots) {
2566 // Constructor is in v0.
2567 DCHECK(lit != NULL);
2570 // No access check is needed here since the constructor is created by the
2572 Register scratch = a1;
2574 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2577 for (int i = 0; i < lit->properties()->length(); i++) {
2578 ObjectLiteral::Property* property = lit->properties()->at(i);
2579 Expression* value = property->value();
2581 if (property->is_static()) {
2582 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2584 __ ld(scratch, MemOperand(sp, 0)); // prototype
2587 EmitPropertyKey(property, lit->GetIdForProperty(i));
2589 // The static prototype property is read only. We handle the non computed
2590 // property name case in the parser. Since this is the only case where we
2591 // need to check for an own read only property we special case this so we do
2592 // not need to do this for every property.
2593 if (property->is_static() && property->is_computed_name()) {
2594 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2598 VisitForStackValue(value);
2599 EmitSetHomeObjectIfNeeded(value, 2,
2600 lit->SlotForHomeObject(value, used_store_slots));
2602 switch (property->kind()) {
2603 case ObjectLiteral::Property::CONSTANT:
2604 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2605 case ObjectLiteral::Property::PROTOTYPE:
2607 case ObjectLiteral::Property::COMPUTED:
2608 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2611 case ObjectLiteral::Property::GETTER:
2612 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2614 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2617 case ObjectLiteral::Property::SETTER:
2618 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2620 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2629 __ CallRuntime(Runtime::kToFastProperties, 1);
2632 __ CallRuntime(Runtime::kToFastProperties, 1);
2634 if (is_strong(language_mode())) {
2636 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2637 __ Push(v0, scratch);
2638 // TODO(conradw): It would be more efficient to define the properties with
2639 // the right attributes the first time round.
2640 // Freeze the prototype.
2641 __ CallRuntime(Runtime::kObjectFreeze, 1);
2642 // Freeze the constructor.
2643 __ CallRuntime(Runtime::kObjectFreeze, 1);
2648 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2649 __ mov(a0, result_register());
2652 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2653 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2654 CallIC(code, expr->BinaryOperationFeedbackId());
2655 patch_site.EmitPatchInfo();
2656 context()->Plug(v0);
2660 void FullCodeGenerator::EmitAssignment(Expression* expr,
2661 FeedbackVectorICSlot slot) {
2662 DCHECK(expr->IsValidReferenceExpression());
2664 Property* prop = expr->AsProperty();
2665 LhsKind assign_type = Property::GetAssignType(prop);
2667 switch (assign_type) {
2669 Variable* var = expr->AsVariableProxy()->var();
2670 EffectContext context(this);
2671 EmitVariableAssignment(var, Token::ASSIGN, slot);
2674 case NAMED_PROPERTY: {
2675 __ push(result_register()); // Preserve value.
2676 VisitForAccumulatorValue(prop->obj());
2677 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2678 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2679 __ li(StoreDescriptor::NameRegister(),
2680 Operand(prop->key()->AsLiteral()->value()));
2681 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2685 case NAMED_SUPER_PROPERTY: {
2687 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2688 VisitForAccumulatorValue(
2689 prop->obj()->AsSuperPropertyReference()->home_object());
2690 // stack: value, this; v0: home_object
2691 Register scratch = a2;
2692 Register scratch2 = a3;
2693 __ mov(scratch, result_register()); // home_object
2694 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2695 __ ld(scratch2, MemOperand(sp, 0)); // this
2696 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2697 __ sd(scratch, MemOperand(sp, 0)); // home_object
2698 // stack: this, home_object; v0: value
2699 EmitNamedSuperPropertyStore(prop);
2702 case KEYED_SUPER_PROPERTY: {
2704 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2706 prop->obj()->AsSuperPropertyReference()->home_object());
2707 VisitForAccumulatorValue(prop->key());
2708 Register scratch = a2;
2709 Register scratch2 = a3;
2710 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2711 // stack: value, this, home_object; v0: key, a3: value
2712 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2713 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2714 __ ld(scratch, MemOperand(sp, 0)); // home_object
2715 __ sd(scratch, MemOperand(sp, kPointerSize));
2716 __ sd(v0, MemOperand(sp, 0));
2717 __ Move(v0, scratch2);
2718 // stack: this, home_object, key; v0: value.
2719 EmitKeyedSuperPropertyStore(prop);
2722 case KEYED_PROPERTY: {
2723 __ push(result_register()); // Preserve value.
2724 VisitForStackValue(prop->obj());
2725 VisitForAccumulatorValue(prop->key());
2726 __ Move(StoreDescriptor::NameRegister(), result_register());
2727 __ Pop(StoreDescriptor::ValueRegister(),
2728 StoreDescriptor::ReceiverRegister());
2729 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2731 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2736 context()->Plug(v0);
2740 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2741 Variable* var, MemOperand location) {
2742 __ sd(result_register(), location);
2743 if (var->IsContextSlot()) {
2744 // RecordWrite may destroy all its register arguments.
2745 __ Move(a3, result_register());
2746 int offset = Context::SlotOffset(var->index());
2747 __ RecordWriteContextSlot(
2748 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2753 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2754 FeedbackVectorICSlot slot) {
2755 if (var->IsUnallocated()) {
2756 // Global var, const, or let.
2757 __ mov(StoreDescriptor::ValueRegister(), result_register());
2758 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2759 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2760 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2763 } else if (var->IsGlobalSlot()) {
2764 // Global var, const, or let.
2765 DCHECK(var->index() > 0);
2766 DCHECK(var->IsStaticGlobalObjectProperty());
2767 // Each var occupies two slots in the context: for reads and writes.
2768 int slot_index = var->index() + 1;
2769 int depth = scope()->ContextChainLength(var->scope());
2770 __ li(StoreGlobalViaContextDescriptor::DepthRegister(),
2771 Operand(Smi::FromInt(depth)));
2772 __ li(StoreGlobalViaContextDescriptor::SlotRegister(),
2773 Operand(Smi::FromInt(slot_index)));
2774 __ li(StoreGlobalViaContextDescriptor::NameRegister(),
2775 Operand(var->name()));
2776 __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
2777 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2780 } else if (var->mode() == LET && op != Token::INIT_LET) {
2781 // Non-initializing assignment to let variable needs a write barrier.
2782 DCHECK(!var->IsLookupSlot());
2783 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2785 MemOperand location = VarOperand(var, a1);
2786 __ ld(a3, location);
2787 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2788 __ Branch(&assign, ne, a3, Operand(a4));
2789 __ li(a3, Operand(var->name()));
2791 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2792 // Perform the assignment.
2794 EmitStoreToStackLocalOrContextSlot(var, location);
2796 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2797 // Assignment to const variable needs a write barrier.
2798 DCHECK(!var->IsLookupSlot());
2799 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2801 MemOperand location = VarOperand(var, a1);
2802 __ ld(a3, location);
2803 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2804 __ Branch(&const_error, ne, a3, Operand(at));
2805 __ li(a3, Operand(var->name()));
2807 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2808 __ bind(&const_error);
2809 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2811 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2812 if (var->IsLookupSlot()) {
2813 // Assignment to var.
2814 __ li(a4, Operand(var->name()));
2815 __ li(a3, Operand(Smi::FromInt(language_mode())));
2816 // jssp[0] : language mode.
2818 // jssp[16] : context.
2819 // jssp[24] : value.
2820 __ Push(v0, cp, a4, a3);
2821 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2823 // Assignment to var or initializing assignment to let/const in harmony
2825 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2826 MemOperand location = VarOperand(var, a1);
2827 if (generate_debug_code_ && op == Token::INIT_LET) {
2828 // Check for an uninitialized let binding.
2829 __ ld(a2, location);
2830 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2831 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2833 EmitStoreToStackLocalOrContextSlot(var, location);
2836 } else if (op == Token::INIT_CONST_LEGACY) {
2837 // Const initializers need a write barrier.
2838 DCHECK(!var->IsParameter()); // No const parameters.
2839 if (var->IsLookupSlot()) {
2840 __ li(a0, Operand(var->name()));
2841 __ Push(v0, cp, a0); // Context and name.
2842 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2844 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2846 MemOperand location = VarOperand(var, a1);
2847 __ ld(a2, location);
2848 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2849 __ Branch(&skip, ne, a2, Operand(at));
2850 EmitStoreToStackLocalOrContextSlot(var, location);
2855 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2856 if (is_strict(language_mode())) {
2857 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2859 // Silently ignore store in sloppy mode.
2864 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2865 // Assignment to a property, using a named store IC.
2866 Property* prop = expr->target()->AsProperty();
2867 DCHECK(prop != NULL);
2868 DCHECK(prop->key()->IsLiteral());
2870 __ mov(StoreDescriptor::ValueRegister(), result_register());
2871 __ li(StoreDescriptor::NameRegister(),
2872 Operand(prop->key()->AsLiteral()->value()));
2873 __ pop(StoreDescriptor::ReceiverRegister());
2874 if (FLAG_vector_stores) {
2875 EmitLoadStoreICSlot(expr->AssignmentSlot());
2878 CallStoreIC(expr->AssignmentFeedbackId());
2881 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2882 context()->Plug(v0);
2886 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2887 // Assignment to named property of super.
2889 // stack : receiver ('this'), home_object
2890 DCHECK(prop != NULL);
2891 Literal* key = prop->key()->AsLiteral();
2892 DCHECK(key != NULL);
2894 __ Push(key->value());
2896 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2897 : Runtime::kStoreToSuper_Sloppy),
2902 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2903 // Assignment to named property of super.
2905 // stack : receiver ('this'), home_object, key
2906 DCHECK(prop != NULL);
2910 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2911 : Runtime::kStoreKeyedToSuper_Sloppy),
2916 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2917 // Assignment to a property, using a keyed store IC.
2918 // Call keyed store IC.
2919 // The arguments are:
2920 // - a0 is the value,
2922 // - a2 is the receiver.
2923 __ mov(StoreDescriptor::ValueRegister(), result_register());
2924 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2925 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2928 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2929 if (FLAG_vector_stores) {
2930 EmitLoadStoreICSlot(expr->AssignmentSlot());
2933 CallIC(ic, expr->AssignmentFeedbackId());
2936 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2937 context()->Plug(v0);
2941 void FullCodeGenerator::VisitProperty(Property* expr) {
2942 Comment cmnt(masm_, "[ Property");
2943 SetExpressionPosition(expr);
2945 Expression* key = expr->key();
2947 if (key->IsPropertyName()) {
2948 if (!expr->IsSuperAccess()) {
2949 VisitForAccumulatorValue(expr->obj());
2950 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2951 EmitNamedPropertyLoad(expr);
2953 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2955 expr->obj()->AsSuperPropertyReference()->home_object());
2956 EmitNamedSuperPropertyLoad(expr);
2959 if (!expr->IsSuperAccess()) {
2960 VisitForStackValue(expr->obj());
2961 VisitForAccumulatorValue(expr->key());
2962 __ Move(LoadDescriptor::NameRegister(), v0);
2963 __ pop(LoadDescriptor::ReceiverRegister());
2964 EmitKeyedPropertyLoad(expr);
2966 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2968 expr->obj()->AsSuperPropertyReference()->home_object());
2969 VisitForStackValue(expr->key());
2970 EmitKeyedSuperPropertyLoad(expr);
2973 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2974 context()->Plug(v0);
2978 void FullCodeGenerator::CallIC(Handle<Code> code,
2979 TypeFeedbackId id) {
2981 __ Call(code, RelocInfo::CODE_TARGET, id);
2985 // Code common for calls using the IC.
2986 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2987 Expression* callee = expr->expression();
2989 CallICState::CallType call_type =
2990 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2992 // Get the target function.
2993 if (call_type == CallICState::FUNCTION) {
2994 { StackValueContext context(this);
2995 EmitVariableLoad(callee->AsVariableProxy());
2996 PrepareForBailout(callee, NO_REGISTERS);
2998 // Push undefined as receiver. This is patched in the method prologue if it
2999 // is a sloppy mode method.
3000 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3003 // Load the function from the receiver.
3004 DCHECK(callee->IsProperty());
3005 DCHECK(!callee->AsProperty()->IsSuperAccess());
3006 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3007 EmitNamedPropertyLoad(callee->AsProperty());
3008 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3009 // Push the target function under the receiver.
3010 __ ld(at, MemOperand(sp, 0));
3012 __ sd(v0, MemOperand(sp, kPointerSize));
3015 EmitCall(expr, call_type);
3019 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
3020 SetExpressionPosition(expr);
3021 Expression* callee = expr->expression();
3022 DCHECK(callee->IsProperty());
3023 Property* prop = callee->AsProperty();
3024 DCHECK(prop->IsSuperAccess());
3026 Literal* key = prop->key()->AsLiteral();
3027 DCHECK(!key->value()->IsSmi());
3028 // Load the function from the receiver.
3029 const Register scratch = a1;
3030 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3031 VisitForAccumulatorValue(super_ref->home_object());
3032 __ mov(scratch, v0);
3033 VisitForAccumulatorValue(super_ref->this_var());
3034 __ Push(scratch, v0, v0, scratch);
3035 __ Push(key->value());
3036 __ Push(Smi::FromInt(language_mode()));
3040 // - this (receiver)
3041 // - this (receiver) <-- LoadFromSuper will pop here and below.
3045 __ CallRuntime(Runtime::kLoadFromSuper, 4);
3047 // Replace home_object with target function.
3048 __ sd(v0, MemOperand(sp, kPointerSize));
3051 // - target function
3052 // - this (receiver)
3053 EmitCall(expr, CallICState::METHOD);
3057 // Code common for calls using the IC.
3058 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3061 VisitForAccumulatorValue(key);
3063 Expression* callee = expr->expression();
3065 // Load the function from the receiver.
3066 DCHECK(callee->IsProperty());
3067 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3068 __ Move(LoadDescriptor::NameRegister(), v0);
3069 EmitKeyedPropertyLoad(callee->AsProperty());
3070 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3072 // Push the target function under the receiver.
3073 __ ld(at, MemOperand(sp, 0));
3075 __ sd(v0, MemOperand(sp, kPointerSize));
3077 EmitCall(expr, CallICState::METHOD);
3081 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3082 Expression* callee = expr->expression();
3083 DCHECK(callee->IsProperty());
3084 Property* prop = callee->AsProperty();
3085 DCHECK(prop->IsSuperAccess());
3087 SetExpressionPosition(prop);
3088 // Load the function from the receiver.
3089 const Register scratch = a1;
3090 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3091 VisitForAccumulatorValue(super_ref->home_object());
3092 __ Move(scratch, v0);
3093 VisitForAccumulatorValue(super_ref->this_var());
3094 __ Push(scratch, v0, v0, scratch);
3095 VisitForStackValue(prop->key());
3096 __ Push(Smi::FromInt(language_mode()));
3100 // - this (receiver)
3101 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3105 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3107 // Replace home_object with target function.
3108 __ sd(v0, MemOperand(sp, kPointerSize));
3111 // - target function
3112 // - this (receiver)
3113 EmitCall(expr, CallICState::METHOD);
3117 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3118 // Load the arguments.
3119 ZoneList<Expression*>* args = expr->arguments();
3120 int arg_count = args->length();
3121 for (int i = 0; i < arg_count; i++) {
3122 VisitForStackValue(args->at(i));
3125 // Record source position of the IC call.
3126 SetCallPosition(expr, arg_count);
3127 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3128 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3129 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3130 // Don't assign a type feedback id to the IC, since type feedback is provided
3131 // by the vector above.
3133 RecordJSReturnSite(expr);
3134 // Restore context register.
3135 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3136 context()->DropAndPlug(1, v0);
3140 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3141 // a6: copy of the first argument or undefined if it doesn't exist.
3142 if (arg_count > 0) {
3143 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
3145 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
3148 // a5: the receiver of the enclosing function.
3149 __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3151 // a4: the language mode.
3152 __ li(a4, Operand(Smi::FromInt(language_mode())));
3154 // a1: the start position of the scope the calls resides in.
3155 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3157 // Do the runtime call.
3158 __ Push(a6, a5, a4, a1);
3159 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3163 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3164 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
3165 Variable* this_var = super_ref->this_var()->var();
3166 GetVar(a1, this_var);
3167 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3168 Label uninitialized_this;
3169 __ Branch(&uninitialized_this, eq, a1, Operand(at));
3170 __ li(a0, Operand(this_var->name()));
3172 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3173 __ bind(&uninitialized_this);
3175 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3179 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3180 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3181 VariableProxy* callee = expr->expression()->AsVariableProxy();
3182 if (callee->var()->IsLookupSlot()) {
3185 SetExpressionPosition(callee);
3186 // Generate code for loading from variables potentially shadowed by
3187 // eval-introduced variables.
3188 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3191 // Call the runtime to find the function to call (returned in v0)
3192 // and the object holding it (returned in v1).
3193 DCHECK(!context_register().is(a2));
3194 __ li(a2, Operand(callee->name()));
3195 __ Push(context_register(), a2);
3196 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3197 __ Push(v0, v1); // Function, receiver.
3198 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3200 // If fast case code has been generated, emit code to push the
3201 // function and receiver and have the slow path jump around this
3203 if (done.is_linked()) {
3209 // The receiver is implicitly the global receiver. Indicate this
3210 // by passing the hole to the call function stub.
3211 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3216 VisitForStackValue(callee);
3217 // refEnv.WithBaseObject()
3218 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3219 __ push(a2); // Reserved receiver slot.
3224 void FullCodeGenerator::VisitCall(Call* expr) {
3226 // We want to verify that RecordJSReturnSite gets called on all paths
3227 // through this function. Avoid early returns.
3228 expr->return_is_recorded_ = false;
3231 Comment cmnt(masm_, "[ Call");
3232 Expression* callee = expr->expression();
3233 Call::CallType call_type = expr->GetCallType(isolate());
3235 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3236 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3237 // to resolve the function we need to call. Then we call the resolved
3238 // function using the given arguments.
3239 ZoneList<Expression*>* args = expr->arguments();
3240 int arg_count = args->length();
3241 PushCalleeAndWithBaseObject(expr);
3243 // Push the arguments.
3244 for (int i = 0; i < arg_count; i++) {
3245 VisitForStackValue(args->at(i));
3248 // Push a copy of the function (found below the arguments) and
3250 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3252 EmitResolvePossiblyDirectEval(arg_count);
3254 // Touch up the stack with the resolved function.
3255 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3257 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3258 // Record source position for debugger.
3259 SetCallPosition(expr, arg_count);
3260 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3261 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3263 RecordJSReturnSite(expr);
3264 // Restore context register.
3265 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3266 context()->DropAndPlug(1, v0);
3267 } else if (call_type == Call::GLOBAL_CALL) {
3268 EmitCallWithLoadIC(expr);
3269 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3270 // Call to a lookup slot (dynamically introduced variable).
3271 PushCalleeAndWithBaseObject(expr);
3273 } else if (call_type == Call::PROPERTY_CALL) {
3274 Property* property = callee->AsProperty();
3275 bool is_named_call = property->key()->IsPropertyName();
3276 if (property->IsSuperAccess()) {
3277 if (is_named_call) {
3278 EmitSuperCallWithLoadIC(expr);
3280 EmitKeyedSuperCallWithLoadIC(expr);
3283 VisitForStackValue(property->obj());
3284 if (is_named_call) {
3285 EmitCallWithLoadIC(expr);
3287 EmitKeyedCallWithLoadIC(expr, property->key());
3290 } else if (call_type == Call::SUPER_CALL) {
3291 EmitSuperConstructorCall(expr);
3293 DCHECK(call_type == Call::OTHER_CALL);
3294 // Call to an arbitrary expression not handled specially above.
3295 VisitForStackValue(callee);
3296 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3298 // Emit function call.
3303 // RecordJSReturnSite should have been called.
3304 DCHECK(expr->return_is_recorded_);
3309 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3310 Comment cmnt(masm_, "[ CallNew");
3311 // According to ECMA-262, section 11.2.2, page 44, the function
3312 // expression in new calls must be evaluated before the
3315 // Push constructor on the stack. If it's not a function it's used as
3316 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3318 DCHECK(!expr->expression()->IsSuperPropertyReference());
3319 VisitForStackValue(expr->expression());
3321 // Push the arguments ("left-to-right") on the stack.
3322 ZoneList<Expression*>* args = expr->arguments();
3323 int arg_count = args->length();
3324 for (int i = 0; i < arg_count; i++) {
3325 VisitForStackValue(args->at(i));
3328 // Call the construct call builtin that handles allocation and
3329 // constructor invocation.
3330 SetConstructCallPosition(expr);
3332 // Load function and argument count into a1 and a0.
3333 __ li(a0, Operand(arg_count));
3334 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3336 // Record call targets in unoptimized code.
3337 if (FLAG_pretenuring_call_new) {
3338 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3339 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3340 expr->CallNewFeedbackSlot().ToInt() + 1);
3343 __ li(a2, FeedbackVector());
3344 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3346 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3347 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3348 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3349 context()->Plug(v0);
3353 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3354 SuperCallReference* super_call_ref =
3355 expr->expression()->AsSuperCallReference();
3356 DCHECK_NOT_NULL(super_call_ref);
3358 EmitLoadSuperConstructor(super_call_ref);
3359 __ push(result_register());
3361 // Push the arguments ("left-to-right") on the stack.
3362 ZoneList<Expression*>* args = expr->arguments();
3363 int arg_count = args->length();
3364 for (int i = 0; i < arg_count; i++) {
3365 VisitForStackValue(args->at(i));
3368 // Call the construct call builtin that handles allocation and
3369 // constructor invocation.
3370 SetConstructCallPosition(expr);
3372 // Load original constructor into a4.
3373 VisitForAccumulatorValue(super_call_ref->new_target_var());
3374 __ mov(a4, result_register());
3376 // Load function and argument count into a1 and a0.
3377 __ li(a0, Operand(arg_count));
3378 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3380 // Record call targets in unoptimized code.
3381 if (FLAG_pretenuring_call_new) {
3383 /* TODO(dslomov): support pretenuring.
3384 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3385 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3386 expr->CallNewFeedbackSlot().ToInt() + 1);
3390 __ li(a2, FeedbackVector());
3391 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3393 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3394 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3396 RecordJSReturnSite(expr);
3398 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3399 context()->Plug(v0);
3403 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3404 ZoneList<Expression*>* args = expr->arguments();
3405 DCHECK(args->length() == 1);
3407 VisitForAccumulatorValue(args->at(0));
3409 Label materialize_true, materialize_false;
3410 Label* if_true = NULL;
3411 Label* if_false = NULL;
3412 Label* fall_through = NULL;
3413 context()->PrepareTest(&materialize_true, &materialize_false,
3414 &if_true, &if_false, &fall_through);
3416 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3418 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3420 context()->Plug(if_true, if_false);
3424 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3425 ZoneList<Expression*>* args = expr->arguments();
3426 DCHECK(args->length() == 1);
3428 VisitForAccumulatorValue(args->at(0));
3430 Label materialize_true, materialize_false;
3431 Label* if_true = NULL;
3432 Label* if_false = NULL;
3433 Label* fall_through = NULL;
3434 context()->PrepareTest(&materialize_true, &materialize_false,
3435 &if_true, &if_false, &fall_through);
3437 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3438 __ NonNegativeSmiTst(v0, at);
3439 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3441 context()->Plug(if_true, if_false);
3445 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3446 ZoneList<Expression*>* args = expr->arguments();
3447 DCHECK(args->length() == 1);
3449 VisitForAccumulatorValue(args->at(0));
3451 Label materialize_true, materialize_false;
3452 Label* if_true = NULL;
3453 Label* if_false = NULL;
3454 Label* fall_through = NULL;
3455 context()->PrepareTest(&materialize_true, &materialize_false,
3456 &if_true, &if_false, &fall_through);
3458 __ JumpIfSmi(v0, if_false);
3459 __ LoadRoot(at, Heap::kNullValueRootIndex);
3460 __ Branch(if_true, eq, v0, Operand(at));
3461 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3462 // Undetectable objects behave like undefined when tested with typeof.
3463 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3464 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3465 __ Branch(if_false, ne, at, Operand(zero_reg));
3466 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3467 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3468 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3469 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3470 if_true, if_false, fall_through);
3472 context()->Plug(if_true, if_false);
3476 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3477 ZoneList<Expression*>* args = expr->arguments();
3478 DCHECK(args->length() == 1);
3480 VisitForAccumulatorValue(args->at(0));
3482 Label materialize_true, materialize_false;
3483 Label* if_true = NULL;
3484 Label* if_false = NULL;
3485 Label* fall_through = NULL;
3486 context()->PrepareTest(&materialize_true, &materialize_false,
3487 &if_true, &if_false, &fall_through);
3489 __ JumpIfSmi(v0, if_false);
3490 __ GetObjectType(v0, a1, a1);
3491 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3492 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3493 if_true, if_false, fall_through);
3495 context()->Plug(if_true, if_false);
3499 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3500 ZoneList<Expression*>* args = expr->arguments();
3501 DCHECK(args->length() == 1);
3503 VisitForAccumulatorValue(args->at(0));
3505 Label materialize_true, materialize_false;
3506 Label* if_true = NULL;
3507 Label* if_false = NULL;
3508 Label* fall_through = NULL;
3509 context()->PrepareTest(&materialize_true, &materialize_false,
3510 &if_true, &if_false, &fall_through);
3512 __ JumpIfSmi(v0, if_false);
3513 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3514 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3515 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3516 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3517 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3519 context()->Plug(if_true, if_false);
3523 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3524 CallRuntime* expr) {
3525 ZoneList<Expression*>* args = expr->arguments();
3526 DCHECK(args->length() == 1);
3528 VisitForAccumulatorValue(args->at(0));
3530 Label materialize_true, materialize_false, skip_lookup;
3531 Label* if_true = NULL;
3532 Label* if_false = NULL;
3533 Label* fall_through = NULL;
3534 context()->PrepareTest(&materialize_true, &materialize_false,
3535 &if_true, &if_false, &fall_through);
3537 __ AssertNotSmi(v0);
3539 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3540 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3541 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3542 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3544 // Check for fast case object. Generate false result for slow case object.
3545 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3546 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3547 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3548 __ Branch(if_false, eq, a2, Operand(a4));
3550 // Look for valueOf name in the descriptor array, and indicate false if
3551 // found. Since we omit an enumeration index check, if it is added via a
3552 // transition that shares its descriptor array, this is a false positive.
3553 Label entry, loop, done;
3555 // Skip loop if no descriptors are valid.
3556 __ NumberOfOwnDescriptors(a3, a1);
3557 __ Branch(&done, eq, a3, Operand(zero_reg));
3559 __ LoadInstanceDescriptors(a1, a4);
3560 // a4: descriptor array.
3561 // a3: valid entries in the descriptor array.
3562 STATIC_ASSERT(kSmiTag == 0);
3563 STATIC_ASSERT(kSmiTagSize == 1);
3565 // STATIC_ASSERT(kPointerSize == 4);
3566 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3567 __ Dmul(a3, a3, at);
3568 // Calculate location of the first key name.
3569 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3570 // Calculate the end of the descriptor array.
3572 __ dsll(a5, a3, kPointerSizeLog2);
3573 __ Daddu(a2, a2, a5);
3575 // Loop through all the keys in the descriptor array. If one of these is the
3576 // string "valueOf" the result is false.
3577 // The use of a6 to store the valueOf string assumes that it is not otherwise
3578 // used in the loop below.
3579 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3582 __ ld(a3, MemOperand(a4, 0));
3583 __ Branch(if_false, eq, a3, Operand(a6));
3584 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3586 __ Branch(&loop, ne, a4, Operand(a2));
3590 // Set the bit in the map to indicate that there is no local valueOf field.
3591 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3592 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3593 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3595 __ bind(&skip_lookup);
3597 // If a valueOf property is not found on the object check that its
3598 // prototype is the un-modified String prototype. If not result is false.
3599 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3600 __ JumpIfSmi(a2, if_false);
3601 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3602 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3603 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3604 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3605 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3606 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3608 context()->Plug(if_true, if_false);
3612 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3613 ZoneList<Expression*>* args = expr->arguments();
3614 DCHECK(args->length() == 1);
3616 VisitForAccumulatorValue(args->at(0));
3618 Label materialize_true, materialize_false;
3619 Label* if_true = NULL;
3620 Label* if_false = NULL;
3621 Label* fall_through = NULL;
3622 context()->PrepareTest(&materialize_true, &materialize_false,
3623 &if_true, &if_false, &fall_through);
3625 __ JumpIfSmi(v0, if_false);
3626 __ GetObjectType(v0, a1, a2);
3627 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3628 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3629 __ Branch(if_false);
3631 context()->Plug(if_true, if_false);
3635 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3636 ZoneList<Expression*>* args = expr->arguments();
3637 DCHECK(args->length() == 1);
3639 VisitForAccumulatorValue(args->at(0));
3641 Label materialize_true, materialize_false;
3642 Label* if_true = NULL;
3643 Label* if_false = NULL;
3644 Label* fall_through = NULL;
3645 context()->PrepareTest(&materialize_true, &materialize_false,
3646 &if_true, &if_false, &fall_through);
3648 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3649 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3650 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3651 __ li(a4, 0x80000000);
3653 __ Branch(¬_nan, ne, a2, Operand(a4));
3654 __ mov(a4, zero_reg);
3658 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3659 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3661 context()->Plug(if_true, if_false);
3665 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3666 ZoneList<Expression*>* args = expr->arguments();
3667 DCHECK(args->length() == 1);
3669 VisitForAccumulatorValue(args->at(0));
3671 Label materialize_true, materialize_false;
3672 Label* if_true = NULL;
3673 Label* if_false = NULL;
3674 Label* fall_through = NULL;
3675 context()->PrepareTest(&materialize_true, &materialize_false,
3676 &if_true, &if_false, &fall_through);
3678 __ JumpIfSmi(v0, if_false);
3679 __ GetObjectType(v0, a1, a1);
3680 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3681 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3682 if_true, if_false, fall_through);
3684 context()->Plug(if_true, if_false);
3688 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3690 DCHECK(args->length() == 1);
3692 VisitForAccumulatorValue(args->at(0));
3694 Label materialize_true, materialize_false;
3695 Label* if_true = NULL;
3696 Label* if_false = NULL;
3697 Label* fall_through = NULL;
3698 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3699 &if_false, &fall_through);
3701 __ JumpIfSmi(v0, if_false);
3702 __ GetObjectType(v0, a1, a1);
3703 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3704 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3706 context()->Plug(if_true, if_false);
3710 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3711 ZoneList<Expression*>* args = expr->arguments();
3712 DCHECK(args->length() == 1);
3714 VisitForAccumulatorValue(args->at(0));
3716 Label materialize_true, materialize_false;
3717 Label* if_true = NULL;
3718 Label* if_false = NULL;
3719 Label* fall_through = NULL;
3720 context()->PrepareTest(&materialize_true, &materialize_false,
3721 &if_true, &if_false, &fall_through);
3723 __ JumpIfSmi(v0, if_false);
3724 __ GetObjectType(v0, a1, a1);
3725 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3726 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3728 context()->Plug(if_true, if_false);
3732 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3733 ZoneList<Expression*>* args = expr->arguments();
3734 DCHECK(args->length() == 1);
3736 VisitForAccumulatorValue(args->at(0));
3738 Label materialize_true, materialize_false;
3739 Label* if_true = NULL;
3740 Label* if_false = NULL;
3741 Label* fall_through = NULL;
3742 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3743 &if_false, &fall_through);
3745 __ JumpIfSmi(v0, if_false);
3747 Register type_reg = a2;
3748 __ GetObjectType(v0, map, type_reg);
3749 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3750 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3751 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3752 if_true, if_false, fall_through);
3754 context()->Plug(if_true, if_false);
3758 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3759 DCHECK(expr->arguments()->length() == 0);
3761 Label materialize_true, materialize_false;
3762 Label* if_true = NULL;
3763 Label* if_false = NULL;
3764 Label* fall_through = NULL;
3765 context()->PrepareTest(&materialize_true, &materialize_false,
3766 &if_true, &if_false, &fall_through);
3768 // Get the frame pointer for the calling frame.
3769 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3771 // Skip the arguments adaptor frame if it exists.
3772 Label check_frame_marker;
3773 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3774 __ Branch(&check_frame_marker, ne,
3775 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3776 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3778 // Check the marker in the calling frame.
3779 __ bind(&check_frame_marker);
3780 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3781 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3782 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3783 if_true, if_false, fall_through);
3785 context()->Plug(if_true, if_false);
3789 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3790 ZoneList<Expression*>* args = expr->arguments();
3791 DCHECK(args->length() == 2);
3793 // Load the two objects into registers and perform the comparison.
3794 VisitForStackValue(args->at(0));
3795 VisitForAccumulatorValue(args->at(1));
3797 Label materialize_true, materialize_false;
3798 Label* if_true = NULL;
3799 Label* if_false = NULL;
3800 Label* fall_through = NULL;
3801 context()->PrepareTest(&materialize_true, &materialize_false,
3802 &if_true, &if_false, &fall_through);
3805 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3806 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3808 context()->Plug(if_true, if_false);
3812 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3813 ZoneList<Expression*>* args = expr->arguments();
3814 DCHECK(args->length() == 1);
3816 // ArgumentsAccessStub expects the key in a1 and the formal
3817 // parameter count in a0.
3818 VisitForAccumulatorValue(args->at(0));
3820 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3821 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3823 context()->Plug(v0);
3827 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3828 DCHECK(expr->arguments()->length() == 0);
3830 // Get the number of formal parameters.
3831 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3833 // Check if the calling frame is an arguments adaptor frame.
3834 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3835 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3836 __ Branch(&exit, ne, a3,
3837 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3839 // Arguments adaptor case: Read the arguments length from the
3841 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3844 context()->Plug(v0);
3848 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3849 ZoneList<Expression*>* args = expr->arguments();
3850 DCHECK(args->length() == 1);
3851 Label done, null, function, non_function_constructor;
3853 VisitForAccumulatorValue(args->at(0));
3855 // If the object is a smi, we return null.
3856 __ JumpIfSmi(v0, &null);
3858 // Check that the object is a JS object but take special care of JS
3859 // functions to make sure they have 'Function' as their class.
3860 // Assume that there are only two callable types, and one of them is at
3861 // either end of the type range for JS object types. Saves extra comparisons.
3862 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3863 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3864 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3866 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3867 FIRST_SPEC_OBJECT_TYPE + 1);
3868 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3870 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3871 LAST_SPEC_OBJECT_TYPE - 1);
3872 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3873 // Assume that there is no larger type.
3874 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3876 // Check if the constructor in the map is a JS function.
3877 Register instance_type = a2;
3878 __ GetMapConstructor(v0, v0, a1, instance_type);
3879 __ Branch(&non_function_constructor, ne, instance_type,
3880 Operand(JS_FUNCTION_TYPE));
3882 // v0 now contains the constructor function. Grab the
3883 // instance class name from there.
3884 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3885 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3888 // Functions have class 'Function'.
3890 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3893 // Objects with a non-function constructor have class 'Object'.
3894 __ bind(&non_function_constructor);
3895 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3898 // Non-JS objects have class null.
3900 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3905 context()->Plug(v0);
3909 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3910 // Load the arguments on the stack and call the stub.
3911 SubStringStub stub(isolate());
3912 ZoneList<Expression*>* args = expr->arguments();
3913 DCHECK(args->length() == 3);
3914 VisitForStackValue(args->at(0));
3915 VisitForStackValue(args->at(1));
3916 VisitForStackValue(args->at(2));
3918 context()->Plug(v0);
3922 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3923 // Load the arguments on the stack and call the stub.
3924 RegExpExecStub stub(isolate());
3925 ZoneList<Expression*>* args = expr->arguments();
3926 DCHECK(args->length() == 4);
3927 VisitForStackValue(args->at(0));
3928 VisitForStackValue(args->at(1));
3929 VisitForStackValue(args->at(2));
3930 VisitForStackValue(args->at(3));
3932 context()->Plug(v0);
3936 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3937 ZoneList<Expression*>* args = expr->arguments();
3938 DCHECK(args->length() == 1);
3940 VisitForAccumulatorValue(args->at(0)); // Load the object.
3943 // If the object is a smi return the object.
3944 __ JumpIfSmi(v0, &done);
3945 // If the object is not a value type, return the object.
3946 __ GetObjectType(v0, a1, a1);
3947 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3949 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3952 context()->Plug(v0);
3956 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3957 ZoneList<Expression*>* args = expr->arguments();
3958 DCHECK_EQ(1, args->length());
3960 VisitForAccumulatorValue(args->at(0));
3962 Label materialize_true, materialize_false;
3963 Label* if_true = nullptr;
3964 Label* if_false = nullptr;
3965 Label* fall_through = nullptr;
3966 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3967 &if_false, &fall_through);
3969 __ JumpIfSmi(v0, if_false);
3970 __ GetObjectType(v0, a1, a1);
3971 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3972 Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through);
3974 context()->Plug(if_true, if_false);
3978 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3979 ZoneList<Expression*>* args = expr->arguments();
3980 DCHECK(args->length() == 2);
3981 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3982 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3984 VisitForAccumulatorValue(args->at(0)); // Load the object.
3986 Register object = v0;
3987 Register result = v0;
3988 Register scratch0 = t1;
3989 Register scratch1 = a1;
3991 if (index->value() == 0) {
3992 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3994 Label runtime, done;
3995 if (index->value() < JSDate::kFirstUncachedField) {
3996 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3997 __ li(scratch1, Operand(stamp));
3998 __ ld(scratch1, MemOperand(scratch1));
3999 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
4000 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
4001 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
4002 kPointerSize * index->value()));
4006 __ PrepareCallCFunction(2, scratch1);
4007 __ li(a1, Operand(index));
4008 __ Move(a0, object);
4009 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
4013 context()->Plug(result);
4017 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
4018 ZoneList<Expression*>* args = expr->arguments();
4019 DCHECK_EQ(3, args->length());
4021 Register string = v0;
4022 Register index = a1;
4023 Register value = a2;
4025 VisitForStackValue(args->at(0)); // index
4026 VisitForStackValue(args->at(1)); // value
4027 VisitForAccumulatorValue(args->at(2)); // string
4028 __ Pop(index, value);
4030 if (FLAG_debug_code) {
4031 __ SmiTst(value, at);
4032 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
4033 __ SmiTst(index, at);
4034 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
4035 __ SmiUntag(index, index);
4036 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
4037 Register scratch = t1;
4038 __ EmitSeqStringSetCharCheck(
4039 string, index, value, scratch, one_byte_seq_type);
4040 __ SmiTag(index, index);
4043 __ SmiUntag(value, value);
4046 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4048 __ Daddu(at, at, index);
4049 __ sb(value, MemOperand(at));
4050 context()->Plug(string);
4054 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
4055 ZoneList<Expression*>* args = expr->arguments();
4056 DCHECK_EQ(3, args->length());
4058 Register string = v0;
4059 Register index = a1;
4060 Register value = a2;
4062 VisitForStackValue(args->at(0)); // index
4063 VisitForStackValue(args->at(1)); // value
4064 VisitForAccumulatorValue(args->at(2)); // string
4065 __ Pop(index, value);
4067 if (FLAG_debug_code) {
4068 __ SmiTst(value, at);
4069 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
4070 __ SmiTst(index, at);
4071 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
4072 __ SmiUntag(index, index);
4073 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
4074 Register scratch = t1;
4075 __ EmitSeqStringSetCharCheck(
4076 string, index, value, scratch, two_byte_seq_type);
4077 __ SmiTag(index, index);
4080 __ SmiUntag(value, value);
4083 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4084 __ dsra(index, index, 32 - 1);
4085 __ Daddu(at, at, index);
4086 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
4087 __ sh(value, MemOperand(at));
4088 context()->Plug(string);
4092 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
4093 // Load the arguments on the stack and call the runtime function.
4094 ZoneList<Expression*>* args = expr->arguments();
4095 DCHECK(args->length() == 2);
4096 VisitForStackValue(args->at(0));
4097 VisitForStackValue(args->at(1));
4098 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4100 context()->Plug(v0);
4104 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4105 ZoneList<Expression*>* args = expr->arguments();
4106 DCHECK(args->length() == 2);
4108 VisitForStackValue(args->at(0)); // Load the object.
4109 VisitForAccumulatorValue(args->at(1)); // Load the value.
4110 __ pop(a1); // v0 = value. a1 = object.
4113 // If the object is a smi, return the value.
4114 __ JumpIfSmi(a1, &done);
4116 // If the object is not a value type, return the value.
4117 __ GetObjectType(a1, a2, a2);
4118 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
4121 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
4122 // Update the write barrier. Save the value as it will be
4123 // overwritten by the write barrier code and is needed afterward.
4125 __ RecordWriteField(
4126 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
4129 context()->Plug(v0);
4133 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4134 ZoneList<Expression*>* args = expr->arguments();
4135 DCHECK_EQ(args->length(), 1);
4137 // Load the argument into a0 and call the stub.
4138 VisitForAccumulatorValue(args->at(0));
4139 __ mov(a0, result_register());
4141 NumberToStringStub stub(isolate());
4143 context()->Plug(v0);
4147 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4148 ZoneList<Expression*>* args = expr->arguments();
4149 DCHECK(args->length() == 1);
4151 VisitForAccumulatorValue(args->at(0));
4154 StringCharFromCodeGenerator generator(v0, a1);
4155 generator.GenerateFast(masm_);
4158 NopRuntimeCallHelper call_helper;
4159 generator.GenerateSlow(masm_, call_helper);
4162 context()->Plug(a1);
4166 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4167 ZoneList<Expression*>* args = expr->arguments();
4168 DCHECK(args->length() == 2);
4170 VisitForStackValue(args->at(0));
4171 VisitForAccumulatorValue(args->at(1));
4172 __ mov(a0, result_register());
4174 Register object = a1;
4175 Register index = a0;
4176 Register result = v0;
4180 Label need_conversion;
4181 Label index_out_of_range;
4183 StringCharCodeAtGenerator generator(object,
4188 &index_out_of_range,
4189 STRING_INDEX_IS_NUMBER);
4190 generator.GenerateFast(masm_);
4193 __ bind(&index_out_of_range);
4194 // When the index is out of range, the spec requires us to return
4196 __ LoadRoot(result, Heap::kNanValueRootIndex);
4199 __ bind(&need_conversion);
4200 // Load the undefined value into the result register, which will
4201 // trigger conversion.
4202 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4205 NopRuntimeCallHelper call_helper;
4206 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4209 context()->Plug(result);
4213 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4214 ZoneList<Expression*>* args = expr->arguments();
4215 DCHECK(args->length() == 2);
4217 VisitForStackValue(args->at(0));
4218 VisitForAccumulatorValue(args->at(1));
4219 __ mov(a0, result_register());
4221 Register object = a1;
4222 Register index = a0;
4223 Register scratch = a3;
4224 Register result = v0;
4228 Label need_conversion;
4229 Label index_out_of_range;
4231 StringCharAtGenerator generator(object,
4237 &index_out_of_range,
4238 STRING_INDEX_IS_NUMBER);
4239 generator.GenerateFast(masm_);
4242 __ bind(&index_out_of_range);
4243 // When the index is out of range, the spec requires us to return
4244 // the empty string.
4245 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4248 __ bind(&need_conversion);
4249 // Move smi zero into the result register, which will trigger
4251 __ li(result, Operand(Smi::FromInt(0)));
4254 NopRuntimeCallHelper call_helper;
4255 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4258 context()->Plug(result);
4262 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4263 ZoneList<Expression*>* args = expr->arguments();
4264 DCHECK_EQ(2, args->length());
4265 VisitForStackValue(args->at(0));
4266 VisitForAccumulatorValue(args->at(1));
4269 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4270 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4272 context()->Plug(v0);
4276 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4277 ZoneList<Expression*>* args = expr->arguments();
4278 DCHECK_EQ(2, args->length());
4280 VisitForStackValue(args->at(0));
4281 VisitForStackValue(args->at(1));
4283 StringCompareStub stub(isolate());
4285 context()->Plug(v0);
4289 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4290 ZoneList<Expression*>* args = expr->arguments();
4291 DCHECK(args->length() >= 2);
4293 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4294 for (int i = 0; i < arg_count + 1; i++) {
4295 VisitForStackValue(args->at(i));
4297 VisitForAccumulatorValue(args->last()); // Function.
4299 Label runtime, done;
4300 // Check for non-function argument (including proxy).
4301 __ JumpIfSmi(v0, &runtime);
4302 __ GetObjectType(v0, a1, a1);
4303 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4305 // InvokeFunction requires the function in a1. Move it in there.
4306 __ mov(a1, result_register());
4307 ParameterCount count(arg_count);
4308 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4309 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4314 __ CallRuntime(Runtime::kCall, args->length());
4317 context()->Plug(v0);
4321 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4322 ZoneList<Expression*>* args = expr->arguments();
4323 DCHECK(args->length() == 2);
4326 VisitForStackValue(args->at(0));
4329 VisitForStackValue(args->at(1));
4330 __ CallRuntime(Runtime::kGetPrototype, 1);
4331 __ Push(result_register());
4333 // Load original constructor into a4.
4334 __ ld(a4, MemOperand(sp, 1 * kPointerSize));
4336 // Check if the calling frame is an arguments adaptor frame.
4337 Label adaptor_frame, args_set_up, runtime;
4338 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4339 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4340 __ Branch(&adaptor_frame, eq, a3,
4341 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4342 // default constructor has no arguments, so no adaptor frame means no args.
4343 __ mov(a0, zero_reg);
4344 __ Branch(&args_set_up);
4346 // Copy arguments from adaptor frame.
4348 __ bind(&adaptor_frame);
4349 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4350 __ SmiUntag(a1, a1);
4354 // Get arguments pointer in a2.
4355 __ dsll(at, a1, kPointerSizeLog2);
4356 __ Daddu(a2, a2, Operand(at));
4357 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4360 // Pre-decrement a2 with kPointerSize on each iteration.
4361 // Pre-decrement in order to skip receiver.
4362 __ Daddu(a2, a2, Operand(-kPointerSize));
4363 __ ld(a3, MemOperand(a2));
4365 __ Daddu(a1, a1, Operand(-1));
4366 __ Branch(&loop, ne, a1, Operand(zero_reg));
4369 __ bind(&args_set_up);
4370 __ dsll(at, a0, kPointerSizeLog2);
4371 __ Daddu(at, at, Operand(sp));
4372 __ ld(a1, MemOperand(at, 0));
4373 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4375 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4376 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4380 context()->Plug(result_register());
4384 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4385 RegExpConstructResultStub stub(isolate());
4386 ZoneList<Expression*>* args = expr->arguments();
4387 DCHECK(args->length() == 3);
4388 VisitForStackValue(args->at(0));
4389 VisitForStackValue(args->at(1));
4390 VisitForAccumulatorValue(args->at(2));
4391 __ mov(a0, result_register());
4395 context()->Plug(v0);
4399 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4400 ZoneList<Expression*>* args = expr->arguments();
4401 DCHECK_EQ(2, args->length());
4403 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4404 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4406 Handle<FixedArray> jsfunction_result_caches(
4407 isolate()->native_context()->jsfunction_result_caches());
4408 if (jsfunction_result_caches->length() <= cache_id) {
4409 __ Abort(kAttemptToUseUndefinedCache);
4410 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4411 context()->Plug(v0);
4415 VisitForAccumulatorValue(args->at(1));
4418 Register cache = a1;
4419 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4420 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4423 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4425 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4428 Label done, not_found;
4429 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4430 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4431 // a2 now holds finger offset as a smi.
4432 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4433 // a3 now points to the start of fixed array elements.
4434 __ SmiScale(at, a2, kPointerSizeLog2);
4435 __ daddu(a3, a3, at);
4436 // a3 now points to key of indexed element of cache.
4437 __ ld(a2, MemOperand(a3));
4438 __ Branch(¬_found, ne, key, Operand(a2));
4440 __ ld(v0, MemOperand(a3, kPointerSize));
4443 __ bind(¬_found);
4444 // Call runtime to perform the lookup.
4445 __ Push(cache, key);
4446 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4449 context()->Plug(v0);
4453 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4454 ZoneList<Expression*>* args = expr->arguments();
4455 VisitForAccumulatorValue(args->at(0));
4457 Label materialize_true, materialize_false;
4458 Label* if_true = NULL;
4459 Label* if_false = NULL;
4460 Label* fall_through = NULL;
4461 context()->PrepareTest(&materialize_true, &materialize_false,
4462 &if_true, &if_false, &fall_through);
4464 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4465 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4467 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4468 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4470 context()->Plug(if_true, if_false);
4474 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4475 ZoneList<Expression*>* args = expr->arguments();
4476 DCHECK(args->length() == 1);
4477 VisitForAccumulatorValue(args->at(0));
4479 __ AssertString(v0);
4481 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4482 __ IndexFromHash(v0, v0);
4484 context()->Plug(v0);
4488 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4489 Label bailout, done, one_char_separator, long_separator,
4490 non_trivial_array, not_size_one_array, loop,
4491 empty_separator_loop, one_char_separator_loop,
4492 one_char_separator_loop_entry, long_separator_loop;
4493 ZoneList<Expression*>* args = expr->arguments();
4494 DCHECK(args->length() == 2);
4495 VisitForStackValue(args->at(1));
4496 VisitForAccumulatorValue(args->at(0));
4498 // All aliases of the same register have disjoint lifetimes.
4499 Register array = v0;
4500 Register elements = no_reg; // Will be v0.
4501 Register result = no_reg; // Will be v0.
4502 Register separator = a1;
4503 Register array_length = a2;
4504 Register result_pos = no_reg; // Will be a2.
4505 Register string_length = a3;
4506 Register string = a4;
4507 Register element = a5;
4508 Register elements_end = a6;
4509 Register scratch1 = a7;
4510 Register scratch2 = t1;
4511 Register scratch3 = t0;
4513 // Separator operand is on the stack.
4516 // Check that the array is a JSArray.
4517 __ JumpIfSmi(array, &bailout);
4518 __ GetObjectType(array, scratch1, scratch2);
4519 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4521 // Check that the array has fast elements.
4522 __ CheckFastElements(scratch1, scratch2, &bailout);
4524 // If the array has length zero, return the empty string.
4525 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4526 __ SmiUntag(array_length);
4527 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4528 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4531 __ bind(&non_trivial_array);
4533 // Get the FixedArray containing array's elements.
4535 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4536 array = no_reg; // End of array's live range.
4538 // Check that all array elements are sequential one-byte strings, and
4539 // accumulate the sum of their lengths, as a smi-encoded value.
4540 __ mov(string_length, zero_reg);
4542 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4543 __ dsll(elements_end, array_length, kPointerSizeLog2);
4544 __ Daddu(elements_end, element, elements_end);
4545 // Loop condition: while (element < elements_end).
4546 // Live values in registers:
4547 // elements: Fixed array of strings.
4548 // array_length: Length of the fixed array of strings (not smi)
4549 // separator: Separator string
4550 // string_length: Accumulated sum of string lengths (smi).
4551 // element: Current array element.
4552 // elements_end: Array end.
4553 if (generate_debug_code_) {
4554 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4558 __ ld(string, MemOperand(element));
4559 __ Daddu(element, element, kPointerSize);
4560 __ JumpIfSmi(string, &bailout);
4561 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4562 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4563 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4564 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4565 __ DadduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4566 __ BranchOnOverflow(&bailout, scratch3);
4567 __ Branch(&loop, lt, element, Operand(elements_end));
4569 // If array_length is 1, return elements[0], a string.
4570 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4571 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4574 __ bind(¬_size_one_array);
4576 // Live values in registers:
4577 // separator: Separator string
4578 // array_length: Length of the array.
4579 // string_length: Sum of string lengths (smi).
4580 // elements: FixedArray of strings.
4582 // Check that the separator is a flat one-byte string.
4583 __ JumpIfSmi(separator, &bailout);
4584 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4585 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4586 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4588 // Add (separator length times array_length) - separator length to the
4589 // string_length to get the length of the result string. array_length is not
4590 // smi but the other values are, so the result is a smi.
4591 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4592 __ Dsubu(string_length, string_length, Operand(scratch1));
4593 __ SmiUntag(scratch1);
4594 __ Dmul(scratch2, array_length, scratch1);
4595 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4597 __ dsra32(scratch1, scratch2, 0);
4598 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4599 __ SmiUntag(string_length);
4600 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4601 __ BranchOnOverflow(&bailout, scratch3);
4603 // Get first element in the array to free up the elements register to be used
4606 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4607 result = elements; // End of live range for elements.
4609 // Live values in registers:
4610 // element: First array element
4611 // separator: Separator string
4612 // string_length: Length of result string (not smi)
4613 // array_length: Length of the array.
4614 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4615 elements_end, &bailout);
4616 // Prepare for looping. Set up elements_end to end of the array. Set
4617 // result_pos to the position of the result where to write the first
4619 __ dsll(elements_end, array_length, kPointerSizeLog2);
4620 __ Daddu(elements_end, element, elements_end);
4621 result_pos = array_length; // End of live range for array_length.
4622 array_length = no_reg;
4623 __ Daddu(result_pos,
4625 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4627 // Check the length of the separator.
4628 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4629 __ li(at, Operand(Smi::FromInt(1)));
4630 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4631 __ Branch(&long_separator, gt, scratch1, Operand(at));
4633 // Empty separator case.
4634 __ bind(&empty_separator_loop);
4635 // Live values in registers:
4636 // result_pos: the position to which we are currently copying characters.
4637 // element: Current array element.
4638 // elements_end: Array end.
4640 // Copy next array element to the result.
4641 __ ld(string, MemOperand(element));
4642 __ Daddu(element, element, kPointerSize);
4643 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4644 __ SmiUntag(string_length);
4645 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4646 __ CopyBytes(string, result_pos, string_length, scratch1);
4647 // End while (element < elements_end).
4648 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4649 DCHECK(result.is(v0));
4652 // One-character separator case.
4653 __ bind(&one_char_separator);
4654 // Replace separator with its one-byte character value.
4655 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4656 // Jump into the loop after the code that copies the separator, so the first
4657 // element is not preceded by a separator.
4658 __ jmp(&one_char_separator_loop_entry);
4660 __ bind(&one_char_separator_loop);
4661 // Live values in registers:
4662 // result_pos: the position to which we are currently copying characters.
4663 // element: Current array element.
4664 // elements_end: Array end.
4665 // separator: Single separator one-byte char (in lower byte).
4667 // Copy the separator character to the result.
4668 __ sb(separator, MemOperand(result_pos));
4669 __ Daddu(result_pos, result_pos, 1);
4671 // Copy next array element to the result.
4672 __ bind(&one_char_separator_loop_entry);
4673 __ ld(string, MemOperand(element));
4674 __ Daddu(element, element, kPointerSize);
4675 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4676 __ SmiUntag(string_length);
4677 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4678 __ CopyBytes(string, result_pos, string_length, scratch1);
4679 // End while (element < elements_end).
4680 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4681 DCHECK(result.is(v0));
4684 // Long separator case (separator is more than one character). Entry is at the
4685 // label long_separator below.
4686 __ bind(&long_separator_loop);
4687 // Live values in registers:
4688 // result_pos: the position to which we are currently copying characters.
4689 // element: Current array element.
4690 // elements_end: Array end.
4691 // separator: Separator string.
4693 // Copy the separator to the result.
4694 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4695 __ SmiUntag(string_length);
4698 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4699 __ CopyBytes(string, result_pos, string_length, scratch1);
4701 __ bind(&long_separator);
4702 __ ld(string, MemOperand(element));
4703 __ Daddu(element, element, kPointerSize);
4704 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4705 __ SmiUntag(string_length);
4706 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4707 __ CopyBytes(string, result_pos, string_length, scratch1);
4708 // End while (element < elements_end).
4709 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4710 DCHECK(result.is(v0));
4714 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4716 context()->Plug(v0);
4720 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4721 DCHECK(expr->arguments()->length() == 0);
4722 ExternalReference debug_is_active =
4723 ExternalReference::debug_is_active_address(isolate());
4724 __ li(at, Operand(debug_is_active));
4725 __ lbu(v0, MemOperand(at));
4727 context()->Plug(v0);
4731 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4732 // Assert: expr === CallRuntime("ReflectConstruct")
4733 DCHECK_EQ(1, expr->arguments()->length());
4734 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4736 ZoneList<Expression*>* args = call->arguments();
4737 DCHECK_EQ(3, args->length());
4739 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4740 DCHECK_NOT_NULL(super_call_ref);
4742 // Load ReflectConstruct function
4743 EmitLoadJSRuntimeFunction(call);
4745 // Push the target function under the receiver.
4746 __ ld(at, MemOperand(sp, 0));
4748 __ sd(v0, MemOperand(sp, kPointerSize));
4750 // Push super constructor
4751 EmitLoadSuperConstructor(super_call_ref);
4752 __ Push(result_register());
4754 // Push arguments array
4755 VisitForStackValue(args->at(1));
4758 DCHECK(args->at(2)->IsVariableProxy());
4759 VisitForStackValue(args->at(2));
4761 EmitCallJSRuntimeFunction(call);
4763 // Restore context register.
4764 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4765 context()->DropAndPlug(1, v0);
4767 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4768 EmitInitializeThisAfterSuper(super_call_ref);
4772 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4773 // Push the builtins object as the receiver.
4774 Register receiver = LoadDescriptor::ReceiverRegister();
4775 __ ld(receiver, GlobalObjectOperand());
4776 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4779 // Load the function from the receiver.
4780 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4781 __ li(LoadDescriptor::SlotRegister(),
4782 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4783 CallLoadIC(NOT_INSIDE_TYPEOF);
4787 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4788 ZoneList<Expression*>* args = expr->arguments();
4789 int arg_count = args->length();
4791 SetCallPosition(expr, arg_count);
4792 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4793 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4798 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4799 ZoneList<Expression*>* args = expr->arguments();
4800 int arg_count = args->length();
4802 if (expr->is_jsruntime()) {
4803 Comment cmnt(masm_, "[ CallRuntime");
4804 EmitLoadJSRuntimeFunction(expr);
4806 // Push the target function under the receiver.
4807 __ ld(at, MemOperand(sp, 0));
4809 __ sd(v0, MemOperand(sp, kPointerSize));
4811 // Push the arguments ("left-to-right").
4812 for (int i = 0; i < arg_count; i++) {
4813 VisitForStackValue(args->at(i));
4816 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4817 EmitCallJSRuntimeFunction(expr);
4819 // Restore context register.
4820 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4822 context()->DropAndPlug(1, v0);
4824 const Runtime::Function* function = expr->function();
4825 switch (function->function_id) {
4826 #define CALL_INTRINSIC_GENERATOR(Name) \
4827 case Runtime::kInline##Name: { \
4828 Comment cmnt(masm_, "[ Inline" #Name); \
4829 return Emit##Name(expr); \
4831 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4832 #undef CALL_INTRINSIC_GENERATOR
4834 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4835 // Push the arguments ("left-to-right").
4836 for (int i = 0; i < arg_count; i++) {
4837 VisitForStackValue(args->at(i));
4840 // Call the C runtime function.
4841 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4842 __ CallRuntime(expr->function(), arg_count);
4843 context()->Plug(v0);
4850 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4851 switch (expr->op()) {
4852 case Token::DELETE: {
4853 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4854 Property* property = expr->expression()->AsProperty();
4855 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4857 if (property != NULL) {
4858 VisitForStackValue(property->obj());
4859 VisitForStackValue(property->key());
4860 __ li(a1, Operand(Smi::FromInt(language_mode())));
4862 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4863 context()->Plug(v0);
4864 } else if (proxy != NULL) {
4865 Variable* var = proxy->var();
4866 // Delete of an unqualified identifier is disallowed in strict mode but
4867 // "delete this" is allowed.
4868 bool is_this = var->HasThisName(isolate());
4869 DCHECK(is_sloppy(language_mode()) || is_this);
4870 if (var->IsUnallocatedOrGlobalSlot()) {
4871 __ ld(a2, GlobalObjectOperand());
4872 __ li(a1, Operand(var->name()));
4873 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4874 __ Push(a2, a1, a0);
4875 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4876 context()->Plug(v0);
4877 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4878 // Result of deleting non-global, non-dynamic variables is false.
4879 // The subexpression does not have side effects.
4880 context()->Plug(is_this);
4882 // Non-global variable. Call the runtime to try to delete from the
4883 // context where the variable was introduced.
4884 DCHECK(!context_register().is(a2));
4885 __ li(a2, Operand(var->name()));
4886 __ Push(context_register(), a2);
4887 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4888 context()->Plug(v0);
4891 // Result of deleting non-property, non-variable reference is true.
4892 // The subexpression may have side effects.
4893 VisitForEffect(expr->expression());
4894 context()->Plug(true);
4900 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4901 VisitForEffect(expr->expression());
4902 context()->Plug(Heap::kUndefinedValueRootIndex);
4907 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4908 if (context()->IsEffect()) {
4909 // Unary NOT has no side effects so it's only necessary to visit the
4910 // subexpression. Match the optimizing compiler by not branching.
4911 VisitForEffect(expr->expression());
4912 } else if (context()->IsTest()) {
4913 const TestContext* test = TestContext::cast(context());
4914 // The labels are swapped for the recursive call.
4915 VisitForControl(expr->expression(),
4916 test->false_label(),
4918 test->fall_through());
4919 context()->Plug(test->true_label(), test->false_label());
4921 // We handle value contexts explicitly rather than simply visiting
4922 // for control and plugging the control flow into the context,
4923 // because we need to prepare a pair of extra administrative AST ids
4924 // for the optimizing compiler.
4925 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4926 Label materialize_true, materialize_false, done;
4927 VisitForControl(expr->expression(),
4931 __ bind(&materialize_true);
4932 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4933 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4934 if (context()->IsStackValue()) __ push(v0);
4936 __ bind(&materialize_false);
4937 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4938 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4939 if (context()->IsStackValue()) __ push(v0);
4945 case Token::TYPEOF: {
4946 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4948 AccumulatorValueContext context(this);
4949 VisitForTypeofValue(expr->expression());
4952 TypeofStub typeof_stub(isolate());
4953 __ CallStub(&typeof_stub);
4954 context()->Plug(v0);
4964 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4965 DCHECK(expr->expression()->IsValidReferenceExpression());
4967 Comment cmnt(masm_, "[ CountOperation");
4969 Property* prop = expr->expression()->AsProperty();
4970 LhsKind assign_type = Property::GetAssignType(prop);
4972 // Evaluate expression and get value.
4973 if (assign_type == VARIABLE) {
4974 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4975 AccumulatorValueContext context(this);
4976 EmitVariableLoad(expr->expression()->AsVariableProxy());
4978 // Reserve space for result of postfix operation.
4979 if (expr->is_postfix() && !context()->IsEffect()) {
4980 __ li(at, Operand(Smi::FromInt(0)));
4983 switch (assign_type) {
4984 case NAMED_PROPERTY: {
4985 // Put the object both on the stack and in the register.
4986 VisitForStackValue(prop->obj());
4987 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4988 EmitNamedPropertyLoad(prop);
4992 case NAMED_SUPER_PROPERTY: {
4993 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4994 VisitForAccumulatorValue(
4995 prop->obj()->AsSuperPropertyReference()->home_object());
4996 __ Push(result_register());
4997 const Register scratch = a1;
4998 __ ld(scratch, MemOperand(sp, kPointerSize));
4999 __ Push(scratch, result_register());
5000 EmitNamedSuperPropertyLoad(prop);
5004 case KEYED_SUPER_PROPERTY: {
5005 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
5006 VisitForAccumulatorValue(
5007 prop->obj()->AsSuperPropertyReference()->home_object());
5008 const Register scratch = a1;
5009 const Register scratch1 = a4;
5010 __ Move(scratch, result_register());
5011 VisitForAccumulatorValue(prop->key());
5012 __ Push(scratch, result_register());
5013 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
5014 __ Push(scratch1, scratch, result_register());
5015 EmitKeyedSuperPropertyLoad(prop);
5019 case KEYED_PROPERTY: {
5020 VisitForStackValue(prop->obj());
5021 VisitForStackValue(prop->key());
5022 __ ld(LoadDescriptor::ReceiverRegister(),
5023 MemOperand(sp, 1 * kPointerSize));
5024 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
5025 EmitKeyedPropertyLoad(prop);
5034 // We need a second deoptimization point after loading the value
5035 // in case evaluating the property load my have a side effect.
5036 if (assign_type == VARIABLE) {
5037 PrepareForBailout(expr->expression(), TOS_REG);
5039 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
5042 // Inline smi case if we are in a loop.
5043 Label stub_call, done;
5044 JumpPatchSite patch_site(masm_);
5046 int count_value = expr->op() == Token::INC ? 1 : -1;
5048 if (ShouldInlineSmiCase(expr->op())) {
5050 patch_site.EmitJumpIfNotSmi(v0, &slow);
5052 // Save result for postfix expressions.
5053 if (expr->is_postfix()) {
5054 if (!context()->IsEffect()) {
5055 // Save the result on the stack. If we have a named or keyed property
5056 // we store the result under the receiver that is currently on top
5058 switch (assign_type) {
5062 case NAMED_PROPERTY:
5063 __ sd(v0, MemOperand(sp, kPointerSize));
5065 case NAMED_SUPER_PROPERTY:
5066 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5068 case KEYED_PROPERTY:
5069 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5071 case KEYED_SUPER_PROPERTY:
5072 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
5078 Register scratch1 = a1;
5079 Register scratch2 = a4;
5080 __ li(scratch1, Operand(Smi::FromInt(count_value)));
5081 __ DadduAndCheckForOverflow(v0, v0, scratch1, scratch2);
5082 __ BranchOnNoOverflow(&done, scratch2);
5083 // Call stub. Undo operation first.
5088 if (!is_strong(language_mode())) {
5089 ToNumberStub convert_stub(isolate());
5090 __ CallStub(&convert_stub);
5091 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5094 // Save result for postfix expressions.
5095 if (expr->is_postfix()) {
5096 if (!context()->IsEffect()) {
5097 // Save the result on the stack. If we have a named or keyed property
5098 // we store the result under the receiver that is currently on top
5100 switch (assign_type) {
5104 case NAMED_PROPERTY:
5105 __ sd(v0, MemOperand(sp, kPointerSize));
5107 case NAMED_SUPER_PROPERTY:
5108 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5110 case KEYED_PROPERTY:
5111 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
5113 case KEYED_SUPER_PROPERTY:
5114 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
5120 __ bind(&stub_call);
5122 __ li(a0, Operand(Smi::FromInt(count_value)));
5124 SetExpressionPosition(expr);
5127 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
5128 strength(language_mode())).code();
5129 CallIC(code, expr->CountBinOpFeedbackId());
5130 patch_site.EmitPatchInfo();
5133 if (is_strong(language_mode())) {
5134 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5136 // Store the value returned in v0.
5137 switch (assign_type) {
5139 if (expr->is_postfix()) {
5140 { EffectContext context(this);
5141 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5142 Token::ASSIGN, expr->CountSlot());
5143 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5146 // For all contexts except EffectConstant we have the result on
5147 // top of the stack.
5148 if (!context()->IsEffect()) {
5149 context()->PlugTOS();
5152 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5153 Token::ASSIGN, expr->CountSlot());
5154 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5155 context()->Plug(v0);
5158 case NAMED_PROPERTY: {
5159 __ mov(StoreDescriptor::ValueRegister(), result_register());
5160 __ li(StoreDescriptor::NameRegister(),
5161 Operand(prop->key()->AsLiteral()->value()));
5162 __ pop(StoreDescriptor::ReceiverRegister());
5163 if (FLAG_vector_stores) {
5164 EmitLoadStoreICSlot(expr->CountSlot());
5167 CallStoreIC(expr->CountStoreFeedbackId());
5169 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5170 if (expr->is_postfix()) {
5171 if (!context()->IsEffect()) {
5172 context()->PlugTOS();
5175 context()->Plug(v0);
5179 case NAMED_SUPER_PROPERTY: {
5180 EmitNamedSuperPropertyStore(prop);
5181 if (expr->is_postfix()) {
5182 if (!context()->IsEffect()) {
5183 context()->PlugTOS();
5186 context()->Plug(v0);
5190 case KEYED_SUPER_PROPERTY: {
5191 EmitKeyedSuperPropertyStore(prop);
5192 if (expr->is_postfix()) {
5193 if (!context()->IsEffect()) {
5194 context()->PlugTOS();
5197 context()->Plug(v0);
5201 case KEYED_PROPERTY: {
5202 __ mov(StoreDescriptor::ValueRegister(), result_register());
5203 __ Pop(StoreDescriptor::ReceiverRegister(),
5204 StoreDescriptor::NameRegister());
5206 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5207 if (FLAG_vector_stores) {
5208 EmitLoadStoreICSlot(expr->CountSlot());
5211 CallIC(ic, expr->CountStoreFeedbackId());
5213 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5214 if (expr->is_postfix()) {
5215 if (!context()->IsEffect()) {
5216 context()->PlugTOS();
5219 context()->Plug(v0);
5227 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5228 Expression* sub_expr,
5229 Handle<String> check) {
5230 Label materialize_true, materialize_false;
5231 Label* if_true = NULL;
5232 Label* if_false = NULL;
5233 Label* fall_through = NULL;
5234 context()->PrepareTest(&materialize_true, &materialize_false,
5235 &if_true, &if_false, &fall_through);
5237 { AccumulatorValueContext context(this);
5238 VisitForTypeofValue(sub_expr);
5240 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5242 Factory* factory = isolate()->factory();
5243 if (String::Equals(check, factory->number_string())) {
5244 __ JumpIfSmi(v0, if_true);
5245 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5246 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5247 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5248 } else if (String::Equals(check, factory->string_string())) {
5249 __ JumpIfSmi(v0, if_false);
5250 // Check for undetectable objects => false.
5251 __ GetObjectType(v0, v0, a1);
5252 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5253 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5254 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5255 Split(eq, a1, Operand(zero_reg),
5256 if_true, if_false, fall_through);
5257 } else if (String::Equals(check, factory->symbol_string())) {
5258 __ JumpIfSmi(v0, if_false);
5259 __ GetObjectType(v0, v0, a1);
5260 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5261 } else if (String::Equals(check, factory->float32x4_string())) {
5262 __ JumpIfSmi(v0, if_false);
5263 __ GetObjectType(v0, v0, a1);
5264 Split(eq, a1, Operand(FLOAT32X4_TYPE), if_true, if_false, fall_through);
5265 } else if (String::Equals(check, factory->boolean_string())) {
5266 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5267 __ Branch(if_true, eq, v0, Operand(at));
5268 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5269 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5270 } else if (String::Equals(check, factory->undefined_string())) {
5271 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5272 __ Branch(if_true, eq, v0, Operand(at));
5273 __ JumpIfSmi(v0, if_false);
5274 // Check for undetectable objects => true.
5275 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5276 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5277 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5278 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5279 } else if (String::Equals(check, factory->function_string())) {
5280 __ JumpIfSmi(v0, if_false);
5281 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5282 __ GetObjectType(v0, v0, a1);
5283 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5284 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5285 if_true, if_false, fall_through);
5286 } else if (String::Equals(check, factory->object_string())) {
5287 __ JumpIfSmi(v0, if_false);
5288 __ LoadRoot(at, Heap::kNullValueRootIndex);
5289 __ Branch(if_true, eq, v0, Operand(at));
5290 // Check for JS objects => true.
5291 __ GetObjectType(v0, v0, a1);
5292 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5293 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5294 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5295 // Check for undetectable objects => false.
5296 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5297 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5298 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5300 if (if_false != fall_through) __ jmp(if_false);
5302 context()->Plug(if_true, if_false);
5306 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5307 Comment cmnt(masm_, "[ CompareOperation");
5308 SetExpressionPosition(expr);
5310 // First we try a fast inlined version of the compare when one of
5311 // the operands is a literal.
5312 if (TryLiteralCompare(expr)) return;
5314 // Always perform the comparison for its control flow. Pack the result
5315 // into the expression's context after the comparison is performed.
5316 Label materialize_true, materialize_false;
5317 Label* if_true = NULL;
5318 Label* if_false = NULL;
5319 Label* fall_through = NULL;
5320 context()->PrepareTest(&materialize_true, &materialize_false,
5321 &if_true, &if_false, &fall_through);
5323 Token::Value op = expr->op();
5324 VisitForStackValue(expr->left());
5327 VisitForStackValue(expr->right());
5328 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5329 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5330 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5331 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5334 case Token::INSTANCEOF: {
5335 VisitForStackValue(expr->right());
5336 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5338 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5339 // The stub returns 0 for true.
5340 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5345 VisitForAccumulatorValue(expr->right());
5346 Condition cc = CompareIC::ComputeCondition(op);
5347 __ mov(a0, result_register());
5350 bool inline_smi_code = ShouldInlineSmiCase(op);
5351 JumpPatchSite patch_site(masm_);
5352 if (inline_smi_code) {
5354 __ Or(a2, a0, Operand(a1));
5355 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5356 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5357 __ bind(&slow_case);
5360 Handle<Code> ic = CodeFactory::CompareIC(
5361 isolate(), op, strength(language_mode())).code();
5362 CallIC(ic, expr->CompareOperationFeedbackId());
5363 patch_site.EmitPatchInfo();
5364 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5365 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5369 // Convert the result of the comparison into one expected for this
5370 // expression's context.
5371 context()->Plug(if_true, if_false);
5375 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5376 Expression* sub_expr,
5378 Label materialize_true, materialize_false;
5379 Label* if_true = NULL;
5380 Label* if_false = NULL;
5381 Label* fall_through = NULL;
5382 context()->PrepareTest(&materialize_true, &materialize_false,
5383 &if_true, &if_false, &fall_through);
5385 VisitForAccumulatorValue(sub_expr);
5386 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5387 __ mov(a0, result_register());
5388 if (expr->op() == Token::EQ_STRICT) {
5389 Heap::RootListIndex nil_value = nil == kNullValue ?
5390 Heap::kNullValueRootIndex :
5391 Heap::kUndefinedValueRootIndex;
5392 __ LoadRoot(a1, nil_value);
5393 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5395 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5396 CallIC(ic, expr->CompareOperationFeedbackId());
5397 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5399 context()->Plug(if_true, if_false);
5403 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5404 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5405 context()->Plug(v0);
5409 Register FullCodeGenerator::result_register() {
5414 Register FullCodeGenerator::context_register() {
5419 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5420 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5421 DCHECK(IsAligned(frame_offset, kPointerSize));
5422 // __ sw(value, MemOperand(fp, frame_offset));
5423 __ sd(value, MemOperand(fp, frame_offset));
5427 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5428 __ ld(dst, ContextOperand(cp, context_index));
5432 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5433 Scope* declaration_scope = scope()->DeclarationScope();
5434 if (declaration_scope->is_script_scope() ||
5435 declaration_scope->is_module_scope()) {
5436 // Contexts nested in the native context have a canonical empty function
5437 // as their closure, not the anonymous closure containing the global
5438 // code. Pass a smi sentinel and let the runtime look up the empty
5440 __ li(at, Operand(Smi::FromInt(0)));
5441 } else if (declaration_scope->is_eval_scope()) {
5442 // Contexts created by a call to eval have the same closure as the
5443 // context calling eval, not the anonymous closure containing the eval
5444 // code. Fetch it from the context.
5445 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5447 DCHECK(declaration_scope->is_function_scope());
5448 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5454 // ----------------------------------------------------------------------------
5455 // Non-local control flow support.
5457 void FullCodeGenerator::EnterFinallyBlock() {
5458 DCHECK(!result_register().is(a1));
5459 // Store result register while executing finally block.
5460 __ push(result_register());
5461 // Cook return address in link register to stack (smi encoded Code* delta).
5462 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
5465 // Store result register while executing finally block.
5468 // Store pending message while executing finally block.
5469 ExternalReference pending_message_obj =
5470 ExternalReference::address_of_pending_message_obj(isolate());
5471 __ li(at, Operand(pending_message_obj));
5472 __ ld(a1, MemOperand(at));
5475 ClearPendingMessage();
5479 void FullCodeGenerator::ExitFinallyBlock() {
5480 DCHECK(!result_register().is(a1));
5481 // Restore pending message from stack.
5483 ExternalReference pending_message_obj =
5484 ExternalReference::address_of_pending_message_obj(isolate());
5485 __ li(at, Operand(pending_message_obj));
5486 __ sd(a1, MemOperand(at));
5488 // Restore result register from stack.
5491 // Uncook return address and return.
5492 __ pop(result_register());
5495 __ Daddu(at, a1, Operand(masm_->CodeObject()));
5500 void FullCodeGenerator::ClearPendingMessage() {
5501 DCHECK(!result_register().is(a1));
5502 ExternalReference pending_message_obj =
5503 ExternalReference::address_of_pending_message_obj(isolate());
5504 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
5505 __ li(at, Operand(pending_message_obj));
5506 __ sd(a1, MemOperand(at));
5510 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5511 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5512 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
5513 Operand(SmiFromSlot(slot)));
5520 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5522 BackEdgeState target_state,
5523 Code* replacement_code) {
5524 static const int kInstrSize = Assembler::kInstrSize;
5525 Address branch_address = pc - 8 * kInstrSize;
5526 CodePatcher patcher(branch_address, 1);
5528 switch (target_state) {
5530 // slt at, a3, zero_reg (in case of count based interrupts)
5531 // beq at, zero_reg, ok
5532 // lui t9, <interrupt stub address> upper
5533 // ori t9, <interrupt stub address> u-middle
5535 // ori t9, <interrupt stub address> lower
5538 // ok-label ----- pc_after points here
5539 patcher.masm()->slt(at, a3, zero_reg);
5541 case ON_STACK_REPLACEMENT:
5542 case OSR_AFTER_STACK_CHECK:
5543 // addiu at, zero_reg, 1
5544 // beq at, zero_reg, ok ;; Not changed
5545 // lui t9, <on-stack replacement address> upper
5546 // ori t9, <on-stack replacement address> middle
5548 // ori t9, <on-stack replacement address> lower
5549 // jalr t9 ;; Not changed
5550 // nop ;; Not changed
5551 // ok-label ----- pc_after points here
5552 patcher.masm()->daddiu(at, zero_reg, 1);
5555 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5556 // Replace the stack check address in the load-immediate (6-instr sequence)
5557 // with the entry address of the replacement code.
5558 Assembler::set_target_address_at(pc_immediate_load_address,
5559 replacement_code->entry());
5561 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5562 unoptimized_code, pc_immediate_load_address, replacement_code);
5566 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5568 Code* unoptimized_code,
5570 static const int kInstrSize = Assembler::kInstrSize;
5571 Address branch_address = pc - 8 * kInstrSize;
5572 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5574 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
5575 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5576 DCHECK(reinterpret_cast<uint64_t>(
5577 Assembler::target_address_at(pc_immediate_load_address)) ==
5578 reinterpret_cast<uint64_t>(
5579 isolate->builtins()->InterruptCheck()->entry()));
5583 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5585 if (reinterpret_cast<uint64_t>(
5586 Assembler::target_address_at(pc_immediate_load_address)) ==
5587 reinterpret_cast<uint64_t>(
5588 isolate->builtins()->OnStackReplacement()->entry())) {
5589 return ON_STACK_REPLACEMENT;
5592 DCHECK(reinterpret_cast<uint64_t>(
5593 Assembler::target_address_at(pc_immediate_load_address)) ==
5594 reinterpret_cast<uint64_t>(
5595 isolate->builtins()->OsrAfterStackCheck()->entry()));
5596 return OSR_AFTER_STACK_CHECK;
5600 } // namespace internal
5603 #endif // V8_TARGET_ARCH_MIPS64