1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_MIPS64
7 // Note on Mips implementation:
9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first
11 // parameter to a function is defined to be 'a0'. So there are many
12 // places where we have to move a previous result in v0 to a0 for the
13 // next call: mov(a0, v0). This is not needed on the other architectures.
15 #include "src/code-factory.h"
16 #include "src/code-stubs.h"
17 #include "src/codegen.h"
18 #include "src/compiler.h"
19 #include "src/debug/debug.h"
20 #include "src/full-codegen/full-codegen.h"
21 #include "src/ic/ic.h"
22 #include "src/parser.h"
23 #include "src/scopes.h"
25 #include "src/mips64/code-stubs-mips64.h"
26 #include "src/mips64/macro-assembler-mips64.h"
31 #define __ ACCESS_MASM(masm_)
34 // A patch site is a location in the code which it is possible to patch. This
35 // class has a number of methods to emit the code which is patchable and the
36 // method EmitPatchInfo to record a marker back to the patchable code. This
37 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
38 // (raw 16 bit immediate value is used) is the delta from the pc to the first
39 // instruction of the patchable code.
40 // The marker instruction is effectively a NOP (dest is zero_reg) and will
41 // never be emitted by normal code.
42 class JumpPatchSite BASE_EMBEDDED {
44 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
46 info_emitted_ = false;
51 DCHECK(patch_site_.is_bound() == info_emitted_);
54 // When initially emitting this ensure that a jump is always generated to skip
55 // the inlined smi code.
56 void EmitJumpIfNotSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59 __ bind(&patch_site_);
61 // Always taken before patched.
62 __ BranchShort(target, eq, at, Operand(zero_reg));
65 // When initially emitting this ensure that a jump is never generated to skip
66 // the inlined smi code.
67 void EmitJumpIfSmi(Register reg, Label* target) {
68 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
69 DCHECK(!patch_site_.is_bound() && !info_emitted_);
70 __ bind(&patch_site_);
72 // Never taken before patched.
73 __ BranchShort(target, ne, at, Operand(zero_reg));
76 void EmitPatchInfo() {
77 if (patch_site_.is_bound()) {
78 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
79 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
80 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
85 __ nop(); // Signals no inlined code.
90 MacroAssembler* masm_;
98 // Generate code for a JS function. On entry to the function the receiver
99 // and arguments have been pushed on the stack left to right. The actual
100 // argument count matches the formal parameter count expected by the
103 // The live registers are:
104 // o a1: the JS function object being called (i.e. ourselves)
106 // o fp: our caller's frame pointer
107 // o sp: stack pointer
108 // o ra: return address
110 // The function builds a JS frame. Please see JavaScriptFrameConstants in
111 // frames-mips.h for its layout.
112 void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122 if (strlen(FLAG_stop_at) > 0 &&
123 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
128 // Sloppy mode functions and builtins need to replace the receiver with the
129 // global proxy when called as functions (without an explicit receiver
131 if (info->MustReplaceUndefinedReceiverWithGlobalProxy()) {
133 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
134 __ ld(at, MemOperand(sp, receiver_offset));
135 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
136 __ Branch(&ok, ne, a2, Operand(at));
138 __ ld(a2, GlobalObjectOperand());
139 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
141 __ sd(a2, MemOperand(sp, receiver_offset));
144 // Open a frame scope to indicate that there is a frame on the stack. The
145 // MANUAL indicates that the scope shouldn't actually generate code to set up
146 // the frame (that is done below).
147 FrameScope frame_scope(masm_, StackFrame::MANUAL);
148 info->set_prologue_offset(masm_->pc_offset());
149 __ Prologue(info->IsCodePreAgingActive());
150 info->AddNoFrameRange(0, masm_->pc_offset());
152 { Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots.
155 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
156 if (locals_count > 0) {
157 if (locals_count >= 128) {
159 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
160 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
161 __ Branch(&ok, hs, t1, Operand(a2));
162 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
165 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
166 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
167 if (locals_count >= kMaxPushes) {
168 int loop_iterations = locals_count / kMaxPushes;
169 __ li(a2, Operand(loop_iterations));
171 __ bind(&loop_header);
173 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
174 for (int i = 0; i < kMaxPushes; i++) {
175 __ sd(t1, MemOperand(sp, i * kPointerSize));
177 // Continue loop if not done.
178 __ Dsubu(a2, a2, Operand(1));
179 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
181 int remaining = locals_count % kMaxPushes;
182 // Emit the remaining pushes.
183 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
184 for (int i = 0; i < remaining; i++) {
185 __ sd(t1, MemOperand(sp, i * kPointerSize));
190 bool function_in_register_a1 = true;
192 // Possibly allocate a local context.
193 if (info->scope()->num_heap_slots() > 0) {
194 Comment cmnt(masm_, "[ Allocate context");
195 // Argument to NewContext is the function, which is still in a1.
196 bool need_write_barrier = true;
197 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
198 if (info->scope()->is_script_scope()) {
200 __ Push(info->scope()->GetScopeInfo(info->isolate()));
201 __ CallRuntime(Runtime::kNewScriptContext, 2);
202 } else if (slots <= FastNewContextStub::kMaximumSlots) {
203 FastNewContextStub stub(isolate(), slots);
205 // Result of FastNewContextStub is always in new space.
206 need_write_barrier = false;
209 __ CallRuntime(Runtime::kNewFunctionContext, 1);
211 function_in_register_a1 = false;
212 // Context is returned in v0. It replaces the context passed to us.
213 // It's saved in the stack and kept live in cp.
215 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
216 // Copy any necessary parameters into the context.
217 int num_parameters = info->scope()->num_parameters();
218 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
219 for (int i = first_parameter; i < num_parameters; i++) {
220 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ ld(a0, MemOperand(fp, parameter_offset));
226 // Store it in the context.
227 MemOperand target = ContextOperand(cp, var->index());
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
233 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(cp, a0, &done);
237 __ Abort(kExpectedNewSpaceObject);
244 PrepareForBailoutForId(BailoutId::Prologue(), NO_REGISTERS);
245 // Function register is trashed in case we bailout here. But since that
246 // could happen only when we allocate a context the value of
247 // |function_in_register_a1| is correct.
249 // Possibly set up a local binding to the this function which is used in
250 // derived constructors with super calls.
251 Variable* this_function_var = scope()->this_function_var();
252 if (this_function_var != nullptr) {
253 Comment cmnt(masm_, "[ This function");
254 if (!function_in_register_a1) {
255 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
256 // The write barrier clobbers register again, keep it marked as such.
258 SetVar(this_function_var, a1, a2, a3);
261 Variable* new_target_var = scope()->new_target_var();
262 if (new_target_var != nullptr) {
263 Comment cmnt(masm_, "[ new.target");
264 // Get the frame pointer for the calling frame.
265 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
267 // Skip the arguments adaptor frame if it exists.
268 Label check_frame_marker;
269 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
270 __ Branch(&check_frame_marker, ne, a1,
271 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
272 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
274 // Check the marker in the calling frame.
275 __ bind(&check_frame_marker);
276 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
277 function_in_register_a1 = false;
279 Label non_construct_frame, done;
280 __ Branch(&non_construct_frame, ne, a1,
281 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
284 MemOperand(a2, ConstructFrameConstants::kOriginalConstructorOffset));
287 __ bind(&non_construct_frame);
288 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
291 SetVar(new_target_var, v0, a2, a3);
294 Variable* arguments = scope()->arguments();
295 if (arguments != NULL) {
296 // Function uses arguments object.
297 Comment cmnt(masm_, "[ Allocate arguments object");
298 if (!function_in_register_a1) {
299 // Load this again, if it's used by the local context below.
300 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
304 // Receiver is just before the parameters on the caller's stack.
305 int num_parameters = info->scope()->num_parameters();
306 int offset = num_parameters * kPointerSize;
308 Operand(StandardFrameConstants::kCallerSPOffset + offset));
309 __ li(a1, Operand(Smi::FromInt(num_parameters)));
312 // Arguments to ArgumentsAccessStub:
313 // function, receiver address, parameter count.
314 // The stub will rewrite receiever and parameter count if the previous
315 // stack frame was an arguments adapter frame.
316 ArgumentsAccessStub::Type type;
317 if (is_strict(language_mode()) || !has_simple_parameters()) {
318 type = ArgumentsAccessStub::NEW_STRICT;
319 } else if (literal()->has_duplicate_parameters()) {
320 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
322 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
324 ArgumentsAccessStub stub(isolate(), type);
327 SetVar(arguments, v0, a1, a2);
331 __ CallRuntime(Runtime::kTraceEnter, 0);
334 // Visit the declarations and body unless there is an illegal
336 if (scope()->HasIllegalRedeclaration()) {
337 Comment cmnt(masm_, "[ Declarations");
338 VisitForEffect(scope()->GetIllegalRedeclaration());
341 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
342 { Comment cmnt(masm_, "[ Declarations");
343 VisitDeclarations(scope()->declarations());
346 // Assert that the declarations do not use ICs. Otherwise the debugger
347 // won't be able to redirect a PC at an IC to the correct IC in newly
349 DCHECK_EQ(0, ic_total_count_);
351 { Comment cmnt(masm_, "[ Stack check");
352 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
354 __ LoadRoot(at, Heap::kStackLimitRootIndex);
355 __ Branch(&ok, hs, sp, Operand(at));
356 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
357 PredictableCodeSizeScope predictable(masm_,
358 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
359 __ Call(stack_check, RelocInfo::CODE_TARGET);
363 { Comment cmnt(masm_, "[ Body");
364 DCHECK(loop_depth() == 0);
366 VisitStatements(literal()->body());
368 DCHECK(loop_depth() == 0);
372 // Always emit a 'return undefined' in case control fell off the end of
374 { Comment cmnt(masm_, "[ return <undefined>;");
375 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
377 EmitReturnSequence();
381 void FullCodeGenerator::ClearAccumulator() {
382 DCHECK(Smi::FromInt(0) == 0);
383 __ mov(v0, zero_reg);
387 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
388 __ li(a2, Operand(profiling_counter_));
389 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
390 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
391 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
395 void FullCodeGenerator::EmitProfilingCounterReset() {
396 int reset_value = FLAG_interrupt_budget;
397 if (info_->is_debug()) {
398 // Detect debug break requests as soon as possible.
399 reset_value = FLAG_interrupt_budget >> 4;
401 __ li(a2, Operand(profiling_counter_));
402 __ li(a3, Operand(Smi::FromInt(reset_value)));
403 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
407 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
408 Label* back_edge_target) {
409 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
410 // to make sure it is constant. Branch may emit a skip-or-jump sequence
411 // instead of the normal Branch. It seems that the "skip" part of that
412 // sequence is about as long as this Branch would be so it is safe to ignore
414 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
415 Comment cmnt(masm_, "[ Back edge bookkeeping");
417 DCHECK(back_edge_target->is_bound());
418 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
419 int weight = Min(kMaxBackEdgeWeight,
420 Max(1, distance / kCodeSizeMultiplier));
421 EmitProfilingCounterDecrement(weight);
422 __ slt(at, a3, zero_reg);
423 __ beq(at, zero_reg, &ok);
424 // Call will emit a li t9 first, so it is safe to use the delay slot.
425 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
426 // Record a mapping of this PC offset to the OSR id. This is used to find
427 // the AST id from the unoptimized code in order to use it as a key into
428 // the deoptimization input data found in the optimized code.
429 RecordBackEdge(stmt->OsrEntryId());
430 EmitProfilingCounterReset();
433 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
434 // Record a mapping of the OSR id to this PC. This is used if the OSR
435 // entry becomes the target of a bailout. We don't expect it to be, but
436 // we want it to work if it is.
437 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
441 void FullCodeGenerator::EmitReturnSequence() {
442 Comment cmnt(masm_, "[ Return sequence");
443 if (return_label_.is_bound()) {
444 __ Branch(&return_label_);
446 __ bind(&return_label_);
448 // Push the return value on the stack as the parameter.
449 // Runtime::TraceExit returns its parameter in v0.
451 __ CallRuntime(Runtime::kTraceExit, 1);
453 // Pretend that the exit is a backwards jump to the entry.
455 if (info_->ShouldSelfOptimize()) {
456 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
458 int distance = masm_->pc_offset();
459 weight = Min(kMaxBackEdgeWeight,
460 Max(1, distance / kCodeSizeMultiplier));
462 EmitProfilingCounterDecrement(weight);
464 __ Branch(&ok, ge, a3, Operand(zero_reg));
466 __ Call(isolate()->builtins()->InterruptCheck(),
467 RelocInfo::CODE_TARGET);
469 EmitProfilingCounterReset();
472 // Make sure that the constant pool is not emitted inside of the return
474 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
475 // Here we use masm_-> instead of the __ macro to avoid the code coverage
476 // tool from instrumenting as we rely on the code size here.
477 int32_t arg_count = info_->scope()->num_parameters() + 1;
478 int32_t sp_delta = arg_count * kPointerSize;
479 SetReturnPosition(literal());
481 int no_frame_start = masm_->pc_offset();
482 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
483 masm_->Daddu(sp, sp, Operand(sp_delta));
485 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
491 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
492 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
493 codegen()->GetVar(result_register(), var);
494 __ push(result_register());
498 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
502 void FullCodeGenerator::AccumulatorValueContext::Plug(
503 Heap::RootListIndex index) const {
504 __ LoadRoot(result_register(), index);
508 void FullCodeGenerator::StackValueContext::Plug(
509 Heap::RootListIndex index) const {
510 __ LoadRoot(result_register(), index);
511 __ push(result_register());
515 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
516 codegen()->PrepareForBailoutBeforeSplit(condition(),
520 if (index == Heap::kUndefinedValueRootIndex ||
521 index == Heap::kNullValueRootIndex ||
522 index == Heap::kFalseValueRootIndex) {
523 if (false_label_ != fall_through_) __ Branch(false_label_);
524 } else if (index == Heap::kTrueValueRootIndex) {
525 if (true_label_ != fall_through_) __ Branch(true_label_);
527 __ LoadRoot(result_register(), index);
528 codegen()->DoTest(this);
533 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
537 void FullCodeGenerator::AccumulatorValueContext::Plug(
538 Handle<Object> lit) const {
539 __ li(result_register(), Operand(lit));
543 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
544 // Immediates cannot be pushed directly.
545 __ li(result_register(), Operand(lit));
546 __ push(result_register());
550 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
551 codegen()->PrepareForBailoutBeforeSplit(condition(),
555 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
556 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
557 if (false_label_ != fall_through_) __ Branch(false_label_);
558 } else if (lit->IsTrue() || lit->IsJSObject()) {
559 if (true_label_ != fall_through_) __ Branch(true_label_);
560 } else if (lit->IsString()) {
561 if (String::cast(*lit)->length() == 0) {
562 if (false_label_ != fall_through_) __ Branch(false_label_);
564 if (true_label_ != fall_through_) __ Branch(true_label_);
566 } else if (lit->IsSmi()) {
567 if (Smi::cast(*lit)->value() == 0) {
568 if (false_label_ != fall_through_) __ Branch(false_label_);
570 if (true_label_ != fall_through_) __ Branch(true_label_);
573 // For simplicity we always test the accumulator register.
574 __ li(result_register(), Operand(lit));
575 codegen()->DoTest(this);
580 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
581 Register reg) const {
587 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
589 Register reg) const {
592 __ Move(result_register(), reg);
596 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
597 Register reg) const {
599 if (count > 1) __ Drop(count - 1);
600 __ sd(reg, MemOperand(sp, 0));
604 void FullCodeGenerator::TestContext::DropAndPlug(int count,
605 Register reg) const {
607 // For simplicity we always test the accumulator register.
609 __ Move(result_register(), reg);
610 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
611 codegen()->DoTest(this);
615 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
616 Label* materialize_false) const {
617 DCHECK(materialize_true == materialize_false);
618 __ bind(materialize_true);
622 void FullCodeGenerator::AccumulatorValueContext::Plug(
623 Label* materialize_true,
624 Label* materialize_false) const {
626 __ bind(materialize_true);
627 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
629 __ bind(materialize_false);
630 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
635 void FullCodeGenerator::StackValueContext::Plug(
636 Label* materialize_true,
637 Label* materialize_false) const {
639 __ bind(materialize_true);
640 __ LoadRoot(at, Heap::kTrueValueRootIndex);
641 // Push the value as the following branch can clobber at in long branch mode.
644 __ bind(materialize_false);
645 __ LoadRoot(at, Heap::kFalseValueRootIndex);
651 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
652 Label* materialize_false) const {
653 DCHECK(materialize_true == true_label_);
654 DCHECK(materialize_false == false_label_);
658 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
659 Heap::RootListIndex value_root_index =
660 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
661 __ LoadRoot(result_register(), value_root_index);
665 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
666 Heap::RootListIndex value_root_index =
667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
668 __ LoadRoot(at, value_root_index);
673 void FullCodeGenerator::TestContext::Plug(bool flag) const {
674 codegen()->PrepareForBailoutBeforeSplit(condition(),
679 if (true_label_ != fall_through_) __ Branch(true_label_);
681 if (false_label_ != fall_through_) __ Branch(false_label_);
686 void FullCodeGenerator::DoTest(Expression* condition,
689 Label* fall_through) {
690 __ mov(a0, result_register());
691 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
692 CallIC(ic, condition->test_id());
693 __ mov(at, zero_reg);
694 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
698 void FullCodeGenerator::Split(Condition cc,
703 Label* fall_through) {
704 if (if_false == fall_through) {
705 __ Branch(if_true, cc, lhs, rhs);
706 } else if (if_true == fall_through) {
707 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
709 __ Branch(if_true, cc, lhs, rhs);
715 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
716 DCHECK(var->IsStackAllocated());
717 // Offset is negative because higher indexes are at lower addresses.
718 int offset = -var->index() * kPointerSize;
719 // Adjust by a (parameter or local) base offset.
720 if (var->IsParameter()) {
721 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
723 offset += JavaScriptFrameConstants::kLocal0Offset;
725 return MemOperand(fp, offset);
729 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
730 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
731 if (var->IsContextSlot()) {
732 int context_chain_length = scope()->ContextChainLength(var->scope());
733 __ LoadContext(scratch, context_chain_length);
734 return ContextOperand(scratch, var->index());
736 return StackOperand(var);
741 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
742 // Use destination as scratch.
743 MemOperand location = VarOperand(var, dest);
744 __ ld(dest, location);
748 void FullCodeGenerator::SetVar(Variable* var,
752 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
753 DCHECK(!scratch0.is(src));
754 DCHECK(!scratch0.is(scratch1));
755 DCHECK(!scratch1.is(src));
756 MemOperand location = VarOperand(var, scratch0);
757 __ sd(src, location);
758 // Emit the write barrier code if the location is in the heap.
759 if (var->IsContextSlot()) {
760 __ RecordWriteContextSlot(scratch0,
770 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
771 bool should_normalize,
774 // Only prepare for bailouts before splits if we're in a test
775 // context. Otherwise, we let the Visit function deal with the
776 // preparation to avoid preparing with the same AST id twice.
777 if (!context()->IsTest()) return;
780 if (should_normalize) __ Branch(&skip);
781 PrepareForBailout(expr, TOS_REG);
782 if (should_normalize) {
783 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
784 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
790 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
791 // The variable in the declaration always resides in the current function
793 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
794 if (generate_debug_code_) {
795 // Check that we're not inside a with or catch context.
796 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
797 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
798 __ Check(ne, kDeclarationInWithContext,
800 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
801 __ Check(ne, kDeclarationInCatchContext,
807 void FullCodeGenerator::VisitVariableDeclaration(
808 VariableDeclaration* declaration) {
809 // If it was not possible to allocate the variable at compile time, we
810 // need to "declare" it at runtime to make sure it actually exists in the
812 VariableProxy* proxy = declaration->proxy();
813 VariableMode mode = declaration->mode();
814 Variable* variable = proxy->var();
815 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
816 switch (variable->location()) {
817 case VariableLocation::GLOBAL:
818 case VariableLocation::UNALLOCATED:
819 globals_->Add(variable->name(), zone());
820 globals_->Add(variable->binding_needs_init()
821 ? isolate()->factory()->the_hole_value()
822 : isolate()->factory()->undefined_value(),
826 case VariableLocation::PARAMETER:
827 case VariableLocation::LOCAL:
829 Comment cmnt(masm_, "[ VariableDeclaration");
830 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
831 __ sd(a4, StackOperand(variable));
835 case VariableLocation::CONTEXT:
837 Comment cmnt(masm_, "[ VariableDeclaration");
838 EmitDebugCheckDeclarationContext(variable);
839 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
840 __ sd(at, ContextOperand(cp, variable->index()));
841 // No write barrier since the_hole_value is in old space.
842 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846 case VariableLocation::LOOKUP: {
847 Comment cmnt(masm_, "[ VariableDeclaration");
848 __ li(a2, Operand(variable->name()));
849 // Declaration nodes are always introduced in one of four modes.
850 DCHECK(IsDeclaredVariableMode(mode));
851 // Push initial value, if any.
852 // Note: For variables we must not push an initial value (such as
853 // 'undefined') because we may have a (legal) redeclaration and we
854 // must not destroy the current value.
856 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
858 DCHECK(Smi::FromInt(0) == 0);
859 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
862 __ CallRuntime(IsImmutableVariableMode(mode)
863 ? Runtime::kDeclareReadOnlyLookupSlot
864 : Runtime::kDeclareLookupSlot,
872 void FullCodeGenerator::VisitFunctionDeclaration(
873 FunctionDeclaration* declaration) {
874 VariableProxy* proxy = declaration->proxy();
875 Variable* variable = proxy->var();
876 switch (variable->location()) {
877 case VariableLocation::GLOBAL:
878 case VariableLocation::UNALLOCATED: {
879 globals_->Add(variable->name(), zone());
880 Handle<SharedFunctionInfo> function =
881 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
882 // Check for stack-overflow exception.
883 if (function.is_null()) return SetStackOverflow();
884 globals_->Add(function, zone());
888 case VariableLocation::PARAMETER:
889 case VariableLocation::LOCAL: {
890 Comment cmnt(masm_, "[ FunctionDeclaration");
891 VisitForAccumulatorValue(declaration->fun());
892 __ sd(result_register(), StackOperand(variable));
896 case VariableLocation::CONTEXT: {
897 Comment cmnt(masm_, "[ FunctionDeclaration");
898 EmitDebugCheckDeclarationContext(variable);
899 VisitForAccumulatorValue(declaration->fun());
900 __ sd(result_register(), ContextOperand(cp, variable->index()));
901 int offset = Context::SlotOffset(variable->index());
902 // We know that we have written a function, which is not a smi.
903 __ RecordWriteContextSlot(cp,
911 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
915 case VariableLocation::LOOKUP: {
916 Comment cmnt(masm_, "[ FunctionDeclaration");
917 __ li(a2, Operand(variable->name()));
919 // Push initial value for function declaration.
920 VisitForStackValue(declaration->fun());
921 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
928 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
929 // Call the runtime to declare the globals.
930 __ li(a1, Operand(pairs));
931 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
933 __ CallRuntime(Runtime::kDeclareGlobals, 2);
934 // Return value is ignored.
938 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
939 // Call the runtime to declare the modules.
940 __ Push(descriptions);
941 __ CallRuntime(Runtime::kDeclareModules, 1);
942 // Return value is ignored.
946 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
947 Comment cmnt(masm_, "[ SwitchStatement");
948 Breakable nested_statement(this, stmt);
949 SetStatementPosition(stmt);
951 // Keep the switch value on the stack until a case matches.
952 VisitForStackValue(stmt->tag());
953 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
955 ZoneList<CaseClause*>* clauses = stmt->cases();
956 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
958 Label next_test; // Recycled for each test.
959 // Compile all the tests with branches to their bodies.
960 for (int i = 0; i < clauses->length(); i++) {
961 CaseClause* clause = clauses->at(i);
962 clause->body_target()->Unuse();
964 // The default is not a test, but remember it as final fall through.
965 if (clause->is_default()) {
966 default_clause = clause;
970 Comment cmnt(masm_, "[ Case comparison");
974 // Compile the label expression.
975 VisitForAccumulatorValue(clause->label());
976 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
978 // Perform the comparison as if via '==='.
979 __ ld(a1, MemOperand(sp, 0)); // Switch value.
980 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
981 JumpPatchSite patch_site(masm_);
982 if (inline_smi_code) {
985 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
987 __ Branch(&next_test, ne, a1, Operand(a0));
988 __ Drop(1); // Switch value is no longer needed.
989 __ Branch(clause->body_target());
994 // Record position before stub call for type feedback.
995 SetExpressionPosition(clause);
996 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
997 strength(language_mode())).code();
998 CallIC(ic, clause->CompareId());
999 patch_site.EmitPatchInfo();
1003 PrepareForBailout(clause, TOS_REG);
1004 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1005 __ Branch(&next_test, ne, v0, Operand(at));
1007 __ Branch(clause->body_target());
1010 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1011 __ Drop(1); // Switch value is no longer needed.
1012 __ Branch(clause->body_target());
1015 // Discard the test value and jump to the default if present, otherwise to
1016 // the end of the statement.
1017 __ bind(&next_test);
1018 __ Drop(1); // Switch value is no longer needed.
1019 if (default_clause == NULL) {
1020 __ Branch(nested_statement.break_label());
1022 __ Branch(default_clause->body_target());
1025 // Compile all the case bodies.
1026 for (int i = 0; i < clauses->length(); i++) {
1027 Comment cmnt(masm_, "[ Case body");
1028 CaseClause* clause = clauses->at(i);
1029 __ bind(clause->body_target());
1030 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1031 VisitStatements(clause->statements());
1034 __ bind(nested_statement.break_label());
1035 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1039 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1040 Comment cmnt(masm_, "[ ForInStatement");
1041 SetStatementPosition(stmt, SKIP_BREAK);
1043 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1046 ForIn loop_statement(this, stmt);
1047 increment_loop_depth();
1049 // Get the object to enumerate over. If the object is null or undefined, skip
1050 // over the loop. See ECMA-262 version 5, section 12.6.4.
1051 SetExpressionAsStatementPosition(stmt->enumerable());
1052 VisitForAccumulatorValue(stmt->enumerable());
1053 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1054 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1055 __ Branch(&exit, eq, a0, Operand(at));
1056 Register null_value = a5;
1057 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1058 __ Branch(&exit, eq, a0, Operand(null_value));
1059 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1061 // Convert the object to a JS object.
1062 Label convert, done_convert;
1063 __ JumpIfSmi(a0, &convert);
1064 __ GetObjectType(a0, a1, a1);
1065 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1067 ToObjectStub stub(isolate());
1070 __ bind(&done_convert);
1071 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1074 // Check for proxies.
1076 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1077 __ GetObjectType(a0, a1, a1);
1078 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1080 // Check cache validity in generated code. This is a fast case for
1081 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1082 // guarantee cache validity, call the runtime system to check cache
1083 // validity or get the property names in a fixed array.
1084 __ CheckEnumCache(null_value, &call_runtime);
1086 // The enum cache is valid. Load the map of the object being
1087 // iterated over and use the cache for the iteration.
1089 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1090 __ Branch(&use_cache);
1092 // Get the set of properties to enumerate.
1093 __ bind(&call_runtime);
1094 __ push(a0); // Duplicate the enumerable object on the stack.
1095 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1096 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1098 // If we got a map from the runtime call, we can do a fast
1099 // modification check. Otherwise, we got a fixed array, and we have
1100 // to do a slow check.
1102 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1103 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1104 __ Branch(&fixed_array, ne, a2, Operand(at));
1106 // We got a map in register v0. Get the enumeration cache from it.
1107 Label no_descriptors;
1108 __ bind(&use_cache);
1110 __ EnumLength(a1, v0);
1111 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1113 __ LoadInstanceDescriptors(v0, a2);
1114 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1115 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1117 // Set up the four remaining stack slots.
1118 __ li(a0, Operand(Smi::FromInt(0)));
1119 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1120 __ Push(v0, a2, a1, a0);
1123 __ bind(&no_descriptors);
1127 // We got a fixed array in register v0. Iterate through that.
1129 __ bind(&fixed_array);
1131 __ li(a1, FeedbackVector());
1132 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1133 int vector_index = FeedbackVector()->GetIndex(slot);
1134 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1136 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1137 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1138 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1139 __ GetObjectType(a2, a3, a3);
1140 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1141 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1142 __ bind(&non_proxy);
1143 __ Push(a1, v0); // Smi and array
1144 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1145 __ li(a0, Operand(Smi::FromInt(0)));
1146 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1148 // Generate code for doing the condition check.
1149 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1151 SetExpressionAsStatementPosition(stmt->each());
1153 // Load the current count to a0, load the length to a1.
1154 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1155 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1156 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1158 // Get the current entry of the array into register a3.
1159 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1160 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1161 __ SmiScale(a4, a0, kPointerSizeLog2);
1162 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1163 __ ld(a3, MemOperand(a4)); // Current entry.
1165 // Get the expected map from the stack or a smi in the
1166 // permanent slow case into register a2.
1167 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1169 // Check if the expected map still matches that of the enumerable.
1170 // If not, we may have to filter the key.
1172 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1173 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1174 __ Branch(&update_each, eq, a4, Operand(a2));
1176 // For proxies, no filtering is done.
1177 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1178 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1179 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1181 // Convert the entry to a string or (smi) 0 if it isn't a property
1182 // any more. If the property has been removed while iterating, we
1184 __ Push(a1, a3); // Enumerable and current entry.
1185 __ CallRuntime(Runtime::kForInFilter, 2);
1186 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1187 __ mov(a3, result_register());
1188 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1189 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1191 // Update the 'each' property or variable from the possibly filtered
1192 // entry in register a3.
1193 __ bind(&update_each);
1194 __ mov(result_register(), a3);
1195 // Perform the assignment as if via '='.
1196 { EffectContext context(this);
1197 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1198 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1201 // Generate code for the body of the loop.
1202 Visit(stmt->body());
1204 // Generate code for the going to the next element by incrementing
1205 // the index (smi) stored on top of the stack.
1206 __ bind(loop_statement.continue_label());
1208 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1211 EmitBackEdgeBookkeeping(stmt, &loop);
1214 // Remove the pointers stored on the stack.
1215 __ bind(loop_statement.break_label());
1218 // Exit and decrement the loop depth.
1219 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1221 decrement_loop_depth();
1225 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1227 // Use the fast case closure allocation code that allocates in new
1228 // space for nested functions that don't need literals cloning. If
1229 // we're running with the --always-opt or the --prepare-always-opt
1230 // flag, we need to use the runtime function so that the new function
1231 // we are creating here gets a chance to have its code optimized and
1232 // doesn't just get a copy of the existing unoptimized code.
1233 if (!FLAG_always_opt &&
1234 !FLAG_prepare_always_opt &&
1236 scope()->is_function_scope() &&
1237 info->num_literals() == 0) {
1238 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1239 __ li(a2, Operand(info));
1244 pretenure ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure, 1);
1246 context()->Plug(v0);
1250 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1252 FeedbackVectorICSlot slot) {
1253 if (NeedsHomeObject(initializer)) {
1254 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1255 __ li(StoreDescriptor::NameRegister(),
1256 Operand(isolate()->factory()->home_object_symbol()));
1257 __ ld(StoreDescriptor::ValueRegister(),
1258 MemOperand(sp, offset * kPointerSize));
1259 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1265 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1266 TypeofMode typeof_mode,
1268 Register current = cp;
1274 if (s->num_heap_slots() > 0) {
1275 if (s->calls_sloppy_eval()) {
1276 // Check that extension is NULL.
1277 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1278 __ Branch(slow, ne, temp, Operand(zero_reg));
1280 // Load next context in chain.
1281 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1282 // Walk the rest of the chain without clobbering cp.
1285 // If no outer scope calls eval, we do not need to check more
1286 // context extensions.
1287 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1288 s = s->outer_scope();
1291 if (s->is_eval_scope()) {
1293 if (!current.is(next)) {
1294 __ Move(next, current);
1297 // Terminate at native context.
1298 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1299 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1300 __ Branch(&fast, eq, temp, Operand(a4));
1301 // Check that extension is NULL.
1302 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1303 __ Branch(slow, ne, temp, Operand(zero_reg));
1304 // Load next context in chain.
1305 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1310 // All extension objects were empty and it is safe to use a normal global
1312 EmitGlobalVariableLoad(proxy, typeof_mode);
1316 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1318 DCHECK(var->IsContextSlot());
1319 Register context = cp;
1323 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1324 if (s->num_heap_slots() > 0) {
1325 if (s->calls_sloppy_eval()) {
1326 // Check that extension is NULL.
1327 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1328 __ Branch(slow, ne, temp, Operand(zero_reg));
1330 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1331 // Walk the rest of the chain without clobbering cp.
1335 // Check that last extension is NULL.
1336 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1337 __ Branch(slow, ne, temp, Operand(zero_reg));
1339 // This function is used only for loads, not stores, so it's safe to
1340 // return an cp-based operand (the write barrier cannot be allowed to
1341 // destroy the cp register).
1342 return ContextOperand(context, var->index());
1346 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1347 TypeofMode typeof_mode,
1348 Label* slow, Label* done) {
1349 // Generate fast-case code for variables that might be shadowed by
1350 // eval-introduced variables. Eval is used a lot without
1351 // introducing variables. In those cases, we do not want to
1352 // perform a runtime call for all variables in the scope
1353 // containing the eval.
1354 Variable* var = proxy->var();
1355 if (var->mode() == DYNAMIC_GLOBAL) {
1356 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1358 } else if (var->mode() == DYNAMIC_LOCAL) {
1359 Variable* local = var->local_if_not_shadowed();
1360 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1361 if (local->mode() == LET || local->mode() == CONST ||
1362 local->mode() == CONST_LEGACY) {
1363 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1364 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1365 if (local->mode() == CONST_LEGACY) {
1366 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1367 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1368 } else { // LET || CONST
1369 __ Branch(done, ne, at, Operand(zero_reg));
1370 __ li(a0, Operand(var->name()));
1372 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1380 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1381 TypeofMode typeof_mode) {
1382 Variable* var = proxy->var();
1383 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1384 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1385 if (var->IsGlobalSlot()) {
1386 DCHECK(var->index() > 0);
1387 DCHECK(var->IsStaticGlobalObjectProperty());
1388 int const slot = var->index();
1389 int const depth = scope()->ContextChainLength(var->scope());
1390 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1391 __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1392 LoadGlobalViaContextStub stub(isolate(), depth);
1395 __ Push(Smi::FromInt(slot));
1396 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1400 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1401 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1402 __ li(LoadDescriptor::SlotRegister(),
1403 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1404 CallLoadIC(typeof_mode);
1409 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1410 TypeofMode typeof_mode) {
1411 // Record position before possible IC call.
1412 SetExpressionPosition(proxy);
1413 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1414 Variable* var = proxy->var();
1416 // Three cases: global variables, lookup variables, and all other types of
1418 switch (var->location()) {
1419 case VariableLocation::GLOBAL:
1420 case VariableLocation::UNALLOCATED: {
1421 Comment cmnt(masm_, "[ Global variable");
1422 EmitGlobalVariableLoad(proxy, typeof_mode);
1423 context()->Plug(v0);
1427 case VariableLocation::PARAMETER:
1428 case VariableLocation::LOCAL:
1429 case VariableLocation::CONTEXT: {
1430 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1431 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1432 : "[ Stack variable");
1433 if (NeedsHoleCheckForLoad(proxy)) {
1434 // Let and const need a read barrier.
1436 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1437 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1438 if (var->mode() == LET || var->mode() == CONST) {
1439 // Throw a reference error when using an uninitialized let/const
1440 // binding in harmony mode.
1442 __ Branch(&done, ne, at, Operand(zero_reg));
1443 __ li(a0, Operand(var->name()));
1445 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1448 // Uninitialized legacy const bindings are unholed.
1449 DCHECK(var->mode() == CONST_LEGACY);
1450 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1451 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1453 context()->Plug(v0);
1456 context()->Plug(var);
1460 case VariableLocation::LOOKUP: {
1461 Comment cmnt(masm_, "[ Lookup variable");
1463 // Generate code for loading from variables potentially shadowed
1464 // by eval-introduced variables.
1465 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1467 __ li(a1, Operand(var->name()));
1468 __ Push(cp, a1); // Context and name.
1469 Runtime::FunctionId function_id =
1470 typeof_mode == NOT_INSIDE_TYPEOF
1471 ? Runtime::kLoadLookupSlot
1472 : Runtime::kLoadLookupSlotNoReferenceError;
1473 __ CallRuntime(function_id, 2);
1475 context()->Plug(v0);
1481 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1482 Comment cmnt(masm_, "[ RegExpLiteral");
1484 // Registers will be used as follows:
1485 // a5 = materialized value (RegExp literal)
1486 // a4 = JS function, literals array
1487 // a3 = literal index
1488 // a2 = RegExp pattern
1489 // a1 = RegExp flags
1490 // a0 = RegExp literal clone
1491 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1492 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1493 int literal_offset =
1494 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1495 __ ld(a5, FieldMemOperand(a4, literal_offset));
1496 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1497 __ Branch(&materialized, ne, a5, Operand(at));
1499 // Create regexp literal using runtime function.
1500 // Result will be in v0.
1501 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1502 __ li(a2, Operand(expr->pattern()));
1503 __ li(a1, Operand(expr->flags()));
1504 __ Push(a4, a3, a2, a1);
1505 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1508 __ bind(&materialized);
1509 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1510 Label allocated, runtime_allocate;
1511 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1514 __ bind(&runtime_allocate);
1515 __ li(a0, Operand(Smi::FromInt(size)));
1517 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1520 __ bind(&allocated);
1522 // After this, registers are used as follows:
1523 // v0: Newly allocated regexp.
1524 // a5: Materialized regexp.
1526 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1527 context()->Plug(v0);
1531 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1532 if (expression == NULL) {
1533 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1536 VisitForStackValue(expression);
1541 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1542 Comment cmnt(masm_, "[ ObjectLiteral");
1544 Handle<FixedArray> constant_properties = expr->constant_properties();
1545 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1546 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1547 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1548 __ li(a1, Operand(constant_properties));
1549 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1550 if (MustCreateObjectLiteralWithRuntime(expr)) {
1551 __ Push(a3, a2, a1, a0);
1552 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1554 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1557 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1559 // If result_saved is true the result is on top of the stack. If
1560 // result_saved is false the result is in v0.
1561 bool result_saved = false;
1563 AccessorTable accessor_table(zone());
1564 int property_index = 0;
1565 // store_slot_index points to the vector IC slot for the next store IC used.
1566 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1567 // and must be updated if the number of store ICs emitted here changes.
1568 int store_slot_index = 0;
1569 for (; property_index < expr->properties()->length(); property_index++) {
1570 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1571 if (property->is_computed_name()) break;
1572 if (property->IsCompileTimeValue()) continue;
1574 Literal* key = property->key()->AsLiteral();
1575 Expression* value = property->value();
1576 if (!result_saved) {
1577 __ push(v0); // Save result on stack.
1578 result_saved = true;
1580 switch (property->kind()) {
1581 case ObjectLiteral::Property::CONSTANT:
1583 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1584 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1586 case ObjectLiteral::Property::COMPUTED:
1587 // It is safe to use [[Put]] here because the boilerplate already
1588 // contains computed properties with an uninitialized value.
1589 if (key->value()->IsInternalizedString()) {
1590 if (property->emit_store()) {
1591 VisitForAccumulatorValue(value);
1592 __ mov(StoreDescriptor::ValueRegister(), result_register());
1593 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1594 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1595 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1596 if (FLAG_vector_stores) {
1597 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1600 CallStoreIC(key->LiteralFeedbackId());
1602 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1604 if (NeedsHomeObject(value)) {
1605 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1606 __ li(StoreDescriptor::NameRegister(),
1607 Operand(isolate()->factory()->home_object_symbol()));
1608 __ ld(StoreDescriptor::ValueRegister(), MemOperand(sp));
1609 if (FLAG_vector_stores) {
1610 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1615 VisitForEffect(value);
1619 // Duplicate receiver on stack.
1620 __ ld(a0, MemOperand(sp));
1622 VisitForStackValue(key);
1623 VisitForStackValue(value);
1624 if (property->emit_store()) {
1625 EmitSetHomeObjectIfNeeded(
1626 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1627 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1629 __ CallRuntime(Runtime::kSetProperty, 4);
1634 case ObjectLiteral::Property::PROTOTYPE:
1635 // Duplicate receiver on stack.
1636 __ ld(a0, MemOperand(sp));
1638 VisitForStackValue(value);
1639 DCHECK(property->emit_store());
1640 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1642 case ObjectLiteral::Property::GETTER:
1643 if (property->emit_store()) {
1644 accessor_table.lookup(key)->second->getter = value;
1647 case ObjectLiteral::Property::SETTER:
1648 if (property->emit_store()) {
1649 accessor_table.lookup(key)->second->setter = value;
1655 // Emit code to define accessors, using only a single call to the runtime for
1656 // each pair of corresponding getters and setters.
1657 for (AccessorTable::Iterator it = accessor_table.begin();
1658 it != accessor_table.end();
1660 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1662 VisitForStackValue(it->first);
1663 EmitAccessor(it->second->getter);
1664 EmitSetHomeObjectIfNeeded(
1665 it->second->getter, 2,
1666 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1667 EmitAccessor(it->second->setter);
1668 EmitSetHomeObjectIfNeeded(
1669 it->second->setter, 3,
1670 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1671 __ li(a0, Operand(Smi::FromInt(NONE)));
1673 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1676 // Object literals have two parts. The "static" part on the left contains no
1677 // computed property names, and so we can compute its map ahead of time; see
1678 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1679 // starts with the first computed property name, and continues with all
1680 // properties to its right. All the code from above initializes the static
1681 // component of the object literal, and arranges for the map of the result to
1682 // reflect the static order in which the keys appear. For the dynamic
1683 // properties, we compile them into a series of "SetOwnProperty" runtime
1684 // calls. This will preserve insertion order.
1685 for (; property_index < expr->properties()->length(); property_index++) {
1686 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1688 Expression* value = property->value();
1689 if (!result_saved) {
1690 __ push(v0); // Save result on the stack
1691 result_saved = true;
1694 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1697 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1698 DCHECK(!property->is_computed_name());
1699 VisitForStackValue(value);
1700 DCHECK(property->emit_store());
1701 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1703 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1704 VisitForStackValue(value);
1705 EmitSetHomeObjectIfNeeded(
1706 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1708 switch (property->kind()) {
1709 case ObjectLiteral::Property::CONSTANT:
1710 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1711 case ObjectLiteral::Property::COMPUTED:
1712 if (property->emit_store()) {
1713 __ li(a0, Operand(Smi::FromInt(NONE)));
1715 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1721 case ObjectLiteral::Property::PROTOTYPE:
1725 case ObjectLiteral::Property::GETTER:
1726 __ li(a0, Operand(Smi::FromInt(NONE)));
1728 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1731 case ObjectLiteral::Property::SETTER:
1732 __ li(a0, Operand(Smi::FromInt(NONE)));
1734 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1740 if (expr->has_function()) {
1741 DCHECK(result_saved);
1742 __ ld(a0, MemOperand(sp));
1744 __ CallRuntime(Runtime::kToFastProperties, 1);
1748 context()->PlugTOS();
1750 context()->Plug(v0);
1753 // Verify that compilation exactly consumed the number of store ic slots that
1754 // the ObjectLiteral node had to offer.
1755 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1760 Comment cmnt(masm_, "[ ArrayLiteral");
1762 expr->BuildConstantElements(isolate());
1764 Handle<FixedArray> constant_elements = expr->constant_elements();
1765 bool has_fast_elements =
1766 IsFastObjectElementsKind(expr->constant_elements_kind());
1768 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1769 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1770 // If the only customer of allocation sites is transitioning, then
1771 // we can turn it off if we don't have anywhere else to transition to.
1772 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1775 __ mov(a0, result_register());
1776 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1777 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1778 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1779 __ li(a1, Operand(constant_elements));
1780 if (MustCreateArrayLiteralWithRuntime(expr)) {
1781 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1782 __ Push(a3, a2, a1, a0);
1783 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1785 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1788 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1790 bool result_saved = false; // Is the result saved to the stack?
1791 ZoneList<Expression*>* subexprs = expr->values();
1792 int length = subexprs->length();
1794 // Emit code to evaluate all the non-constant subexpressions and to store
1795 // them into the newly cloned array.
1796 int array_index = 0;
1797 for (; array_index < length; array_index++) {
1798 Expression* subexpr = subexprs->at(array_index);
1799 if (subexpr->IsSpread()) break;
1801 // If the subexpression is a literal or a simple materialized literal it
1802 // is already set in the cloned array.
1803 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1805 if (!result_saved) {
1806 __ push(v0); // array literal
1807 __ Push(Smi::FromInt(expr->literal_index()));
1808 result_saved = true;
1811 VisitForAccumulatorValue(subexpr);
1813 if (has_fast_elements) {
1814 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1815 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1816 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1817 __ sd(result_register(), FieldMemOperand(a1, offset));
1818 // Update the write barrier for the array store.
1819 __ RecordWriteField(a1, offset, result_register(), a2,
1820 kRAHasBeenSaved, kDontSaveFPRegs,
1821 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1823 __ li(a3, Operand(Smi::FromInt(array_index)));
1824 __ mov(a0, result_register());
1825 StoreArrayLiteralElementStub stub(isolate());
1829 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1832 // In case the array literal contains spread expressions it has two parts. The
1833 // first part is the "static" array which has a literal index is handled
1834 // above. The second part is the part after the first spread expression
1835 // (inclusive) and these elements gets appended to the array. Note that the
1836 // number elements an iterable produces is unknown ahead of time.
1837 if (array_index < length && result_saved) {
1838 __ Pop(); // literal index
1840 result_saved = false;
1842 for (; array_index < length; array_index++) {
1843 Expression* subexpr = subexprs->at(array_index);
1846 if (subexpr->IsSpread()) {
1847 VisitForStackValue(subexpr->AsSpread()->expression());
1848 __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1851 VisitForStackValue(subexpr);
1852 __ CallRuntime(Runtime::kAppendElement, 2);
1855 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1859 __ Pop(); // literal index
1860 context()->PlugTOS();
1862 context()->Plug(v0);
1867 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1868 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1870 Comment cmnt(masm_, "[ Assignment");
1871 SetExpressionPosition(expr, INSERT_BREAK);
1873 Property* property = expr->target()->AsProperty();
1874 LhsKind assign_type = Property::GetAssignType(property);
1876 // Evaluate LHS expression.
1877 switch (assign_type) {
1879 // Nothing to do here.
1881 case NAMED_PROPERTY:
1882 if (expr->is_compound()) {
1883 // We need the receiver both on the stack and in the register.
1884 VisitForStackValue(property->obj());
1885 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1887 VisitForStackValue(property->obj());
1890 case NAMED_SUPER_PROPERTY:
1892 property->obj()->AsSuperPropertyReference()->this_var());
1893 VisitForAccumulatorValue(
1894 property->obj()->AsSuperPropertyReference()->home_object());
1895 __ Push(result_register());
1896 if (expr->is_compound()) {
1897 const Register scratch = a1;
1898 __ ld(scratch, MemOperand(sp, kPointerSize));
1899 __ Push(scratch, result_register());
1902 case KEYED_SUPER_PROPERTY: {
1903 const Register scratch = a1;
1905 property->obj()->AsSuperPropertyReference()->this_var());
1906 VisitForAccumulatorValue(
1907 property->obj()->AsSuperPropertyReference()->home_object());
1908 __ Move(scratch, result_register());
1909 VisitForAccumulatorValue(property->key());
1910 __ Push(scratch, result_register());
1911 if (expr->is_compound()) {
1912 const Register scratch1 = a4;
1913 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1914 __ Push(scratch1, scratch, result_register());
1918 case KEYED_PROPERTY:
1919 // We need the key and receiver on both the stack and in v0 and a1.
1920 if (expr->is_compound()) {
1921 VisitForStackValue(property->obj());
1922 VisitForStackValue(property->key());
1923 __ ld(LoadDescriptor::ReceiverRegister(),
1924 MemOperand(sp, 1 * kPointerSize));
1925 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1927 VisitForStackValue(property->obj());
1928 VisitForStackValue(property->key());
1933 // For compound assignments we need another deoptimization point after the
1934 // variable/property load.
1935 if (expr->is_compound()) {
1936 { AccumulatorValueContext context(this);
1937 switch (assign_type) {
1939 EmitVariableLoad(expr->target()->AsVariableProxy());
1940 PrepareForBailout(expr->target(), TOS_REG);
1942 case NAMED_PROPERTY:
1943 EmitNamedPropertyLoad(property);
1944 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1946 case NAMED_SUPER_PROPERTY:
1947 EmitNamedSuperPropertyLoad(property);
1948 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1950 case KEYED_SUPER_PROPERTY:
1951 EmitKeyedSuperPropertyLoad(property);
1952 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1954 case KEYED_PROPERTY:
1955 EmitKeyedPropertyLoad(property);
1956 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1961 Token::Value op = expr->binary_op();
1962 __ push(v0); // Left operand goes on the stack.
1963 VisitForAccumulatorValue(expr->value());
1965 AccumulatorValueContext context(this);
1966 if (ShouldInlineSmiCase(op)) {
1967 EmitInlineSmiBinaryOp(expr->binary_operation(),
1972 EmitBinaryOp(expr->binary_operation(), op);
1975 // Deoptimization point in case the binary operation may have side effects.
1976 PrepareForBailout(expr->binary_operation(), TOS_REG);
1978 VisitForAccumulatorValue(expr->value());
1981 SetExpressionPosition(expr);
1984 switch (assign_type) {
1986 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1987 expr->op(), expr->AssignmentSlot());
1988 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1989 context()->Plug(v0);
1991 case NAMED_PROPERTY:
1992 EmitNamedPropertyAssignment(expr);
1994 case NAMED_SUPER_PROPERTY:
1995 EmitNamedSuperPropertyStore(property);
1996 context()->Plug(v0);
1998 case KEYED_SUPER_PROPERTY:
1999 EmitKeyedSuperPropertyStore(property);
2000 context()->Plug(v0);
2002 case KEYED_PROPERTY:
2003 EmitKeyedPropertyAssignment(expr);
2009 void FullCodeGenerator::VisitYield(Yield* expr) {
2010 Comment cmnt(masm_, "[ Yield");
2011 SetExpressionPosition(expr);
2013 // Evaluate yielded value first; the initial iterator definition depends on
2014 // this. It stays on the stack while we update the iterator.
2015 VisitForStackValue(expr->expression());
2017 switch (expr->yield_kind()) {
2018 case Yield::kSuspend:
2019 // Pop value from top-of-stack slot; box result into result register.
2020 EmitCreateIteratorResult(false);
2021 __ push(result_register());
2023 case Yield::kInitial: {
2024 Label suspend, continuation, post_runtime, resume;
2027 __ bind(&continuation);
2028 __ RecordGeneratorContinuation();
2032 VisitForAccumulatorValue(expr->generator_object());
2033 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2034 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2035 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2036 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2038 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2039 kRAHasBeenSaved, kDontSaveFPRegs);
2040 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2041 __ Branch(&post_runtime, eq, sp, Operand(a1));
2042 __ push(v0); // generator object
2043 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2044 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2045 __ bind(&post_runtime);
2046 __ pop(result_register());
2047 EmitReturnSequence();
2050 context()->Plug(result_register());
2054 case Yield::kFinal: {
2055 VisitForAccumulatorValue(expr->generator_object());
2056 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2057 __ sd(a1, FieldMemOperand(result_register(),
2058 JSGeneratorObject::kContinuationOffset));
2059 // Pop value from top-of-stack slot, box result into result register.
2060 EmitCreateIteratorResult(true);
2061 EmitUnwindBeforeReturn();
2062 EmitReturnSequence();
2066 case Yield::kDelegating: {
2067 VisitForStackValue(expr->generator_object());
2069 // Initial stack layout is as follows:
2070 // [sp + 1 * kPointerSize] iter
2071 // [sp + 0 * kPointerSize] g
2073 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2074 Label l_next, l_call;
2075 Register load_receiver = LoadDescriptor::ReceiverRegister();
2076 Register load_name = LoadDescriptor::NameRegister();
2077 // Initial send value is undefined.
2078 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2081 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2084 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2085 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2086 __ Push(a2, a3, a0); // "throw", iter, except
2089 // try { received = %yield result }
2090 // Shuffle the received result above a try handler and yield it without
2093 __ pop(a0); // result
2094 int handler_index = NewHandlerTableEntry();
2095 EnterTryBlock(handler_index, &l_catch);
2096 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2097 __ push(a0); // result
2100 __ bind(&l_continuation);
2101 __ RecordGeneratorContinuation();
2105 __ bind(&l_suspend);
2106 const int generator_object_depth = kPointerSize + try_block_size;
2107 __ ld(a0, MemOperand(sp, generator_object_depth));
2109 __ Push(Smi::FromInt(handler_index)); // handler-index
2110 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2111 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2112 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2113 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2115 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2116 kRAHasBeenSaved, kDontSaveFPRegs);
2117 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2118 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2119 __ pop(v0); // result
2120 EmitReturnSequence();
2122 __ bind(&l_resume); // received in a0
2123 ExitTryBlock(handler_index);
2125 // receiver = iter; f = 'next'; arg = received;
2127 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2128 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2129 __ Push(load_name, a3, a0); // "next", iter, received
2131 // result = receiver[f](arg);
2133 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2134 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2135 __ li(LoadDescriptor::SlotRegister(),
2136 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2137 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2138 CallIC(ic, TypeFeedbackId::None());
2141 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2142 SetCallPosition(expr, 1);
2143 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2146 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2147 __ Drop(1); // The function is still on the stack; drop it.
2149 // if (!result.done) goto l_try;
2150 __ Move(load_receiver, v0);
2152 __ push(load_receiver); // save result
2153 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2154 __ li(LoadDescriptor::SlotRegister(),
2155 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2156 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.done
2158 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2160 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2163 __ pop(load_receiver); // result
2164 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2165 __ li(LoadDescriptor::SlotRegister(),
2166 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2167 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.value
2168 context()->DropAndPlug(2, v0); // drop iter and g
2175 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2177 JSGeneratorObject::ResumeMode resume_mode) {
2178 // The value stays in a0, and is ultimately read by the resumed generator, as
2179 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2180 // is read to throw the value when the resumed generator is already closed.
2181 // a1 will hold the generator object until the activation has been resumed.
2182 VisitForStackValue(generator);
2183 VisitForAccumulatorValue(value);
2186 // Load suspended function and context.
2187 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2188 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2190 // Load receiver and store as the first argument.
2191 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2194 // Push holes for the rest of the arguments to the generator function.
2195 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2196 // The argument count is stored as int32_t on 64-bit platforms.
2197 // TODO(plind): Smi on 32-bit platforms.
2199 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2200 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2201 Label push_argument_holes, push_frame;
2202 __ bind(&push_argument_holes);
2203 __ Dsubu(a3, a3, Operand(1));
2204 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2206 __ jmp(&push_argument_holes);
2208 // Enter a new JavaScript frame, and initialize its slots as they were when
2209 // the generator was suspended.
2210 Label resume_frame, done;
2211 __ bind(&push_frame);
2212 __ Call(&resume_frame);
2214 __ bind(&resume_frame);
2215 // ra = return address.
2216 // fp = caller's frame pointer.
2217 // cp = callee's context,
2218 // a4 = callee's JS function.
2219 __ Push(ra, fp, cp, a4);
2220 // Adjust FP to point to saved FP.
2221 __ Daddu(fp, sp, 2 * kPointerSize);
2223 // Load the operand stack size.
2224 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2225 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2228 // If we are sending a value and there is no operand stack, we can jump back
2230 if (resume_mode == JSGeneratorObject::NEXT) {
2232 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2233 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2234 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2236 __ Daddu(a3, a3, Operand(a2));
2237 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2238 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2240 __ bind(&slow_resume);
2243 // Otherwise, we push holes for the operand stack and call the runtime to fix
2244 // up the stack and the handlers.
2245 Label push_operand_holes, call_resume;
2246 __ bind(&push_operand_holes);
2247 __ Dsubu(a3, a3, Operand(1));
2248 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2250 __ Branch(&push_operand_holes);
2251 __ bind(&call_resume);
2252 DCHECK(!result_register().is(a1));
2253 __ Push(a1, result_register());
2254 __ Push(Smi::FromInt(resume_mode));
2255 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2256 // Not reached: the runtime call returns elsewhere.
2257 __ stop("not-reached");
2260 context()->Plug(result_register());
2264 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2265 Label allocate, done_allocate;
2267 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2268 __ jmp(&done_allocate);
2271 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2272 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2274 __ bind(&done_allocate);
2275 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2276 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2277 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2280 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2281 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2282 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2283 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2284 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2285 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2286 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2287 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2291 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2292 SetExpressionPosition(prop);
2293 Literal* key = prop->key()->AsLiteral();
2294 DCHECK(!prop->IsSuperAccess());
2296 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2297 __ li(LoadDescriptor::SlotRegister(),
2298 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2299 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2303 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2304 // Stack: receiver, home_object.
2305 SetExpressionPosition(prop);
2307 Literal* key = prop->key()->AsLiteral();
2308 DCHECK(!key->value()->IsSmi());
2309 DCHECK(prop->IsSuperAccess());
2311 __ Push(key->value());
2312 __ Push(Smi::FromInt(language_mode()));
2313 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2317 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2318 // Call keyed load IC. It has register arguments receiver and key.
2319 SetExpressionPosition(prop);
2321 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2322 __ li(LoadDescriptor::SlotRegister(),
2323 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2328 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2329 // Stack: receiver, home_object, key.
2330 SetExpressionPosition(prop);
2331 __ Push(Smi::FromInt(language_mode()));
2332 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2336 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2338 Expression* left_expr,
2339 Expression* right_expr) {
2340 Label done, smi_case, stub_call;
2342 Register scratch1 = a2;
2343 Register scratch2 = a3;
2345 // Get the arguments.
2347 Register right = a0;
2349 __ mov(a0, result_register());
2351 // Perform combined smi check on both operands.
2352 __ Or(scratch1, left, Operand(right));
2353 STATIC_ASSERT(kSmiTag == 0);
2354 JumpPatchSite patch_site(masm_);
2355 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2357 __ bind(&stub_call);
2359 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2360 CallIC(code, expr->BinaryOperationFeedbackId());
2361 patch_site.EmitPatchInfo();
2365 // Smi case. This code works the same way as the smi-smi case in the type
2366 // recording binary operation stub, see
2369 __ GetLeastBitsFromSmi(scratch1, right, 5);
2370 __ dsrav(right, left, scratch1);
2371 __ And(v0, right, Operand(0xffffffff00000000L));
2374 __ SmiUntag(scratch1, left);
2375 __ GetLeastBitsFromSmi(scratch2, right, 5);
2376 __ dsllv(scratch1, scratch1, scratch2);
2377 __ SmiTag(v0, scratch1);
2381 __ SmiUntag(scratch1, left);
2382 __ GetLeastBitsFromSmi(scratch2, right, 5);
2383 __ dsrlv(scratch1, scratch1, scratch2);
2384 __ And(scratch2, scratch1, 0x80000000);
2385 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2386 __ SmiTag(v0, scratch1);
2390 __ DadduAndCheckForOverflow(v0, left, right, scratch1);
2391 __ BranchOnOverflow(&stub_call, scratch1);
2394 __ DsubuAndCheckForOverflow(v0, left, right, scratch1);
2395 __ BranchOnOverflow(&stub_call, scratch1);
2398 __ Dmulh(v0, left, right);
2399 __ dsra32(scratch2, v0, 0);
2400 __ sra(scratch1, v0, 31);
2401 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2403 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2404 __ Daddu(scratch2, right, left);
2405 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2406 DCHECK(Smi::FromInt(0) == 0);
2407 __ mov(v0, zero_reg);
2411 __ Or(v0, left, Operand(right));
2413 case Token::BIT_AND:
2414 __ And(v0, left, Operand(right));
2416 case Token::BIT_XOR:
2417 __ Xor(v0, left, Operand(right));
2424 context()->Plug(v0);
2428 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2429 int* used_store_slots) {
2430 // Constructor is in v0.
2431 DCHECK(lit != NULL);
2434 // No access check is needed here since the constructor is created by the
2436 Register scratch = a1;
2438 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2441 for (int i = 0; i < lit->properties()->length(); i++) {
2442 ObjectLiteral::Property* property = lit->properties()->at(i);
2443 Expression* value = property->value();
2445 if (property->is_static()) {
2446 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2448 __ ld(scratch, MemOperand(sp, 0)); // prototype
2451 EmitPropertyKey(property, lit->GetIdForProperty(i));
2453 // The static prototype property is read only. We handle the non computed
2454 // property name case in the parser. Since this is the only case where we
2455 // need to check for an own read only property we special case this so we do
2456 // not need to do this for every property.
2457 if (property->is_static() && property->is_computed_name()) {
2458 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2462 VisitForStackValue(value);
2463 EmitSetHomeObjectIfNeeded(value, 2,
2464 lit->SlotForHomeObject(value, used_store_slots));
2466 switch (property->kind()) {
2467 case ObjectLiteral::Property::CONSTANT:
2468 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2469 case ObjectLiteral::Property::PROTOTYPE:
2471 case ObjectLiteral::Property::COMPUTED:
2472 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2475 case ObjectLiteral::Property::GETTER:
2476 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2478 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2481 case ObjectLiteral::Property::SETTER:
2482 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2484 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2492 // Set both the prototype and constructor to have fast properties, and also
2493 // freeze them in strong mode.
2494 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2498 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2499 __ mov(a0, result_register());
2502 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2503 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2504 CallIC(code, expr->BinaryOperationFeedbackId());
2505 patch_site.EmitPatchInfo();
2506 context()->Plug(v0);
2510 void FullCodeGenerator::EmitAssignment(Expression* expr,
2511 FeedbackVectorICSlot slot) {
2512 DCHECK(expr->IsValidReferenceExpressionOrThis());
2514 Property* prop = expr->AsProperty();
2515 LhsKind assign_type = Property::GetAssignType(prop);
2517 switch (assign_type) {
2519 Variable* var = expr->AsVariableProxy()->var();
2520 EffectContext context(this);
2521 EmitVariableAssignment(var, Token::ASSIGN, slot);
2524 case NAMED_PROPERTY: {
2525 __ push(result_register()); // Preserve value.
2526 VisitForAccumulatorValue(prop->obj());
2527 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2528 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2529 __ li(StoreDescriptor::NameRegister(),
2530 Operand(prop->key()->AsLiteral()->value()));
2531 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2535 case NAMED_SUPER_PROPERTY: {
2537 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2538 VisitForAccumulatorValue(
2539 prop->obj()->AsSuperPropertyReference()->home_object());
2540 // stack: value, this; v0: home_object
2541 Register scratch = a2;
2542 Register scratch2 = a3;
2543 __ mov(scratch, result_register()); // home_object
2544 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2545 __ ld(scratch2, MemOperand(sp, 0)); // this
2546 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2547 __ sd(scratch, MemOperand(sp, 0)); // home_object
2548 // stack: this, home_object; v0: value
2549 EmitNamedSuperPropertyStore(prop);
2552 case KEYED_SUPER_PROPERTY: {
2554 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2556 prop->obj()->AsSuperPropertyReference()->home_object());
2557 VisitForAccumulatorValue(prop->key());
2558 Register scratch = a2;
2559 Register scratch2 = a3;
2560 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2561 // stack: value, this, home_object; v0: key, a3: value
2562 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2563 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2564 __ ld(scratch, MemOperand(sp, 0)); // home_object
2565 __ sd(scratch, MemOperand(sp, kPointerSize));
2566 __ sd(v0, MemOperand(sp, 0));
2567 __ Move(v0, scratch2);
2568 // stack: this, home_object, key; v0: value.
2569 EmitKeyedSuperPropertyStore(prop);
2572 case KEYED_PROPERTY: {
2573 __ push(result_register()); // Preserve value.
2574 VisitForStackValue(prop->obj());
2575 VisitForAccumulatorValue(prop->key());
2576 __ Move(StoreDescriptor::NameRegister(), result_register());
2577 __ Pop(StoreDescriptor::ValueRegister(),
2578 StoreDescriptor::ReceiverRegister());
2579 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2581 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2586 context()->Plug(v0);
2590 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2591 Variable* var, MemOperand location) {
2592 __ sd(result_register(), location);
2593 if (var->IsContextSlot()) {
2594 // RecordWrite may destroy all its register arguments.
2595 __ Move(a3, result_register());
2596 int offset = Context::SlotOffset(var->index());
2597 __ RecordWriteContextSlot(
2598 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2603 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2604 FeedbackVectorICSlot slot) {
2605 if (var->IsUnallocated()) {
2606 // Global var, const, or let.
2607 __ mov(StoreDescriptor::ValueRegister(), result_register());
2608 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2609 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2610 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2613 } else if (var->IsGlobalSlot()) {
2614 // Global var, const, or let.
2615 DCHECK(var->index() > 0);
2616 DCHECK(var->IsStaticGlobalObjectProperty());
2617 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
2618 __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
2619 int const slot = var->index();
2620 int const depth = scope()->ContextChainLength(var->scope());
2621 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2622 __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2623 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2626 __ Push(Smi::FromInt(slot));
2628 __ CallRuntime(is_strict(language_mode())
2629 ? Runtime::kStoreGlobalViaContext_Strict
2630 : Runtime::kStoreGlobalViaContext_Sloppy,
2634 } else if (var->mode() == LET && op != Token::INIT_LET) {
2635 // Non-initializing assignment to let variable needs a write barrier.
2636 DCHECK(!var->IsLookupSlot());
2637 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2639 MemOperand location = VarOperand(var, a1);
2640 __ ld(a3, location);
2641 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2642 __ Branch(&assign, ne, a3, Operand(a4));
2643 __ li(a3, Operand(var->name()));
2645 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2646 // Perform the assignment.
2648 EmitStoreToStackLocalOrContextSlot(var, location);
2650 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2651 // Assignment to const variable needs a write barrier.
2652 DCHECK(!var->IsLookupSlot());
2653 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2655 MemOperand location = VarOperand(var, a1);
2656 __ ld(a3, location);
2657 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2658 __ Branch(&const_error, ne, a3, Operand(at));
2659 __ li(a3, Operand(var->name()));
2661 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2662 __ bind(&const_error);
2663 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2665 } else if (var->is_this() && op == Token::INIT_CONST) {
2666 // Initializing assignment to const {this} needs a write barrier.
2667 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2668 Label uninitialized_this;
2669 MemOperand location = VarOperand(var, a1);
2670 __ ld(a3, location);
2671 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2672 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2673 __ li(a0, Operand(var->name()));
2675 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2676 __ bind(&uninitialized_this);
2677 EmitStoreToStackLocalOrContextSlot(var, location);
2679 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2680 if (var->IsLookupSlot()) {
2681 // Assignment to var.
2682 __ li(a4, Operand(var->name()));
2683 __ li(a3, Operand(Smi::FromInt(language_mode())));
2684 // jssp[0] : language mode.
2686 // jssp[16] : context.
2687 // jssp[24] : value.
2688 __ Push(v0, cp, a4, a3);
2689 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2691 // Assignment to var or initializing assignment to let/const in harmony
2693 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2694 MemOperand location = VarOperand(var, a1);
2695 if (generate_debug_code_ && op == Token::INIT_LET) {
2696 // Check for an uninitialized let binding.
2697 __ ld(a2, location);
2698 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2699 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2701 EmitStoreToStackLocalOrContextSlot(var, location);
2704 } else if (op == Token::INIT_CONST_LEGACY) {
2705 // Const initializers need a write barrier.
2706 DCHECK(!var->IsParameter()); // No const parameters.
2707 if (var->IsLookupSlot()) {
2708 __ li(a0, Operand(var->name()));
2709 __ Push(v0, cp, a0); // Context and name.
2710 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2712 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2714 MemOperand location = VarOperand(var, a1);
2715 __ ld(a2, location);
2716 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2717 __ Branch(&skip, ne, a2, Operand(at));
2718 EmitStoreToStackLocalOrContextSlot(var, location);
2723 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2724 if (is_strict(language_mode())) {
2725 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2727 // Silently ignore store in sloppy mode.
2732 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2733 // Assignment to a property, using a named store IC.
2734 Property* prop = expr->target()->AsProperty();
2735 DCHECK(prop != NULL);
2736 DCHECK(prop->key()->IsLiteral());
2738 __ mov(StoreDescriptor::ValueRegister(), result_register());
2739 __ li(StoreDescriptor::NameRegister(),
2740 Operand(prop->key()->AsLiteral()->value()));
2741 __ pop(StoreDescriptor::ReceiverRegister());
2742 if (FLAG_vector_stores) {
2743 EmitLoadStoreICSlot(expr->AssignmentSlot());
2746 CallStoreIC(expr->AssignmentFeedbackId());
2749 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2750 context()->Plug(v0);
2754 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2755 // Assignment to named property of super.
2757 // stack : receiver ('this'), home_object
2758 DCHECK(prop != NULL);
2759 Literal* key = prop->key()->AsLiteral();
2760 DCHECK(key != NULL);
2762 __ Push(key->value());
2764 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2765 : Runtime::kStoreToSuper_Sloppy),
2770 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2771 // Assignment to named property of super.
2773 // stack : receiver ('this'), home_object, key
2774 DCHECK(prop != NULL);
2778 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2779 : Runtime::kStoreKeyedToSuper_Sloppy),
2784 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2785 // Assignment to a property, using a keyed store IC.
2786 // Call keyed store IC.
2787 // The arguments are:
2788 // - a0 is the value,
2790 // - a2 is the receiver.
2791 __ mov(StoreDescriptor::ValueRegister(), result_register());
2792 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2793 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2796 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2797 if (FLAG_vector_stores) {
2798 EmitLoadStoreICSlot(expr->AssignmentSlot());
2801 CallIC(ic, expr->AssignmentFeedbackId());
2804 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2805 context()->Plug(v0);
2809 void FullCodeGenerator::VisitProperty(Property* expr) {
2810 Comment cmnt(masm_, "[ Property");
2811 SetExpressionPosition(expr);
2813 Expression* key = expr->key();
2815 if (key->IsPropertyName()) {
2816 if (!expr->IsSuperAccess()) {
2817 VisitForAccumulatorValue(expr->obj());
2818 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2819 EmitNamedPropertyLoad(expr);
2821 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2823 expr->obj()->AsSuperPropertyReference()->home_object());
2824 EmitNamedSuperPropertyLoad(expr);
2827 if (!expr->IsSuperAccess()) {
2828 VisitForStackValue(expr->obj());
2829 VisitForAccumulatorValue(expr->key());
2830 __ Move(LoadDescriptor::NameRegister(), v0);
2831 __ pop(LoadDescriptor::ReceiverRegister());
2832 EmitKeyedPropertyLoad(expr);
2834 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2836 expr->obj()->AsSuperPropertyReference()->home_object());
2837 VisitForStackValue(expr->key());
2838 EmitKeyedSuperPropertyLoad(expr);
2841 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2842 context()->Plug(v0);
2846 void FullCodeGenerator::CallIC(Handle<Code> code,
2847 TypeFeedbackId id) {
2849 __ Call(code, RelocInfo::CODE_TARGET, id);
2853 // Code common for calls using the IC.
2854 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2855 Expression* callee = expr->expression();
2857 CallICState::CallType call_type =
2858 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2860 // Get the target function.
2861 if (call_type == CallICState::FUNCTION) {
2862 { StackValueContext context(this);
2863 EmitVariableLoad(callee->AsVariableProxy());
2864 PrepareForBailout(callee, NO_REGISTERS);
2866 // Push undefined as receiver. This is patched in the method prologue if it
2867 // is a sloppy mode method.
2868 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2871 // Load the function from the receiver.
2872 DCHECK(callee->IsProperty());
2873 DCHECK(!callee->AsProperty()->IsSuperAccess());
2874 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2875 EmitNamedPropertyLoad(callee->AsProperty());
2876 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2877 // Push the target function under the receiver.
2878 __ ld(at, MemOperand(sp, 0));
2880 __ sd(v0, MemOperand(sp, kPointerSize));
2883 EmitCall(expr, call_type);
2887 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2888 SetExpressionPosition(expr);
2889 Expression* callee = expr->expression();
2890 DCHECK(callee->IsProperty());
2891 Property* prop = callee->AsProperty();
2892 DCHECK(prop->IsSuperAccess());
2894 Literal* key = prop->key()->AsLiteral();
2895 DCHECK(!key->value()->IsSmi());
2896 // Load the function from the receiver.
2897 const Register scratch = a1;
2898 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2899 VisitForAccumulatorValue(super_ref->home_object());
2900 __ mov(scratch, v0);
2901 VisitForAccumulatorValue(super_ref->this_var());
2902 __ Push(scratch, v0, v0, scratch);
2903 __ Push(key->value());
2904 __ Push(Smi::FromInt(language_mode()));
2908 // - this (receiver)
2909 // - this (receiver) <-- LoadFromSuper will pop here and below.
2913 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2915 // Replace home_object with target function.
2916 __ sd(v0, MemOperand(sp, kPointerSize));
2919 // - target function
2920 // - this (receiver)
2921 EmitCall(expr, CallICState::METHOD);
2925 // Code common for calls using the IC.
2926 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2929 VisitForAccumulatorValue(key);
2931 Expression* callee = expr->expression();
2933 // Load the function from the receiver.
2934 DCHECK(callee->IsProperty());
2935 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2936 __ Move(LoadDescriptor::NameRegister(), v0);
2937 EmitKeyedPropertyLoad(callee->AsProperty());
2938 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2940 // Push the target function under the receiver.
2941 __ ld(at, MemOperand(sp, 0));
2943 __ sd(v0, MemOperand(sp, kPointerSize));
2945 EmitCall(expr, CallICState::METHOD);
2949 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2950 Expression* callee = expr->expression();
2951 DCHECK(callee->IsProperty());
2952 Property* prop = callee->AsProperty();
2953 DCHECK(prop->IsSuperAccess());
2955 SetExpressionPosition(prop);
2956 // Load the function from the receiver.
2957 const Register scratch = a1;
2958 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2959 VisitForAccumulatorValue(super_ref->home_object());
2960 __ Move(scratch, v0);
2961 VisitForAccumulatorValue(super_ref->this_var());
2962 __ Push(scratch, v0, v0, scratch);
2963 VisitForStackValue(prop->key());
2964 __ Push(Smi::FromInt(language_mode()));
2968 // - this (receiver)
2969 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2973 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2975 // Replace home_object with target function.
2976 __ sd(v0, MemOperand(sp, kPointerSize));
2979 // - target function
2980 // - this (receiver)
2981 EmitCall(expr, CallICState::METHOD);
2985 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2986 // Load the arguments.
2987 ZoneList<Expression*>* args = expr->arguments();
2988 int arg_count = args->length();
2989 for (int i = 0; i < arg_count; i++) {
2990 VisitForStackValue(args->at(i));
2993 // Record source position of the IC call.
2994 SetCallPosition(expr, arg_count);
2995 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2996 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2997 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2998 // Don't assign a type feedback id to the IC, since type feedback is provided
2999 // by the vector above.
3001 RecordJSReturnSite(expr);
3002 // Restore context register.
3003 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3004 context()->DropAndPlug(1, v0);
3008 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3009 // a6: copy of the first argument or undefined if it doesn't exist.
3010 if (arg_count > 0) {
3011 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
3013 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
3016 // a5: the receiver of the enclosing function.
3017 __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3019 // a4: the language mode.
3020 __ li(a4, Operand(Smi::FromInt(language_mode())));
3022 // a1: the start position of the scope the calls resides in.
3023 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3025 // Do the runtime call.
3026 __ Push(a6, a5, a4, a1);
3027 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3031 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3032 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3033 VariableProxy* callee = expr->expression()->AsVariableProxy();
3034 if (callee->var()->IsLookupSlot()) {
3037 SetExpressionPosition(callee);
3038 // Generate code for loading from variables potentially shadowed by
3039 // eval-introduced variables.
3040 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3043 // Call the runtime to find the function to call (returned in v0)
3044 // and the object holding it (returned in v1).
3045 DCHECK(!context_register().is(a2));
3046 __ li(a2, Operand(callee->name()));
3047 __ Push(context_register(), a2);
3048 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3049 __ Push(v0, v1); // Function, receiver.
3050 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3052 // If fast case code has been generated, emit code to push the
3053 // function and receiver and have the slow path jump around this
3055 if (done.is_linked()) {
3061 // The receiver is implicitly the global receiver. Indicate this
3062 // by passing the hole to the call function stub.
3063 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3068 VisitForStackValue(callee);
3069 // refEnv.WithBaseObject()
3070 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3071 __ push(a2); // Reserved receiver slot.
3076 void FullCodeGenerator::VisitCall(Call* expr) {
3078 // We want to verify that RecordJSReturnSite gets called on all paths
3079 // through this function. Avoid early returns.
3080 expr->return_is_recorded_ = false;
3083 Comment cmnt(masm_, "[ Call");
3084 Expression* callee = expr->expression();
3085 Call::CallType call_type = expr->GetCallType(isolate());
3087 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3088 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3089 // to resolve the function we need to call. Then we call the resolved
3090 // function using the given arguments.
3091 ZoneList<Expression*>* args = expr->arguments();
3092 int arg_count = args->length();
3093 PushCalleeAndWithBaseObject(expr);
3095 // Push the arguments.
3096 for (int i = 0; i < arg_count; i++) {
3097 VisitForStackValue(args->at(i));
3100 // Push a copy of the function (found below the arguments) and
3102 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3104 EmitResolvePossiblyDirectEval(arg_count);
3106 // Touch up the stack with the resolved function.
3107 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3109 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3110 // Record source position for debugger.
3111 SetCallPosition(expr, arg_count);
3112 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3113 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3115 RecordJSReturnSite(expr);
3116 // Restore context register.
3117 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3118 context()->DropAndPlug(1, v0);
3119 } else if (call_type == Call::GLOBAL_CALL) {
3120 EmitCallWithLoadIC(expr);
3121 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3122 // Call to a lookup slot (dynamically introduced variable).
3123 PushCalleeAndWithBaseObject(expr);
3125 } else if (call_type == Call::PROPERTY_CALL) {
3126 Property* property = callee->AsProperty();
3127 bool is_named_call = property->key()->IsPropertyName();
3128 if (property->IsSuperAccess()) {
3129 if (is_named_call) {
3130 EmitSuperCallWithLoadIC(expr);
3132 EmitKeyedSuperCallWithLoadIC(expr);
3135 VisitForStackValue(property->obj());
3136 if (is_named_call) {
3137 EmitCallWithLoadIC(expr);
3139 EmitKeyedCallWithLoadIC(expr, property->key());
3142 } else if (call_type == Call::SUPER_CALL) {
3143 EmitSuperConstructorCall(expr);
3145 DCHECK(call_type == Call::OTHER_CALL);
3146 // Call to an arbitrary expression not handled specially above.
3147 VisitForStackValue(callee);
3148 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3150 // Emit function call.
3155 // RecordJSReturnSite should have been called.
3156 DCHECK(expr->return_is_recorded_);
3161 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3162 Comment cmnt(masm_, "[ CallNew");
3163 // According to ECMA-262, section 11.2.2, page 44, the function
3164 // expression in new calls must be evaluated before the
3167 // Push constructor on the stack. If it's not a function it's used as
3168 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3170 DCHECK(!expr->expression()->IsSuperPropertyReference());
3171 VisitForStackValue(expr->expression());
3173 // Push the arguments ("left-to-right") on the stack.
3174 ZoneList<Expression*>* args = expr->arguments();
3175 int arg_count = args->length();
3176 for (int i = 0; i < arg_count; i++) {
3177 VisitForStackValue(args->at(i));
3180 // Call the construct call builtin that handles allocation and
3181 // constructor invocation.
3182 SetConstructCallPosition(expr);
3184 // Load function and argument count into a1 and a0.
3185 __ li(a0, Operand(arg_count));
3186 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3188 // Record call targets in unoptimized code.
3189 if (FLAG_pretenuring_call_new) {
3190 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3191 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3192 expr->CallNewFeedbackSlot().ToInt() + 1);
3195 __ li(a2, FeedbackVector());
3196 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3198 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3199 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3200 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3201 context()->Plug(v0);
3205 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3206 SuperCallReference* super_call_ref =
3207 expr->expression()->AsSuperCallReference();
3208 DCHECK_NOT_NULL(super_call_ref);
3210 EmitLoadSuperConstructor(super_call_ref);
3211 __ push(result_register());
3213 // Push the arguments ("left-to-right") on the stack.
3214 ZoneList<Expression*>* args = expr->arguments();
3215 int arg_count = args->length();
3216 for (int i = 0; i < arg_count; i++) {
3217 VisitForStackValue(args->at(i));
3220 // Call the construct call builtin that handles allocation and
3221 // constructor invocation.
3222 SetConstructCallPosition(expr);
3224 // Load original constructor into a4.
3225 VisitForAccumulatorValue(super_call_ref->new_target_var());
3226 __ mov(a4, result_register());
3228 // Load function and argument count into a1 and a0.
3229 __ li(a0, Operand(arg_count));
3230 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3232 // Record call targets in unoptimized code.
3233 if (FLAG_pretenuring_call_new) {
3235 /* TODO(dslomov): support pretenuring.
3236 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3237 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3238 expr->CallNewFeedbackSlot().ToInt() + 1);
3242 __ li(a2, FeedbackVector());
3243 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3245 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3246 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3248 RecordJSReturnSite(expr);
3250 context()->Plug(v0);
3254 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3255 ZoneList<Expression*>* args = expr->arguments();
3256 DCHECK(args->length() == 1);
3258 VisitForAccumulatorValue(args->at(0));
3260 Label materialize_true, materialize_false;
3261 Label* if_true = NULL;
3262 Label* if_false = NULL;
3263 Label* fall_through = NULL;
3264 context()->PrepareTest(&materialize_true, &materialize_false,
3265 &if_true, &if_false, &fall_through);
3267 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3269 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3271 context()->Plug(if_true, if_false);
3275 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 DCHECK(args->length() == 1);
3279 VisitForAccumulatorValue(args->at(0));
3281 Label materialize_true, materialize_false;
3282 Label* if_true = NULL;
3283 Label* if_false = NULL;
3284 Label* fall_through = NULL;
3285 context()->PrepareTest(&materialize_true, &materialize_false,
3286 &if_true, &if_false, &fall_through);
3288 __ JumpIfSmi(v0, if_false);
3289 __ GetObjectType(v0, a1, a1);
3290 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3291 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3292 if_true, if_false, fall_through);
3294 context()->Plug(if_true, if_false);
3298 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3299 ZoneList<Expression*>* args = expr->arguments();
3300 DCHECK(args->length() == 1);
3302 VisitForAccumulatorValue(args->at(0));
3304 Label materialize_true, materialize_false;
3305 Label* if_true = NULL;
3306 Label* if_false = NULL;
3307 Label* fall_through = NULL;
3308 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3309 &if_false, &fall_through);
3311 __ JumpIfSmi(v0, if_false);
3312 __ GetObjectType(v0, a1, a1);
3313 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3314 Split(eq, a1, Operand(SIMD128_VALUE_TYPE), if_true, if_false, fall_through);
3316 context()->Plug(if_true, if_false);
3320 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3321 CallRuntime* expr) {
3322 ZoneList<Expression*>* args = expr->arguments();
3323 DCHECK(args->length() == 1);
3325 VisitForAccumulatorValue(args->at(0));
3327 Label materialize_true, materialize_false, skip_lookup;
3328 Label* if_true = NULL;
3329 Label* if_false = NULL;
3330 Label* fall_through = NULL;
3331 context()->PrepareTest(&materialize_true, &materialize_false,
3332 &if_true, &if_false, &fall_through);
3334 __ AssertNotSmi(v0);
3336 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3337 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3338 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3339 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3341 // Check for fast case object. Generate false result for slow case object.
3342 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3343 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3344 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3345 __ Branch(if_false, eq, a2, Operand(a4));
3347 // Look for valueOf name in the descriptor array, and indicate false if
3348 // found. Since we omit an enumeration index check, if it is added via a
3349 // transition that shares its descriptor array, this is a false positive.
3350 Label entry, loop, done;
3352 // Skip loop if no descriptors are valid.
3353 __ NumberOfOwnDescriptors(a3, a1);
3354 __ Branch(&done, eq, a3, Operand(zero_reg));
3356 __ LoadInstanceDescriptors(a1, a4);
3357 // a4: descriptor array.
3358 // a3: valid entries in the descriptor array.
3359 STATIC_ASSERT(kSmiTag == 0);
3360 STATIC_ASSERT(kSmiTagSize == 1);
3362 // STATIC_ASSERT(kPointerSize == 4);
3363 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3364 __ Dmul(a3, a3, at);
3365 // Calculate location of the first key name.
3366 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3367 // Calculate the end of the descriptor array.
3369 __ dsll(a5, a3, kPointerSizeLog2);
3370 __ Daddu(a2, a2, a5);
3372 // Loop through all the keys in the descriptor array. If one of these is the
3373 // string "valueOf" the result is false.
3374 // The use of a6 to store the valueOf string assumes that it is not otherwise
3375 // used in the loop below.
3376 __ LoadRoot(a6, Heap::kvalueOf_stringRootIndex);
3379 __ ld(a3, MemOperand(a4, 0));
3380 __ Branch(if_false, eq, a3, Operand(a6));
3381 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3383 __ Branch(&loop, ne, a4, Operand(a2));
3387 // Set the bit in the map to indicate that there is no local valueOf field.
3388 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3389 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3390 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3392 __ bind(&skip_lookup);
3394 // If a valueOf property is not found on the object check that its
3395 // prototype is the un-modified String prototype. If not result is false.
3396 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3397 __ JumpIfSmi(a2, if_false);
3398 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3399 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3400 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3401 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3402 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3403 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3405 context()->Plug(if_true, if_false);
3409 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3410 ZoneList<Expression*>* args = expr->arguments();
3411 DCHECK(args->length() == 1);
3413 VisitForAccumulatorValue(args->at(0));
3415 Label materialize_true, materialize_false;
3416 Label* if_true = NULL;
3417 Label* if_false = NULL;
3418 Label* fall_through = NULL;
3419 context()->PrepareTest(&materialize_true, &materialize_false,
3420 &if_true, &if_false, &fall_through);
3422 __ JumpIfSmi(v0, if_false);
3423 __ GetObjectType(v0, a1, a2);
3424 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3425 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3426 __ Branch(if_false);
3428 context()->Plug(if_true, if_false);
3432 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3433 ZoneList<Expression*>* args = expr->arguments();
3434 DCHECK(args->length() == 1);
3436 VisitForAccumulatorValue(args->at(0));
3438 Label materialize_true, materialize_false;
3439 Label* if_true = NULL;
3440 Label* if_false = NULL;
3441 Label* fall_through = NULL;
3442 context()->PrepareTest(&materialize_true, &materialize_false,
3443 &if_true, &if_false, &fall_through);
3445 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3446 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3447 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3448 __ li(a4, 0x80000000);
3450 __ Branch(¬_nan, ne, a2, Operand(a4));
3451 __ mov(a4, zero_reg);
3455 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3456 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3458 context()->Plug(if_true, if_false);
3462 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3463 ZoneList<Expression*>* args = expr->arguments();
3464 DCHECK(args->length() == 1);
3466 VisitForAccumulatorValue(args->at(0));
3468 Label materialize_true, materialize_false;
3469 Label* if_true = NULL;
3470 Label* if_false = NULL;
3471 Label* fall_through = NULL;
3472 context()->PrepareTest(&materialize_true, &materialize_false,
3473 &if_true, &if_false, &fall_through);
3475 __ JumpIfSmi(v0, if_false);
3476 __ GetObjectType(v0, a1, a1);
3477 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3478 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3479 if_true, if_false, fall_through);
3481 context()->Plug(if_true, if_false);
3485 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3486 ZoneList<Expression*>* args = expr->arguments();
3487 DCHECK(args->length() == 1);
3489 VisitForAccumulatorValue(args->at(0));
3491 Label materialize_true, materialize_false;
3492 Label* if_true = NULL;
3493 Label* if_false = NULL;
3494 Label* fall_through = NULL;
3495 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3496 &if_false, &fall_through);
3498 __ JumpIfSmi(v0, if_false);
3499 __ GetObjectType(v0, a1, a1);
3500 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3501 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3503 context()->Plug(if_true, if_false);
3507 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3508 ZoneList<Expression*>* args = expr->arguments();
3509 DCHECK(args->length() == 1);
3511 VisitForAccumulatorValue(args->at(0));
3513 Label materialize_true, materialize_false;
3514 Label* if_true = NULL;
3515 Label* if_false = NULL;
3516 Label* fall_through = NULL;
3517 context()->PrepareTest(&materialize_true, &materialize_false,
3518 &if_true, &if_false, &fall_through);
3520 __ JumpIfSmi(v0, if_false);
3521 __ GetObjectType(v0, a1, a1);
3522 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3523 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3525 context()->Plug(if_true, if_false);
3529 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3530 ZoneList<Expression*>* args = expr->arguments();
3531 DCHECK(args->length() == 1);
3533 VisitForAccumulatorValue(args->at(0));
3535 Label materialize_true, materialize_false;
3536 Label* if_true = NULL;
3537 Label* if_false = NULL;
3538 Label* fall_through = NULL;
3539 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3540 &if_false, &fall_through);
3542 __ JumpIfSmi(v0, if_false);
3544 Register type_reg = a2;
3545 __ GetObjectType(v0, map, type_reg);
3546 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3547 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3548 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3549 if_true, if_false, fall_through);
3551 context()->Plug(if_true, if_false);
3555 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3556 DCHECK(expr->arguments()->length() == 0);
3558 Label materialize_true, materialize_false;
3559 Label* if_true = NULL;
3560 Label* if_false = NULL;
3561 Label* fall_through = NULL;
3562 context()->PrepareTest(&materialize_true, &materialize_false,
3563 &if_true, &if_false, &fall_through);
3565 // Get the frame pointer for the calling frame.
3566 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3568 // Skip the arguments adaptor frame if it exists.
3569 Label check_frame_marker;
3570 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3571 __ Branch(&check_frame_marker, ne,
3572 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3573 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3575 // Check the marker in the calling frame.
3576 __ bind(&check_frame_marker);
3577 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3578 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3579 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3580 if_true, if_false, fall_through);
3582 context()->Plug(if_true, if_false);
3586 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3587 ZoneList<Expression*>* args = expr->arguments();
3588 DCHECK(args->length() == 2);
3590 // Load the two objects into registers and perform the comparison.
3591 VisitForStackValue(args->at(0));
3592 VisitForAccumulatorValue(args->at(1));
3594 Label materialize_true, materialize_false;
3595 Label* if_true = NULL;
3596 Label* if_false = NULL;
3597 Label* fall_through = NULL;
3598 context()->PrepareTest(&materialize_true, &materialize_false,
3599 &if_true, &if_false, &fall_through);
3602 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3603 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3605 context()->Plug(if_true, if_false);
3609 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3610 ZoneList<Expression*>* args = expr->arguments();
3611 DCHECK(args->length() == 1);
3613 // ArgumentsAccessStub expects the key in a1 and the formal
3614 // parameter count in a0.
3615 VisitForAccumulatorValue(args->at(0));
3617 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3618 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3620 context()->Plug(v0);
3624 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3625 DCHECK(expr->arguments()->length() == 0);
3627 // Get the number of formal parameters.
3628 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3630 // Check if the calling frame is an arguments adaptor frame.
3631 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3632 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3633 __ Branch(&exit, ne, a3,
3634 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3636 // Arguments adaptor case: Read the arguments length from the
3638 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3641 context()->Plug(v0);
3645 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3647 DCHECK(args->length() == 1);
3648 Label done, null, function, non_function_constructor;
3650 VisitForAccumulatorValue(args->at(0));
3652 // If the object is a smi, we return null.
3653 __ JumpIfSmi(v0, &null);
3655 // Check that the object is a JS object but take special care of JS
3656 // functions to make sure they have 'Function' as their class.
3657 // Assume that there are only two callable types, and one of them is at
3658 // either end of the type range for JS object types. Saves extra comparisons.
3659 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3660 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3661 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3663 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3664 FIRST_SPEC_OBJECT_TYPE + 1);
3665 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3667 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3668 LAST_SPEC_OBJECT_TYPE - 1);
3669 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3670 // Assume that there is no larger type.
3671 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3673 // Check if the constructor in the map is a JS function.
3674 Register instance_type = a2;
3675 __ GetMapConstructor(v0, v0, a1, instance_type);
3676 __ Branch(&non_function_constructor, ne, instance_type,
3677 Operand(JS_FUNCTION_TYPE));
3679 // v0 now contains the constructor function. Grab the
3680 // instance class name from there.
3681 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3682 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3685 // Functions have class 'Function'.
3687 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3690 // Objects with a non-function constructor have class 'Object'.
3691 __ bind(&non_function_constructor);
3692 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3695 // Non-JS objects have class null.
3697 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3702 context()->Plug(v0);
3706 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3707 ZoneList<Expression*>* args = expr->arguments();
3708 DCHECK(args->length() == 1);
3710 VisitForAccumulatorValue(args->at(0)); // Load the object.
3713 // If the object is a smi return the object.
3714 __ JumpIfSmi(v0, &done);
3715 // If the object is not a value type, return the object.
3716 __ GetObjectType(v0, a1, a1);
3717 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3719 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3722 context()->Plug(v0);
3726 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3727 ZoneList<Expression*>* args = expr->arguments();
3728 DCHECK_EQ(1, args->length());
3730 VisitForAccumulatorValue(args->at(0));
3732 Label materialize_true, materialize_false;
3733 Label* if_true = nullptr;
3734 Label* if_false = nullptr;
3735 Label* fall_through = nullptr;
3736 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3737 &if_false, &fall_through);
3739 __ JumpIfSmi(v0, if_false);
3740 __ GetObjectType(v0, a1, a1);
3741 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3742 Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through);
3744 context()->Plug(if_true, if_false);
3748 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3749 ZoneList<Expression*>* args = expr->arguments();
3750 DCHECK(args->length() == 2);
3751 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3752 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3754 VisitForAccumulatorValue(args->at(0)); // Load the object.
3756 Register object = v0;
3757 Register result = v0;
3758 Register scratch0 = t1;
3759 Register scratch1 = a1;
3761 if (index->value() == 0) {
3762 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3764 Label runtime, done;
3765 if (index->value() < JSDate::kFirstUncachedField) {
3766 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3767 __ li(scratch1, Operand(stamp));
3768 __ ld(scratch1, MemOperand(scratch1));
3769 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3770 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3771 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3772 kPointerSize * index->value()));
3776 __ PrepareCallCFunction(2, scratch1);
3777 __ li(a1, Operand(index));
3778 __ Move(a0, object);
3779 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3783 context()->Plug(result);
3787 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3788 ZoneList<Expression*>* args = expr->arguments();
3789 DCHECK_EQ(3, args->length());
3791 Register string = v0;
3792 Register index = a1;
3793 Register value = a2;
3795 VisitForStackValue(args->at(0)); // index
3796 VisitForStackValue(args->at(1)); // value
3797 VisitForAccumulatorValue(args->at(2)); // string
3798 __ Pop(index, value);
3800 if (FLAG_debug_code) {
3801 __ SmiTst(value, at);
3802 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3803 __ SmiTst(index, at);
3804 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3805 __ SmiUntag(index, index);
3806 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3807 Register scratch = t1;
3808 __ EmitSeqStringSetCharCheck(
3809 string, index, value, scratch, one_byte_seq_type);
3810 __ SmiTag(index, index);
3813 __ SmiUntag(value, value);
3816 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3818 __ Daddu(at, at, index);
3819 __ sb(value, MemOperand(at));
3820 context()->Plug(string);
3824 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3825 ZoneList<Expression*>* args = expr->arguments();
3826 DCHECK_EQ(3, args->length());
3828 Register string = v0;
3829 Register index = a1;
3830 Register value = a2;
3832 VisitForStackValue(args->at(0)); // index
3833 VisitForStackValue(args->at(1)); // value
3834 VisitForAccumulatorValue(args->at(2)); // string
3835 __ Pop(index, value);
3837 if (FLAG_debug_code) {
3838 __ SmiTst(value, at);
3839 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3840 __ SmiTst(index, at);
3841 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3842 __ SmiUntag(index, index);
3843 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3844 Register scratch = t1;
3845 __ EmitSeqStringSetCharCheck(
3846 string, index, value, scratch, two_byte_seq_type);
3847 __ SmiTag(index, index);
3850 __ SmiUntag(value, value);
3853 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3854 __ dsra(index, index, 32 - 1);
3855 __ Daddu(at, at, index);
3856 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3857 __ sh(value, MemOperand(at));
3858 context()->Plug(string);
3862 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3863 ZoneList<Expression*>* args = expr->arguments();
3864 DCHECK(args->length() == 2);
3866 VisitForStackValue(args->at(0)); // Load the object.
3867 VisitForAccumulatorValue(args->at(1)); // Load the value.
3868 __ pop(a1); // v0 = value. a1 = object.
3871 // If the object is a smi, return the value.
3872 __ JumpIfSmi(a1, &done);
3874 // If the object is not a value type, return the value.
3875 __ GetObjectType(a1, a2, a2);
3876 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3879 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3880 // Update the write barrier. Save the value as it will be
3881 // overwritten by the write barrier code and is needed afterward.
3883 __ RecordWriteField(
3884 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3887 context()->Plug(v0);
3891 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3892 ZoneList<Expression*>* args = expr->arguments();
3893 DCHECK_EQ(args->length(), 1);
3895 // Load the argument into a0 and call the stub.
3896 VisitForAccumulatorValue(args->at(0));
3897 __ mov(a0, result_register());
3899 NumberToStringStub stub(isolate());
3901 context()->Plug(v0);
3905 void FullCodeGenerator::EmitToString(CallRuntime* expr) {
3906 ZoneList<Expression*>* args = expr->arguments();
3907 DCHECK_EQ(1, args->length());
3909 // Load the argument into a0 and convert it.
3910 VisitForAccumulatorValue(args->at(0));
3911 __ mov(a0, result_register());
3913 ToStringStub stub(isolate());
3915 context()->Plug(v0);
3919 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3920 ZoneList<Expression*>* args = expr->arguments();
3921 DCHECK_EQ(1, args->length());
3923 // Load the argument into v0 and convert it.
3924 VisitForAccumulatorValue(args->at(0));
3926 Label convert, done_convert;
3927 __ JumpIfSmi(v0, &convert);
3928 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3929 __ GetObjectType(v0, a1, a1);
3930 __ Branch(&done_convert, le, a1, Operand(LAST_NAME_TYPE));
3932 ToStringStub stub(isolate());
3935 __ bind(&done_convert);
3936 context()->Plug(v0);
3940 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3941 ZoneList<Expression*>* args = expr->arguments();
3942 DCHECK_EQ(1, args->length());
3944 // Load the argument into a0 and convert it.
3945 VisitForAccumulatorValue(args->at(0));
3946 __ mov(a0, result_register());
3948 ToObjectStub stub(isolate());
3950 context()->Plug(v0);
3954 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3955 ZoneList<Expression*>* args = expr->arguments();
3956 DCHECK(args->length() == 1);
3958 VisitForAccumulatorValue(args->at(0));
3961 StringCharFromCodeGenerator generator(v0, a1);
3962 generator.GenerateFast(masm_);
3965 NopRuntimeCallHelper call_helper;
3966 generator.GenerateSlow(masm_, call_helper);
3969 context()->Plug(a1);
3973 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3974 ZoneList<Expression*>* args = expr->arguments();
3975 DCHECK(args->length() == 2);
3977 VisitForStackValue(args->at(0));
3978 VisitForAccumulatorValue(args->at(1));
3979 __ mov(a0, result_register());
3981 Register object = a1;
3982 Register index = a0;
3983 Register result = v0;
3987 Label need_conversion;
3988 Label index_out_of_range;
3990 StringCharCodeAtGenerator generator(object,
3995 &index_out_of_range,
3996 STRING_INDEX_IS_NUMBER);
3997 generator.GenerateFast(masm_);
4000 __ bind(&index_out_of_range);
4001 // When the index is out of range, the spec requires us to return
4003 __ LoadRoot(result, Heap::kNanValueRootIndex);
4006 __ bind(&need_conversion);
4007 // Load the undefined value into the result register, which will
4008 // trigger conversion.
4009 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4012 NopRuntimeCallHelper call_helper;
4013 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4016 context()->Plug(result);
4020 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4021 ZoneList<Expression*>* args = expr->arguments();
4022 DCHECK(args->length() == 2);
4024 VisitForStackValue(args->at(0));
4025 VisitForAccumulatorValue(args->at(1));
4026 __ mov(a0, result_register());
4028 Register object = a1;
4029 Register index = a0;
4030 Register scratch = a3;
4031 Register result = v0;
4035 Label need_conversion;
4036 Label index_out_of_range;
4038 StringCharAtGenerator generator(object,
4044 &index_out_of_range,
4045 STRING_INDEX_IS_NUMBER);
4046 generator.GenerateFast(masm_);
4049 __ bind(&index_out_of_range);
4050 // When the index is out of range, the spec requires us to return
4051 // the empty string.
4052 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4055 __ bind(&need_conversion);
4056 // Move smi zero into the result register, which will trigger
4058 __ li(result, Operand(Smi::FromInt(0)));
4061 NopRuntimeCallHelper call_helper;
4062 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4065 context()->Plug(result);
4069 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4070 ZoneList<Expression*>* args = expr->arguments();
4071 DCHECK_EQ(2, args->length());
4072 VisitForStackValue(args->at(0));
4073 VisitForAccumulatorValue(args->at(1));
4076 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4077 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4079 context()->Plug(v0);
4083 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
4084 ZoneList<Expression*>* args = expr->arguments();
4085 DCHECK_LE(2, args->length());
4086 // Push target, receiver and arguments onto the stack.
4087 for (Expression* const arg : *args) {
4088 VisitForStackValue(arg);
4090 // Move target to a1.
4091 int const argc = args->length() - 2;
4092 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
4094 __ li(a0, Operand(argc));
4095 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
4096 // Restore context register.
4097 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4098 // Discard the function left on TOS.
4099 context()->DropAndPlug(1, v0);
4103 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4104 ZoneList<Expression*>* args = expr->arguments();
4105 DCHECK(args->length() >= 2);
4107 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4108 for (int i = 0; i < arg_count + 1; i++) {
4109 VisitForStackValue(args->at(i));
4111 VisitForAccumulatorValue(args->last()); // Function.
4113 Label runtime, done;
4114 // Check for non-function argument (including proxy).
4115 __ JumpIfSmi(v0, &runtime);
4116 __ GetObjectType(v0, a1, a1);
4117 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4119 // InvokeFunction requires the function in a1. Move it in there.
4120 __ mov(a1, result_register());
4121 ParameterCount count(arg_count);
4122 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4123 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4128 __ CallRuntime(Runtime::kCallFunction, args->length());
4131 context()->Plug(v0);
4135 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4136 ZoneList<Expression*>* args = expr->arguments();
4137 DCHECK(args->length() == 2);
4139 // Evaluate new.target and super constructor.
4140 VisitForStackValue(args->at(0));
4141 VisitForStackValue(args->at(1));
4143 // Load original constructor into a4.
4144 __ ld(a4, MemOperand(sp, 1 * kPointerSize));
4146 // Check if the calling frame is an arguments adaptor frame.
4147 Label adaptor_frame, args_set_up, runtime;
4148 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4149 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4150 __ Branch(&adaptor_frame, eq, a3,
4151 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4152 // default constructor has no arguments, so no adaptor frame means no args.
4153 __ mov(a0, zero_reg);
4154 __ Branch(&args_set_up);
4156 // Copy arguments from adaptor frame.
4158 __ bind(&adaptor_frame);
4159 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4160 __ SmiUntag(a1, a1);
4164 // Get arguments pointer in a2.
4165 __ dsll(at, a1, kPointerSizeLog2);
4166 __ Daddu(a2, a2, Operand(at));
4167 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4170 // Pre-decrement a2 with kPointerSize on each iteration.
4171 // Pre-decrement in order to skip receiver.
4172 __ Daddu(a2, a2, Operand(-kPointerSize));
4173 __ ld(a3, MemOperand(a2));
4175 __ Daddu(a1, a1, Operand(-1));
4176 __ Branch(&loop, ne, a1, Operand(zero_reg));
4179 __ bind(&args_set_up);
4180 __ dsll(at, a0, kPointerSizeLog2);
4181 __ Daddu(at, at, Operand(sp));
4182 __ ld(a1, MemOperand(at, 0));
4183 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4185 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4186 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4190 context()->Plug(result_register());
4194 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4195 RegExpConstructResultStub stub(isolate());
4196 ZoneList<Expression*>* args = expr->arguments();
4197 DCHECK(args->length() == 3);
4198 VisitForStackValue(args->at(0));
4199 VisitForStackValue(args->at(1));
4200 VisitForAccumulatorValue(args->at(2));
4201 __ mov(a0, result_register());
4205 context()->Plug(v0);
4209 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4210 ZoneList<Expression*>* args = expr->arguments();
4211 VisitForAccumulatorValue(args->at(0));
4213 Label materialize_true, materialize_false;
4214 Label* if_true = NULL;
4215 Label* if_false = NULL;
4216 Label* fall_through = NULL;
4217 context()->PrepareTest(&materialize_true, &materialize_false,
4218 &if_true, &if_false, &fall_through);
4220 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4221 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4223 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4224 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4226 context()->Plug(if_true, if_false);
4230 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4231 ZoneList<Expression*>* args = expr->arguments();
4232 DCHECK(args->length() == 1);
4233 VisitForAccumulatorValue(args->at(0));
4235 __ AssertString(v0);
4237 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4238 __ IndexFromHash(v0, v0);
4240 context()->Plug(v0);
4244 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4245 Label bailout, done, one_char_separator, long_separator,
4246 non_trivial_array, not_size_one_array, loop,
4247 empty_separator_loop, one_char_separator_loop,
4248 one_char_separator_loop_entry, long_separator_loop;
4249 ZoneList<Expression*>* args = expr->arguments();
4250 DCHECK(args->length() == 2);
4251 VisitForStackValue(args->at(1));
4252 VisitForAccumulatorValue(args->at(0));
4254 // All aliases of the same register have disjoint lifetimes.
4255 Register array = v0;
4256 Register elements = no_reg; // Will be v0.
4257 Register result = no_reg; // Will be v0.
4258 Register separator = a1;
4259 Register array_length = a2;
4260 Register result_pos = no_reg; // Will be a2.
4261 Register string_length = a3;
4262 Register string = a4;
4263 Register element = a5;
4264 Register elements_end = a6;
4265 Register scratch1 = a7;
4266 Register scratch2 = t1;
4267 Register scratch3 = t0;
4269 // Separator operand is on the stack.
4272 // Check that the array is a JSArray.
4273 __ JumpIfSmi(array, &bailout);
4274 __ GetObjectType(array, scratch1, scratch2);
4275 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4277 // Check that the array has fast elements.
4278 __ CheckFastElements(scratch1, scratch2, &bailout);
4280 // If the array has length zero, return the empty string.
4281 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4282 __ SmiUntag(array_length);
4283 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4284 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4287 __ bind(&non_trivial_array);
4289 // Get the FixedArray containing array's elements.
4291 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4292 array = no_reg; // End of array's live range.
4294 // Check that all array elements are sequential one-byte strings, and
4295 // accumulate the sum of their lengths, as a smi-encoded value.
4296 __ mov(string_length, zero_reg);
4298 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4299 __ dsll(elements_end, array_length, kPointerSizeLog2);
4300 __ Daddu(elements_end, element, elements_end);
4301 // Loop condition: while (element < elements_end).
4302 // Live values in registers:
4303 // elements: Fixed array of strings.
4304 // array_length: Length of the fixed array of strings (not smi)
4305 // separator: Separator string
4306 // string_length: Accumulated sum of string lengths (smi).
4307 // element: Current array element.
4308 // elements_end: Array end.
4309 if (generate_debug_code_) {
4310 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4314 __ ld(string, MemOperand(element));
4315 __ Daddu(element, element, kPointerSize);
4316 __ JumpIfSmi(string, &bailout);
4317 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4318 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4319 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4320 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4321 __ DadduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4322 __ BranchOnOverflow(&bailout, scratch3);
4323 __ Branch(&loop, lt, element, Operand(elements_end));
4325 // If array_length is 1, return elements[0], a string.
4326 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4327 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4330 __ bind(¬_size_one_array);
4332 // Live values in registers:
4333 // separator: Separator string
4334 // array_length: Length of the array.
4335 // string_length: Sum of string lengths (smi).
4336 // elements: FixedArray of strings.
4338 // Check that the separator is a flat one-byte string.
4339 __ JumpIfSmi(separator, &bailout);
4340 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4341 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4342 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4344 // Add (separator length times array_length) - separator length to the
4345 // string_length to get the length of the result string. array_length is not
4346 // smi but the other values are, so the result is a smi.
4347 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4348 __ Dsubu(string_length, string_length, Operand(scratch1));
4349 __ SmiUntag(scratch1);
4350 __ Dmul(scratch2, array_length, scratch1);
4351 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4353 __ dsra32(scratch1, scratch2, 0);
4354 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4355 __ SmiUntag(string_length);
4356 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4357 __ BranchOnOverflow(&bailout, scratch3);
4359 // Get first element in the array to free up the elements register to be used
4362 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4363 result = elements; // End of live range for elements.
4365 // Live values in registers:
4366 // element: First array element
4367 // separator: Separator string
4368 // string_length: Length of result string (not smi)
4369 // array_length: Length of the array.
4370 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4371 elements_end, &bailout);
4372 // Prepare for looping. Set up elements_end to end of the array. Set
4373 // result_pos to the position of the result where to write the first
4375 __ dsll(elements_end, array_length, kPointerSizeLog2);
4376 __ Daddu(elements_end, element, elements_end);
4377 result_pos = array_length; // End of live range for array_length.
4378 array_length = no_reg;
4379 __ Daddu(result_pos,
4381 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4383 // Check the length of the separator.
4384 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4385 __ li(at, Operand(Smi::FromInt(1)));
4386 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4387 __ Branch(&long_separator, gt, scratch1, Operand(at));
4389 // Empty separator case.
4390 __ bind(&empty_separator_loop);
4391 // Live values in registers:
4392 // result_pos: the position to which we are currently copying characters.
4393 // element: Current array element.
4394 // elements_end: Array end.
4396 // Copy next array element to the result.
4397 __ ld(string, MemOperand(element));
4398 __ Daddu(element, element, kPointerSize);
4399 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4400 __ SmiUntag(string_length);
4401 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4402 __ CopyBytes(string, result_pos, string_length, scratch1);
4403 // End while (element < elements_end).
4404 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4405 DCHECK(result.is(v0));
4408 // One-character separator case.
4409 __ bind(&one_char_separator);
4410 // Replace separator with its one-byte character value.
4411 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4412 // Jump into the loop after the code that copies the separator, so the first
4413 // element is not preceded by a separator.
4414 __ jmp(&one_char_separator_loop_entry);
4416 __ bind(&one_char_separator_loop);
4417 // Live values in registers:
4418 // result_pos: the position to which we are currently copying characters.
4419 // element: Current array element.
4420 // elements_end: Array end.
4421 // separator: Single separator one-byte char (in lower byte).
4423 // Copy the separator character to the result.
4424 __ sb(separator, MemOperand(result_pos));
4425 __ Daddu(result_pos, result_pos, 1);
4427 // Copy next array element to the result.
4428 __ bind(&one_char_separator_loop_entry);
4429 __ ld(string, MemOperand(element));
4430 __ Daddu(element, element, kPointerSize);
4431 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4432 __ SmiUntag(string_length);
4433 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4434 __ CopyBytes(string, result_pos, string_length, scratch1);
4435 // End while (element < elements_end).
4436 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4437 DCHECK(result.is(v0));
4440 // Long separator case (separator is more than one character). Entry is at the
4441 // label long_separator below.
4442 __ bind(&long_separator_loop);
4443 // Live values in registers:
4444 // result_pos: the position to which we are currently copying characters.
4445 // element: Current array element.
4446 // elements_end: Array end.
4447 // separator: Separator string.
4449 // Copy the separator to the result.
4450 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4451 __ SmiUntag(string_length);
4454 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4455 __ CopyBytes(string, result_pos, string_length, scratch1);
4457 __ bind(&long_separator);
4458 __ ld(string, MemOperand(element));
4459 __ Daddu(element, element, kPointerSize);
4460 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4461 __ SmiUntag(string_length);
4462 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4463 __ CopyBytes(string, result_pos, string_length, scratch1);
4464 // End while (element < elements_end).
4465 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4466 DCHECK(result.is(v0));
4470 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4472 context()->Plug(v0);
4476 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4477 DCHECK(expr->arguments()->length() == 0);
4478 ExternalReference debug_is_active =
4479 ExternalReference::debug_is_active_address(isolate());
4480 __ li(at, Operand(debug_is_active));
4481 __ lbu(v0, MemOperand(at));
4483 context()->Plug(v0);
4487 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
4488 ZoneList<Expression*>* args = expr->arguments();
4489 DCHECK_EQ(2, args->length());
4490 VisitForStackValue(args->at(0));
4491 VisitForStackValue(args->at(1));
4493 Label runtime, done;
4495 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
4496 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4497 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
4498 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
4500 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
4501 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
4502 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4503 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
4504 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
4505 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
4506 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4510 __ CallRuntime(Runtime::kCreateIterResultObject, 2);
4513 context()->Plug(v0);
4517 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4518 // Push undefined as the receiver.
4519 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4522 __ ld(v0, GlobalObjectOperand());
4523 __ ld(v0, FieldMemOperand(v0, GlobalObject::kNativeContextOffset));
4524 __ ld(v0, ContextOperand(v0, expr->context_index()));
4528 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4529 ZoneList<Expression*>* args = expr->arguments();
4530 int arg_count = args->length();
4532 SetCallPosition(expr, arg_count);
4533 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4534 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4539 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4540 ZoneList<Expression*>* args = expr->arguments();
4541 int arg_count = args->length();
4543 if (expr->is_jsruntime()) {
4544 Comment cmnt(masm_, "[ CallRuntime");
4545 EmitLoadJSRuntimeFunction(expr);
4547 // Push the target function under the receiver.
4548 __ ld(at, MemOperand(sp, 0));
4550 __ sd(v0, MemOperand(sp, kPointerSize));
4552 // Push the arguments ("left-to-right").
4553 for (int i = 0; i < arg_count; i++) {
4554 VisitForStackValue(args->at(i));
4557 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4558 EmitCallJSRuntimeFunction(expr);
4560 // Restore context register.
4561 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4563 context()->DropAndPlug(1, v0);
4565 const Runtime::Function* function = expr->function();
4566 switch (function->function_id) {
4567 #define CALL_INTRINSIC_GENERATOR(Name) \
4568 case Runtime::kInline##Name: { \
4569 Comment cmnt(masm_, "[ Inline" #Name); \
4570 return Emit##Name(expr); \
4572 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4573 #undef CALL_INTRINSIC_GENERATOR
4575 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4576 // Push the arguments ("left-to-right").
4577 for (int i = 0; i < arg_count; i++) {
4578 VisitForStackValue(args->at(i));
4581 // Call the C runtime function.
4582 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4583 __ CallRuntime(expr->function(), arg_count);
4584 context()->Plug(v0);
4591 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4592 switch (expr->op()) {
4593 case Token::DELETE: {
4594 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4595 Property* property = expr->expression()->AsProperty();
4596 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4598 if (property != NULL) {
4599 VisitForStackValue(property->obj());
4600 VisitForStackValue(property->key());
4601 __ CallRuntime(is_strict(language_mode())
4602 ? Runtime::kDeleteProperty_Strict
4603 : Runtime::kDeleteProperty_Sloppy,
4605 context()->Plug(v0);
4606 } else if (proxy != NULL) {
4607 Variable* var = proxy->var();
4608 // Delete of an unqualified identifier is disallowed in strict mode but
4609 // "delete this" is allowed.
4610 bool is_this = var->HasThisName(isolate());
4611 DCHECK(is_sloppy(language_mode()) || is_this);
4612 if (var->IsUnallocatedOrGlobalSlot()) {
4613 __ ld(a2, GlobalObjectOperand());
4614 __ li(a1, Operand(var->name()));
4616 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4617 context()->Plug(v0);
4618 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4619 // Result of deleting non-global, non-dynamic variables is false.
4620 // The subexpression does not have side effects.
4621 context()->Plug(is_this);
4623 // Non-global variable. Call the runtime to try to delete from the
4624 // context where the variable was introduced.
4625 DCHECK(!context_register().is(a2));
4626 __ li(a2, Operand(var->name()));
4627 __ Push(context_register(), a2);
4628 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4629 context()->Plug(v0);
4632 // Result of deleting non-property, non-variable reference is true.
4633 // The subexpression may have side effects.
4634 VisitForEffect(expr->expression());
4635 context()->Plug(true);
4641 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4642 VisitForEffect(expr->expression());
4643 context()->Plug(Heap::kUndefinedValueRootIndex);
4648 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4649 if (context()->IsEffect()) {
4650 // Unary NOT has no side effects so it's only necessary to visit the
4651 // subexpression. Match the optimizing compiler by not branching.
4652 VisitForEffect(expr->expression());
4653 } else if (context()->IsTest()) {
4654 const TestContext* test = TestContext::cast(context());
4655 // The labels are swapped for the recursive call.
4656 VisitForControl(expr->expression(),
4657 test->false_label(),
4659 test->fall_through());
4660 context()->Plug(test->true_label(), test->false_label());
4662 // We handle value contexts explicitly rather than simply visiting
4663 // for control and plugging the control flow into the context,
4664 // because we need to prepare a pair of extra administrative AST ids
4665 // for the optimizing compiler.
4666 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4667 Label materialize_true, materialize_false, done;
4668 VisitForControl(expr->expression(),
4672 __ bind(&materialize_true);
4673 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4674 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4675 if (context()->IsStackValue()) __ push(v0);
4677 __ bind(&materialize_false);
4678 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4679 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4680 if (context()->IsStackValue()) __ push(v0);
4686 case Token::TYPEOF: {
4687 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4689 AccumulatorValueContext context(this);
4690 VisitForTypeofValue(expr->expression());
4693 TypeofStub typeof_stub(isolate());
4694 __ CallStub(&typeof_stub);
4695 context()->Plug(v0);
4705 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4706 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4708 Comment cmnt(masm_, "[ CountOperation");
4710 Property* prop = expr->expression()->AsProperty();
4711 LhsKind assign_type = Property::GetAssignType(prop);
4713 // Evaluate expression and get value.
4714 if (assign_type == VARIABLE) {
4715 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4716 AccumulatorValueContext context(this);
4717 EmitVariableLoad(expr->expression()->AsVariableProxy());
4719 // Reserve space for result of postfix operation.
4720 if (expr->is_postfix() && !context()->IsEffect()) {
4721 __ li(at, Operand(Smi::FromInt(0)));
4724 switch (assign_type) {
4725 case NAMED_PROPERTY: {
4726 // Put the object both on the stack and in the register.
4727 VisitForStackValue(prop->obj());
4728 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4729 EmitNamedPropertyLoad(prop);
4733 case NAMED_SUPER_PROPERTY: {
4734 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4735 VisitForAccumulatorValue(
4736 prop->obj()->AsSuperPropertyReference()->home_object());
4737 __ Push(result_register());
4738 const Register scratch = a1;
4739 __ ld(scratch, MemOperand(sp, kPointerSize));
4740 __ Push(scratch, result_register());
4741 EmitNamedSuperPropertyLoad(prop);
4745 case KEYED_SUPER_PROPERTY: {
4746 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4747 VisitForAccumulatorValue(
4748 prop->obj()->AsSuperPropertyReference()->home_object());
4749 const Register scratch = a1;
4750 const Register scratch1 = a4;
4751 __ Move(scratch, result_register());
4752 VisitForAccumulatorValue(prop->key());
4753 __ Push(scratch, result_register());
4754 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
4755 __ Push(scratch1, scratch, result_register());
4756 EmitKeyedSuperPropertyLoad(prop);
4760 case KEYED_PROPERTY: {
4761 VisitForStackValue(prop->obj());
4762 VisitForStackValue(prop->key());
4763 __ ld(LoadDescriptor::ReceiverRegister(),
4764 MemOperand(sp, 1 * kPointerSize));
4765 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4766 EmitKeyedPropertyLoad(prop);
4775 // We need a second deoptimization point after loading the value
4776 // in case evaluating the property load my have a side effect.
4777 if (assign_type == VARIABLE) {
4778 PrepareForBailout(expr->expression(), TOS_REG);
4780 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4783 // Inline smi case if we are in a loop.
4784 Label stub_call, done;
4785 JumpPatchSite patch_site(masm_);
4787 int count_value = expr->op() == Token::INC ? 1 : -1;
4789 if (ShouldInlineSmiCase(expr->op())) {
4791 patch_site.EmitJumpIfNotSmi(v0, &slow);
4793 // Save result for postfix expressions.
4794 if (expr->is_postfix()) {
4795 if (!context()->IsEffect()) {
4796 // Save the result on the stack. If we have a named or keyed property
4797 // we store the result under the receiver that is currently on top
4799 switch (assign_type) {
4803 case NAMED_PROPERTY:
4804 __ sd(v0, MemOperand(sp, kPointerSize));
4806 case NAMED_SUPER_PROPERTY:
4807 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4809 case KEYED_PROPERTY:
4810 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4812 case KEYED_SUPER_PROPERTY:
4813 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4819 Register scratch1 = a1;
4820 Register scratch2 = a4;
4821 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4822 __ DadduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4823 __ BranchOnNoOverflow(&done, scratch2);
4824 // Call stub. Undo operation first.
4829 if (!is_strong(language_mode())) {
4830 ToNumberStub convert_stub(isolate());
4831 __ CallStub(&convert_stub);
4832 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4835 // Save result for postfix expressions.
4836 if (expr->is_postfix()) {
4837 if (!context()->IsEffect()) {
4838 // Save the result on the stack. If we have a named or keyed property
4839 // we store the result under the receiver that is currently on top
4841 switch (assign_type) {
4845 case NAMED_PROPERTY:
4846 __ sd(v0, MemOperand(sp, kPointerSize));
4848 case NAMED_SUPER_PROPERTY:
4849 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4851 case KEYED_PROPERTY:
4852 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4854 case KEYED_SUPER_PROPERTY:
4855 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4861 __ bind(&stub_call);
4863 __ li(a0, Operand(Smi::FromInt(count_value)));
4865 SetExpressionPosition(expr);
4868 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4869 strength(language_mode())).code();
4870 CallIC(code, expr->CountBinOpFeedbackId());
4871 patch_site.EmitPatchInfo();
4874 if (is_strong(language_mode())) {
4875 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4877 // Store the value returned in v0.
4878 switch (assign_type) {
4880 if (expr->is_postfix()) {
4881 { EffectContext context(this);
4882 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4883 Token::ASSIGN, expr->CountSlot());
4884 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4887 // For all contexts except EffectConstant we have the result on
4888 // top of the stack.
4889 if (!context()->IsEffect()) {
4890 context()->PlugTOS();
4893 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4894 Token::ASSIGN, expr->CountSlot());
4895 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4896 context()->Plug(v0);
4899 case NAMED_PROPERTY: {
4900 __ mov(StoreDescriptor::ValueRegister(), result_register());
4901 __ li(StoreDescriptor::NameRegister(),
4902 Operand(prop->key()->AsLiteral()->value()));
4903 __ pop(StoreDescriptor::ReceiverRegister());
4904 if (FLAG_vector_stores) {
4905 EmitLoadStoreICSlot(expr->CountSlot());
4908 CallStoreIC(expr->CountStoreFeedbackId());
4910 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4911 if (expr->is_postfix()) {
4912 if (!context()->IsEffect()) {
4913 context()->PlugTOS();
4916 context()->Plug(v0);
4920 case NAMED_SUPER_PROPERTY: {
4921 EmitNamedSuperPropertyStore(prop);
4922 if (expr->is_postfix()) {
4923 if (!context()->IsEffect()) {
4924 context()->PlugTOS();
4927 context()->Plug(v0);
4931 case KEYED_SUPER_PROPERTY: {
4932 EmitKeyedSuperPropertyStore(prop);
4933 if (expr->is_postfix()) {
4934 if (!context()->IsEffect()) {
4935 context()->PlugTOS();
4938 context()->Plug(v0);
4942 case KEYED_PROPERTY: {
4943 __ mov(StoreDescriptor::ValueRegister(), result_register());
4944 __ Pop(StoreDescriptor::ReceiverRegister(),
4945 StoreDescriptor::NameRegister());
4947 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4948 if (FLAG_vector_stores) {
4949 EmitLoadStoreICSlot(expr->CountSlot());
4952 CallIC(ic, expr->CountStoreFeedbackId());
4954 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4955 if (expr->is_postfix()) {
4956 if (!context()->IsEffect()) {
4957 context()->PlugTOS();
4960 context()->Plug(v0);
4968 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4969 Expression* sub_expr,
4970 Handle<String> check) {
4971 Label materialize_true, materialize_false;
4972 Label* if_true = NULL;
4973 Label* if_false = NULL;
4974 Label* fall_through = NULL;
4975 context()->PrepareTest(&materialize_true, &materialize_false,
4976 &if_true, &if_false, &fall_through);
4978 { AccumulatorValueContext context(this);
4979 VisitForTypeofValue(sub_expr);
4981 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4983 Factory* factory = isolate()->factory();
4984 if (String::Equals(check, factory->number_string())) {
4985 __ JumpIfSmi(v0, if_true);
4986 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4987 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4988 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4989 } else if (String::Equals(check, factory->string_string())) {
4990 __ JumpIfSmi(v0, if_false);
4991 __ GetObjectType(v0, v0, a1);
4992 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
4994 } else if (String::Equals(check, factory->symbol_string())) {
4995 __ JumpIfSmi(v0, if_false);
4996 __ GetObjectType(v0, v0, a1);
4997 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4998 } else if (String::Equals(check, factory->boolean_string())) {
4999 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5000 __ Branch(if_true, eq, v0, Operand(at));
5001 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5002 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5003 } else if (String::Equals(check, factory->undefined_string())) {
5004 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5005 __ Branch(if_true, eq, v0, Operand(at));
5006 __ JumpIfSmi(v0, if_false);
5007 // Check for undetectable objects => true.
5008 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5009 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5010 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5011 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5012 } else if (String::Equals(check, factory->function_string())) {
5013 __ JumpIfSmi(v0, if_false);
5014 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5015 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5017 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5018 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
5020 } else if (String::Equals(check, factory->object_string())) {
5021 __ JumpIfSmi(v0, if_false);
5022 __ LoadRoot(at, Heap::kNullValueRootIndex);
5023 __ Branch(if_true, eq, v0, Operand(at));
5024 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
5025 __ GetObjectType(v0, v0, a1);
5026 __ Branch(if_false, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
5027 // Check for callable or undetectable objects => false.
5028 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5030 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5031 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5033 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5034 } else if (String::Equals(check, factory->type##_string())) { \
5035 __ JumpIfSmi(v0, if_false); \
5036 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
5037 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
5038 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5039 SIMD128_TYPES(SIMD128_TYPE)
5043 if (if_false != fall_through) __ jmp(if_false);
5045 context()->Plug(if_true, if_false);
5049 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5050 Comment cmnt(masm_, "[ CompareOperation");
5051 SetExpressionPosition(expr);
5053 // First we try a fast inlined version of the compare when one of
5054 // the operands is a literal.
5055 if (TryLiteralCompare(expr)) return;
5057 // Always perform the comparison for its control flow. Pack the result
5058 // into the expression's context after the comparison is performed.
5059 Label materialize_true, materialize_false;
5060 Label* if_true = NULL;
5061 Label* if_false = NULL;
5062 Label* fall_through = NULL;
5063 context()->PrepareTest(&materialize_true, &materialize_false,
5064 &if_true, &if_false, &fall_through);
5066 Token::Value op = expr->op();
5067 VisitForStackValue(expr->left());
5070 VisitForStackValue(expr->right());
5071 __ CallRuntime(Runtime::kHasProperty, 2);
5072 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5073 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5074 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5077 case Token::INSTANCEOF: {
5078 VisitForAccumulatorValue(expr->right());
5079 __ mov(a0, result_register());
5081 InstanceOfStub stub(isolate());
5083 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5084 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5085 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5090 VisitForAccumulatorValue(expr->right());
5091 Condition cc = CompareIC::ComputeCondition(op);
5092 __ mov(a0, result_register());
5095 bool inline_smi_code = ShouldInlineSmiCase(op);
5096 JumpPatchSite patch_site(masm_);
5097 if (inline_smi_code) {
5099 __ Or(a2, a0, Operand(a1));
5100 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5101 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5102 __ bind(&slow_case);
5105 Handle<Code> ic = CodeFactory::CompareIC(
5106 isolate(), op, strength(language_mode())).code();
5107 CallIC(ic, expr->CompareOperationFeedbackId());
5108 patch_site.EmitPatchInfo();
5109 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5110 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5114 // Convert the result of the comparison into one expected for this
5115 // expression's context.
5116 context()->Plug(if_true, if_false);
5120 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5121 Expression* sub_expr,
5123 Label materialize_true, materialize_false;
5124 Label* if_true = NULL;
5125 Label* if_false = NULL;
5126 Label* fall_through = NULL;
5127 context()->PrepareTest(&materialize_true, &materialize_false,
5128 &if_true, &if_false, &fall_through);
5130 VisitForAccumulatorValue(sub_expr);
5131 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5132 __ mov(a0, result_register());
5133 if (expr->op() == Token::EQ_STRICT) {
5134 Heap::RootListIndex nil_value = nil == kNullValue ?
5135 Heap::kNullValueRootIndex :
5136 Heap::kUndefinedValueRootIndex;
5137 __ LoadRoot(a1, nil_value);
5138 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5140 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5141 CallIC(ic, expr->CompareOperationFeedbackId());
5142 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5144 context()->Plug(if_true, if_false);
5148 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5149 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5150 context()->Plug(v0);
5154 Register FullCodeGenerator::result_register() {
5159 Register FullCodeGenerator::context_register() {
5164 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5165 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5166 DCHECK(IsAligned(frame_offset, kPointerSize));
5167 // __ sw(value, MemOperand(fp, frame_offset));
5168 __ sd(value, MemOperand(fp, frame_offset));
5172 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5173 __ ld(dst, ContextOperand(cp, context_index));
5177 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5178 Scope* closure_scope = scope()->ClosureScope();
5179 if (closure_scope->is_script_scope() ||
5180 closure_scope->is_module_scope()) {
5181 // Contexts nested in the native context have a canonical empty function
5182 // as their closure, not the anonymous closure containing the global
5183 // code. Pass a smi sentinel and let the runtime look up the empty
5185 __ li(at, Operand(Smi::FromInt(0)));
5186 } else if (closure_scope->is_eval_scope()) {
5187 // Contexts created by a call to eval have the same closure as the
5188 // context calling eval, not the anonymous closure containing the eval
5189 // code. Fetch it from the context.
5190 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5192 DCHECK(closure_scope->is_function_scope());
5193 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5199 // ----------------------------------------------------------------------------
5200 // Non-local control flow support.
5202 void FullCodeGenerator::EnterFinallyBlock() {
5203 DCHECK(!result_register().is(a1));
5204 // Store result register while executing finally block.
5205 __ push(result_register());
5206 // Cook return address in link register to stack (smi encoded Code* delta).
5207 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
5210 // Store result register while executing finally block.
5213 // Store pending message while executing finally block.
5214 ExternalReference pending_message_obj =
5215 ExternalReference::address_of_pending_message_obj(isolate());
5216 __ li(at, Operand(pending_message_obj));
5217 __ ld(a1, MemOperand(at));
5220 ClearPendingMessage();
5224 void FullCodeGenerator::ExitFinallyBlock() {
5225 DCHECK(!result_register().is(a1));
5226 // Restore pending message from stack.
5228 ExternalReference pending_message_obj =
5229 ExternalReference::address_of_pending_message_obj(isolate());
5230 __ li(at, Operand(pending_message_obj));
5231 __ sd(a1, MemOperand(at));
5233 // Restore result register from stack.
5236 // Uncook return address and return.
5237 __ pop(result_register());
5240 __ Daddu(at, a1, Operand(masm_->CodeObject()));
5245 void FullCodeGenerator::ClearPendingMessage() {
5246 DCHECK(!result_register().is(a1));
5247 ExternalReference pending_message_obj =
5248 ExternalReference::address_of_pending_message_obj(isolate());
5249 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
5250 __ li(at, Operand(pending_message_obj));
5251 __ sd(a1, MemOperand(at));
5255 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5256 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5257 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
5258 Operand(SmiFromSlot(slot)));
5265 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5267 BackEdgeState target_state,
5268 Code* replacement_code) {
5269 static const int kInstrSize = Assembler::kInstrSize;
5270 Address branch_address = pc - 8 * kInstrSize;
5271 CodePatcher patcher(branch_address, 1);
5273 switch (target_state) {
5275 // slt at, a3, zero_reg (in case of count based interrupts)
5276 // beq at, zero_reg, ok
5277 // lui t9, <interrupt stub address> upper
5278 // ori t9, <interrupt stub address> u-middle
5280 // ori t9, <interrupt stub address> lower
5283 // ok-label ----- pc_after points here
5284 patcher.masm()->slt(at, a3, zero_reg);
5286 case ON_STACK_REPLACEMENT:
5287 case OSR_AFTER_STACK_CHECK:
5288 // addiu at, zero_reg, 1
5289 // beq at, zero_reg, ok ;; Not changed
5290 // lui t9, <on-stack replacement address> upper
5291 // ori t9, <on-stack replacement address> middle
5293 // ori t9, <on-stack replacement address> lower
5294 // jalr t9 ;; Not changed
5295 // nop ;; Not changed
5296 // ok-label ----- pc_after points here
5297 patcher.masm()->daddiu(at, zero_reg, 1);
5300 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5301 // Replace the stack check address in the load-immediate (6-instr sequence)
5302 // with the entry address of the replacement code.
5303 Assembler::set_target_address_at(pc_immediate_load_address,
5304 replacement_code->entry());
5306 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5307 unoptimized_code, pc_immediate_load_address, replacement_code);
5311 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5313 Code* unoptimized_code,
5315 static const int kInstrSize = Assembler::kInstrSize;
5316 Address branch_address = pc - 8 * kInstrSize;
5317 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5319 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
5320 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5321 DCHECK(reinterpret_cast<uint64_t>(
5322 Assembler::target_address_at(pc_immediate_load_address)) ==
5323 reinterpret_cast<uint64_t>(
5324 isolate->builtins()->InterruptCheck()->entry()));
5328 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5330 if (reinterpret_cast<uint64_t>(
5331 Assembler::target_address_at(pc_immediate_load_address)) ==
5332 reinterpret_cast<uint64_t>(
5333 isolate->builtins()->OnStackReplacement()->entry())) {
5334 return ON_STACK_REPLACEMENT;
5337 DCHECK(reinterpret_cast<uint64_t>(
5338 Assembler::target_address_at(pc_immediate_load_address)) ==
5339 reinterpret_cast<uint64_t>(
5340 isolate->builtins()->OsrAfterStackCheck()->entry()));
5341 return OSR_AFTER_STACK_CHECK;
5345 } // namespace internal
5348 #endif // V8_TARGET_ARCH_MIPS64