1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_MIPS
7 // Note on Mips implementation:
9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first
11 // parameter to a function is defined to be 'a0'. So there are many
12 // places where we have to move a previous result in v0 to a0 for the
13 // next call: mov(a0, v0). This is not needed on the other architectures.
15 #include "src/code-factory.h"
16 #include "src/code-stubs.h"
17 #include "src/codegen.h"
18 #include "src/compiler.h"
19 #include "src/debug/debug.h"
20 #include "src/full-codegen/full-codegen.h"
21 #include "src/ic/ic.h"
22 #include "src/parser.h"
23 #include "src/scopes.h"
25 #include "src/mips/code-stubs-mips.h"
26 #include "src/mips/macro-assembler-mips.h"
31 #define __ ACCESS_MASM(masm_)
34 // A patch site is a location in the code which it is possible to patch. This
35 // class has a number of methods to emit the code which is patchable and the
36 // method EmitPatchInfo to record a marker back to the patchable code. This
37 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
38 // (raw 16 bit immediate value is used) is the delta from the pc to the first
39 // instruction of the patchable code.
40 // The marker instruction is effectively a NOP (dest is zero_reg) and will
41 // never be emitted by normal code.
42 class JumpPatchSite BASE_EMBEDDED {
44 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
46 info_emitted_ = false;
51 DCHECK(patch_site_.is_bound() == info_emitted_);
54 // When initially emitting this ensure that a jump is always generated to skip
55 // the inlined smi code.
56 void EmitJumpIfNotSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59 __ bind(&patch_site_);
61 // Always taken before patched.
62 __ BranchShort(target, eq, at, Operand(zero_reg));
65 // When initially emitting this ensure that a jump is never generated to skip
66 // the inlined smi code.
67 void EmitJumpIfSmi(Register reg, Label* target) {
68 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
69 DCHECK(!patch_site_.is_bound() && !info_emitted_);
70 __ bind(&patch_site_);
72 // Never taken before patched.
73 __ BranchShort(target, ne, at, Operand(zero_reg));
76 void EmitPatchInfo() {
77 if (patch_site_.is_bound()) {
78 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
79 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
80 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
85 __ nop(); // Signals no inlined code.
90 MacroAssembler* masm_;
98 // Generate code for a JS function. On entry to the function the receiver
99 // and arguments have been pushed on the stack left to right. The actual
100 // argument count matches the formal parameter count expected by the
103 // The live registers are:
104 // o a1: the JS function object being called (i.e. ourselves)
106 // o fp: our caller's frame pointer
107 // o sp: stack pointer
108 // o ra: return address
110 // The function builds a JS frame. Please see JavaScriptFrameConstants in
111 // frames-mips.h for its layout.
112 void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122 if (strlen(FLAG_stop_at) > 0 &&
123 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
128 // Sloppy mode functions and builtins need to replace the receiver with the
129 // global proxy when called as functions (without an explicit receiver
131 if (info->MustReplaceUndefinedReceiverWithGlobalProxy()) {
133 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
134 __ lw(at, MemOperand(sp, receiver_offset));
135 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
136 __ Branch(&ok, ne, a2, Operand(at));
138 __ lw(a2, GlobalObjectOperand());
139 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
141 __ sw(a2, MemOperand(sp, receiver_offset));
146 // Open a frame scope to indicate that there is a frame on the stack. The
147 // MANUAL indicates that the scope shouldn't actually generate code to set up
148 // the frame (that is done below).
149 FrameScope frame_scope(masm_, StackFrame::MANUAL);
151 info->set_prologue_offset(masm_->pc_offset());
152 __ Prologue(info->IsCodePreAgingActive());
153 info->AddNoFrameRange(0, masm_->pc_offset());
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
162 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
163 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
164 __ Branch(&ok, hs, t5, Operand(a2));
165 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
168 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ li(a2, Operand(loop_iterations));
174 __ bind(&loop_header);
176 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ sw(t5, MemOperand(sp, i * kPointerSize));
180 // Continue loop if not done.
181 __ Subu(a2, a2, Operand(1));
182 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 __ Subu(sp, sp, Operand(remaining * kPointerSize));
187 for (int i = 0; i < remaining; i++) {
188 __ sw(t5, MemOperand(sp, i * kPointerSize));
193 bool function_in_register_a1 = true;
195 // Possibly allocate a local context.
196 if (info->scope()->num_heap_slots() > 0) {
197 Comment cmnt(masm_, "[ Allocate context");
198 // Argument to NewContext is the function, which is still in a1.
199 bool need_write_barrier = true;
200 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201 if (info->scope()->is_script_scope()) {
203 __ Push(info->scope()->GetScopeInfo(info->isolate()));
204 __ CallRuntime(Runtime::kNewScriptContext, 2);
205 } else if (slots <= FastNewContextStub::kMaximumSlots) {
206 FastNewContextStub stub(isolate(), slots);
208 // Result of FastNewContextStub is always in new space.
209 need_write_barrier = false;
212 __ CallRuntime(Runtime::kNewFunctionContext, 1);
214 function_in_register_a1 = false;
215 // Context is returned in v0. It replaces the context passed to us.
216 // It's saved in the stack and kept live in cp.
218 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
219 // Copy any necessary parameters into the context.
220 int num_parameters = info->scope()->num_parameters();
221 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
222 for (int i = first_parameter; i < num_parameters; i++) {
223 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
224 if (var->IsContextSlot()) {
225 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
226 (num_parameters - 1 - i) * kPointerSize;
227 // Load parameter from stack.
228 __ lw(a0, MemOperand(fp, parameter_offset));
229 // Store it in the context.
230 MemOperand target = ContextOperand(cp, var->index());
233 // Update the write barrier.
234 if (need_write_barrier) {
235 __ RecordWriteContextSlot(
236 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
237 } else if (FLAG_debug_code) {
239 __ JumpIfInNewSpace(cp, a0, &done);
240 __ Abort(kExpectedNewSpaceObject);
247 PrepareForBailoutForId(BailoutId::Prologue(), NO_REGISTERS);
248 // Function register is trashed in case we bailout here. But since that
249 // could happen only when we allocate a context the value of
250 // |function_in_register_a1| is correct.
252 // Possibly set up a local binding to the this function which is used in
253 // derived constructors with super calls.
254 Variable* this_function_var = scope()->this_function_var();
255 if (this_function_var != nullptr) {
256 Comment cmnt(masm_, "[ This function");
257 if (!function_in_register_a1) {
258 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
259 // The write barrier clobbers register again, keep it marked as such.
261 SetVar(this_function_var, a1, a2, a3);
264 Variable* new_target_var = scope()->new_target_var();
265 if (new_target_var != nullptr) {
266 Comment cmnt(masm_, "[ new.target");
268 // Get the frame pointer for the calling frame.
269 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
271 // Skip the arguments adaptor frame if it exists.
272 Label check_frame_marker;
273 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
274 __ Branch(&check_frame_marker, ne, a1,
275 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
276 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
278 // Check the marker in the calling frame.
279 __ bind(&check_frame_marker);
280 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
281 function_in_register_a1 = false;
283 Label non_construct_frame, done;
284 __ Branch(&non_construct_frame, ne, a1,
285 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
288 MemOperand(a2, ConstructFrameConstants::kOriginalConstructorOffset));
291 __ bind(&non_construct_frame);
292 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
295 SetVar(new_target_var, v0, a2, a3);
298 Variable* arguments = scope()->arguments();
299 if (arguments != NULL) {
300 // Function uses arguments object.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (!function_in_register_a1) {
303 // Load this again, if it's used by the local context below.
304 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
308 // Receiver is just before the parameters on the caller's stack.
309 int num_parameters = info->scope()->num_parameters();
310 int offset = num_parameters * kPointerSize;
312 Operand(StandardFrameConstants::kCallerSPOffset + offset));
313 __ li(a1, Operand(Smi::FromInt(num_parameters)));
316 // Arguments to ArgumentsAccessStub:
317 // function, receiver address, parameter count.
318 // The stub will rewrite receiever and parameter count if the previous
319 // stack frame was an arguments adapter frame.
320 ArgumentsAccessStub::Type type;
321 if (is_strict(language_mode()) || !has_simple_parameters()) {
322 type = ArgumentsAccessStub::NEW_STRICT;
323 } else if (literal()->has_duplicate_parameters()) {
324 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
326 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
328 ArgumentsAccessStub stub(isolate(), type);
331 SetVar(arguments, v0, a1, a2);
335 __ CallRuntime(Runtime::kTraceEnter, 0);
338 // Visit the declarations and body unless there is an illegal
340 if (scope()->HasIllegalRedeclaration()) {
341 Comment cmnt(masm_, "[ Declarations");
342 VisitForEffect(scope()->GetIllegalRedeclaration());
345 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
346 { Comment cmnt(masm_, "[ Declarations");
347 VisitDeclarations(scope()->declarations());
350 // Assert that the declarations do not use ICs. Otherwise the debugger
351 // won't be able to redirect a PC at an IC to the correct IC in newly
353 DCHECK_EQ(0, ic_total_count_);
355 { Comment cmnt(masm_, "[ Stack check");
356 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
358 __ LoadRoot(at, Heap::kStackLimitRootIndex);
359 __ Branch(&ok, hs, sp, Operand(at));
360 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
361 PredictableCodeSizeScope predictable(masm_,
362 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
363 __ Call(stack_check, RelocInfo::CODE_TARGET);
367 { Comment cmnt(masm_, "[ Body");
368 DCHECK(loop_depth() == 0);
369 VisitStatements(literal()->body());
370 DCHECK(loop_depth() == 0);
374 // Always emit a 'return undefined' in case control fell off the end of
376 { Comment cmnt(masm_, "[ return <undefined>;");
377 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
379 EmitReturnSequence();
383 void FullCodeGenerator::ClearAccumulator() {
384 DCHECK(Smi::FromInt(0) == 0);
385 __ mov(v0, zero_reg);
389 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
390 __ li(a2, Operand(profiling_counter_));
391 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
392 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
393 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
397 void FullCodeGenerator::EmitProfilingCounterReset() {
398 int reset_value = FLAG_interrupt_budget;
399 if (info_->is_debug()) {
400 // Detect debug break requests as soon as possible.
401 reset_value = FLAG_interrupt_budget >> 4;
403 __ li(a2, Operand(profiling_counter_));
404 __ li(a3, Operand(Smi::FromInt(reset_value)));
405 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
409 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
410 Label* back_edge_target) {
411 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
412 // to make sure it is constant. Branch may emit a skip-or-jump sequence
413 // instead of the normal Branch. It seems that the "skip" part of that
414 // sequence is about as long as this Branch would be so it is safe to ignore
416 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
417 Comment cmnt(masm_, "[ Back edge bookkeeping");
419 DCHECK(back_edge_target->is_bound());
420 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
421 int weight = Min(kMaxBackEdgeWeight,
422 Max(1, distance / kCodeSizeMultiplier));
423 EmitProfilingCounterDecrement(weight);
424 __ slt(at, a3, zero_reg);
425 __ beq(at, zero_reg, &ok);
426 // Call will emit a li t9 first, so it is safe to use the delay slot.
427 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428 // Record a mapping of this PC offset to the OSR id. This is used to find
429 // the AST id from the unoptimized code in order to use it as a key into
430 // the deoptimization input data found in the optimized code.
431 RecordBackEdge(stmt->OsrEntryId());
432 EmitProfilingCounterReset();
435 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
436 // Record a mapping of the OSR id to this PC. This is used if the OSR
437 // entry becomes the target of a bailout. We don't expect it to be, but
438 // we want it to work if it is.
439 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
443 void FullCodeGenerator::EmitReturnSequence() {
444 Comment cmnt(masm_, "[ Return sequence");
445 if (return_label_.is_bound()) {
446 __ Branch(&return_label_);
448 __ bind(&return_label_);
450 // Push the return value on the stack as the parameter.
451 // Runtime::TraceExit returns its parameter in v0.
453 __ CallRuntime(Runtime::kTraceExit, 1);
455 // Pretend that the exit is a backwards jump to the entry.
457 if (info_->ShouldSelfOptimize()) {
458 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
460 int distance = masm_->pc_offset();
461 weight = Min(kMaxBackEdgeWeight,
462 Max(1, distance / kCodeSizeMultiplier));
464 EmitProfilingCounterDecrement(weight);
466 __ Branch(&ok, ge, a3, Operand(zero_reg));
468 __ Call(isolate()->builtins()->InterruptCheck(),
469 RelocInfo::CODE_TARGET);
471 EmitProfilingCounterReset();
474 // Make sure that the constant pool is not emitted inside of the return
476 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
477 // Here we use masm_-> instead of the __ macro to avoid the code coverage
478 // tool from instrumenting as we rely on the code size here.
479 int32_t arg_count = info_->scope()->num_parameters() + 1;
480 int32_t sp_delta = arg_count * kPointerSize;
481 SetReturnPosition(literal());
483 int no_frame_start = masm_->pc_offset();
484 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
485 masm_->Addu(sp, sp, Operand(sp_delta));
487 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
493 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
494 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
495 codegen()->GetVar(result_register(), var);
496 __ push(result_register());
500 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
504 void FullCodeGenerator::AccumulatorValueContext::Plug(
505 Heap::RootListIndex index) const {
506 __ LoadRoot(result_register(), index);
510 void FullCodeGenerator::StackValueContext::Plug(
511 Heap::RootListIndex index) const {
512 __ LoadRoot(result_register(), index);
513 __ push(result_register());
517 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
518 codegen()->PrepareForBailoutBeforeSplit(condition(),
522 if (index == Heap::kUndefinedValueRootIndex ||
523 index == Heap::kNullValueRootIndex ||
524 index == Heap::kFalseValueRootIndex) {
525 if (false_label_ != fall_through_) __ Branch(false_label_);
526 } else if (index == Heap::kTrueValueRootIndex) {
527 if (true_label_ != fall_through_) __ Branch(true_label_);
529 __ LoadRoot(result_register(), index);
530 codegen()->DoTest(this);
535 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
539 void FullCodeGenerator::AccumulatorValueContext::Plug(
540 Handle<Object> lit) const {
541 __ li(result_register(), Operand(lit));
545 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
546 // Immediates cannot be pushed directly.
547 __ li(result_register(), Operand(lit));
548 __ push(result_register());
552 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
553 codegen()->PrepareForBailoutBeforeSplit(condition(),
557 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
558 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
559 if (false_label_ != fall_through_) __ Branch(false_label_);
560 } else if (lit->IsTrue() || lit->IsJSObject()) {
561 if (true_label_ != fall_through_) __ Branch(true_label_);
562 } else if (lit->IsString()) {
563 if (String::cast(*lit)->length() == 0) {
564 if (false_label_ != fall_through_) __ Branch(false_label_);
566 if (true_label_ != fall_through_) __ Branch(true_label_);
568 } else if (lit->IsSmi()) {
569 if (Smi::cast(*lit)->value() == 0) {
570 if (false_label_ != fall_through_) __ Branch(false_label_);
572 if (true_label_ != fall_through_) __ Branch(true_label_);
575 // For simplicity we always test the accumulator register.
576 __ li(result_register(), Operand(lit));
577 codegen()->DoTest(this);
582 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
583 Register reg) const {
589 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
591 Register reg) const {
594 __ Move(result_register(), reg);
598 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
599 Register reg) const {
601 if (count > 1) __ Drop(count - 1);
602 __ sw(reg, MemOperand(sp, 0));
606 void FullCodeGenerator::TestContext::DropAndPlug(int count,
607 Register reg) const {
609 // For simplicity we always test the accumulator register.
611 __ Move(result_register(), reg);
612 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
613 codegen()->DoTest(this);
617 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
618 Label* materialize_false) const {
619 DCHECK(materialize_true == materialize_false);
620 __ bind(materialize_true);
624 void FullCodeGenerator::AccumulatorValueContext::Plug(
625 Label* materialize_true,
626 Label* materialize_false) const {
628 __ bind(materialize_true);
629 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
631 __ bind(materialize_false);
632 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
637 void FullCodeGenerator::StackValueContext::Plug(
638 Label* materialize_true,
639 Label* materialize_false) const {
641 __ bind(materialize_true);
642 __ LoadRoot(at, Heap::kTrueValueRootIndex);
643 // Push the value as the following branch can clobber at in long branch mode.
646 __ bind(materialize_false);
647 __ LoadRoot(at, Heap::kFalseValueRootIndex);
653 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
654 Label* materialize_false) const {
655 DCHECK(materialize_true == true_label_);
656 DCHECK(materialize_false == false_label_);
660 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
661 Heap::RootListIndex value_root_index =
662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663 __ LoadRoot(result_register(), value_root_index);
667 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
668 Heap::RootListIndex value_root_index =
669 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
670 __ LoadRoot(at, value_root_index);
675 void FullCodeGenerator::TestContext::Plug(bool flag) const {
676 codegen()->PrepareForBailoutBeforeSplit(condition(),
681 if (true_label_ != fall_through_) __ Branch(true_label_);
683 if (false_label_ != fall_through_) __ Branch(false_label_);
688 void FullCodeGenerator::DoTest(Expression* condition,
691 Label* fall_through) {
692 __ mov(a0, result_register());
693 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
694 CallIC(ic, condition->test_id());
695 __ mov(at, zero_reg);
696 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
700 void FullCodeGenerator::Split(Condition cc,
705 Label* fall_through) {
706 if (if_false == fall_through) {
707 __ Branch(if_true, cc, lhs, rhs);
708 } else if (if_true == fall_through) {
709 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
711 __ Branch(if_true, cc, lhs, rhs);
717 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
718 DCHECK(var->IsStackAllocated());
719 // Offset is negative because higher indexes are at lower addresses.
720 int offset = -var->index() * kPointerSize;
721 // Adjust by a (parameter or local) base offset.
722 if (var->IsParameter()) {
723 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
725 offset += JavaScriptFrameConstants::kLocal0Offset;
727 return MemOperand(fp, offset);
731 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
732 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
733 if (var->IsContextSlot()) {
734 int context_chain_length = scope()->ContextChainLength(var->scope());
735 __ LoadContext(scratch, context_chain_length);
736 return ContextOperand(scratch, var->index());
738 return StackOperand(var);
743 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
744 // Use destination as scratch.
745 MemOperand location = VarOperand(var, dest);
746 __ lw(dest, location);
750 void FullCodeGenerator::SetVar(Variable* var,
754 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
755 DCHECK(!scratch0.is(src));
756 DCHECK(!scratch0.is(scratch1));
757 DCHECK(!scratch1.is(src));
758 MemOperand location = VarOperand(var, scratch0);
759 __ sw(src, location);
760 // Emit the write barrier code if the location is in the heap.
761 if (var->IsContextSlot()) {
762 __ RecordWriteContextSlot(scratch0,
772 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
773 bool should_normalize,
776 // Only prepare for bailouts before splits if we're in a test
777 // context. Otherwise, we let the Visit function deal with the
778 // preparation to avoid preparing with the same AST id twice.
779 if (!context()->IsTest()) return;
782 if (should_normalize) __ Branch(&skip);
783 PrepareForBailout(expr, TOS_REG);
784 if (should_normalize) {
785 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
786 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
793 // The variable in the declaration always resides in the current function
795 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (generate_debug_code_) {
797 // Check that we're not inside a with or catch context.
798 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
799 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
800 __ Check(ne, kDeclarationInWithContext,
802 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
803 __ Check(ne, kDeclarationInCatchContext,
809 void FullCodeGenerator::VisitVariableDeclaration(
810 VariableDeclaration* declaration) {
811 // If it was not possible to allocate the variable at compile time, we
812 // need to "declare" it at runtime to make sure it actually exists in the
814 VariableProxy* proxy = declaration->proxy();
815 VariableMode mode = declaration->mode();
816 Variable* variable = proxy->var();
817 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
818 switch (variable->location()) {
819 case VariableLocation::GLOBAL:
820 case VariableLocation::UNALLOCATED:
821 globals_->Add(variable->name(), zone());
822 globals_->Add(variable->binding_needs_init()
823 ? isolate()->factory()->the_hole_value()
824 : isolate()->factory()->undefined_value(),
828 case VariableLocation::PARAMETER:
829 case VariableLocation::LOCAL:
831 Comment cmnt(masm_, "[ VariableDeclaration");
832 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
833 __ sw(t0, StackOperand(variable));
837 case VariableLocation::CONTEXT:
839 Comment cmnt(masm_, "[ VariableDeclaration");
840 EmitDebugCheckDeclarationContext(variable);
841 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
842 __ sw(at, ContextOperand(cp, variable->index()));
843 // No write barrier since the_hole_value is in old space.
844 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
848 case VariableLocation::LOOKUP: {
849 Comment cmnt(masm_, "[ VariableDeclaration");
850 __ li(a2, Operand(variable->name()));
851 // Declaration nodes are always introduced in one of four modes.
852 DCHECK(IsDeclaredVariableMode(mode));
853 // Push initial value, if any.
854 // Note: For variables we must not push an initial value (such as
855 // 'undefined') because we may have a (legal) redeclaration and we
856 // must not destroy the current value.
858 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
860 DCHECK(Smi::FromInt(0) == 0);
861 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
864 __ CallRuntime(IsImmutableVariableMode(mode)
865 ? Runtime::kDeclareReadOnlyLookupSlot
866 : Runtime::kDeclareLookupSlot,
874 void FullCodeGenerator::VisitFunctionDeclaration(
875 FunctionDeclaration* declaration) {
876 VariableProxy* proxy = declaration->proxy();
877 Variable* variable = proxy->var();
878 switch (variable->location()) {
879 case VariableLocation::GLOBAL:
880 case VariableLocation::UNALLOCATED: {
881 globals_->Add(variable->name(), zone());
882 Handle<SharedFunctionInfo> function =
883 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
884 // Check for stack-overflow exception.
885 if (function.is_null()) return SetStackOverflow();
886 globals_->Add(function, zone());
890 case VariableLocation::PARAMETER:
891 case VariableLocation::LOCAL: {
892 Comment cmnt(masm_, "[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun());
894 __ sw(result_register(), StackOperand(variable));
898 case VariableLocation::CONTEXT: {
899 Comment cmnt(masm_, "[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun());
902 __ sw(result_register(), ContextOperand(cp, variable->index()));
903 int offset = Context::SlotOffset(variable->index());
904 // We know that we have written a function, which is not a smi.
905 __ RecordWriteContextSlot(cp,
913 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
917 case VariableLocation::LOOKUP: {
918 Comment cmnt(masm_, "[ FunctionDeclaration");
919 __ li(a2, Operand(variable->name()));
921 // Push initial value for function declaration.
922 VisitForStackValue(declaration->fun());
923 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
930 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
931 // Call the runtime to declare the globals.
932 __ li(a1, Operand(pairs));
933 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
935 __ CallRuntime(Runtime::kDeclareGlobals, 2);
936 // Return value is ignored.
940 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
941 // Call the runtime to declare the modules.
942 __ Push(descriptions);
943 __ CallRuntime(Runtime::kDeclareModules, 1);
944 // Return value is ignored.
948 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
949 Comment cmnt(masm_, "[ SwitchStatement");
950 Breakable nested_statement(this, stmt);
951 SetStatementPosition(stmt);
953 // Keep the switch value on the stack until a case matches.
954 VisitForStackValue(stmt->tag());
955 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
957 ZoneList<CaseClause*>* clauses = stmt->cases();
958 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
960 Label next_test; // Recycled for each test.
961 // Compile all the tests with branches to their bodies.
962 for (int i = 0; i < clauses->length(); i++) {
963 CaseClause* clause = clauses->at(i);
964 clause->body_target()->Unuse();
966 // The default is not a test, but remember it as final fall through.
967 if (clause->is_default()) {
968 default_clause = clause;
972 Comment cmnt(masm_, "[ Case comparison");
976 // Compile the label expression.
977 VisitForAccumulatorValue(clause->label());
978 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
980 // Perform the comparison as if via '==='.
981 __ lw(a1, MemOperand(sp, 0)); // Switch value.
982 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
983 JumpPatchSite patch_site(masm_);
984 if (inline_smi_code) {
987 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
989 __ Branch(&next_test, ne, a1, Operand(a0));
990 __ Drop(1); // Switch value is no longer needed.
991 __ Branch(clause->body_target());
996 // Record position before stub call for type feedback.
997 SetExpressionPosition(clause);
998 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
999 strength(language_mode())).code();
1000 CallIC(ic, clause->CompareId());
1001 patch_site.EmitPatchInfo();
1005 PrepareForBailout(clause, TOS_REG);
1006 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1007 __ Branch(&next_test, ne, v0, Operand(at));
1009 __ Branch(clause->body_target());
1012 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1013 __ Drop(1); // Switch value is no longer needed.
1014 __ Branch(clause->body_target());
1017 // Discard the test value and jump to the default if present, otherwise to
1018 // the end of the statement.
1019 __ bind(&next_test);
1020 __ Drop(1); // Switch value is no longer needed.
1021 if (default_clause == NULL) {
1022 __ Branch(nested_statement.break_label());
1024 __ Branch(default_clause->body_target());
1027 // Compile all the case bodies.
1028 for (int i = 0; i < clauses->length(); i++) {
1029 Comment cmnt(masm_, "[ Case body");
1030 CaseClause* clause = clauses->at(i);
1031 __ bind(clause->body_target());
1032 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1033 VisitStatements(clause->statements());
1036 __ bind(nested_statement.break_label());
1037 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1041 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1042 Comment cmnt(masm_, "[ ForInStatement");
1043 SetStatementPosition(stmt, SKIP_BREAK);
1045 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1048 ForIn loop_statement(this, stmt);
1049 increment_loop_depth();
1051 // Get the object to enumerate over. If the object is null or undefined, skip
1052 // over the loop. See ECMA-262 version 5, section 12.6.4.
1053 SetExpressionAsStatementPosition(stmt->enumerable());
1054 VisitForAccumulatorValue(stmt->enumerable());
1055 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1056 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1057 __ Branch(&exit, eq, a0, Operand(at));
1058 Register null_value = t1;
1059 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1060 __ Branch(&exit, eq, a0, Operand(null_value));
1061 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1063 // Convert the object to a JS object.
1064 Label convert, done_convert;
1065 __ JumpIfSmi(a0, &convert);
1066 __ GetObjectType(a0, a1, a1);
1067 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1069 ToObjectStub stub(isolate());
1072 __ bind(&done_convert);
1073 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1076 // Check for proxies.
1078 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1079 __ GetObjectType(a0, a1, a1);
1080 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1082 // Check cache validity in generated code. This is a fast case for
1083 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1084 // guarantee cache validity, call the runtime system to check cache
1085 // validity or get the property names in a fixed array.
1086 __ CheckEnumCache(null_value, &call_runtime);
1088 // The enum cache is valid. Load the map of the object being
1089 // iterated over and use the cache for the iteration.
1091 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1092 __ Branch(&use_cache);
1094 // Get the set of properties to enumerate.
1095 __ bind(&call_runtime);
1096 __ push(a0); // Duplicate the enumerable object on the stack.
1097 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1098 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1100 // If we got a map from the runtime call, we can do a fast
1101 // modification check. Otherwise, we got a fixed array, and we have
1102 // to do a slow check.
1104 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1105 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1106 __ Branch(&fixed_array, ne, a2, Operand(at));
1108 // We got a map in register v0. Get the enumeration cache from it.
1109 Label no_descriptors;
1110 __ bind(&use_cache);
1112 __ EnumLength(a1, v0);
1113 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1115 __ LoadInstanceDescriptors(v0, a2);
1116 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1117 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1119 // Set up the four remaining stack slots.
1120 __ li(a0, Operand(Smi::FromInt(0)));
1121 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1122 __ Push(v0, a2, a1, a0);
1125 __ bind(&no_descriptors);
1129 // We got a fixed array in register v0. Iterate through that.
1131 __ bind(&fixed_array);
1133 __ li(a1, FeedbackVector());
1134 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1135 int vector_index = FeedbackVector()->GetIndex(slot);
1136 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1138 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1139 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1140 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1141 __ GetObjectType(a2, a3, a3);
1142 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1143 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1144 __ bind(&non_proxy);
1145 __ Push(a1, v0); // Smi and array
1146 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1147 __ li(a0, Operand(Smi::FromInt(0)));
1148 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1150 // Generate code for doing the condition check.
1151 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1153 SetExpressionAsStatementPosition(stmt->each());
1155 // Load the current count to a0, load the length to a1.
1156 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1157 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1158 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1160 // Get the current entry of the array into register a3.
1161 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1162 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1163 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1164 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1165 __ lw(a3, MemOperand(t0)); // Current entry.
1167 // Get the expected map from the stack or a smi in the
1168 // permanent slow case into register a2.
1169 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1171 // Check if the expected map still matches that of the enumerable.
1172 // If not, we may have to filter the key.
1174 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1175 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1176 __ Branch(&update_each, eq, t0, Operand(a2));
1178 // For proxies, no filtering is done.
1179 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1180 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1181 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1183 // Convert the entry to a string or (smi) 0 if it isn't a property
1184 // any more. If the property has been removed while iterating, we
1186 __ Push(a1, a3); // Enumerable and current entry.
1187 __ CallRuntime(Runtime::kForInFilter, 2);
1188 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1189 __ mov(a3, result_register());
1190 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1191 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1193 // Update the 'each' property or variable from the possibly filtered
1194 // entry in register a3.
1195 __ bind(&update_each);
1196 __ mov(result_register(), a3);
1197 // Perform the assignment as if via '='.
1198 { EffectContext context(this);
1199 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1200 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1203 // Generate code for the body of the loop.
1204 Visit(stmt->body());
1206 // Generate code for the going to the next element by incrementing
1207 // the index (smi) stored on top of the stack.
1208 __ bind(loop_statement.continue_label());
1210 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1213 EmitBackEdgeBookkeeping(stmt, &loop);
1216 // Remove the pointers stored on the stack.
1217 __ bind(loop_statement.break_label());
1220 // Exit and decrement the loop depth.
1221 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1223 decrement_loop_depth();
1227 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1229 // Use the fast case closure allocation code that allocates in new
1230 // space for nested functions that don't need literals cloning. If
1231 // we're running with the --always-opt or the --prepare-always-opt
1232 // flag, we need to use the runtime function so that the new function
1233 // we are creating here gets a chance to have its code optimized and
1234 // doesn't just get a copy of the existing unoptimized code.
1235 if (!FLAG_always_opt &&
1236 !FLAG_prepare_always_opt &&
1238 scope()->is_function_scope() &&
1239 info->num_literals() == 0) {
1240 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1241 __ li(a2, Operand(info));
1246 pretenure ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure, 1);
1248 context()->Plug(v0);
1252 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1254 FeedbackVectorICSlot slot) {
1255 if (NeedsHomeObject(initializer)) {
1256 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1257 __ li(StoreDescriptor::NameRegister(),
1258 Operand(isolate()->factory()->home_object_symbol()));
1259 __ lw(StoreDescriptor::ValueRegister(),
1260 MemOperand(sp, offset * kPointerSize));
1261 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1267 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1268 TypeofMode typeof_mode,
1270 Register current = cp;
1276 if (s->num_heap_slots() > 0) {
1277 if (s->calls_sloppy_eval()) {
1278 // Check that extension is NULL.
1279 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1280 __ Branch(slow, ne, temp, Operand(zero_reg));
1282 // Load next context in chain.
1283 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1284 // Walk the rest of the chain without clobbering cp.
1287 // If no outer scope calls eval, we do not need to check more
1288 // context extensions.
1289 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1290 s = s->outer_scope();
1293 if (s->is_eval_scope()) {
1295 if (!current.is(next)) {
1296 __ Move(next, current);
1299 // Terminate at native context.
1300 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1301 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1302 __ Branch(&fast, eq, temp, Operand(t0));
1303 // Check that extension is NULL.
1304 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1305 __ Branch(slow, ne, temp, Operand(zero_reg));
1306 // Load next context in chain.
1307 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1312 // All extension objects were empty and it is safe to use a normal global
1314 EmitGlobalVariableLoad(proxy, typeof_mode);
1318 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1320 DCHECK(var->IsContextSlot());
1321 Register context = cp;
1325 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1326 if (s->num_heap_slots() > 0) {
1327 if (s->calls_sloppy_eval()) {
1328 // Check that extension is NULL.
1329 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1330 __ Branch(slow, ne, temp, Operand(zero_reg));
1332 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1333 // Walk the rest of the chain without clobbering cp.
1337 // Check that last extension is NULL.
1338 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1339 __ Branch(slow, ne, temp, Operand(zero_reg));
1341 // This function is used only for loads, not stores, so it's safe to
1342 // return an cp-based operand (the write barrier cannot be allowed to
1343 // destroy the cp register).
1344 return ContextOperand(context, var->index());
1348 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1349 TypeofMode typeof_mode,
1350 Label* slow, Label* done) {
1351 // Generate fast-case code for variables that might be shadowed by
1352 // eval-introduced variables. Eval is used a lot without
1353 // introducing variables. In those cases, we do not want to
1354 // perform a runtime call for all variables in the scope
1355 // containing the eval.
1356 Variable* var = proxy->var();
1357 if (var->mode() == DYNAMIC_GLOBAL) {
1358 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1360 } else if (var->mode() == DYNAMIC_LOCAL) {
1361 Variable* local = var->local_if_not_shadowed();
1362 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1363 if (local->mode() == LET || local->mode() == CONST ||
1364 local->mode() == CONST_LEGACY) {
1365 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1366 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1367 if (local->mode() == CONST_LEGACY) {
1368 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1369 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1370 } else { // LET || CONST
1371 __ Branch(done, ne, at, Operand(zero_reg));
1372 __ li(a0, Operand(var->name()));
1374 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1382 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1383 TypeofMode typeof_mode) {
1384 Variable* var = proxy->var();
1385 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1386 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1387 if (var->IsGlobalSlot()) {
1388 DCHECK(var->index() > 0);
1389 DCHECK(var->IsStaticGlobalObjectProperty());
1390 int const slot = var->index();
1391 int const depth = scope()->ContextChainLength(var->scope());
1392 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1393 __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1394 LoadGlobalViaContextStub stub(isolate(), depth);
1397 __ Push(Smi::FromInt(slot));
1398 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1402 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1403 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1404 __ li(LoadDescriptor::SlotRegister(),
1405 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1406 CallLoadIC(typeof_mode);
1411 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1412 TypeofMode typeof_mode) {
1413 // Record position before possible IC call.
1414 SetExpressionPosition(proxy);
1415 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1416 Variable* var = proxy->var();
1418 // Three cases: global variables, lookup variables, and all other types of
1420 switch (var->location()) {
1421 case VariableLocation::GLOBAL:
1422 case VariableLocation::UNALLOCATED: {
1423 Comment cmnt(masm_, "[ Global variable");
1424 EmitGlobalVariableLoad(proxy, typeof_mode);
1425 context()->Plug(v0);
1429 case VariableLocation::PARAMETER:
1430 case VariableLocation::LOCAL:
1431 case VariableLocation::CONTEXT: {
1432 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1433 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1434 : "[ Stack variable");
1435 if (NeedsHoleCheckForLoad(proxy)) {
1436 // Let and const need a read barrier.
1438 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1439 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1440 if (var->mode() == LET || var->mode() == CONST) {
1441 // Throw a reference error when using an uninitialized let/const
1442 // binding in harmony mode.
1444 __ Branch(&done, ne, at, Operand(zero_reg));
1445 __ li(a0, Operand(var->name()));
1447 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1450 // Uninitialized legacy const bindings are unholed.
1451 DCHECK(var->mode() == CONST_LEGACY);
1452 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1453 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1455 context()->Plug(v0);
1458 context()->Plug(var);
1462 case VariableLocation::LOOKUP: {
1463 Comment cmnt(masm_, "[ Lookup variable");
1465 // Generate code for loading from variables potentially shadowed
1466 // by eval-introduced variables.
1467 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1469 __ li(a1, Operand(var->name()));
1470 __ Push(cp, a1); // Context and name.
1471 Runtime::FunctionId function_id =
1472 typeof_mode == NOT_INSIDE_TYPEOF
1473 ? Runtime::kLoadLookupSlot
1474 : Runtime::kLoadLookupSlotNoReferenceError;
1475 __ CallRuntime(function_id, 2);
1477 context()->Plug(v0);
1483 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1484 Comment cmnt(masm_, "[ RegExpLiteral");
1486 // Registers will be used as follows:
1487 // t1 = materialized value (RegExp literal)
1488 // t0 = JS function, literals array
1489 // a3 = literal index
1490 // a2 = RegExp pattern
1491 // a1 = RegExp flags
1492 // a0 = RegExp literal clone
1493 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1494 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1495 int literal_offset =
1496 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1497 __ lw(t1, FieldMemOperand(t0, literal_offset));
1498 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1499 __ Branch(&materialized, ne, t1, Operand(at));
1501 // Create regexp literal using runtime function.
1502 // Result will be in v0.
1503 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1504 __ li(a2, Operand(expr->pattern()));
1505 __ li(a1, Operand(expr->flags()));
1506 __ Push(t0, a3, a2, a1);
1507 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1510 __ bind(&materialized);
1511 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1512 Label allocated, runtime_allocate;
1513 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1516 __ bind(&runtime_allocate);
1517 __ li(a0, Operand(Smi::FromInt(size)));
1519 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1522 __ bind(&allocated);
1524 // After this, registers are used as follows:
1525 // v0: Newly allocated regexp.
1526 // t1: Materialized regexp.
1528 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1529 context()->Plug(v0);
1533 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1534 if (expression == NULL) {
1535 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1538 VisitForStackValue(expression);
1543 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1544 Comment cmnt(masm_, "[ ObjectLiteral");
1546 Handle<FixedArray> constant_properties = expr->constant_properties();
1547 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1548 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1549 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1550 __ li(a1, Operand(constant_properties));
1551 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1552 if (MustCreateObjectLiteralWithRuntime(expr)) {
1553 __ Push(a3, a2, a1, a0);
1554 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1556 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1559 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1561 // If result_saved is true the result is on top of the stack. If
1562 // result_saved is false the result is in v0.
1563 bool result_saved = false;
1565 AccessorTable accessor_table(zone());
1566 int property_index = 0;
1567 // store_slot_index points to the vector IC slot for the next store IC used.
1568 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1569 // and must be updated if the number of store ICs emitted here changes.
1570 int store_slot_index = 0;
1571 for (; property_index < expr->properties()->length(); property_index++) {
1572 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1573 if (property->is_computed_name()) break;
1574 if (property->IsCompileTimeValue()) continue;
1576 Literal* key = property->key()->AsLiteral();
1577 Expression* value = property->value();
1578 if (!result_saved) {
1579 __ push(v0); // Save result on stack.
1580 result_saved = true;
1582 switch (property->kind()) {
1583 case ObjectLiteral::Property::CONSTANT:
1585 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1586 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1588 case ObjectLiteral::Property::COMPUTED:
1589 // It is safe to use [[Put]] here because the boilerplate already
1590 // contains computed properties with an uninitialized value.
1591 if (key->value()->IsInternalizedString()) {
1592 if (property->emit_store()) {
1593 VisitForAccumulatorValue(value);
1594 __ mov(StoreDescriptor::ValueRegister(), result_register());
1595 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1596 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1597 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1598 if (FLAG_vector_stores) {
1599 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1602 CallStoreIC(key->LiteralFeedbackId());
1604 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1606 if (NeedsHomeObject(value)) {
1607 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1608 __ li(StoreDescriptor::NameRegister(),
1609 Operand(isolate()->factory()->home_object_symbol()));
1610 __ lw(StoreDescriptor::ValueRegister(), MemOperand(sp));
1611 if (FLAG_vector_stores) {
1612 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1617 VisitForEffect(value);
1621 // Duplicate receiver on stack.
1622 __ lw(a0, MemOperand(sp));
1624 VisitForStackValue(key);
1625 VisitForStackValue(value);
1626 if (property->emit_store()) {
1627 EmitSetHomeObjectIfNeeded(
1628 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1629 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1631 __ CallRuntime(Runtime::kSetProperty, 4);
1636 case ObjectLiteral::Property::PROTOTYPE:
1637 // Duplicate receiver on stack.
1638 __ lw(a0, MemOperand(sp));
1640 VisitForStackValue(value);
1641 DCHECK(property->emit_store());
1642 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1644 case ObjectLiteral::Property::GETTER:
1645 if (property->emit_store()) {
1646 accessor_table.lookup(key)->second->getter = value;
1649 case ObjectLiteral::Property::SETTER:
1650 if (property->emit_store()) {
1651 accessor_table.lookup(key)->second->setter = value;
1657 // Emit code to define accessors, using only a single call to the runtime for
1658 // each pair of corresponding getters and setters.
1659 for (AccessorTable::Iterator it = accessor_table.begin();
1660 it != accessor_table.end();
1662 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1664 VisitForStackValue(it->first);
1665 EmitAccessor(it->second->getter);
1666 EmitSetHomeObjectIfNeeded(
1667 it->second->getter, 2,
1668 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1669 EmitAccessor(it->second->setter);
1670 EmitSetHomeObjectIfNeeded(
1671 it->second->setter, 3,
1672 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1673 __ li(a0, Operand(Smi::FromInt(NONE)));
1675 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1678 // Object literals have two parts. The "static" part on the left contains no
1679 // computed property names, and so we can compute its map ahead of time; see
1680 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1681 // starts with the first computed property name, and continues with all
1682 // properties to its right. All the code from above initializes the static
1683 // component of the object literal, and arranges for the map of the result to
1684 // reflect the static order in which the keys appear. For the dynamic
1685 // properties, we compile them into a series of "SetOwnProperty" runtime
1686 // calls. This will preserve insertion order.
1687 for (; property_index < expr->properties()->length(); property_index++) {
1688 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1690 Expression* value = property->value();
1691 if (!result_saved) {
1692 __ push(v0); // Save result on the stack
1693 result_saved = true;
1696 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1699 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1700 DCHECK(!property->is_computed_name());
1701 VisitForStackValue(value);
1702 DCHECK(property->emit_store());
1703 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1705 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1706 VisitForStackValue(value);
1707 EmitSetHomeObjectIfNeeded(
1708 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1710 switch (property->kind()) {
1711 case ObjectLiteral::Property::CONSTANT:
1712 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1713 case ObjectLiteral::Property::COMPUTED:
1714 if (property->emit_store()) {
1715 __ li(a0, Operand(Smi::FromInt(NONE)));
1717 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1723 case ObjectLiteral::Property::PROTOTYPE:
1727 case ObjectLiteral::Property::GETTER:
1728 __ li(a0, Operand(Smi::FromInt(NONE)));
1730 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1733 case ObjectLiteral::Property::SETTER:
1734 __ li(a0, Operand(Smi::FromInt(NONE)));
1736 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1742 if (expr->has_function()) {
1743 DCHECK(result_saved);
1744 __ lw(a0, MemOperand(sp));
1746 __ CallRuntime(Runtime::kToFastProperties, 1);
1750 context()->PlugTOS();
1752 context()->Plug(v0);
1755 // Verify that compilation exactly consumed the number of store ic slots that
1756 // the ObjectLiteral node had to offer.
1757 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1761 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1762 Comment cmnt(masm_, "[ ArrayLiteral");
1764 expr->BuildConstantElements(isolate());
1766 Handle<FixedArray> constant_elements = expr->constant_elements();
1767 bool has_fast_elements =
1768 IsFastObjectElementsKind(expr->constant_elements_kind());
1770 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1771 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1772 // If the only customer of allocation sites is transitioning, then
1773 // we can turn it off if we don't have anywhere else to transition to.
1774 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1777 __ mov(a0, result_register());
1778 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1779 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1780 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1781 __ li(a1, Operand(constant_elements));
1782 if (MustCreateArrayLiteralWithRuntime(expr)) {
1783 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1784 __ Push(a3, a2, a1, a0);
1785 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1787 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1790 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1792 bool result_saved = false; // Is the result saved to the stack?
1793 ZoneList<Expression*>* subexprs = expr->values();
1794 int length = subexprs->length();
1796 // Emit code to evaluate all the non-constant subexpressions and to store
1797 // them into the newly cloned array.
1798 int array_index = 0;
1799 for (; array_index < length; array_index++) {
1800 Expression* subexpr = subexprs->at(array_index);
1801 if (subexpr->IsSpread()) break;
1803 // If the subexpression is a literal or a simple materialized literal it
1804 // is already set in the cloned array.
1805 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1807 if (!result_saved) {
1808 __ push(v0); // array literal
1809 __ Push(Smi::FromInt(expr->literal_index()));
1810 result_saved = true;
1813 VisitForAccumulatorValue(subexpr);
1815 if (has_fast_elements) {
1816 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1817 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1818 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1819 __ sw(result_register(), FieldMemOperand(a1, offset));
1820 // Update the write barrier for the array store.
1821 __ RecordWriteField(a1, offset, result_register(), a2,
1822 kRAHasBeenSaved, kDontSaveFPRegs,
1823 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1825 __ li(a3, Operand(Smi::FromInt(array_index)));
1826 __ mov(a0, result_register());
1827 StoreArrayLiteralElementStub stub(isolate());
1831 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1834 // In case the array literal contains spread expressions it has two parts. The
1835 // first part is the "static" array which has a literal index is handled
1836 // above. The second part is the part after the first spread expression
1837 // (inclusive) and these elements gets appended to the array. Note that the
1838 // number elements an iterable produces is unknown ahead of time.
1839 if (array_index < length && result_saved) {
1840 __ Pop(); // literal index
1842 result_saved = false;
1844 for (; array_index < length; array_index++) {
1845 Expression* subexpr = subexprs->at(array_index);
1848 if (subexpr->IsSpread()) {
1849 VisitForStackValue(subexpr->AsSpread()->expression());
1850 __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1853 VisitForStackValue(subexpr);
1854 __ CallRuntime(Runtime::kAppendElement, 2);
1857 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1861 __ Pop(); // literal index
1862 context()->PlugTOS();
1864 context()->Plug(v0);
1869 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1870 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1872 Comment cmnt(masm_, "[ Assignment");
1873 SetExpressionPosition(expr, INSERT_BREAK);
1875 Property* property = expr->target()->AsProperty();
1876 LhsKind assign_type = Property::GetAssignType(property);
1878 // Evaluate LHS expression.
1879 switch (assign_type) {
1881 // Nothing to do here.
1883 case NAMED_PROPERTY:
1884 if (expr->is_compound()) {
1885 // We need the receiver both on the stack and in the register.
1886 VisitForStackValue(property->obj());
1887 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1889 VisitForStackValue(property->obj());
1892 case NAMED_SUPER_PROPERTY:
1894 property->obj()->AsSuperPropertyReference()->this_var());
1895 VisitForAccumulatorValue(
1896 property->obj()->AsSuperPropertyReference()->home_object());
1897 __ Push(result_register());
1898 if (expr->is_compound()) {
1899 const Register scratch = a1;
1900 __ lw(scratch, MemOperand(sp, kPointerSize));
1901 __ Push(scratch, result_register());
1904 case KEYED_SUPER_PROPERTY: {
1905 const Register scratch = a1;
1907 property->obj()->AsSuperPropertyReference()->this_var());
1908 VisitForAccumulatorValue(
1909 property->obj()->AsSuperPropertyReference()->home_object());
1910 __ Move(scratch, result_register());
1911 VisitForAccumulatorValue(property->key());
1912 __ Push(scratch, result_register());
1913 if (expr->is_compound()) {
1914 const Register scratch1 = t0;
1915 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1916 __ Push(scratch1, scratch, result_register());
1920 case KEYED_PROPERTY:
1921 // We need the key and receiver on both the stack and in v0 and a1.
1922 if (expr->is_compound()) {
1923 VisitForStackValue(property->obj());
1924 VisitForStackValue(property->key());
1925 __ lw(LoadDescriptor::ReceiverRegister(),
1926 MemOperand(sp, 1 * kPointerSize));
1927 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1929 VisitForStackValue(property->obj());
1930 VisitForStackValue(property->key());
1935 // For compound assignments we need another deoptimization point after the
1936 // variable/property load.
1937 if (expr->is_compound()) {
1938 { AccumulatorValueContext context(this);
1939 switch (assign_type) {
1941 EmitVariableLoad(expr->target()->AsVariableProxy());
1942 PrepareForBailout(expr->target(), TOS_REG);
1944 case NAMED_PROPERTY:
1945 EmitNamedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1948 case NAMED_SUPER_PROPERTY:
1949 EmitNamedSuperPropertyLoad(property);
1950 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1952 case KEYED_SUPER_PROPERTY:
1953 EmitKeyedSuperPropertyLoad(property);
1954 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1956 case KEYED_PROPERTY:
1957 EmitKeyedPropertyLoad(property);
1958 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1963 Token::Value op = expr->binary_op();
1964 __ push(v0); // Left operand goes on the stack.
1965 VisitForAccumulatorValue(expr->value());
1967 AccumulatorValueContext context(this);
1968 if (ShouldInlineSmiCase(op)) {
1969 EmitInlineSmiBinaryOp(expr->binary_operation(),
1974 EmitBinaryOp(expr->binary_operation(), op);
1977 // Deoptimization point in case the binary operation may have side effects.
1978 PrepareForBailout(expr->binary_operation(), TOS_REG);
1980 VisitForAccumulatorValue(expr->value());
1983 SetExpressionPosition(expr);
1986 switch (assign_type) {
1988 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1989 expr->op(), expr->AssignmentSlot());
1990 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1991 context()->Plug(v0);
1993 case NAMED_PROPERTY:
1994 EmitNamedPropertyAssignment(expr);
1996 case NAMED_SUPER_PROPERTY:
1997 EmitNamedSuperPropertyStore(property);
1998 context()->Plug(v0);
2000 case KEYED_SUPER_PROPERTY:
2001 EmitKeyedSuperPropertyStore(property);
2002 context()->Plug(v0);
2004 case KEYED_PROPERTY:
2005 EmitKeyedPropertyAssignment(expr);
2011 void FullCodeGenerator::VisitYield(Yield* expr) {
2012 Comment cmnt(masm_, "[ Yield");
2013 SetExpressionPosition(expr);
2015 // Evaluate yielded value first; the initial iterator definition depends on
2016 // this. It stays on the stack while we update the iterator.
2017 VisitForStackValue(expr->expression());
2019 switch (expr->yield_kind()) {
2020 case Yield::kSuspend:
2021 // Pop value from top-of-stack slot; box result into result register.
2022 EmitCreateIteratorResult(false);
2023 __ push(result_register());
2025 case Yield::kInitial: {
2026 Label suspend, continuation, post_runtime, resume;
2029 __ bind(&continuation);
2030 __ RecordGeneratorContinuation();
2034 VisitForAccumulatorValue(expr->generator_object());
2035 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2036 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2037 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2038 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2040 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2041 kRAHasBeenSaved, kDontSaveFPRegs);
2042 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2043 __ Branch(&post_runtime, eq, sp, Operand(a1));
2044 __ push(v0); // generator object
2045 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2046 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2047 __ bind(&post_runtime);
2048 __ pop(result_register());
2049 EmitReturnSequence();
2052 context()->Plug(result_register());
2056 case Yield::kFinal: {
2057 VisitForAccumulatorValue(expr->generator_object());
2058 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2059 __ sw(a1, FieldMemOperand(result_register(),
2060 JSGeneratorObject::kContinuationOffset));
2061 // Pop value from top-of-stack slot, box result into result register.
2062 EmitCreateIteratorResult(true);
2063 EmitUnwindBeforeReturn();
2064 EmitReturnSequence();
2068 case Yield::kDelegating: {
2069 VisitForStackValue(expr->generator_object());
2071 // Initial stack layout is as follows:
2072 // [sp + 1 * kPointerSize] iter
2073 // [sp + 0 * kPointerSize] g
2075 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2076 Label l_next, l_call;
2077 Register load_receiver = LoadDescriptor::ReceiverRegister();
2078 Register load_name = LoadDescriptor::NameRegister();
2080 // Initial send value is undefined.
2081 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2084 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2087 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2088 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2089 __ Push(load_name, a3, a0); // "throw", iter, except
2092 // try { received = %yield result }
2093 // Shuffle the received result above a try handler and yield it without
2096 __ pop(a0); // result
2097 int handler_index = NewHandlerTableEntry();
2098 EnterTryBlock(handler_index, &l_catch);
2099 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2100 __ push(a0); // result
2103 __ bind(&l_continuation);
2104 __ RecordGeneratorContinuation();
2108 __ bind(&l_suspend);
2109 const int generator_object_depth = kPointerSize + try_block_size;
2110 __ lw(a0, MemOperand(sp, generator_object_depth));
2112 __ Push(Smi::FromInt(handler_index)); // handler-index
2113 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2114 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2115 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2116 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2118 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2119 kRAHasBeenSaved, kDontSaveFPRegs);
2120 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2121 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2122 __ pop(v0); // result
2123 EmitReturnSequence();
2125 __ bind(&l_resume); // received in a0
2126 ExitTryBlock(handler_index);
2128 // receiver = iter; f = 'next'; arg = received;
2131 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2132 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2133 __ Push(load_name, a3, a0); // "next", iter, received
2135 // result = receiver[f](arg);
2137 __ lw(load_receiver, MemOperand(sp, kPointerSize));
2138 __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2139 __ li(LoadDescriptor::SlotRegister(),
2140 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2141 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2142 CallIC(ic, TypeFeedbackId::None());
2145 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2146 SetCallPosition(expr, 1);
2147 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2150 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2151 __ Drop(1); // The function is still on the stack; drop it.
2153 // if (!result.done) goto l_try;
2154 __ Move(load_receiver, v0);
2156 __ push(load_receiver); // save result
2157 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2158 __ li(LoadDescriptor::SlotRegister(),
2159 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2160 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.done
2162 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2164 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2167 __ pop(load_receiver); // result
2168 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2169 __ li(LoadDescriptor::SlotRegister(),
2170 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2171 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.value
2172 context()->DropAndPlug(2, v0); // drop iter and g
2179 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2181 JSGeneratorObject::ResumeMode resume_mode) {
2182 // The value stays in a0, and is ultimately read by the resumed generator, as
2183 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2184 // is read to throw the value when the resumed generator is already closed.
2185 // a1 will hold the generator object until the activation has been resumed.
2186 VisitForStackValue(generator);
2187 VisitForAccumulatorValue(value);
2190 // Load suspended function and context.
2191 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2192 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2194 // Load receiver and store as the first argument.
2195 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2198 // Push holes for the rest of the arguments to the generator function.
2199 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2201 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2202 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2203 Label push_argument_holes, push_frame;
2204 __ bind(&push_argument_holes);
2205 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2206 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2208 __ jmp(&push_argument_holes);
2210 // Enter a new JavaScript frame, and initialize its slots as they were when
2211 // the generator was suspended.
2212 Label resume_frame, done;
2213 __ bind(&push_frame);
2214 __ Call(&resume_frame);
2216 __ bind(&resume_frame);
2217 // ra = return address.
2218 // fp = caller's frame pointer.
2219 // cp = callee's context,
2220 // t0 = callee's JS function.
2221 __ Push(ra, fp, cp, t0);
2222 // Adjust FP to point to saved FP.
2223 __ Addu(fp, sp, 2 * kPointerSize);
2225 // Load the operand stack size.
2226 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2227 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2230 // If we are sending a value and there is no operand stack, we can jump back
2232 if (resume_mode == JSGeneratorObject::NEXT) {
2234 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2235 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2236 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2238 __ Addu(a3, a3, Operand(a2));
2239 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2240 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2242 __ bind(&slow_resume);
2245 // Otherwise, we push holes for the operand stack and call the runtime to fix
2246 // up the stack and the handlers.
2247 Label push_operand_holes, call_resume;
2248 __ bind(&push_operand_holes);
2249 __ Subu(a3, a3, Operand(1));
2250 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2252 __ Branch(&push_operand_holes);
2253 __ bind(&call_resume);
2254 DCHECK(!result_register().is(a1));
2255 __ Push(a1, result_register());
2256 __ Push(Smi::FromInt(resume_mode));
2257 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2258 // Not reached: the runtime call returns elsewhere.
2259 __ stop("not-reached");
2262 context()->Plug(result_register());
2266 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2267 Label allocate, done_allocate;
2269 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2270 __ jmp(&done_allocate);
2273 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2274 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2276 __ bind(&done_allocate);
2277 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2278 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2279 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2282 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2283 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
2284 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2285 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2286 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2287 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2288 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2289 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2293 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2294 SetExpressionPosition(prop);
2295 Literal* key = prop->key()->AsLiteral();
2296 DCHECK(!prop->IsSuperAccess());
2298 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2299 __ li(LoadDescriptor::SlotRegister(),
2300 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2301 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2305 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2306 // Stack: receiver, home_object.
2307 SetExpressionPosition(prop);
2309 Literal* key = prop->key()->AsLiteral();
2310 DCHECK(!key->value()->IsSmi());
2311 DCHECK(prop->IsSuperAccess());
2313 __ Push(key->value());
2314 __ Push(Smi::FromInt(language_mode()));
2315 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2319 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2320 SetExpressionPosition(prop);
2321 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2322 __ li(LoadDescriptor::SlotRegister(),
2323 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2328 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2329 // Stack: receiver, home_object, key.
2330 SetExpressionPosition(prop);
2331 __ Push(Smi::FromInt(language_mode()));
2332 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2336 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2338 Expression* left_expr,
2339 Expression* right_expr) {
2340 Label done, smi_case, stub_call;
2342 Register scratch1 = a2;
2343 Register scratch2 = a3;
2345 // Get the arguments.
2347 Register right = a0;
2349 __ mov(a0, result_register());
2351 // Perform combined smi check on both operands.
2352 __ Or(scratch1, left, Operand(right));
2353 STATIC_ASSERT(kSmiTag == 0);
2354 JumpPatchSite patch_site(masm_);
2355 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2357 __ bind(&stub_call);
2359 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2360 CallIC(code, expr->BinaryOperationFeedbackId());
2361 patch_site.EmitPatchInfo();
2365 // Smi case. This code works the same way as the smi-smi case in the type
2366 // recording binary operation stub, see
2369 __ GetLeastBitsFromSmi(scratch1, right, 5);
2370 __ srav(right, left, scratch1);
2371 __ And(v0, right, Operand(~kSmiTagMask));
2374 __ SmiUntag(scratch1, left);
2375 __ GetLeastBitsFromSmi(scratch2, right, 5);
2376 __ sllv(scratch1, scratch1, scratch2);
2377 __ Addu(scratch2, scratch1, Operand(0x40000000));
2378 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2379 __ SmiTag(v0, scratch1);
2383 __ SmiUntag(scratch1, left);
2384 __ GetLeastBitsFromSmi(scratch2, right, 5);
2385 __ srlv(scratch1, scratch1, scratch2);
2386 __ And(scratch2, scratch1, 0xc0000000);
2387 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2388 __ SmiTag(v0, scratch1);
2392 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2393 __ BranchOnOverflow(&stub_call, scratch1);
2396 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2397 __ BranchOnOverflow(&stub_call, scratch1);
2400 __ SmiUntag(scratch1, right);
2401 __ Mul(scratch2, v0, left, scratch1);
2402 __ sra(scratch1, v0, 31);
2403 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2404 __ Branch(&done, ne, v0, Operand(zero_reg));
2405 __ Addu(scratch2, right, left);
2406 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2407 DCHECK(Smi::FromInt(0) == 0);
2408 __ mov(v0, zero_reg);
2412 __ Or(v0, left, Operand(right));
2414 case Token::BIT_AND:
2415 __ And(v0, left, Operand(right));
2417 case Token::BIT_XOR:
2418 __ Xor(v0, left, Operand(right));
2425 context()->Plug(v0);
2429 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2430 int* used_store_slots) {
2431 // Constructor is in v0.
2432 DCHECK(lit != NULL);
2435 // No access check is needed here since the constructor is created by the
2437 Register scratch = a1;
2439 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2442 for (int i = 0; i < lit->properties()->length(); i++) {
2443 ObjectLiteral::Property* property = lit->properties()->at(i);
2444 Expression* value = property->value();
2446 if (property->is_static()) {
2447 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2449 __ lw(scratch, MemOperand(sp, 0)); // prototype
2452 EmitPropertyKey(property, lit->GetIdForProperty(i));
2454 // The static prototype property is read only. We handle the non computed
2455 // property name case in the parser. Since this is the only case where we
2456 // need to check for an own read only property we special case this so we do
2457 // not need to do this for every property.
2458 if (property->is_static() && property->is_computed_name()) {
2459 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2463 VisitForStackValue(value);
2464 EmitSetHomeObjectIfNeeded(value, 2,
2465 lit->SlotForHomeObject(value, used_store_slots));
2467 switch (property->kind()) {
2468 case ObjectLiteral::Property::CONSTANT:
2469 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2470 case ObjectLiteral::Property::PROTOTYPE:
2472 case ObjectLiteral::Property::COMPUTED:
2473 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2476 case ObjectLiteral::Property::GETTER:
2477 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2479 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2482 case ObjectLiteral::Property::SETTER:
2483 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2485 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2493 // Set both the prototype and constructor to have fast properties, and also
2494 // freeze them in strong mode.
2495 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2499 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2500 __ mov(a0, result_register());
2503 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2504 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2505 CallIC(code, expr->BinaryOperationFeedbackId());
2506 patch_site.EmitPatchInfo();
2507 context()->Plug(v0);
2511 void FullCodeGenerator::EmitAssignment(Expression* expr,
2512 FeedbackVectorICSlot slot) {
2513 DCHECK(expr->IsValidReferenceExpressionOrThis());
2515 Property* prop = expr->AsProperty();
2516 LhsKind assign_type = Property::GetAssignType(prop);
2518 switch (assign_type) {
2520 Variable* var = expr->AsVariableProxy()->var();
2521 EffectContext context(this);
2522 EmitVariableAssignment(var, Token::ASSIGN, slot);
2525 case NAMED_PROPERTY: {
2526 __ push(result_register()); // Preserve value.
2527 VisitForAccumulatorValue(prop->obj());
2528 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2529 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2530 __ li(StoreDescriptor::NameRegister(),
2531 Operand(prop->key()->AsLiteral()->value()));
2532 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2536 case NAMED_SUPER_PROPERTY: {
2538 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2539 VisitForAccumulatorValue(
2540 prop->obj()->AsSuperPropertyReference()->home_object());
2541 // stack: value, this; v0: home_object
2542 Register scratch = a2;
2543 Register scratch2 = a3;
2544 __ mov(scratch, result_register()); // home_object
2545 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2546 __ lw(scratch2, MemOperand(sp, 0)); // this
2547 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2548 __ sw(scratch, MemOperand(sp, 0)); // home_object
2549 // stack: this, home_object; v0: value
2550 EmitNamedSuperPropertyStore(prop);
2553 case KEYED_SUPER_PROPERTY: {
2555 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2557 prop->obj()->AsSuperPropertyReference()->home_object());
2558 VisitForAccumulatorValue(prop->key());
2559 Register scratch = a2;
2560 Register scratch2 = a3;
2561 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2562 // stack: value, this, home_object; v0: key, a3: value
2563 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2564 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2565 __ lw(scratch, MemOperand(sp, 0)); // home_object
2566 __ sw(scratch, MemOperand(sp, kPointerSize));
2567 __ sw(v0, MemOperand(sp, 0));
2568 __ Move(v0, scratch2);
2569 // stack: this, home_object, key; v0: value.
2570 EmitKeyedSuperPropertyStore(prop);
2573 case KEYED_PROPERTY: {
2574 __ push(result_register()); // Preserve value.
2575 VisitForStackValue(prop->obj());
2576 VisitForAccumulatorValue(prop->key());
2577 __ mov(StoreDescriptor::NameRegister(), result_register());
2578 __ Pop(StoreDescriptor::ValueRegister(),
2579 StoreDescriptor::ReceiverRegister());
2580 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2582 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2587 context()->Plug(v0);
2591 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2592 Variable* var, MemOperand location) {
2593 __ sw(result_register(), location);
2594 if (var->IsContextSlot()) {
2595 // RecordWrite may destroy all its register arguments.
2596 __ Move(a3, result_register());
2597 int offset = Context::SlotOffset(var->index());
2598 __ RecordWriteContextSlot(
2599 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2604 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2605 FeedbackVectorICSlot slot) {
2606 if (var->IsUnallocated()) {
2607 // Global var, const, or let.
2608 __ mov(StoreDescriptor::ValueRegister(), result_register());
2609 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2610 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2611 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2614 } else if (var->IsGlobalSlot()) {
2615 // Global var, const, or let.
2616 DCHECK(var->index() > 0);
2617 DCHECK(var->IsStaticGlobalObjectProperty());
2618 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
2619 __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
2620 int const slot = var->index();
2621 int const depth = scope()->ContextChainLength(var->scope());
2622 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2623 __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2624 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2627 __ Push(Smi::FromInt(slot));
2629 __ CallRuntime(is_strict(language_mode())
2630 ? Runtime::kStoreGlobalViaContext_Strict
2631 : Runtime::kStoreGlobalViaContext_Sloppy,
2635 } else if (var->mode() == LET && op != Token::INIT_LET) {
2636 // Non-initializing assignment to let variable needs a write barrier.
2637 DCHECK(!var->IsLookupSlot());
2638 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2640 MemOperand location = VarOperand(var, a1);
2641 __ lw(a3, location);
2642 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2643 __ Branch(&assign, ne, a3, Operand(t0));
2644 __ li(a3, Operand(var->name()));
2646 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2647 // Perform the assignment.
2649 EmitStoreToStackLocalOrContextSlot(var, location);
2651 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2652 // Assignment to const variable needs a write barrier.
2653 DCHECK(!var->IsLookupSlot());
2654 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2656 MemOperand location = VarOperand(var, a1);
2657 __ lw(a3, location);
2658 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2659 __ Branch(&const_error, ne, a3, Operand(at));
2660 __ li(a3, Operand(var->name()));
2662 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2663 __ bind(&const_error);
2664 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2666 } else if (var->is_this() && op == Token::INIT_CONST) {
2667 // Initializing assignment to const {this} needs a write barrier.
2668 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2669 Label uninitialized_this;
2670 MemOperand location = VarOperand(var, a1);
2671 __ lw(a3, location);
2672 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2673 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2674 __ li(a0, Operand(var->name()));
2676 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2677 __ bind(&uninitialized_this);
2678 EmitStoreToStackLocalOrContextSlot(var, location);
2680 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2681 if (var->IsLookupSlot()) {
2682 // Assignment to var.
2683 __ li(a1, Operand(var->name()));
2684 __ li(a0, Operand(Smi::FromInt(language_mode())));
2685 __ Push(v0, cp, a1, a0); // Value, context, name, language mode.
2686 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2688 // Assignment to var or initializing assignment to let/const in harmony
2690 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2691 MemOperand location = VarOperand(var, a1);
2692 if (generate_debug_code_ && op == Token::INIT_LET) {
2693 // Check for an uninitialized let binding.
2694 __ lw(a2, location);
2695 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2696 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2698 EmitStoreToStackLocalOrContextSlot(var, location);
2701 } else if (op == Token::INIT_CONST_LEGACY) {
2702 // Const initializers need a write barrier.
2703 DCHECK(!var->IsParameter()); // No const parameters.
2704 if (var->IsLookupSlot()) {
2705 __ li(a0, Operand(var->name()));
2706 __ Push(v0, cp, a0); // Context and name.
2707 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2709 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2711 MemOperand location = VarOperand(var, a1);
2712 __ lw(a2, location);
2713 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2714 __ Branch(&skip, ne, a2, Operand(at));
2715 EmitStoreToStackLocalOrContextSlot(var, location);
2720 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2721 if (is_strict(language_mode())) {
2722 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2724 // Silently ignore store in sloppy mode.
2729 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2730 // Assignment to a property, using a named store IC.
2731 Property* prop = expr->target()->AsProperty();
2732 DCHECK(prop != NULL);
2733 DCHECK(prop->key()->IsLiteral());
2735 __ mov(StoreDescriptor::ValueRegister(), result_register());
2736 __ li(StoreDescriptor::NameRegister(),
2737 Operand(prop->key()->AsLiteral()->value()));
2738 __ pop(StoreDescriptor::ReceiverRegister());
2739 if (FLAG_vector_stores) {
2740 EmitLoadStoreICSlot(expr->AssignmentSlot());
2743 CallStoreIC(expr->AssignmentFeedbackId());
2746 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2747 context()->Plug(v0);
2751 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2752 // Assignment to named property of super.
2754 // stack : receiver ('this'), home_object
2755 DCHECK(prop != NULL);
2756 Literal* key = prop->key()->AsLiteral();
2757 DCHECK(key != NULL);
2759 __ Push(key->value());
2761 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2762 : Runtime::kStoreToSuper_Sloppy),
2767 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2768 // Assignment to named property of super.
2770 // stack : receiver ('this'), home_object, key
2771 DCHECK(prop != NULL);
2775 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2776 : Runtime::kStoreKeyedToSuper_Sloppy),
2781 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2782 // Assignment to a property, using a keyed store IC.
2783 // Call keyed store IC.
2784 // The arguments are:
2785 // - a0 is the value,
2787 // - a2 is the receiver.
2788 __ mov(StoreDescriptor::ValueRegister(), result_register());
2789 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2790 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2793 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2794 if (FLAG_vector_stores) {
2795 EmitLoadStoreICSlot(expr->AssignmentSlot());
2798 CallIC(ic, expr->AssignmentFeedbackId());
2801 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2802 context()->Plug(v0);
2806 void FullCodeGenerator::VisitProperty(Property* expr) {
2807 Comment cmnt(masm_, "[ Property");
2808 SetExpressionPosition(expr);
2810 Expression* key = expr->key();
2812 if (key->IsPropertyName()) {
2813 if (!expr->IsSuperAccess()) {
2814 VisitForAccumulatorValue(expr->obj());
2815 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2816 EmitNamedPropertyLoad(expr);
2818 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2820 expr->obj()->AsSuperPropertyReference()->home_object());
2821 EmitNamedSuperPropertyLoad(expr);
2824 if (!expr->IsSuperAccess()) {
2825 VisitForStackValue(expr->obj());
2826 VisitForAccumulatorValue(expr->key());
2827 __ Move(LoadDescriptor::NameRegister(), v0);
2828 __ pop(LoadDescriptor::ReceiverRegister());
2829 EmitKeyedPropertyLoad(expr);
2831 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2833 expr->obj()->AsSuperPropertyReference()->home_object());
2834 VisitForStackValue(expr->key());
2835 EmitKeyedSuperPropertyLoad(expr);
2838 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2839 context()->Plug(v0);
2843 void FullCodeGenerator::CallIC(Handle<Code> code,
2844 TypeFeedbackId id) {
2846 __ Call(code, RelocInfo::CODE_TARGET, id);
2850 // Code common for calls using the IC.
2851 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2852 Expression* callee = expr->expression();
2854 CallICState::CallType call_type =
2855 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2857 // Get the target function.
2858 if (call_type == CallICState::FUNCTION) {
2859 { StackValueContext context(this);
2860 EmitVariableLoad(callee->AsVariableProxy());
2861 PrepareForBailout(callee, NO_REGISTERS);
2863 // Push undefined as receiver. This is patched in the method prologue if it
2864 // is a sloppy mode method.
2865 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2868 // Load the function from the receiver.
2869 DCHECK(callee->IsProperty());
2870 DCHECK(!callee->AsProperty()->IsSuperAccess());
2871 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2872 EmitNamedPropertyLoad(callee->AsProperty());
2873 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2874 // Push the target function under the receiver.
2875 __ lw(at, MemOperand(sp, 0));
2877 __ sw(v0, MemOperand(sp, kPointerSize));
2880 EmitCall(expr, call_type);
2884 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2885 SetExpressionPosition(expr);
2886 Expression* callee = expr->expression();
2887 DCHECK(callee->IsProperty());
2888 Property* prop = callee->AsProperty();
2889 DCHECK(prop->IsSuperAccess());
2891 Literal* key = prop->key()->AsLiteral();
2892 DCHECK(!key->value()->IsSmi());
2893 // Load the function from the receiver.
2894 const Register scratch = a1;
2895 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2896 VisitForAccumulatorValue(super_ref->home_object());
2897 __ mov(scratch, v0);
2898 VisitForAccumulatorValue(super_ref->this_var());
2899 __ Push(scratch, v0, v0, scratch);
2900 __ Push(key->value());
2901 __ Push(Smi::FromInt(language_mode()));
2905 // - this (receiver)
2906 // - this (receiver) <-- LoadFromSuper will pop here and below.
2910 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2912 // Replace home_object with target function.
2913 __ sw(v0, MemOperand(sp, kPointerSize));
2916 // - target function
2917 // - this (receiver)
2918 EmitCall(expr, CallICState::METHOD);
2922 // Code common for calls using the IC.
2923 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2926 VisitForAccumulatorValue(key);
2928 Expression* callee = expr->expression();
2930 // Load the function from the receiver.
2931 DCHECK(callee->IsProperty());
2932 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2933 __ Move(LoadDescriptor::NameRegister(), v0);
2934 EmitKeyedPropertyLoad(callee->AsProperty());
2935 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2937 // Push the target function under the receiver.
2938 __ lw(at, MemOperand(sp, 0));
2940 __ sw(v0, MemOperand(sp, kPointerSize));
2942 EmitCall(expr, CallICState::METHOD);
2946 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2947 Expression* callee = expr->expression();
2948 DCHECK(callee->IsProperty());
2949 Property* prop = callee->AsProperty();
2950 DCHECK(prop->IsSuperAccess());
2952 SetExpressionPosition(prop);
2953 // Load the function from the receiver.
2954 const Register scratch = a1;
2955 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2956 VisitForAccumulatorValue(super_ref->home_object());
2957 __ Move(scratch, v0);
2958 VisitForAccumulatorValue(super_ref->this_var());
2959 __ Push(scratch, v0, v0, scratch);
2960 VisitForStackValue(prop->key());
2961 __ Push(Smi::FromInt(language_mode()));
2965 // - this (receiver)
2966 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2970 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2972 // Replace home_object with target function.
2973 __ sw(v0, MemOperand(sp, kPointerSize));
2976 // - target function
2977 // - this (receiver)
2978 EmitCall(expr, CallICState::METHOD);
2982 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2983 // Load the arguments.
2984 ZoneList<Expression*>* args = expr->arguments();
2985 int arg_count = args->length();
2986 for (int i = 0; i < arg_count; i++) {
2987 VisitForStackValue(args->at(i));
2990 // Record source position of the IC call.
2991 SetCallPosition(expr, arg_count);
2992 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2993 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2994 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2995 // Don't assign a type feedback id to the IC, since type feedback is provided
2996 // by the vector above.
2999 RecordJSReturnSite(expr);
3000 // Restore context register.
3001 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3002 context()->DropAndPlug(1, v0);
3006 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3007 // t3: copy of the first argument or undefined if it doesn't exist.
3008 if (arg_count > 0) {
3009 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
3011 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
3014 // t2: the receiver of the enclosing function.
3015 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3017 // t1: the language mode.
3018 __ li(t1, Operand(Smi::FromInt(language_mode())));
3020 // t0: the start position of the scope the calls resides in.
3021 __ li(t0, Operand(Smi::FromInt(scope()->start_position())));
3023 // Do the runtime call.
3024 __ Push(t3, t2, t1, t0);
3025 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3029 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3030 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3031 VariableProxy* callee = expr->expression()->AsVariableProxy();
3032 if (callee->var()->IsLookupSlot()) {
3035 SetExpressionPosition(callee);
3036 // Generate code for loading from variables potentially shadowed by
3037 // eval-introduced variables.
3038 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3041 // Call the runtime to find the function to call (returned in v0)
3042 // and the object holding it (returned in v1).
3043 DCHECK(!context_register().is(a2));
3044 __ li(a2, Operand(callee->name()));
3045 __ Push(context_register(), a2);
3046 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3047 __ Push(v0, v1); // Function, receiver.
3048 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3050 // If fast case code has been generated, emit code to push the
3051 // function and receiver and have the slow path jump around this
3053 if (done.is_linked()) {
3059 // The receiver is implicitly the global receiver. Indicate this
3060 // by passing the hole to the call function stub.
3061 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3066 VisitForStackValue(callee);
3067 // refEnv.WithBaseObject()
3068 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3069 __ push(a2); // Reserved receiver slot.
3074 void FullCodeGenerator::VisitCall(Call* expr) {
3076 // We want to verify that RecordJSReturnSite gets called on all paths
3077 // through this function. Avoid early returns.
3078 expr->return_is_recorded_ = false;
3081 Comment cmnt(masm_, "[ Call");
3082 Expression* callee = expr->expression();
3083 Call::CallType call_type = expr->GetCallType(isolate());
3085 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3086 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3087 // to resolve the function we need to call. Then we call the resolved
3088 // function using the given arguments.
3089 ZoneList<Expression*>* args = expr->arguments();
3090 int arg_count = args->length();
3091 PushCalleeAndWithBaseObject(expr);
3093 // Push the arguments.
3094 for (int i = 0; i < arg_count; i++) {
3095 VisitForStackValue(args->at(i));
3098 // Push a copy of the function (found below the arguments) and
3100 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3102 EmitResolvePossiblyDirectEval(arg_count);
3104 // Touch up the stack with the resolved function.
3105 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3107 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3108 // Record source position for debugger.
3109 SetCallPosition(expr, arg_count);
3110 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3111 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3113 RecordJSReturnSite(expr);
3114 // Restore context register.
3115 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3116 context()->DropAndPlug(1, v0);
3117 } else if (call_type == Call::GLOBAL_CALL) {
3118 EmitCallWithLoadIC(expr);
3119 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3120 // Call to a lookup slot (dynamically introduced variable).
3121 PushCalleeAndWithBaseObject(expr);
3123 } else if (call_type == Call::PROPERTY_CALL) {
3124 Property* property = callee->AsProperty();
3125 bool is_named_call = property->key()->IsPropertyName();
3126 if (property->IsSuperAccess()) {
3127 if (is_named_call) {
3128 EmitSuperCallWithLoadIC(expr);
3130 EmitKeyedSuperCallWithLoadIC(expr);
3133 VisitForStackValue(property->obj());
3134 if (is_named_call) {
3135 EmitCallWithLoadIC(expr);
3137 EmitKeyedCallWithLoadIC(expr, property->key());
3140 } else if (call_type == Call::SUPER_CALL) {
3141 EmitSuperConstructorCall(expr);
3143 DCHECK(call_type == Call::OTHER_CALL);
3144 // Call to an arbitrary expression not handled specially above.
3145 VisitForStackValue(callee);
3146 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3148 // Emit function call.
3153 // RecordJSReturnSite should have been called.
3154 DCHECK(expr->return_is_recorded_);
3159 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3160 Comment cmnt(masm_, "[ CallNew");
3161 // According to ECMA-262, section 11.2.2, page 44, the function
3162 // expression in new calls must be evaluated before the
3165 // Push constructor on the stack. If it's not a function it's used as
3166 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3168 DCHECK(!expr->expression()->IsSuperPropertyReference());
3169 VisitForStackValue(expr->expression());
3171 // Push the arguments ("left-to-right") on the stack.
3172 ZoneList<Expression*>* args = expr->arguments();
3173 int arg_count = args->length();
3174 for (int i = 0; i < arg_count; i++) {
3175 VisitForStackValue(args->at(i));
3178 // Call the construct call builtin that handles allocation and
3179 // constructor invocation.
3180 SetConstructCallPosition(expr);
3182 // Load function and argument count into a1 and a0.
3183 __ li(a0, Operand(arg_count));
3184 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3186 // Record call targets in unoptimized code.
3187 if (FLAG_pretenuring_call_new) {
3188 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3189 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3190 expr->CallNewFeedbackSlot().ToInt() + 1);
3193 __ li(a2, FeedbackVector());
3194 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3196 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3197 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3198 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3199 context()->Plug(v0);
3203 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3204 SuperCallReference* super_call_ref =
3205 expr->expression()->AsSuperCallReference();
3206 DCHECK_NOT_NULL(super_call_ref);
3208 EmitLoadSuperConstructor(super_call_ref);
3209 __ push(result_register());
3211 // Push the arguments ("left-to-right") on the stack.
3212 ZoneList<Expression*>* args = expr->arguments();
3213 int arg_count = args->length();
3214 for (int i = 0; i < arg_count; i++) {
3215 VisitForStackValue(args->at(i));
3218 // Call the construct call builtin that handles allocation and
3219 // constructor invocation.
3220 SetConstructCallPosition(expr);
3222 // Load original constructor into t0.
3223 VisitForAccumulatorValue(super_call_ref->new_target_var());
3224 __ mov(t0, result_register());
3226 // Load function and argument count into a1 and a0.
3227 __ li(a0, Operand(arg_count));
3228 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3230 // Record call targets in unoptimized code.
3231 if (FLAG_pretenuring_call_new) {
3233 /* TODO(dslomov): support pretenuring.
3234 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3235 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3236 expr->CallNewFeedbackSlot().ToInt() + 1);
3240 __ li(a2, FeedbackVector());
3241 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3243 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3244 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3246 RecordJSReturnSite(expr);
3248 context()->Plug(v0);
3252 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3253 ZoneList<Expression*>* args = expr->arguments();
3254 DCHECK(args->length() == 1);
3256 VisitForAccumulatorValue(args->at(0));
3258 Label materialize_true, materialize_false;
3259 Label* if_true = NULL;
3260 Label* if_false = NULL;
3261 Label* fall_through = NULL;
3262 context()->PrepareTest(&materialize_true, &materialize_false,
3263 &if_true, &if_false, &fall_through);
3265 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3267 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
3269 context()->Plug(if_true, if_false);
3273 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3274 ZoneList<Expression*>* args = expr->arguments();
3275 DCHECK(args->length() == 1);
3277 VisitForAccumulatorValue(args->at(0));
3279 Label materialize_true, materialize_false;
3280 Label* if_true = NULL;
3281 Label* if_false = NULL;
3282 Label* fall_through = NULL;
3283 context()->PrepareTest(&materialize_true, &materialize_false,
3284 &if_true, &if_false, &fall_through);
3286 __ JumpIfSmi(v0, if_false);
3287 __ GetObjectType(v0, a1, a1);
3288 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3289 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3290 if_true, if_false, fall_through);
3292 context()->Plug(if_true, if_false);
3296 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3297 ZoneList<Expression*>* args = expr->arguments();
3298 DCHECK(args->length() == 1);
3300 VisitForAccumulatorValue(args->at(0));
3302 Label materialize_true, materialize_false;
3303 Label* if_true = NULL;
3304 Label* if_false = NULL;
3305 Label* fall_through = NULL;
3306 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3307 &if_false, &fall_through);
3309 __ JumpIfSmi(v0, if_false);
3310 __ GetObjectType(v0, a1, a1);
3311 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3312 Split(eq, a1, Operand(SIMD128_VALUE_TYPE), if_true, if_false, fall_through);
3314 context()->Plug(if_true, if_false);
3318 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3319 CallRuntime* expr) {
3320 ZoneList<Expression*>* args = expr->arguments();
3321 DCHECK(args->length() == 1);
3323 VisitForAccumulatorValue(args->at(0));
3325 Label materialize_true, materialize_false, skip_lookup;
3326 Label* if_true = NULL;
3327 Label* if_false = NULL;
3328 Label* fall_through = NULL;
3329 context()->PrepareTest(&materialize_true, &materialize_false,
3330 &if_true, &if_false, &fall_through);
3332 __ AssertNotSmi(v0);
3334 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3335 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3336 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3337 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3339 // Check for fast case object. Generate false result for slow case object.
3340 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3341 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3342 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3343 __ Branch(if_false, eq, a2, Operand(t0));
3345 // Look for valueOf name in the descriptor array, and indicate false if
3346 // found. Since we omit an enumeration index check, if it is added via a
3347 // transition that shares its descriptor array, this is a false positive.
3348 Label entry, loop, done;
3350 // Skip loop if no descriptors are valid.
3351 __ NumberOfOwnDescriptors(a3, a1);
3352 __ Branch(&done, eq, a3, Operand(zero_reg));
3354 __ LoadInstanceDescriptors(a1, t0);
3355 // t0: descriptor array.
3356 // a3: valid entries in the descriptor array.
3357 STATIC_ASSERT(kSmiTag == 0);
3358 STATIC_ASSERT(kSmiTagSize == 1);
3359 STATIC_ASSERT(kPointerSize == 4);
3360 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3362 // Calculate location of the first key name.
3363 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3364 // Calculate the end of the descriptor array.
3366 __ sll(t1, a3, kPointerSizeLog2);
3367 __ Addu(a2, a2, t1);
3369 // Loop through all the keys in the descriptor array. If one of these is the
3370 // string "valueOf" the result is false.
3371 // The use of t2 to store the valueOf string assumes that it is not otherwise
3372 // used in the loop below.
3373 __ LoadRoot(t2, Heap::kvalueOf_stringRootIndex);
3376 __ lw(a3, MemOperand(t0, 0));
3377 __ Branch(if_false, eq, a3, Operand(t2));
3378 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3380 __ Branch(&loop, ne, t0, Operand(a2));
3384 // Set the bit in the map to indicate that there is no local valueOf field.
3385 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3386 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3387 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3389 __ bind(&skip_lookup);
3391 // If a valueOf property is not found on the object check that its
3392 // prototype is the un-modified String prototype. If not result is false.
3393 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3394 __ JumpIfSmi(a2, if_false);
3395 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3396 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3397 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3398 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3399 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3400 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3402 context()->Plug(if_true, if_false);
3406 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3407 ZoneList<Expression*>* args = expr->arguments();
3408 DCHECK(args->length() == 1);
3410 VisitForAccumulatorValue(args->at(0));
3412 Label materialize_true, materialize_false;
3413 Label* if_true = NULL;
3414 Label* if_false = NULL;
3415 Label* fall_through = NULL;
3416 context()->PrepareTest(&materialize_true, &materialize_false,
3417 &if_true, &if_false, &fall_through);
3419 __ JumpIfSmi(v0, if_false);
3420 __ GetObjectType(v0, a1, a2);
3421 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3422 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3423 __ Branch(if_false);
3425 context()->Plug(if_true, if_false);
3429 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3430 ZoneList<Expression*>* args = expr->arguments();
3431 DCHECK(args->length() == 1);
3433 VisitForAccumulatorValue(args->at(0));
3435 Label materialize_true, materialize_false;
3436 Label* if_true = NULL;
3437 Label* if_false = NULL;
3438 Label* fall_through = NULL;
3439 context()->PrepareTest(&materialize_true, &materialize_false,
3440 &if_true, &if_false, &fall_through);
3442 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3443 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3444 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3445 __ li(t0, 0x80000000);
3447 __ Branch(¬_nan, ne, a2, Operand(t0));
3448 __ mov(t0, zero_reg);
3452 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3453 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3455 context()->Plug(if_true, if_false);
3459 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3460 ZoneList<Expression*>* args = expr->arguments();
3461 DCHECK(args->length() == 1);
3463 VisitForAccumulatorValue(args->at(0));
3465 Label materialize_true, materialize_false;
3466 Label* if_true = NULL;
3467 Label* if_false = NULL;
3468 Label* fall_through = NULL;
3469 context()->PrepareTest(&materialize_true, &materialize_false,
3470 &if_true, &if_false, &fall_through);
3472 __ JumpIfSmi(v0, if_false);
3473 __ GetObjectType(v0, a1, a1);
3474 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3475 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3476 if_true, if_false, fall_through);
3478 context()->Plug(if_true, if_false);
3482 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3483 ZoneList<Expression*>* args = expr->arguments();
3484 DCHECK(args->length() == 1);
3486 VisitForAccumulatorValue(args->at(0));
3488 Label materialize_true, materialize_false;
3489 Label* if_true = NULL;
3490 Label* if_false = NULL;
3491 Label* fall_through = NULL;
3492 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3493 &if_false, &fall_through);
3495 __ JumpIfSmi(v0, if_false);
3496 __ GetObjectType(v0, a1, a1);
3497 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3498 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3500 context()->Plug(if_true, if_false);
3504 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3505 ZoneList<Expression*>* args = expr->arguments();
3506 DCHECK(args->length() == 1);
3508 VisitForAccumulatorValue(args->at(0));
3510 Label materialize_true, materialize_false;
3511 Label* if_true = NULL;
3512 Label* if_false = NULL;
3513 Label* fall_through = NULL;
3514 context()->PrepareTest(&materialize_true, &materialize_false,
3515 &if_true, &if_false, &fall_through);
3517 __ JumpIfSmi(v0, if_false);
3518 __ GetObjectType(v0, a1, a1);
3519 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3520 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3522 context()->Plug(if_true, if_false);
3526 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3527 ZoneList<Expression*>* args = expr->arguments();
3528 DCHECK(args->length() == 1);
3530 VisitForAccumulatorValue(args->at(0));
3532 Label materialize_true, materialize_false;
3533 Label* if_true = NULL;
3534 Label* if_false = NULL;
3535 Label* fall_through = NULL;
3536 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3537 &if_false, &fall_through);
3539 __ JumpIfSmi(v0, if_false);
3541 Register type_reg = a2;
3542 __ GetObjectType(v0, map, type_reg);
3543 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3544 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3545 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3546 if_true, if_false, fall_through);
3548 context()->Plug(if_true, if_false);
3552 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3553 DCHECK(expr->arguments()->length() == 0);
3555 Label materialize_true, materialize_false;
3556 Label* if_true = NULL;
3557 Label* if_false = NULL;
3558 Label* fall_through = NULL;
3559 context()->PrepareTest(&materialize_true, &materialize_false,
3560 &if_true, &if_false, &fall_through);
3562 // Get the frame pointer for the calling frame.
3563 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3565 // Skip the arguments adaptor frame if it exists.
3566 Label check_frame_marker;
3567 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3568 __ Branch(&check_frame_marker, ne,
3569 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3570 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3572 // Check the marker in the calling frame.
3573 __ bind(&check_frame_marker);
3574 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3575 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3576 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3577 if_true, if_false, fall_through);
3579 context()->Plug(if_true, if_false);
3583 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3584 ZoneList<Expression*>* args = expr->arguments();
3585 DCHECK(args->length() == 2);
3587 // Load the two objects into registers and perform the comparison.
3588 VisitForStackValue(args->at(0));
3589 VisitForAccumulatorValue(args->at(1));
3591 Label materialize_true, materialize_false;
3592 Label* if_true = NULL;
3593 Label* if_false = NULL;
3594 Label* fall_through = NULL;
3595 context()->PrepareTest(&materialize_true, &materialize_false,
3596 &if_true, &if_false, &fall_through);
3599 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3600 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3602 context()->Plug(if_true, if_false);
3606 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3607 ZoneList<Expression*>* args = expr->arguments();
3608 DCHECK(args->length() == 1);
3610 // ArgumentsAccessStub expects the key in a1 and the formal
3611 // parameter count in a0.
3612 VisitForAccumulatorValue(args->at(0));
3614 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3615 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3617 context()->Plug(v0);
3621 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3622 DCHECK(expr->arguments()->length() == 0);
3624 // Get the number of formal parameters.
3625 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3627 // Check if the calling frame is an arguments adaptor frame.
3628 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3629 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3630 __ Branch(&exit, ne, a3,
3631 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3633 // Arguments adaptor case: Read the arguments length from the
3635 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3638 context()->Plug(v0);
3642 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3643 ZoneList<Expression*>* args = expr->arguments();
3644 DCHECK(args->length() == 1);
3645 Label done, null, function, non_function_constructor;
3647 VisitForAccumulatorValue(args->at(0));
3649 // If the object is a smi, we return null.
3650 __ JumpIfSmi(v0, &null);
3652 // Check that the object is a JS object but take special care of JS
3653 // functions to make sure they have 'Function' as their class.
3654 // Assume that there are only two callable types, and one of them is at
3655 // either end of the type range for JS object types. Saves extra comparisons.
3656 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3657 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3658 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3660 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3661 FIRST_SPEC_OBJECT_TYPE + 1);
3662 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3664 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3665 LAST_SPEC_OBJECT_TYPE - 1);
3666 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3667 // Assume that there is no larger type.
3668 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3670 // Check if the constructor in the map is a JS function.
3671 Register instance_type = a2;
3672 __ GetMapConstructor(v0, v0, a1, instance_type);
3673 __ Branch(&non_function_constructor, ne, instance_type,
3674 Operand(JS_FUNCTION_TYPE));
3676 // v0 now contains the constructor function. Grab the
3677 // instance class name from there.
3678 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3679 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3682 // Functions have class 'Function'.
3684 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3687 // Objects with a non-function constructor have class 'Object'.
3688 __ bind(&non_function_constructor);
3689 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3692 // Non-JS objects have class null.
3694 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3699 context()->Plug(v0);
3703 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3704 ZoneList<Expression*>* args = expr->arguments();
3705 DCHECK(args->length() == 1);
3707 VisitForAccumulatorValue(args->at(0)); // Load the object.
3710 // If the object is a smi return the object.
3711 __ JumpIfSmi(v0, &done);
3712 // If the object is not a value type, return the object.
3713 __ GetObjectType(v0, a1, a1);
3714 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3716 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3719 context()->Plug(v0);
3723 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3724 ZoneList<Expression*>* args = expr->arguments();
3725 DCHECK_EQ(1, args->length());
3727 VisitForAccumulatorValue(args->at(0));
3729 Label materialize_true, materialize_false;
3730 Label* if_true = nullptr;
3731 Label* if_false = nullptr;
3732 Label* fall_through = nullptr;
3733 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3734 &if_false, &fall_through);
3736 __ JumpIfSmi(v0, if_false);
3737 __ GetObjectType(v0, a1, a1);
3738 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3739 Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through);
3741 context()->Plug(if_true, if_false);
3745 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3746 ZoneList<Expression*>* args = expr->arguments();
3747 DCHECK(args->length() == 2);
3748 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3749 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3751 VisitForAccumulatorValue(args->at(0)); // Load the object.
3753 Register object = v0;
3754 Register result = v0;
3755 Register scratch0 = t5;
3756 Register scratch1 = a1;
3758 if (index->value() == 0) {
3759 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3761 Label runtime, done;
3762 if (index->value() < JSDate::kFirstUncachedField) {
3763 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3764 __ li(scratch1, Operand(stamp));
3765 __ lw(scratch1, MemOperand(scratch1));
3766 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3767 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3768 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3769 kPointerSize * index->value()));
3773 __ PrepareCallCFunction(2, scratch1);
3774 __ li(a1, Operand(index));
3775 __ Move(a0, object);
3776 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3780 context()->Plug(result);
3784 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3785 ZoneList<Expression*>* args = expr->arguments();
3786 DCHECK_EQ(3, args->length());
3788 Register string = v0;
3789 Register index = a1;
3790 Register value = a2;
3792 VisitForStackValue(args->at(0)); // index
3793 VisitForStackValue(args->at(1)); // value
3794 VisitForAccumulatorValue(args->at(2)); // string
3795 __ Pop(index, value);
3797 if (FLAG_debug_code) {
3798 __ SmiTst(value, at);
3799 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3800 __ SmiTst(index, at);
3801 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3802 __ SmiUntag(index, index);
3803 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3804 Register scratch = t5;
3805 __ EmitSeqStringSetCharCheck(
3806 string, index, value, scratch, one_byte_seq_type);
3807 __ SmiTag(index, index);
3810 __ SmiUntag(value, value);
3813 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3815 __ Addu(at, at, index);
3816 __ sb(value, MemOperand(at));
3817 context()->Plug(string);
3821 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3822 ZoneList<Expression*>* args = expr->arguments();
3823 DCHECK_EQ(3, args->length());
3825 Register string = v0;
3826 Register index = a1;
3827 Register value = a2;
3829 VisitForStackValue(args->at(0)); // index
3830 VisitForStackValue(args->at(1)); // value
3831 VisitForAccumulatorValue(args->at(2)); // string
3832 __ Pop(index, value);
3834 if (FLAG_debug_code) {
3835 __ SmiTst(value, at);
3836 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3837 __ SmiTst(index, at);
3838 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3839 __ SmiUntag(index, index);
3840 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3841 Register scratch = t5;
3842 __ EmitSeqStringSetCharCheck(
3843 string, index, value, scratch, two_byte_seq_type);
3844 __ SmiTag(index, index);
3847 __ SmiUntag(value, value);
3850 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3851 __ Addu(at, at, index);
3852 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3853 __ sh(value, MemOperand(at));
3854 context()->Plug(string);
3858 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3859 ZoneList<Expression*>* args = expr->arguments();
3860 DCHECK(args->length() == 2);
3862 VisitForStackValue(args->at(0)); // Load the object.
3863 VisitForAccumulatorValue(args->at(1)); // Load the value.
3864 __ pop(a1); // v0 = value. a1 = object.
3867 // If the object is a smi, return the value.
3868 __ JumpIfSmi(a1, &done);
3870 // If the object is not a value type, return the value.
3871 __ GetObjectType(a1, a2, a2);
3872 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3875 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3876 // Update the write barrier. Save the value as it will be
3877 // overwritten by the write barrier code and is needed afterward.
3879 __ RecordWriteField(
3880 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3883 context()->Plug(v0);
3887 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3888 ZoneList<Expression*>* args = expr->arguments();
3889 DCHECK_EQ(args->length(), 1);
3891 // Load the argument into a0 and call the stub.
3892 VisitForAccumulatorValue(args->at(0));
3893 __ mov(a0, result_register());
3895 NumberToStringStub stub(isolate());
3897 context()->Plug(v0);
3901 void FullCodeGenerator::EmitToString(CallRuntime* expr) {
3902 ZoneList<Expression*>* args = expr->arguments();
3903 DCHECK_EQ(1, args->length());
3905 // Load the argument into a0 and convert it.
3906 VisitForAccumulatorValue(args->at(0));
3907 __ mov(a0, result_register());
3909 ToStringStub stub(isolate());
3911 context()->Plug(v0);
3915 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3916 ZoneList<Expression*>* args = expr->arguments();
3917 DCHECK_EQ(1, args->length());
3919 // Load the argument into v0 and convert it.
3920 VisitForAccumulatorValue(args->at(0));
3922 Label convert, done_convert;
3923 __ JumpIfSmi(v0, &convert);
3924 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3925 __ GetObjectType(v0, a1, a1);
3926 __ Branch(&done_convert, le, a1, Operand(LAST_NAME_TYPE));
3928 ToStringStub stub(isolate());
3931 __ bind(&done_convert);
3932 context()->Plug(v0);
3936 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3937 ZoneList<Expression*>* args = expr->arguments();
3938 DCHECK_EQ(1, args->length());
3940 // Load the argument into a0 and convert it.
3941 VisitForAccumulatorValue(args->at(0));
3942 __ mov(a0, result_register());
3944 ToObjectStub stub(isolate());
3946 context()->Plug(v0);
3950 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3951 ZoneList<Expression*>* args = expr->arguments();
3952 DCHECK(args->length() == 1);
3954 VisitForAccumulatorValue(args->at(0));
3957 StringCharFromCodeGenerator generator(v0, a1);
3958 generator.GenerateFast(masm_);
3961 NopRuntimeCallHelper call_helper;
3962 generator.GenerateSlow(masm_, call_helper);
3965 context()->Plug(a1);
3969 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3970 ZoneList<Expression*>* args = expr->arguments();
3971 DCHECK(args->length() == 2);
3973 VisitForStackValue(args->at(0));
3974 VisitForAccumulatorValue(args->at(1));
3975 __ mov(a0, result_register());
3977 Register object = a1;
3978 Register index = a0;
3979 Register result = v0;
3983 Label need_conversion;
3984 Label index_out_of_range;
3986 StringCharCodeAtGenerator generator(object,
3991 &index_out_of_range,
3992 STRING_INDEX_IS_NUMBER);
3993 generator.GenerateFast(masm_);
3996 __ bind(&index_out_of_range);
3997 // When the index is out of range, the spec requires us to return
3999 __ LoadRoot(result, Heap::kNanValueRootIndex);
4002 __ bind(&need_conversion);
4003 // Load the undefined value into the result register, which will
4004 // trigger conversion.
4005 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4008 NopRuntimeCallHelper call_helper;
4009 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4012 context()->Plug(result);
4016 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4017 ZoneList<Expression*>* args = expr->arguments();
4018 DCHECK(args->length() == 2);
4020 VisitForStackValue(args->at(0));
4021 VisitForAccumulatorValue(args->at(1));
4022 __ mov(a0, result_register());
4024 Register object = a1;
4025 Register index = a0;
4026 Register scratch = a3;
4027 Register result = v0;
4031 Label need_conversion;
4032 Label index_out_of_range;
4034 StringCharAtGenerator generator(object,
4040 &index_out_of_range,
4041 STRING_INDEX_IS_NUMBER);
4042 generator.GenerateFast(masm_);
4045 __ bind(&index_out_of_range);
4046 // When the index is out of range, the spec requires us to return
4047 // the empty string.
4048 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4051 __ bind(&need_conversion);
4052 // Move smi zero into the result register, which will trigger
4054 __ li(result, Operand(Smi::FromInt(0)));
4057 NopRuntimeCallHelper call_helper;
4058 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4061 context()->Plug(result);
4065 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4066 ZoneList<Expression*>* args = expr->arguments();
4067 DCHECK_EQ(2, args->length());
4068 VisitForStackValue(args->at(0));
4069 VisitForAccumulatorValue(args->at(1));
4072 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4073 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4075 context()->Plug(v0);
4079 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
4080 ZoneList<Expression*>* args = expr->arguments();
4081 DCHECK_LE(2, args->length());
4082 // Push target, receiver and arguments onto the stack.
4083 for (Expression* const arg : *args) {
4084 VisitForStackValue(arg);
4086 // Move target to a1.
4087 int const argc = args->length() - 2;
4088 __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
4090 __ li(a0, Operand(argc));
4091 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
4092 // Restore context register.
4093 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4094 // Discard the function left on TOS.
4095 context()->DropAndPlug(1, v0);
4099 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4100 ZoneList<Expression*>* args = expr->arguments();
4101 DCHECK(args->length() >= 2);
4103 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4104 for (int i = 0; i < arg_count + 1; i++) {
4105 VisitForStackValue(args->at(i));
4107 VisitForAccumulatorValue(args->last()); // Function.
4109 Label runtime, done;
4110 // Check for non-function argument (including proxy).
4111 __ JumpIfSmi(v0, &runtime);
4112 __ GetObjectType(v0, a1, a1);
4113 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4115 // InvokeFunction requires the function in a1. Move it in there.
4116 __ mov(a1, result_register());
4117 ParameterCount count(arg_count);
4118 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4119 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4124 __ CallRuntime(Runtime::kCallFunction, args->length());
4127 context()->Plug(v0);
4131 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4132 ZoneList<Expression*>* args = expr->arguments();
4133 DCHECK(args->length() == 2);
4135 // Evaluate new.target and super constructor.
4136 VisitForStackValue(args->at(0));
4137 VisitForStackValue(args->at(1));
4139 // Load original constructor into t0.
4140 __ lw(t0, MemOperand(sp, 1 * kPointerSize));
4142 // Check if the calling frame is an arguments adaptor frame.
4143 Label adaptor_frame, args_set_up, runtime;
4144 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4145 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4146 __ Branch(&adaptor_frame, eq, a3,
4147 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4148 // default constructor has no arguments, so no adaptor frame means no args.
4149 __ mov(a0, zero_reg);
4150 __ Branch(&args_set_up);
4152 // Copy arguments from adaptor frame.
4154 __ bind(&adaptor_frame);
4155 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4156 __ SmiUntag(a1, a1);
4160 // Get arguments pointer in a2.
4161 __ sll(at, a1, kPointerSizeLog2);
4162 __ addu(a2, a2, at);
4163 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4166 // Pre-decrement a2 with kPointerSize on each iteration.
4167 // Pre-decrement in order to skip receiver.
4168 __ Addu(a2, a2, Operand(-kPointerSize));
4169 __ lw(a3, MemOperand(a2));
4171 __ Addu(a1, a1, Operand(-1));
4172 __ Branch(&loop, ne, a1, Operand(zero_reg));
4175 __ bind(&args_set_up);
4176 __ sll(at, a0, kPointerSizeLog2);
4177 __ Addu(at, at, Operand(sp));
4178 __ lw(a1, MemOperand(at, 0));
4179 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4181 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4182 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4186 context()->Plug(result_register());
4190 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4191 RegExpConstructResultStub stub(isolate());
4192 ZoneList<Expression*>* args = expr->arguments();
4193 DCHECK(args->length() == 3);
4194 VisitForStackValue(args->at(0));
4195 VisitForStackValue(args->at(1));
4196 VisitForAccumulatorValue(args->at(2));
4197 __ mov(a0, result_register());
4201 context()->Plug(v0);
4205 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4206 ZoneList<Expression*>* args = expr->arguments();
4207 VisitForAccumulatorValue(args->at(0));
4209 Label materialize_true, materialize_false;
4210 Label* if_true = NULL;
4211 Label* if_false = NULL;
4212 Label* fall_through = NULL;
4213 context()->PrepareTest(&materialize_true, &materialize_false,
4214 &if_true, &if_false, &fall_through);
4216 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4217 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4219 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4220 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4222 context()->Plug(if_true, if_false);
4226 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4227 ZoneList<Expression*>* args = expr->arguments();
4228 DCHECK(args->length() == 1);
4229 VisitForAccumulatorValue(args->at(0));
4231 __ AssertString(v0);
4233 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4234 __ IndexFromHash(v0, v0);
4236 context()->Plug(v0);
4240 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4241 Label bailout, done, one_char_separator, long_separator,
4242 non_trivial_array, not_size_one_array, loop,
4243 empty_separator_loop, one_char_separator_loop,
4244 one_char_separator_loop_entry, long_separator_loop;
4245 ZoneList<Expression*>* args = expr->arguments();
4246 DCHECK(args->length() == 2);
4247 VisitForStackValue(args->at(1));
4248 VisitForAccumulatorValue(args->at(0));
4250 // All aliases of the same register have disjoint lifetimes.
4251 Register array = v0;
4252 Register elements = no_reg; // Will be v0.
4253 Register result = no_reg; // Will be v0.
4254 Register separator = a1;
4255 Register array_length = a2;
4256 Register result_pos = no_reg; // Will be a2.
4257 Register string_length = a3;
4258 Register string = t0;
4259 Register element = t1;
4260 Register elements_end = t2;
4261 Register scratch1 = t3;
4262 Register scratch2 = t5;
4263 Register scratch3 = t4;
4265 // Separator operand is on the stack.
4268 // Check that the array is a JSArray.
4269 __ JumpIfSmi(array, &bailout);
4270 __ GetObjectType(array, scratch1, scratch2);
4271 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4273 // Check that the array has fast elements.
4274 __ CheckFastElements(scratch1, scratch2, &bailout);
4276 // If the array has length zero, return the empty string.
4277 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4278 __ SmiUntag(array_length);
4279 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4280 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4283 __ bind(&non_trivial_array);
4285 // Get the FixedArray containing array's elements.
4287 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4288 array = no_reg; // End of array's live range.
4290 // Check that all array elements are sequential one-byte strings, and
4291 // accumulate the sum of their lengths, as a smi-encoded value.
4292 __ mov(string_length, zero_reg);
4294 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4295 __ sll(elements_end, array_length, kPointerSizeLog2);
4296 __ Addu(elements_end, element, elements_end);
4297 // Loop condition: while (element < elements_end).
4298 // Live values in registers:
4299 // elements: Fixed array of strings.
4300 // array_length: Length of the fixed array of strings (not smi)
4301 // separator: Separator string
4302 // string_length: Accumulated sum of string lengths (smi).
4303 // element: Current array element.
4304 // elements_end: Array end.
4305 if (generate_debug_code_) {
4306 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4310 __ lw(string, MemOperand(element));
4311 __ Addu(element, element, kPointerSize);
4312 __ JumpIfSmi(string, &bailout);
4313 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4314 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4315 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4316 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4317 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4318 __ BranchOnOverflow(&bailout, scratch3);
4319 __ Branch(&loop, lt, element, Operand(elements_end));
4321 // If array_length is 1, return elements[0], a string.
4322 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4323 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4326 __ bind(¬_size_one_array);
4328 // Live values in registers:
4329 // separator: Separator string
4330 // array_length: Length of the array.
4331 // string_length: Sum of string lengths (smi).
4332 // elements: FixedArray of strings.
4334 // Check that the separator is a flat one-byte string.
4335 __ JumpIfSmi(separator, &bailout);
4336 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4337 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4338 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4340 // Add (separator length times array_length) - separator length to the
4341 // string_length to get the length of the result string. array_length is not
4342 // smi but the other values are, so the result is a smi.
4343 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4344 __ Subu(string_length, string_length, Operand(scratch1));
4345 __ Mul(scratch3, scratch2, array_length, scratch1);
4346 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4348 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4349 __ And(scratch3, scratch2, Operand(0x80000000));
4350 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4351 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4352 __ BranchOnOverflow(&bailout, scratch3);
4353 __ SmiUntag(string_length);
4355 // Get first element in the array to free up the elements register to be used
4358 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4359 result = elements; // End of live range for elements.
4361 // Live values in registers:
4362 // element: First array element
4363 // separator: Separator string
4364 // string_length: Length of result string (not smi)
4365 // array_length: Length of the array.
4366 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4367 elements_end, &bailout);
4368 // Prepare for looping. Set up elements_end to end of the array. Set
4369 // result_pos to the position of the result where to write the first
4371 __ sll(elements_end, array_length, kPointerSizeLog2);
4372 __ Addu(elements_end, element, elements_end);
4373 result_pos = array_length; // End of live range for array_length.
4374 array_length = no_reg;
4377 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4379 // Check the length of the separator.
4380 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4381 __ li(at, Operand(Smi::FromInt(1)));
4382 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4383 __ Branch(&long_separator, gt, scratch1, Operand(at));
4385 // Empty separator case.
4386 __ bind(&empty_separator_loop);
4387 // Live values in registers:
4388 // result_pos: the position to which we are currently copying characters.
4389 // element: Current array element.
4390 // elements_end: Array end.
4392 // Copy next array element to the result.
4393 __ lw(string, MemOperand(element));
4394 __ Addu(element, element, kPointerSize);
4395 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4396 __ SmiUntag(string_length);
4397 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4398 __ CopyBytes(string, result_pos, string_length, scratch1);
4399 // End while (element < elements_end).
4400 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4401 DCHECK(result.is(v0));
4404 // One-character separator case.
4405 __ bind(&one_char_separator);
4406 // Replace separator with its one-byte character value.
4407 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4408 // Jump into the loop after the code that copies the separator, so the first
4409 // element is not preceded by a separator.
4410 __ jmp(&one_char_separator_loop_entry);
4412 __ bind(&one_char_separator_loop);
4413 // Live values in registers:
4414 // result_pos: the position to which we are currently copying characters.
4415 // element: Current array element.
4416 // elements_end: Array end.
4417 // separator: Single separator one-byte char (in lower byte).
4419 // Copy the separator character to the result.
4420 __ sb(separator, MemOperand(result_pos));
4421 __ Addu(result_pos, result_pos, 1);
4423 // Copy next array element to the result.
4424 __ bind(&one_char_separator_loop_entry);
4425 __ lw(string, MemOperand(element));
4426 __ Addu(element, element, kPointerSize);
4427 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4428 __ SmiUntag(string_length);
4429 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4430 __ CopyBytes(string, result_pos, string_length, scratch1);
4431 // End while (element < elements_end).
4432 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4433 DCHECK(result.is(v0));
4436 // Long separator case (separator is more than one character). Entry is at the
4437 // label long_separator below.
4438 __ bind(&long_separator_loop);
4439 // Live values in registers:
4440 // result_pos: the position to which we are currently copying characters.
4441 // element: Current array element.
4442 // elements_end: Array end.
4443 // separator: Separator string.
4445 // Copy the separator to the result.
4446 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4447 __ SmiUntag(string_length);
4450 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4451 __ CopyBytes(string, result_pos, string_length, scratch1);
4453 __ bind(&long_separator);
4454 __ lw(string, MemOperand(element));
4455 __ Addu(element, element, kPointerSize);
4456 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4457 __ SmiUntag(string_length);
4458 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4459 __ CopyBytes(string, result_pos, string_length, scratch1);
4460 // End while (element < elements_end).
4461 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4462 DCHECK(result.is(v0));
4466 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4468 context()->Plug(v0);
4472 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4473 DCHECK(expr->arguments()->length() == 0);
4474 ExternalReference debug_is_active =
4475 ExternalReference::debug_is_active_address(isolate());
4476 __ li(at, Operand(debug_is_active));
4477 __ lb(v0, MemOperand(at));
4479 context()->Plug(v0);
4483 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
4484 ZoneList<Expression*>* args = expr->arguments();
4485 DCHECK_EQ(2, args->length());
4486 VisitForStackValue(args->at(0));
4487 VisitForStackValue(args->at(1));
4489 Label runtime, done;
4491 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
4492 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4493 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
4494 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
4496 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
4497 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
4498 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4499 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
4500 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
4501 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
4502 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4506 __ CallRuntime(Runtime::kCreateIterResultObject, 2);
4509 context()->Plug(v0);
4513 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4514 // Push undefined as the receiver.
4515 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4518 __ lw(v0, GlobalObjectOperand());
4519 __ lw(v0, FieldMemOperand(v0, GlobalObject::kNativeContextOffset));
4520 __ lw(v0, ContextOperand(v0, expr->context_index()));
4524 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4525 ZoneList<Expression*>* args = expr->arguments();
4526 int arg_count = args->length();
4528 SetCallPosition(expr, arg_count);
4529 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4530 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4535 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4536 ZoneList<Expression*>* args = expr->arguments();
4537 int arg_count = args->length();
4539 if (expr->is_jsruntime()) {
4540 Comment cmnt(masm_, "[ CallRuntime");
4541 EmitLoadJSRuntimeFunction(expr);
4543 // Push the target function under the receiver.
4544 __ lw(at, MemOperand(sp, 0));
4546 __ sw(v0, MemOperand(sp, kPointerSize));
4548 // Push the arguments ("left-to-right").
4549 for (int i = 0; i < arg_count; i++) {
4550 VisitForStackValue(args->at(i));
4553 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4554 EmitCallJSRuntimeFunction(expr);
4556 // Restore context register.
4557 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4559 context()->DropAndPlug(1, v0);
4562 const Runtime::Function* function = expr->function();
4563 switch (function->function_id) {
4564 #define CALL_INTRINSIC_GENERATOR(Name) \
4565 case Runtime::kInline##Name: { \
4566 Comment cmnt(masm_, "[ Inline" #Name); \
4567 return Emit##Name(expr); \
4569 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4570 #undef CALL_INTRINSIC_GENERATOR
4572 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4573 // Push the arguments ("left-to-right").
4574 for (int i = 0; i < arg_count; i++) {
4575 VisitForStackValue(args->at(i));
4578 // Call the C runtime function.
4579 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4580 __ CallRuntime(expr->function(), arg_count);
4581 context()->Plug(v0);
4588 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4589 switch (expr->op()) {
4590 case Token::DELETE: {
4591 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4592 Property* property = expr->expression()->AsProperty();
4593 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4595 if (property != NULL) {
4596 VisitForStackValue(property->obj());
4597 VisitForStackValue(property->key());
4598 __ CallRuntime(is_strict(language_mode())
4599 ? Runtime::kDeleteProperty_Strict
4600 : Runtime::kDeleteProperty_Sloppy,
4602 context()->Plug(v0);
4603 } else if (proxy != NULL) {
4604 Variable* var = proxy->var();
4605 // Delete of an unqualified identifier is disallowed in strict mode but
4606 // "delete this" is allowed.
4607 bool is_this = var->HasThisName(isolate());
4608 DCHECK(is_sloppy(language_mode()) || is_this);
4609 if (var->IsUnallocatedOrGlobalSlot()) {
4610 __ lw(a2, GlobalObjectOperand());
4611 __ li(a1, Operand(var->name()));
4613 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4614 context()->Plug(v0);
4615 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4616 // Result of deleting non-global, non-dynamic variables is false.
4617 // The subexpression does not have side effects.
4618 context()->Plug(is_this);
4620 // Non-global variable. Call the runtime to try to delete from the
4621 // context where the variable was introduced.
4622 DCHECK(!context_register().is(a2));
4623 __ li(a2, Operand(var->name()));
4624 __ Push(context_register(), a2);
4625 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4626 context()->Plug(v0);
4629 // Result of deleting non-property, non-variable reference is true.
4630 // The subexpression may have side effects.
4631 VisitForEffect(expr->expression());
4632 context()->Plug(true);
4638 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4639 VisitForEffect(expr->expression());
4640 context()->Plug(Heap::kUndefinedValueRootIndex);
4645 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4646 if (context()->IsEffect()) {
4647 // Unary NOT has no side effects so it's only necessary to visit the
4648 // subexpression. Match the optimizing compiler by not branching.
4649 VisitForEffect(expr->expression());
4650 } else if (context()->IsTest()) {
4651 const TestContext* test = TestContext::cast(context());
4652 // The labels are swapped for the recursive call.
4653 VisitForControl(expr->expression(),
4654 test->false_label(),
4656 test->fall_through());
4657 context()->Plug(test->true_label(), test->false_label());
4659 // We handle value contexts explicitly rather than simply visiting
4660 // for control and plugging the control flow into the context,
4661 // because we need to prepare a pair of extra administrative AST ids
4662 // for the optimizing compiler.
4663 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4664 Label materialize_true, materialize_false, done;
4665 VisitForControl(expr->expression(),
4669 __ bind(&materialize_true);
4670 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4671 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4672 if (context()->IsStackValue()) __ push(v0);
4674 __ bind(&materialize_false);
4675 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4676 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4677 if (context()->IsStackValue()) __ push(v0);
4683 case Token::TYPEOF: {
4684 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4686 AccumulatorValueContext context(this);
4687 VisitForTypeofValue(expr->expression());
4690 TypeofStub typeof_stub(isolate());
4691 __ CallStub(&typeof_stub);
4692 context()->Plug(v0);
4702 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4703 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4705 Comment cmnt(masm_, "[ CountOperation");
4707 Property* prop = expr->expression()->AsProperty();
4708 LhsKind assign_type = Property::GetAssignType(prop);
4710 // Evaluate expression and get value.
4711 if (assign_type == VARIABLE) {
4712 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4713 AccumulatorValueContext context(this);
4714 EmitVariableLoad(expr->expression()->AsVariableProxy());
4716 // Reserve space for result of postfix operation.
4717 if (expr->is_postfix() && !context()->IsEffect()) {
4718 __ li(at, Operand(Smi::FromInt(0)));
4721 switch (assign_type) {
4722 case NAMED_PROPERTY: {
4723 // Put the object both on the stack and in the register.
4724 VisitForStackValue(prop->obj());
4725 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4726 EmitNamedPropertyLoad(prop);
4730 case NAMED_SUPER_PROPERTY: {
4731 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4732 VisitForAccumulatorValue(
4733 prop->obj()->AsSuperPropertyReference()->home_object());
4734 __ Push(result_register());
4735 const Register scratch = a1;
4736 __ lw(scratch, MemOperand(sp, kPointerSize));
4737 __ Push(scratch, result_register());
4738 EmitNamedSuperPropertyLoad(prop);
4742 case KEYED_SUPER_PROPERTY: {
4743 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4744 VisitForAccumulatorValue(
4745 prop->obj()->AsSuperPropertyReference()->home_object());
4746 const Register scratch = a1;
4747 const Register scratch1 = t0;
4748 __ Move(scratch, result_register());
4749 VisitForAccumulatorValue(prop->key());
4750 __ Push(scratch, result_register());
4751 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
4752 __ Push(scratch1, scratch, result_register());
4753 EmitKeyedSuperPropertyLoad(prop);
4757 case KEYED_PROPERTY: {
4758 VisitForStackValue(prop->obj());
4759 VisitForStackValue(prop->key());
4760 __ lw(LoadDescriptor::ReceiverRegister(),
4761 MemOperand(sp, 1 * kPointerSize));
4762 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4763 EmitKeyedPropertyLoad(prop);
4772 // We need a second deoptimization point after loading the value
4773 // in case evaluating the property load my have a side effect.
4774 if (assign_type == VARIABLE) {
4775 PrepareForBailout(expr->expression(), TOS_REG);
4777 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4780 // Inline smi case if we are in a loop.
4781 Label stub_call, done;
4782 JumpPatchSite patch_site(masm_);
4784 int count_value = expr->op() == Token::INC ? 1 : -1;
4786 if (ShouldInlineSmiCase(expr->op())) {
4788 patch_site.EmitJumpIfNotSmi(v0, &slow);
4790 // Save result for postfix expressions.
4791 if (expr->is_postfix()) {
4792 if (!context()->IsEffect()) {
4793 // Save the result on the stack. If we have a named or keyed property
4794 // we store the result under the receiver that is currently on top
4796 switch (assign_type) {
4800 case NAMED_PROPERTY:
4801 __ sw(v0, MemOperand(sp, kPointerSize));
4803 case NAMED_SUPER_PROPERTY:
4804 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4806 case KEYED_PROPERTY:
4807 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4809 case KEYED_SUPER_PROPERTY:
4810 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4816 Register scratch1 = a1;
4817 Register scratch2 = t0;
4818 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4819 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4820 __ BranchOnNoOverflow(&done, scratch2);
4821 // Call stub. Undo operation first.
4826 if (!is_strong(language_mode())) {
4827 ToNumberStub convert_stub(isolate());
4828 __ CallStub(&convert_stub);
4829 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4832 // Save result for postfix expressions.
4833 if (expr->is_postfix()) {
4834 if (!context()->IsEffect()) {
4835 // Save the result on the stack. If we have a named or keyed property
4836 // we store the result under the receiver that is currently on top
4838 switch (assign_type) {
4842 case NAMED_PROPERTY:
4843 __ sw(v0, MemOperand(sp, kPointerSize));
4845 case NAMED_SUPER_PROPERTY:
4846 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4848 case KEYED_PROPERTY:
4849 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4851 case KEYED_SUPER_PROPERTY:
4852 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4858 __ bind(&stub_call);
4860 __ li(a0, Operand(Smi::FromInt(count_value)));
4862 SetExpressionPosition(expr);
4865 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4866 strength(language_mode())).code();
4867 CallIC(code, expr->CountBinOpFeedbackId());
4868 patch_site.EmitPatchInfo();
4871 if (is_strong(language_mode())) {
4872 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4874 // Store the value returned in v0.
4875 switch (assign_type) {
4877 if (expr->is_postfix()) {
4878 { EffectContext context(this);
4879 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4880 Token::ASSIGN, expr->CountSlot());
4881 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4884 // For all contexts except EffectConstant we have the result on
4885 // top of the stack.
4886 if (!context()->IsEffect()) {
4887 context()->PlugTOS();
4890 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4891 Token::ASSIGN, expr->CountSlot());
4892 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4893 context()->Plug(v0);
4896 case NAMED_PROPERTY: {
4897 __ mov(StoreDescriptor::ValueRegister(), result_register());
4898 __ li(StoreDescriptor::NameRegister(),
4899 Operand(prop->key()->AsLiteral()->value()));
4900 __ pop(StoreDescriptor::ReceiverRegister());
4901 if (FLAG_vector_stores) {
4902 EmitLoadStoreICSlot(expr->CountSlot());
4905 CallStoreIC(expr->CountStoreFeedbackId());
4907 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4908 if (expr->is_postfix()) {
4909 if (!context()->IsEffect()) {
4910 context()->PlugTOS();
4913 context()->Plug(v0);
4917 case NAMED_SUPER_PROPERTY: {
4918 EmitNamedSuperPropertyStore(prop);
4919 if (expr->is_postfix()) {
4920 if (!context()->IsEffect()) {
4921 context()->PlugTOS();
4924 context()->Plug(v0);
4928 case KEYED_SUPER_PROPERTY: {
4929 EmitKeyedSuperPropertyStore(prop);
4930 if (expr->is_postfix()) {
4931 if (!context()->IsEffect()) {
4932 context()->PlugTOS();
4935 context()->Plug(v0);
4939 case KEYED_PROPERTY: {
4940 __ mov(StoreDescriptor::ValueRegister(), result_register());
4941 __ Pop(StoreDescriptor::ReceiverRegister(),
4942 StoreDescriptor::NameRegister());
4944 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4945 if (FLAG_vector_stores) {
4946 EmitLoadStoreICSlot(expr->CountSlot());
4949 CallIC(ic, expr->CountStoreFeedbackId());
4951 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4952 if (expr->is_postfix()) {
4953 if (!context()->IsEffect()) {
4954 context()->PlugTOS();
4957 context()->Plug(v0);
4965 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4966 Expression* sub_expr,
4967 Handle<String> check) {
4968 Label materialize_true, materialize_false;
4969 Label* if_true = NULL;
4970 Label* if_false = NULL;
4971 Label* fall_through = NULL;
4972 context()->PrepareTest(&materialize_true, &materialize_false,
4973 &if_true, &if_false, &fall_through);
4975 { AccumulatorValueContext context(this);
4976 VisitForTypeofValue(sub_expr);
4978 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4980 Factory* factory = isolate()->factory();
4981 if (String::Equals(check, factory->number_string())) {
4982 __ JumpIfSmi(v0, if_true);
4983 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4984 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4985 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4986 } else if (String::Equals(check, factory->string_string())) {
4987 __ JumpIfSmi(v0, if_false);
4988 __ GetObjectType(v0, v0, a1);
4989 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
4991 } else if (String::Equals(check, factory->symbol_string())) {
4992 __ JumpIfSmi(v0, if_false);
4993 __ GetObjectType(v0, v0, a1);
4994 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4995 } else if (String::Equals(check, factory->boolean_string())) {
4996 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4997 __ Branch(if_true, eq, v0, Operand(at));
4998 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4999 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5000 } else if (String::Equals(check, factory->undefined_string())) {
5001 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5002 __ Branch(if_true, eq, v0, Operand(at));
5003 __ JumpIfSmi(v0, if_false);
5004 // Check for undetectable objects => true.
5005 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5006 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5007 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5008 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5009 } else if (String::Equals(check, factory->function_string())) {
5010 __ JumpIfSmi(v0, if_false);
5011 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5012 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5014 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5015 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
5017 } else if (String::Equals(check, factory->object_string())) {
5018 __ JumpIfSmi(v0, if_false);
5019 __ LoadRoot(at, Heap::kNullValueRootIndex);
5020 __ Branch(if_true, eq, v0, Operand(at));
5021 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
5022 __ GetObjectType(v0, v0, a1);
5023 __ Branch(if_false, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
5024 // Check for callable or undetectable objects => false.
5025 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5027 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5028 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5030 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5031 } else if (String::Equals(check, factory->type##_string())) { \
5032 __ JumpIfSmi(v0, if_false); \
5033 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
5034 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
5035 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5036 SIMD128_TYPES(SIMD128_TYPE)
5040 if (if_false != fall_through) __ jmp(if_false);
5042 context()->Plug(if_true, if_false);
5046 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5047 Comment cmnt(masm_, "[ CompareOperation");
5048 SetExpressionPosition(expr);
5050 // First we try a fast inlined version of the compare when one of
5051 // the operands is a literal.
5052 if (TryLiteralCompare(expr)) return;
5054 // Always perform the comparison for its control flow. Pack the result
5055 // into the expression's context after the comparison is performed.
5056 Label materialize_true, materialize_false;
5057 Label* if_true = NULL;
5058 Label* if_false = NULL;
5059 Label* fall_through = NULL;
5060 context()->PrepareTest(&materialize_true, &materialize_false,
5061 &if_true, &if_false, &fall_through);
5063 Token::Value op = expr->op();
5064 VisitForStackValue(expr->left());
5067 VisitForStackValue(expr->right());
5068 __ CallRuntime(Runtime::kHasProperty, 2);
5069 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5070 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
5071 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
5074 case Token::INSTANCEOF: {
5075 VisitForAccumulatorValue(expr->right());
5076 __ mov(a0, result_register());
5078 InstanceOfStub stub(isolate());
5080 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5081 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5082 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5087 VisitForAccumulatorValue(expr->right());
5088 Condition cc = CompareIC::ComputeCondition(op);
5089 __ mov(a0, result_register());
5092 bool inline_smi_code = ShouldInlineSmiCase(op);
5093 JumpPatchSite patch_site(masm_);
5094 if (inline_smi_code) {
5096 __ Or(a2, a0, Operand(a1));
5097 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5098 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5099 __ bind(&slow_case);
5102 Handle<Code> ic = CodeFactory::CompareIC(
5103 isolate(), op, strength(language_mode())).code();
5104 CallIC(ic, expr->CompareOperationFeedbackId());
5105 patch_site.EmitPatchInfo();
5106 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5107 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5111 // Convert the result of the comparison into one expected for this
5112 // expression's context.
5113 context()->Plug(if_true, if_false);
5117 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5118 Expression* sub_expr,
5120 Label materialize_true, materialize_false;
5121 Label* if_true = NULL;
5122 Label* if_false = NULL;
5123 Label* fall_through = NULL;
5124 context()->PrepareTest(&materialize_true, &materialize_false,
5125 &if_true, &if_false, &fall_through);
5127 VisitForAccumulatorValue(sub_expr);
5128 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5129 __ mov(a0, result_register());
5130 if (expr->op() == Token::EQ_STRICT) {
5131 Heap::RootListIndex nil_value = nil == kNullValue ?
5132 Heap::kNullValueRootIndex :
5133 Heap::kUndefinedValueRootIndex;
5134 __ LoadRoot(a1, nil_value);
5135 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5137 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5138 CallIC(ic, expr->CompareOperationFeedbackId());
5139 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5141 context()->Plug(if_true, if_false);
5145 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5146 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5147 context()->Plug(v0);
5151 Register FullCodeGenerator::result_register() {
5156 Register FullCodeGenerator::context_register() {
5161 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5162 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5163 __ sw(value, MemOperand(fp, frame_offset));
5167 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5168 __ lw(dst, ContextOperand(cp, context_index));
5172 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5173 Scope* closure_scope = scope()->ClosureScope();
5174 if (closure_scope->is_script_scope() ||
5175 closure_scope->is_module_scope()) {
5176 // Contexts nested in the native context have a canonical empty function
5177 // as their closure, not the anonymous closure containing the global
5178 // code. Pass a smi sentinel and let the runtime look up the empty
5180 __ li(at, Operand(Smi::FromInt(0)));
5181 } else if (closure_scope->is_eval_scope()) {
5182 // Contexts created by a call to eval have the same closure as the
5183 // context calling eval, not the anonymous closure containing the eval
5184 // code. Fetch it from the context.
5185 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5187 DCHECK(closure_scope->is_function_scope());
5188 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5194 // ----------------------------------------------------------------------------
5195 // Non-local control flow support.
5197 void FullCodeGenerator::EnterFinallyBlock() {
5198 DCHECK(!result_register().is(a1));
5199 // Store result register while executing finally block.
5200 __ push(result_register());
5201 // Cook return address in link register to stack (smi encoded Code* delta).
5202 __ Subu(a1, ra, Operand(masm_->CodeObject()));
5203 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5204 STATIC_ASSERT(0 == kSmiTag);
5205 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
5207 // Store result register while executing finally block.
5210 // Store pending message while executing finally block.
5211 ExternalReference pending_message_obj =
5212 ExternalReference::address_of_pending_message_obj(isolate());
5213 __ li(at, Operand(pending_message_obj));
5214 __ lw(a1, MemOperand(at));
5217 ClearPendingMessage();
5221 void FullCodeGenerator::ExitFinallyBlock() {
5222 DCHECK(!result_register().is(a1));
5223 // Restore pending message from stack.
5225 ExternalReference pending_message_obj =
5226 ExternalReference::address_of_pending_message_obj(isolate());
5227 __ li(at, Operand(pending_message_obj));
5228 __ sw(a1, MemOperand(at));
5230 // Restore result register from stack.
5233 // Uncook return address and return.
5234 __ pop(result_register());
5235 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5236 __ sra(a1, a1, 1); // Un-smi-tag value.
5237 __ Addu(at, a1, Operand(masm_->CodeObject()));
5242 void FullCodeGenerator::ClearPendingMessage() {
5243 DCHECK(!result_register().is(a1));
5244 ExternalReference pending_message_obj =
5245 ExternalReference::address_of_pending_message_obj(isolate());
5246 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
5247 __ li(at, Operand(pending_message_obj));
5248 __ sw(a1, MemOperand(at));
5252 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5253 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5254 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
5255 Operand(SmiFromSlot(slot)));
5262 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5264 BackEdgeState target_state,
5265 Code* replacement_code) {
5266 static const int kInstrSize = Assembler::kInstrSize;
5267 Address branch_address = pc - 6 * kInstrSize;
5268 CodePatcher patcher(branch_address, 1);
5270 switch (target_state) {
5272 // slt at, a3, zero_reg (in case of count based interrupts)
5273 // beq at, zero_reg, ok
5274 // lui t9, <interrupt stub address> upper
5275 // ori t9, <interrupt stub address> lower
5278 // ok-label ----- pc_after points here
5279 patcher.masm()->slt(at, a3, zero_reg);
5281 case ON_STACK_REPLACEMENT:
5282 case OSR_AFTER_STACK_CHECK:
5283 // addiu at, zero_reg, 1
5284 // beq at, zero_reg, ok ;; Not changed
5285 // lui t9, <on-stack replacement address> upper
5286 // ori t9, <on-stack replacement address> lower
5287 // jalr t9 ;; Not changed
5288 // nop ;; Not changed
5289 // ok-label ----- pc_after points here
5290 patcher.masm()->addiu(at, zero_reg, 1);
5293 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5294 // Replace the stack check address in the load-immediate (lui/ori pair)
5295 // with the entry address of the replacement code.
5296 Assembler::set_target_address_at(pc_immediate_load_address,
5297 replacement_code->entry());
5299 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5300 unoptimized_code, pc_immediate_load_address, replacement_code);
5304 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5306 Code* unoptimized_code,
5308 static const int kInstrSize = Assembler::kInstrSize;
5309 Address branch_address = pc - 6 * kInstrSize;
5310 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5312 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
5313 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5314 DCHECK(reinterpret_cast<uint32_t>(
5315 Assembler::target_address_at(pc_immediate_load_address)) ==
5316 reinterpret_cast<uint32_t>(
5317 isolate->builtins()->InterruptCheck()->entry()));
5321 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5323 if (reinterpret_cast<uint32_t>(
5324 Assembler::target_address_at(pc_immediate_load_address)) ==
5325 reinterpret_cast<uint32_t>(
5326 isolate->builtins()->OnStackReplacement()->entry())) {
5327 return ON_STACK_REPLACEMENT;
5330 DCHECK(reinterpret_cast<uint32_t>(
5331 Assembler::target_address_at(pc_immediate_load_address)) ==
5332 reinterpret_cast<uint32_t>(
5333 isolate->builtins()->OsrAfterStackCheck()->entry()));
5334 return OSR_AFTER_STACK_CHECK;
5338 } // namespace internal
5341 #endif // V8_TARGET_ARCH_MIPS