1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_MIPS
7 // Note on Mips implementation:
9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first
11 // parameter to a function is defined to be 'a0'. So there are many
12 // places where we have to move a previous result in v0 to a0 for the
13 // next call: mov(a0, v0). This is not needed on the other architectures.
15 #include "src/code-factory.h"
16 #include "src/code-stubs.h"
17 #include "src/codegen.h"
18 #include "src/compiler.h"
19 #include "src/debug/debug.h"
20 #include "src/full-codegen/full-codegen.h"
21 #include "src/ic/ic.h"
22 #include "src/parser.h"
23 #include "src/scopes.h"
25 #include "src/mips/code-stubs-mips.h"
26 #include "src/mips/macro-assembler-mips.h"
31 #define __ ACCESS_MASM(masm_)
34 // A patch site is a location in the code which it is possible to patch. This
35 // class has a number of methods to emit the code which is patchable and the
36 // method EmitPatchInfo to record a marker back to the patchable code. This
37 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
38 // (raw 16 bit immediate value is used) is the delta from the pc to the first
39 // instruction of the patchable code.
40 // The marker instruction is effectively a NOP (dest is zero_reg) and will
41 // never be emitted by normal code.
42 class JumpPatchSite BASE_EMBEDDED {
44 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
46 info_emitted_ = false;
51 DCHECK(patch_site_.is_bound() == info_emitted_);
54 // When initially emitting this ensure that a jump is always generated to skip
55 // the inlined smi code.
56 void EmitJumpIfNotSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59 __ bind(&patch_site_);
61 // Always taken before patched.
62 __ BranchShort(target, eq, at, Operand(zero_reg));
65 // When initially emitting this ensure that a jump is never generated to skip
66 // the inlined smi code.
67 void EmitJumpIfSmi(Register reg, Label* target) {
68 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
69 DCHECK(!patch_site_.is_bound() && !info_emitted_);
70 __ bind(&patch_site_);
72 // Never taken before patched.
73 __ BranchShort(target, ne, at, Operand(zero_reg));
76 void EmitPatchInfo() {
77 if (patch_site_.is_bound()) {
78 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
79 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
80 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
85 __ nop(); // Signals no inlined code.
90 MacroAssembler* masm_;
98 // Generate code for a JS function. On entry to the function the receiver
99 // and arguments have been pushed on the stack left to right. The actual
100 // argument count matches the formal parameter count expected by the
103 // The live registers are:
104 // o a1: the JS function object being called (i.e. ourselves)
106 // o fp: our caller's frame pointer
107 // o sp: stack pointer
108 // o ra: return address
110 // The function builds a JS frame. Please see JavaScriptFrameConstants in
111 // frames-mips.h for its layout.
112 void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122 if (strlen(FLAG_stop_at) > 0 &&
123 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
128 // Sloppy mode functions and builtins need to replace the receiver with the
129 // global proxy when called as functions (without an explicit receiver
131 if (info->MustReplaceUndefinedReceiverWithGlobalProxy()) {
133 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
134 __ lw(at, MemOperand(sp, receiver_offset));
135 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
136 __ Branch(&ok, ne, a2, Operand(at));
138 __ lw(a2, GlobalObjectOperand());
139 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
141 __ sw(a2, MemOperand(sp, receiver_offset));
146 // Open a frame scope to indicate that there is a frame on the stack. The
147 // MANUAL indicates that the scope shouldn't actually generate code to set up
148 // the frame (that is done below).
149 FrameScope frame_scope(masm_, StackFrame::MANUAL);
151 info->set_prologue_offset(masm_->pc_offset());
152 __ Prologue(info->IsCodePreAgingActive());
153 info->AddNoFrameRange(0, masm_->pc_offset());
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
162 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
163 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
164 __ Branch(&ok, hs, t5, Operand(a2));
165 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
168 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ li(a2, Operand(loop_iterations));
174 __ bind(&loop_header);
176 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ sw(t5, MemOperand(sp, i * kPointerSize));
180 // Continue loop if not done.
181 __ Subu(a2, a2, Operand(1));
182 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 __ Subu(sp, sp, Operand(remaining * kPointerSize));
187 for (int i = 0; i < remaining; i++) {
188 __ sw(t5, MemOperand(sp, i * kPointerSize));
193 bool function_in_register = true;
195 // Possibly allocate a local context.
196 if (info->scope()->num_heap_slots() > 0) {
197 Comment cmnt(masm_, "[ Allocate context");
198 // Argument to NewContext is the function, which is still in a1.
199 bool need_write_barrier = true;
200 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201 if (info->scope()->is_script_scope()) {
203 __ Push(info->scope()->GetScopeInfo(info->isolate()));
204 __ CallRuntime(Runtime::kNewScriptContext, 2);
205 } else if (slots <= FastNewContextStub::kMaximumSlots) {
206 FastNewContextStub stub(isolate(), slots);
208 // Result of FastNewContextStub is always in new space.
209 need_write_barrier = false;
212 __ CallRuntime(Runtime::kNewFunctionContext, 1);
214 function_in_register = false;
215 // Context is returned in v0. It replaces the context passed to us.
216 // It's saved in the stack and kept live in cp.
218 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
219 // Copy any necessary parameters into the context.
220 int num_parameters = info->scope()->num_parameters();
221 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
222 for (int i = first_parameter; i < num_parameters; i++) {
223 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
224 if (var->IsContextSlot()) {
225 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
226 (num_parameters - 1 - i) * kPointerSize;
227 // Load parameter from stack.
228 __ lw(a0, MemOperand(fp, parameter_offset));
229 // Store it in the context.
230 MemOperand target = ContextOperand(cp, var->index());
233 // Update the write barrier.
234 if (need_write_barrier) {
235 __ RecordWriteContextSlot(
236 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
237 } else if (FLAG_debug_code) {
239 __ JumpIfInNewSpace(cp, a0, &done);
240 __ Abort(kExpectedNewSpaceObject);
247 // Possibly set up a local binding to the this function which is used in
248 // derived constructors with super calls.
249 Variable* this_function_var = scope()->this_function_var();
250 if (this_function_var != nullptr) {
251 Comment cmnt(masm_, "[ This function");
252 if (!function_in_register) {
253 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // The write barrier clobbers register again, keep is marked as such.
256 SetVar(this_function_var, a1, a2, a3);
259 Variable* new_target_var = scope()->new_target_var();
260 if (new_target_var != nullptr) {
261 Comment cmnt(masm_, "[ new.target");
263 // Get the frame pointer for the calling frame.
264 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
266 // Skip the arguments adaptor frame if it exists.
267 Label check_frame_marker;
268 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
269 __ Branch(&check_frame_marker, ne, a1,
270 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
271 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
273 // Check the marker in the calling frame.
274 __ bind(&check_frame_marker);
275 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
277 Label non_construct_frame, done;
278 __ Branch(&non_construct_frame, ne, a1,
279 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
282 MemOperand(a2, ConstructFrameConstants::kOriginalConstructorOffset));
285 __ bind(&non_construct_frame);
286 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
289 SetVar(new_target_var, v0, a2, a3);
292 // Possibly allocate RestParameters
294 Variable* rest_param = scope()->rest_parameter(&rest_index);
296 Comment cmnt(masm_, "[ Allocate rest parameter array");
298 int num_parameters = info->scope()->num_parameters();
299 int offset = num_parameters * kPointerSize;
302 Operand(StandardFrameConstants::kCallerSPOffset + offset));
303 __ li(a2, Operand(Smi::FromInt(num_parameters)));
304 __ li(a1, Operand(Smi::FromInt(rest_index)));
305 __ li(a0, Operand(Smi::FromInt(language_mode())));
306 __ Push(a3, a2, a1, a0);
308 RestParamAccessStub stub(isolate());
311 SetVar(rest_param, v0, a1, a2);
314 Variable* arguments = scope()->arguments();
315 if (arguments != NULL) {
316 // Function uses arguments object.
317 Comment cmnt(masm_, "[ Allocate arguments object");
318 if (!function_in_register) {
319 // Load this again, if it's used by the local context below.
320 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
324 // Receiver is just before the parameters on the caller's stack.
325 int num_parameters = info->scope()->num_parameters();
326 int offset = num_parameters * kPointerSize;
328 Operand(StandardFrameConstants::kCallerSPOffset + offset));
329 __ li(a1, Operand(Smi::FromInt(num_parameters)));
332 // Arguments to ArgumentsAccessStub:
333 // function, receiver address, parameter count.
334 // The stub will rewrite receiever and parameter count if the previous
335 // stack frame was an arguments adapter frame.
336 ArgumentsAccessStub::Type type;
337 if (is_strict(language_mode()) || !has_simple_parameters()) {
338 type = ArgumentsAccessStub::NEW_STRICT;
339 } else if (literal()->has_duplicate_parameters()) {
340 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
342 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
344 ArgumentsAccessStub stub(isolate(), type);
347 SetVar(arguments, v0, a1, a2);
351 __ CallRuntime(Runtime::kTraceEnter, 0);
354 // Visit the declarations and body unless there is an illegal
356 if (scope()->HasIllegalRedeclaration()) {
357 Comment cmnt(masm_, "[ Declarations");
358 scope()->VisitIllegalRedeclaration(this);
361 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
362 { Comment cmnt(masm_, "[ Declarations");
363 VisitDeclarations(scope()->declarations());
366 // Assert that the declarations do not use ICs. Otherwise the debugger
367 // won't be able to redirect a PC at an IC to the correct IC in newly
369 DCHECK_EQ(0, ic_total_count_);
371 { Comment cmnt(masm_, "[ Stack check");
372 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
374 __ LoadRoot(at, Heap::kStackLimitRootIndex);
375 __ Branch(&ok, hs, sp, Operand(at));
376 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
377 PredictableCodeSizeScope predictable(masm_,
378 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
379 __ Call(stack_check, RelocInfo::CODE_TARGET);
383 { Comment cmnt(masm_, "[ Body");
384 DCHECK(loop_depth() == 0);
385 VisitStatements(literal()->body());
386 DCHECK(loop_depth() == 0);
390 // Always emit a 'return undefined' in case control fell off the end of
392 { Comment cmnt(masm_, "[ return <undefined>;");
393 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
395 EmitReturnSequence();
399 void FullCodeGenerator::ClearAccumulator() {
400 DCHECK(Smi::FromInt(0) == 0);
401 __ mov(v0, zero_reg);
405 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
406 __ li(a2, Operand(profiling_counter_));
407 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
408 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
409 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
413 void FullCodeGenerator::EmitProfilingCounterReset() {
414 int reset_value = FLAG_interrupt_budget;
415 if (info_->is_debug()) {
416 // Detect debug break requests as soon as possible.
417 reset_value = FLAG_interrupt_budget >> 4;
419 __ li(a2, Operand(profiling_counter_));
420 __ li(a3, Operand(Smi::FromInt(reset_value)));
421 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
425 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
426 Label* back_edge_target) {
427 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
428 // to make sure it is constant. Branch may emit a skip-or-jump sequence
429 // instead of the normal Branch. It seems that the "skip" part of that
430 // sequence is about as long as this Branch would be so it is safe to ignore
432 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
433 Comment cmnt(masm_, "[ Back edge bookkeeping");
435 DCHECK(back_edge_target->is_bound());
436 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
437 int weight = Min(kMaxBackEdgeWeight,
438 Max(1, distance / kCodeSizeMultiplier));
439 EmitProfilingCounterDecrement(weight);
440 __ slt(at, a3, zero_reg);
441 __ beq(at, zero_reg, &ok);
442 // Call will emit a li t9 first, so it is safe to use the delay slot.
443 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
444 // Record a mapping of this PC offset to the OSR id. This is used to find
445 // the AST id from the unoptimized code in order to use it as a key into
446 // the deoptimization input data found in the optimized code.
447 RecordBackEdge(stmt->OsrEntryId());
448 EmitProfilingCounterReset();
451 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
452 // Record a mapping of the OSR id to this PC. This is used if the OSR
453 // entry becomes the target of a bailout. We don't expect it to be, but
454 // we want it to work if it is.
455 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
459 void FullCodeGenerator::EmitReturnSequence() {
460 Comment cmnt(masm_, "[ Return sequence");
461 if (return_label_.is_bound()) {
462 __ Branch(&return_label_);
464 __ bind(&return_label_);
466 // Push the return value on the stack as the parameter.
467 // Runtime::TraceExit returns its parameter in v0.
469 __ CallRuntime(Runtime::kTraceExit, 1);
471 // Pretend that the exit is a backwards jump to the entry.
473 if (info_->ShouldSelfOptimize()) {
474 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
476 int distance = masm_->pc_offset();
477 weight = Min(kMaxBackEdgeWeight,
478 Max(1, distance / kCodeSizeMultiplier));
480 EmitProfilingCounterDecrement(weight);
482 __ Branch(&ok, ge, a3, Operand(zero_reg));
484 __ Call(isolate()->builtins()->InterruptCheck(),
485 RelocInfo::CODE_TARGET);
487 EmitProfilingCounterReset();
490 // Make sure that the constant pool is not emitted inside of the return
492 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
493 // Here we use masm_-> instead of the __ macro to avoid the code coverage
494 // tool from instrumenting as we rely on the code size here.
495 int32_t arg_count = info_->scope()->num_parameters() + 1;
496 int32_t sp_delta = arg_count * kPointerSize;
497 SetReturnPosition(literal());
499 int no_frame_start = masm_->pc_offset();
500 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
501 masm_->Addu(sp, sp, Operand(sp_delta));
503 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
509 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
510 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
511 codegen()->GetVar(result_register(), var);
512 __ push(result_register());
516 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
520 void FullCodeGenerator::AccumulatorValueContext::Plug(
521 Heap::RootListIndex index) const {
522 __ LoadRoot(result_register(), index);
526 void FullCodeGenerator::StackValueContext::Plug(
527 Heap::RootListIndex index) const {
528 __ LoadRoot(result_register(), index);
529 __ push(result_register());
533 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
534 codegen()->PrepareForBailoutBeforeSplit(condition(),
538 if (index == Heap::kUndefinedValueRootIndex ||
539 index == Heap::kNullValueRootIndex ||
540 index == Heap::kFalseValueRootIndex) {
541 if (false_label_ != fall_through_) __ Branch(false_label_);
542 } else if (index == Heap::kTrueValueRootIndex) {
543 if (true_label_ != fall_through_) __ Branch(true_label_);
545 __ LoadRoot(result_register(), index);
546 codegen()->DoTest(this);
551 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
555 void FullCodeGenerator::AccumulatorValueContext::Plug(
556 Handle<Object> lit) const {
557 __ li(result_register(), Operand(lit));
561 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
562 // Immediates cannot be pushed directly.
563 __ li(result_register(), Operand(lit));
564 __ push(result_register());
568 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
569 codegen()->PrepareForBailoutBeforeSplit(condition(),
573 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
574 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
575 if (false_label_ != fall_through_) __ Branch(false_label_);
576 } else if (lit->IsTrue() || lit->IsJSObject()) {
577 if (true_label_ != fall_through_) __ Branch(true_label_);
578 } else if (lit->IsString()) {
579 if (String::cast(*lit)->length() == 0) {
580 if (false_label_ != fall_through_) __ Branch(false_label_);
582 if (true_label_ != fall_through_) __ Branch(true_label_);
584 } else if (lit->IsSmi()) {
585 if (Smi::cast(*lit)->value() == 0) {
586 if (false_label_ != fall_through_) __ Branch(false_label_);
588 if (true_label_ != fall_through_) __ Branch(true_label_);
591 // For simplicity we always test the accumulator register.
592 __ li(result_register(), Operand(lit));
593 codegen()->DoTest(this);
598 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
599 Register reg) const {
605 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
607 Register reg) const {
610 __ Move(result_register(), reg);
614 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
615 Register reg) const {
617 if (count > 1) __ Drop(count - 1);
618 __ sw(reg, MemOperand(sp, 0));
622 void FullCodeGenerator::TestContext::DropAndPlug(int count,
623 Register reg) const {
625 // For simplicity we always test the accumulator register.
627 __ Move(result_register(), reg);
628 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
629 codegen()->DoTest(this);
633 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
634 Label* materialize_false) const {
635 DCHECK(materialize_true == materialize_false);
636 __ bind(materialize_true);
640 void FullCodeGenerator::AccumulatorValueContext::Plug(
641 Label* materialize_true,
642 Label* materialize_false) const {
644 __ bind(materialize_true);
645 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
647 __ bind(materialize_false);
648 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
653 void FullCodeGenerator::StackValueContext::Plug(
654 Label* materialize_true,
655 Label* materialize_false) const {
657 __ bind(materialize_true);
658 __ LoadRoot(at, Heap::kTrueValueRootIndex);
659 // Push the value as the following branch can clobber at in long branch mode.
662 __ bind(materialize_false);
663 __ LoadRoot(at, Heap::kFalseValueRootIndex);
669 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
670 Label* materialize_false) const {
671 DCHECK(materialize_true == true_label_);
672 DCHECK(materialize_false == false_label_);
676 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
677 Heap::RootListIndex value_root_index =
678 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
679 __ LoadRoot(result_register(), value_root_index);
683 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
684 Heap::RootListIndex value_root_index =
685 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
686 __ LoadRoot(at, value_root_index);
691 void FullCodeGenerator::TestContext::Plug(bool flag) const {
692 codegen()->PrepareForBailoutBeforeSplit(condition(),
697 if (true_label_ != fall_through_) __ Branch(true_label_);
699 if (false_label_ != fall_through_) __ Branch(false_label_);
704 void FullCodeGenerator::DoTest(Expression* condition,
707 Label* fall_through) {
708 __ mov(a0, result_register());
709 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
710 CallIC(ic, condition->test_id());
711 __ mov(at, zero_reg);
712 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
716 void FullCodeGenerator::Split(Condition cc,
721 Label* fall_through) {
722 if (if_false == fall_through) {
723 __ Branch(if_true, cc, lhs, rhs);
724 } else if (if_true == fall_through) {
725 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
727 __ Branch(if_true, cc, lhs, rhs);
733 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
734 DCHECK(var->IsStackAllocated());
735 // Offset is negative because higher indexes are at lower addresses.
736 int offset = -var->index() * kPointerSize;
737 // Adjust by a (parameter or local) base offset.
738 if (var->IsParameter()) {
739 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
741 offset += JavaScriptFrameConstants::kLocal0Offset;
743 return MemOperand(fp, offset);
747 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
748 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
749 if (var->IsContextSlot()) {
750 int context_chain_length = scope()->ContextChainLength(var->scope());
751 __ LoadContext(scratch, context_chain_length);
752 return ContextOperand(scratch, var->index());
754 return StackOperand(var);
759 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
760 // Use destination as scratch.
761 MemOperand location = VarOperand(var, dest);
762 __ lw(dest, location);
766 void FullCodeGenerator::SetVar(Variable* var,
770 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
771 DCHECK(!scratch0.is(src));
772 DCHECK(!scratch0.is(scratch1));
773 DCHECK(!scratch1.is(src));
774 MemOperand location = VarOperand(var, scratch0);
775 __ sw(src, location);
776 // Emit the write barrier code if the location is in the heap.
777 if (var->IsContextSlot()) {
778 __ RecordWriteContextSlot(scratch0,
788 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
789 bool should_normalize,
792 // Only prepare for bailouts before splits if we're in a test
793 // context. Otherwise, we let the Visit function deal with the
794 // preparation to avoid preparing with the same AST id twice.
795 if (!context()->IsTest()) return;
798 if (should_normalize) __ Branch(&skip);
799 PrepareForBailout(expr, TOS_REG);
800 if (should_normalize) {
801 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
802 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
808 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
809 // The variable in the declaration always resides in the current function
811 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
812 if (generate_debug_code_) {
813 // Check that we're not inside a with or catch context.
814 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
815 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
816 __ Check(ne, kDeclarationInWithContext,
818 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
819 __ Check(ne, kDeclarationInCatchContext,
825 void FullCodeGenerator::VisitVariableDeclaration(
826 VariableDeclaration* declaration) {
827 // If it was not possible to allocate the variable at compile time, we
828 // need to "declare" it at runtime to make sure it actually exists in the
830 VariableProxy* proxy = declaration->proxy();
831 VariableMode mode = declaration->mode();
832 Variable* variable = proxy->var();
833 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
834 switch (variable->location()) {
835 case VariableLocation::GLOBAL:
836 case VariableLocation::UNALLOCATED:
837 globals_->Add(variable->name(), zone());
838 globals_->Add(variable->binding_needs_init()
839 ? isolate()->factory()->the_hole_value()
840 : isolate()->factory()->undefined_value(),
844 case VariableLocation::PARAMETER:
845 case VariableLocation::LOCAL:
847 Comment cmnt(masm_, "[ VariableDeclaration");
848 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
849 __ sw(t0, StackOperand(variable));
853 case VariableLocation::CONTEXT:
855 Comment cmnt(masm_, "[ VariableDeclaration");
856 EmitDebugCheckDeclarationContext(variable);
857 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
858 __ sw(at, ContextOperand(cp, variable->index()));
859 // No write barrier since the_hole_value is in old space.
860 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
864 case VariableLocation::LOOKUP: {
865 Comment cmnt(masm_, "[ VariableDeclaration");
866 __ li(a2, Operand(variable->name()));
867 // Declaration nodes are always introduced in one of four modes.
868 DCHECK(IsDeclaredVariableMode(mode));
869 // Push initial value, if any.
870 // Note: For variables we must not push an initial value (such as
871 // 'undefined') because we may have a (legal) redeclaration and we
872 // must not destroy the current value.
874 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
876 DCHECK(Smi::FromInt(0) == 0);
877 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
880 __ CallRuntime(IsImmutableVariableMode(mode)
881 ? Runtime::kDeclareReadOnlyLookupSlot
882 : Runtime::kDeclareLookupSlot,
890 void FullCodeGenerator::VisitFunctionDeclaration(
891 FunctionDeclaration* declaration) {
892 VariableProxy* proxy = declaration->proxy();
893 Variable* variable = proxy->var();
894 switch (variable->location()) {
895 case VariableLocation::GLOBAL:
896 case VariableLocation::UNALLOCATED: {
897 globals_->Add(variable->name(), zone());
898 Handle<SharedFunctionInfo> function =
899 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
900 // Check for stack-overflow exception.
901 if (function.is_null()) return SetStackOverflow();
902 globals_->Add(function, zone());
906 case VariableLocation::PARAMETER:
907 case VariableLocation::LOCAL: {
908 Comment cmnt(masm_, "[ FunctionDeclaration");
909 VisitForAccumulatorValue(declaration->fun());
910 __ sw(result_register(), StackOperand(variable));
914 case VariableLocation::CONTEXT: {
915 Comment cmnt(masm_, "[ FunctionDeclaration");
916 EmitDebugCheckDeclarationContext(variable);
917 VisitForAccumulatorValue(declaration->fun());
918 __ sw(result_register(), ContextOperand(cp, variable->index()));
919 int offset = Context::SlotOffset(variable->index());
920 // We know that we have written a function, which is not a smi.
921 __ RecordWriteContextSlot(cp,
929 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
933 case VariableLocation::LOOKUP: {
934 Comment cmnt(masm_, "[ FunctionDeclaration");
935 __ li(a2, Operand(variable->name()));
937 // Push initial value for function declaration.
938 VisitForStackValue(declaration->fun());
939 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
946 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
947 // Call the runtime to declare the globals.
948 __ li(a1, Operand(pairs));
949 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
951 __ CallRuntime(Runtime::kDeclareGlobals, 2);
952 // Return value is ignored.
956 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
957 // Call the runtime to declare the modules.
958 __ Push(descriptions);
959 __ CallRuntime(Runtime::kDeclareModules, 1);
960 // Return value is ignored.
964 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
965 Comment cmnt(masm_, "[ SwitchStatement");
966 Breakable nested_statement(this, stmt);
967 SetStatementPosition(stmt);
969 // Keep the switch value on the stack until a case matches.
970 VisitForStackValue(stmt->tag());
971 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
973 ZoneList<CaseClause*>* clauses = stmt->cases();
974 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
976 Label next_test; // Recycled for each test.
977 // Compile all the tests with branches to their bodies.
978 for (int i = 0; i < clauses->length(); i++) {
979 CaseClause* clause = clauses->at(i);
980 clause->body_target()->Unuse();
982 // The default is not a test, but remember it as final fall through.
983 if (clause->is_default()) {
984 default_clause = clause;
988 Comment cmnt(masm_, "[ Case comparison");
992 // Compile the label expression.
993 VisitForAccumulatorValue(clause->label());
994 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
996 // Perform the comparison as if via '==='.
997 __ lw(a1, MemOperand(sp, 0)); // Switch value.
998 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
999 JumpPatchSite patch_site(masm_);
1000 if (inline_smi_code) {
1003 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1005 __ Branch(&next_test, ne, a1, Operand(a0));
1006 __ Drop(1); // Switch value is no longer needed.
1007 __ Branch(clause->body_target());
1009 __ bind(&slow_case);
1012 // Record position before stub call for type feedback.
1013 SetExpressionPosition(clause);
1014 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1015 strength(language_mode())).code();
1016 CallIC(ic, clause->CompareId());
1017 patch_site.EmitPatchInfo();
1021 PrepareForBailout(clause, TOS_REG);
1022 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1023 __ Branch(&next_test, ne, v0, Operand(at));
1025 __ Branch(clause->body_target());
1028 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1029 __ Drop(1); // Switch value is no longer needed.
1030 __ Branch(clause->body_target());
1033 // Discard the test value and jump to the default if present, otherwise to
1034 // the end of the statement.
1035 __ bind(&next_test);
1036 __ Drop(1); // Switch value is no longer needed.
1037 if (default_clause == NULL) {
1038 __ Branch(nested_statement.break_label());
1040 __ Branch(default_clause->body_target());
1043 // Compile all the case bodies.
1044 for (int i = 0; i < clauses->length(); i++) {
1045 Comment cmnt(masm_, "[ Case body");
1046 CaseClause* clause = clauses->at(i);
1047 __ bind(clause->body_target());
1048 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1049 VisitStatements(clause->statements());
1052 __ bind(nested_statement.break_label());
1053 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1057 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1058 Comment cmnt(masm_, "[ ForInStatement");
1059 SetStatementPosition(stmt, SKIP_BREAK);
1061 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1064 ForIn loop_statement(this, stmt);
1065 increment_loop_depth();
1067 // Get the object to enumerate over. If the object is null or undefined, skip
1068 // over the loop. See ECMA-262 version 5, section 12.6.4.
1069 SetExpressionAsStatementPosition(stmt->enumerable());
1070 VisitForAccumulatorValue(stmt->enumerable());
1071 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1072 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1073 __ Branch(&exit, eq, a0, Operand(at));
1074 Register null_value = t1;
1075 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1076 __ Branch(&exit, eq, a0, Operand(null_value));
1077 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1079 // Convert the object to a JS object.
1080 Label convert, done_convert;
1081 __ JumpIfSmi(a0, &convert);
1082 __ GetObjectType(a0, a1, a1);
1083 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1085 ToObjectStub stub(isolate());
1088 __ bind(&done_convert);
1089 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1092 // Check for proxies.
1094 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1095 __ GetObjectType(a0, a1, a1);
1096 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1098 // Check cache validity in generated code. This is a fast case for
1099 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1100 // guarantee cache validity, call the runtime system to check cache
1101 // validity or get the property names in a fixed array.
1102 __ CheckEnumCache(null_value, &call_runtime);
1104 // The enum cache is valid. Load the map of the object being
1105 // iterated over and use the cache for the iteration.
1107 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1108 __ Branch(&use_cache);
1110 // Get the set of properties to enumerate.
1111 __ bind(&call_runtime);
1112 __ push(a0); // Duplicate the enumerable object on the stack.
1113 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1114 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1116 // If we got a map from the runtime call, we can do a fast
1117 // modification check. Otherwise, we got a fixed array, and we have
1118 // to do a slow check.
1120 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1121 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1122 __ Branch(&fixed_array, ne, a2, Operand(at));
1124 // We got a map in register v0. Get the enumeration cache from it.
1125 Label no_descriptors;
1126 __ bind(&use_cache);
1128 __ EnumLength(a1, v0);
1129 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1131 __ LoadInstanceDescriptors(v0, a2);
1132 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1133 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1135 // Set up the four remaining stack slots.
1136 __ li(a0, Operand(Smi::FromInt(0)));
1137 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1138 __ Push(v0, a2, a1, a0);
1141 __ bind(&no_descriptors);
1145 // We got a fixed array in register v0. Iterate through that.
1147 __ bind(&fixed_array);
1149 __ li(a1, FeedbackVector());
1150 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1151 int vector_index = FeedbackVector()->GetIndex(slot);
1152 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1154 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1155 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1156 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1157 __ GetObjectType(a2, a3, a3);
1158 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1159 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1160 __ bind(&non_proxy);
1161 __ Push(a1, v0); // Smi and array
1162 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1163 __ li(a0, Operand(Smi::FromInt(0)));
1164 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1166 // Generate code for doing the condition check.
1167 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1169 SetExpressionAsStatementPosition(stmt->each());
1171 // Load the current count to a0, load the length to a1.
1172 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1173 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1174 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1176 // Get the current entry of the array into register a3.
1177 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1178 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1179 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1180 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1181 __ lw(a3, MemOperand(t0)); // Current entry.
1183 // Get the expected map from the stack or a smi in the
1184 // permanent slow case into register a2.
1185 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1187 // Check if the expected map still matches that of the enumerable.
1188 // If not, we may have to filter the key.
1190 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1191 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1192 __ Branch(&update_each, eq, t0, Operand(a2));
1194 // For proxies, no filtering is done.
1195 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1196 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1197 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1199 // Convert the entry to a string or (smi) 0 if it isn't a property
1200 // any more. If the property has been removed while iterating, we
1202 __ Push(a1, a3); // Enumerable and current entry.
1203 __ CallRuntime(Runtime::kForInFilter, 2);
1204 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1205 __ mov(a3, result_register());
1206 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1207 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1209 // Update the 'each' property or variable from the possibly filtered
1210 // entry in register a3.
1211 __ bind(&update_each);
1212 __ mov(result_register(), a3);
1213 // Perform the assignment as if via '='.
1214 { EffectContext context(this);
1215 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1216 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1219 // Generate code for the body of the loop.
1220 Visit(stmt->body());
1222 // Generate code for the going to the next element by incrementing
1223 // the index (smi) stored on top of the stack.
1224 __ bind(loop_statement.continue_label());
1226 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1229 EmitBackEdgeBookkeeping(stmt, &loop);
1232 // Remove the pointers stored on the stack.
1233 __ bind(loop_statement.break_label());
1236 // Exit and decrement the loop depth.
1237 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1239 decrement_loop_depth();
1243 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1245 // Use the fast case closure allocation code that allocates in new
1246 // space for nested functions that don't need literals cloning. If
1247 // we're running with the --always-opt or the --prepare-always-opt
1248 // flag, we need to use the runtime function so that the new function
1249 // we are creating here gets a chance to have its code optimized and
1250 // doesn't just get a copy of the existing unoptimized code.
1251 if (!FLAG_always_opt &&
1252 !FLAG_prepare_always_opt &&
1254 scope()->is_function_scope() &&
1255 info->num_literals() == 0) {
1256 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1257 __ li(a2, Operand(info));
1260 __ li(a0, Operand(info));
1261 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1262 : Heap::kFalseValueRootIndex);
1263 __ Push(cp, a0, a1);
1264 __ CallRuntime(Runtime::kNewClosure, 3);
1266 context()->Plug(v0);
1270 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1272 FeedbackVectorICSlot slot) {
1273 if (NeedsHomeObject(initializer)) {
1274 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1275 __ li(StoreDescriptor::NameRegister(),
1276 Operand(isolate()->factory()->home_object_symbol()));
1277 __ lw(StoreDescriptor::ValueRegister(),
1278 MemOperand(sp, offset * kPointerSize));
1279 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1285 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1286 TypeofMode typeof_mode,
1288 Register current = cp;
1294 if (s->num_heap_slots() > 0) {
1295 if (s->calls_sloppy_eval()) {
1296 // Check that extension is NULL.
1297 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1298 __ Branch(slow, ne, temp, Operand(zero_reg));
1300 // Load next context in chain.
1301 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1302 // Walk the rest of the chain without clobbering cp.
1305 // If no outer scope calls eval, we do not need to check more
1306 // context extensions.
1307 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1308 s = s->outer_scope();
1311 if (s->is_eval_scope()) {
1313 if (!current.is(next)) {
1314 __ Move(next, current);
1317 // Terminate at native context.
1318 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1319 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1320 __ Branch(&fast, eq, temp, Operand(t0));
1321 // Check that extension is NULL.
1322 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1323 __ Branch(slow, ne, temp, Operand(zero_reg));
1324 // Load next context in chain.
1325 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1330 // All extension objects were empty and it is safe to use a normal global
1332 EmitGlobalVariableLoad(proxy, typeof_mode);
1336 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1338 DCHECK(var->IsContextSlot());
1339 Register context = cp;
1343 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1344 if (s->num_heap_slots() > 0) {
1345 if (s->calls_sloppy_eval()) {
1346 // Check that extension is NULL.
1347 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1348 __ Branch(slow, ne, temp, Operand(zero_reg));
1350 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1351 // Walk the rest of the chain without clobbering cp.
1355 // Check that last extension is NULL.
1356 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1357 __ Branch(slow, ne, temp, Operand(zero_reg));
1359 // This function is used only for loads, not stores, so it's safe to
1360 // return an cp-based operand (the write barrier cannot be allowed to
1361 // destroy the cp register).
1362 return ContextOperand(context, var->index());
1366 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1367 TypeofMode typeof_mode,
1368 Label* slow, Label* done) {
1369 // Generate fast-case code for variables that might be shadowed by
1370 // eval-introduced variables. Eval is used a lot without
1371 // introducing variables. In those cases, we do not want to
1372 // perform a runtime call for all variables in the scope
1373 // containing the eval.
1374 Variable* var = proxy->var();
1375 if (var->mode() == DYNAMIC_GLOBAL) {
1376 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1378 } else if (var->mode() == DYNAMIC_LOCAL) {
1379 Variable* local = var->local_if_not_shadowed();
1380 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1381 if (local->mode() == LET || local->mode() == CONST ||
1382 local->mode() == CONST_LEGACY) {
1383 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1384 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1385 if (local->mode() == CONST_LEGACY) {
1386 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1387 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1388 } else { // LET || CONST
1389 __ Branch(done, ne, at, Operand(zero_reg));
1390 __ li(a0, Operand(var->name()));
1392 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1400 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1401 TypeofMode typeof_mode) {
1402 Variable* var = proxy->var();
1403 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1404 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1405 if (var->IsGlobalSlot()) {
1406 DCHECK(var->index() > 0);
1407 DCHECK(var->IsStaticGlobalObjectProperty());
1408 int const slot = var->index();
1409 int const depth = scope()->ContextChainLength(var->scope());
1410 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1411 __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1412 LoadGlobalViaContextStub stub(isolate(), depth);
1415 __ Push(Smi::FromInt(slot));
1416 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1420 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1421 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1422 __ li(LoadDescriptor::SlotRegister(),
1423 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1424 CallLoadIC(typeof_mode);
1429 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1430 TypeofMode typeof_mode) {
1431 // Record position before possible IC call.
1432 SetExpressionPosition(proxy);
1433 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1434 Variable* var = proxy->var();
1436 // Three cases: global variables, lookup variables, and all other types of
1438 switch (var->location()) {
1439 case VariableLocation::GLOBAL:
1440 case VariableLocation::UNALLOCATED: {
1441 Comment cmnt(masm_, "[ Global variable");
1442 EmitGlobalVariableLoad(proxy, typeof_mode);
1443 context()->Plug(v0);
1447 case VariableLocation::PARAMETER:
1448 case VariableLocation::LOCAL:
1449 case VariableLocation::CONTEXT: {
1450 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1451 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1452 : "[ Stack variable");
1453 if (var->binding_needs_init()) {
1454 // var->scope() may be NULL when the proxy is located in eval code and
1455 // refers to a potential outside binding. Currently those bindings are
1456 // always looked up dynamically, i.e. in that case
1457 // var->location() == LOOKUP.
1459 DCHECK(var->scope() != NULL);
1461 // Check if the binding really needs an initialization check. The check
1462 // can be skipped in the following situation: we have a LET or CONST
1463 // binding in harmony mode, both the Variable and the VariableProxy have
1464 // the same declaration scope (i.e. they are both in global code, in the
1465 // same function or in the same eval code) and the VariableProxy is in
1466 // the source physically located after the initializer of the variable.
1468 // We cannot skip any initialization checks for CONST in non-harmony
1469 // mode because const variables may be declared but never initialized:
1470 // if (false) { const x; }; var y = x;
1472 // The condition on the declaration scopes is a conservative check for
1473 // nested functions that access a binding and are called before the
1474 // binding is initialized:
1475 // function() { f(); let x = 1; function f() { x = 2; } }
1477 bool skip_init_check;
1478 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1479 skip_init_check = false;
1480 } else if (var->is_this()) {
1481 CHECK(info_->has_literal() &&
1482 (info_->literal()->kind() & kSubclassConstructor) != 0);
1483 // TODO(dslomov): implement 'this' hole check elimination.
1484 skip_init_check = false;
1486 // Check that we always have valid source position.
1487 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1488 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1489 skip_init_check = var->mode() != CONST_LEGACY &&
1490 var->initializer_position() < proxy->position();
1493 if (!skip_init_check) {
1494 // Let and const need a read barrier.
1496 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1497 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1498 if (var->mode() == LET || var->mode() == CONST) {
1499 // Throw a reference error when using an uninitialized let/const
1500 // binding in harmony mode.
1502 __ Branch(&done, ne, at, Operand(zero_reg));
1503 __ li(a0, Operand(var->name()));
1505 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1508 // Uninitalized const bindings outside of harmony mode are unholed.
1509 DCHECK(var->mode() == CONST_LEGACY);
1510 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1511 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1513 context()->Plug(v0);
1517 context()->Plug(var);
1521 case VariableLocation::LOOKUP: {
1522 Comment cmnt(masm_, "[ Lookup variable");
1524 // Generate code for loading from variables potentially shadowed
1525 // by eval-introduced variables.
1526 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1528 __ li(a1, Operand(var->name()));
1529 __ Push(cp, a1); // Context and name.
1530 Runtime::FunctionId function_id =
1531 typeof_mode == NOT_INSIDE_TYPEOF
1532 ? Runtime::kLoadLookupSlot
1533 : Runtime::kLoadLookupSlotNoReferenceError;
1534 __ CallRuntime(function_id, 2);
1536 context()->Plug(v0);
1542 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1543 Comment cmnt(masm_, "[ RegExpLiteral");
1545 // Registers will be used as follows:
1546 // t1 = materialized value (RegExp literal)
1547 // t0 = JS function, literals array
1548 // a3 = literal index
1549 // a2 = RegExp pattern
1550 // a1 = RegExp flags
1551 // a0 = RegExp literal clone
1552 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1553 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1554 int literal_offset =
1555 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1556 __ lw(t1, FieldMemOperand(t0, literal_offset));
1557 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1558 __ Branch(&materialized, ne, t1, Operand(at));
1560 // Create regexp literal using runtime function.
1561 // Result will be in v0.
1562 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1563 __ li(a2, Operand(expr->pattern()));
1564 __ li(a1, Operand(expr->flags()));
1565 __ Push(t0, a3, a2, a1);
1566 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1569 __ bind(&materialized);
1570 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1571 Label allocated, runtime_allocate;
1572 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1575 __ bind(&runtime_allocate);
1576 __ li(a0, Operand(Smi::FromInt(size)));
1578 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1581 __ bind(&allocated);
1583 // After this, registers are used as follows:
1584 // v0: Newly allocated regexp.
1585 // t1: Materialized regexp.
1587 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1588 context()->Plug(v0);
1592 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1593 if (expression == NULL) {
1594 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1597 VisitForStackValue(expression);
1602 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1603 Comment cmnt(masm_, "[ ObjectLiteral");
1605 Handle<FixedArray> constant_properties = expr->constant_properties();
1606 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1607 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1608 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1609 __ li(a1, Operand(constant_properties));
1610 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1611 if (MustCreateObjectLiteralWithRuntime(expr)) {
1612 __ Push(a3, a2, a1, a0);
1613 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1615 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1618 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1620 // If result_saved is true the result is on top of the stack. If
1621 // result_saved is false the result is in v0.
1622 bool result_saved = false;
1624 AccessorTable accessor_table(zone());
1625 int property_index = 0;
1626 // store_slot_index points to the vector IC slot for the next store IC used.
1627 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1628 // and must be updated if the number of store ICs emitted here changes.
1629 int store_slot_index = 0;
1630 for (; property_index < expr->properties()->length(); property_index++) {
1631 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1632 if (property->is_computed_name()) break;
1633 if (property->IsCompileTimeValue()) continue;
1635 Literal* key = property->key()->AsLiteral();
1636 Expression* value = property->value();
1637 if (!result_saved) {
1638 __ push(v0); // Save result on stack.
1639 result_saved = true;
1641 switch (property->kind()) {
1642 case ObjectLiteral::Property::CONSTANT:
1644 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1645 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1647 case ObjectLiteral::Property::COMPUTED:
1648 // It is safe to use [[Put]] here because the boilerplate already
1649 // contains computed properties with an uninitialized value.
1650 if (key->value()->IsInternalizedString()) {
1651 if (property->emit_store()) {
1652 VisitForAccumulatorValue(value);
1653 __ mov(StoreDescriptor::ValueRegister(), result_register());
1654 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1655 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1656 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1657 if (FLAG_vector_stores) {
1658 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1661 CallStoreIC(key->LiteralFeedbackId());
1663 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1665 if (NeedsHomeObject(value)) {
1666 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1667 __ li(StoreDescriptor::NameRegister(),
1668 Operand(isolate()->factory()->home_object_symbol()));
1669 __ lw(StoreDescriptor::ValueRegister(), MemOperand(sp));
1670 if (FLAG_vector_stores) {
1671 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1676 VisitForEffect(value);
1680 // Duplicate receiver on stack.
1681 __ lw(a0, MemOperand(sp));
1683 VisitForStackValue(key);
1684 VisitForStackValue(value);
1685 if (property->emit_store()) {
1686 EmitSetHomeObjectIfNeeded(
1687 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1688 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1690 __ CallRuntime(Runtime::kSetProperty, 4);
1695 case ObjectLiteral::Property::PROTOTYPE:
1696 // Duplicate receiver on stack.
1697 __ lw(a0, MemOperand(sp));
1699 VisitForStackValue(value);
1700 DCHECK(property->emit_store());
1701 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1703 case ObjectLiteral::Property::GETTER:
1704 if (property->emit_store()) {
1705 accessor_table.lookup(key)->second->getter = value;
1708 case ObjectLiteral::Property::SETTER:
1709 if (property->emit_store()) {
1710 accessor_table.lookup(key)->second->setter = value;
1716 // Emit code to define accessors, using only a single call to the runtime for
1717 // each pair of corresponding getters and setters.
1718 for (AccessorTable::Iterator it = accessor_table.begin();
1719 it != accessor_table.end();
1721 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1723 VisitForStackValue(it->first);
1724 EmitAccessor(it->second->getter);
1725 EmitSetHomeObjectIfNeeded(
1726 it->second->getter, 2,
1727 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1728 EmitAccessor(it->second->setter);
1729 EmitSetHomeObjectIfNeeded(
1730 it->second->setter, 3,
1731 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1732 __ li(a0, Operand(Smi::FromInt(NONE)));
1734 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1737 // Object literals have two parts. The "static" part on the left contains no
1738 // computed property names, and so we can compute its map ahead of time; see
1739 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1740 // starts with the first computed property name, and continues with all
1741 // properties to its right. All the code from above initializes the static
1742 // component of the object literal, and arranges for the map of the result to
1743 // reflect the static order in which the keys appear. For the dynamic
1744 // properties, we compile them into a series of "SetOwnProperty" runtime
1745 // calls. This will preserve insertion order.
1746 for (; property_index < expr->properties()->length(); property_index++) {
1747 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1749 Expression* value = property->value();
1750 if (!result_saved) {
1751 __ push(v0); // Save result on the stack
1752 result_saved = true;
1755 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1758 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1759 DCHECK(!property->is_computed_name());
1760 VisitForStackValue(value);
1761 DCHECK(property->emit_store());
1762 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1764 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1765 VisitForStackValue(value);
1766 EmitSetHomeObjectIfNeeded(
1767 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1769 switch (property->kind()) {
1770 case ObjectLiteral::Property::CONSTANT:
1771 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1772 case ObjectLiteral::Property::COMPUTED:
1773 if (property->emit_store()) {
1774 __ li(a0, Operand(Smi::FromInt(NONE)));
1776 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1782 case ObjectLiteral::Property::PROTOTYPE:
1786 case ObjectLiteral::Property::GETTER:
1787 __ li(a0, Operand(Smi::FromInt(NONE)));
1789 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1792 case ObjectLiteral::Property::SETTER:
1793 __ li(a0, Operand(Smi::FromInt(NONE)));
1795 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1801 if (expr->has_function()) {
1802 DCHECK(result_saved);
1803 __ lw(a0, MemOperand(sp));
1805 __ CallRuntime(Runtime::kToFastProperties, 1);
1809 context()->PlugTOS();
1811 context()->Plug(v0);
1814 // Verify that compilation exactly consumed the number of store ic slots that
1815 // the ObjectLiteral node had to offer.
1816 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1820 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1821 Comment cmnt(masm_, "[ ArrayLiteral");
1823 expr->BuildConstantElements(isolate());
1825 Handle<FixedArray> constant_elements = expr->constant_elements();
1826 bool has_fast_elements =
1827 IsFastObjectElementsKind(expr->constant_elements_kind());
1829 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1830 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1831 // If the only customer of allocation sites is transitioning, then
1832 // we can turn it off if we don't have anywhere else to transition to.
1833 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1836 __ mov(a0, result_register());
1837 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1838 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1839 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1840 __ li(a1, Operand(constant_elements));
1841 if (MustCreateArrayLiteralWithRuntime(expr)) {
1842 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1843 __ Push(a3, a2, a1, a0);
1844 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1846 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1849 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1851 bool result_saved = false; // Is the result saved to the stack?
1852 ZoneList<Expression*>* subexprs = expr->values();
1853 int length = subexprs->length();
1855 // Emit code to evaluate all the non-constant subexpressions and to store
1856 // them into the newly cloned array.
1857 int array_index = 0;
1858 for (; array_index < length; array_index++) {
1859 Expression* subexpr = subexprs->at(array_index);
1860 if (subexpr->IsSpread()) break;
1862 // If the subexpression is a literal or a simple materialized literal it
1863 // is already set in the cloned array.
1864 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1866 if (!result_saved) {
1867 __ push(v0); // array literal
1868 __ Push(Smi::FromInt(expr->literal_index()));
1869 result_saved = true;
1872 VisitForAccumulatorValue(subexpr);
1874 if (has_fast_elements) {
1875 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1876 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1877 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1878 __ sw(result_register(), FieldMemOperand(a1, offset));
1879 // Update the write barrier for the array store.
1880 __ RecordWriteField(a1, offset, result_register(), a2,
1881 kRAHasBeenSaved, kDontSaveFPRegs,
1882 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1884 __ li(a3, Operand(Smi::FromInt(array_index)));
1885 __ mov(a0, result_register());
1886 StoreArrayLiteralElementStub stub(isolate());
1890 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1893 // In case the array literal contains spread expressions it has two parts. The
1894 // first part is the "static" array which has a literal index is handled
1895 // above. The second part is the part after the first spread expression
1896 // (inclusive) and these elements gets appended to the array. Note that the
1897 // number elements an iterable produces is unknown ahead of time.
1898 if (array_index < length && result_saved) {
1899 __ Pop(); // literal index
1901 result_saved = false;
1903 for (; array_index < length; array_index++) {
1904 Expression* subexpr = subexprs->at(array_index);
1907 if (subexpr->IsSpread()) {
1908 VisitForStackValue(subexpr->AsSpread()->expression());
1909 __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1912 VisitForStackValue(subexpr);
1913 __ CallRuntime(Runtime::kAppendElement, 2);
1916 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1920 __ Pop(); // literal index
1921 context()->PlugTOS();
1923 context()->Plug(v0);
1928 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1929 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1931 Comment cmnt(masm_, "[ Assignment");
1932 SetExpressionPosition(expr, INSERT_BREAK);
1934 Property* property = expr->target()->AsProperty();
1935 LhsKind assign_type = Property::GetAssignType(property);
1937 // Evaluate LHS expression.
1938 switch (assign_type) {
1940 // Nothing to do here.
1942 case NAMED_PROPERTY:
1943 if (expr->is_compound()) {
1944 // We need the receiver both on the stack and in the register.
1945 VisitForStackValue(property->obj());
1946 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1948 VisitForStackValue(property->obj());
1951 case NAMED_SUPER_PROPERTY:
1953 property->obj()->AsSuperPropertyReference()->this_var());
1954 VisitForAccumulatorValue(
1955 property->obj()->AsSuperPropertyReference()->home_object());
1956 __ Push(result_register());
1957 if (expr->is_compound()) {
1958 const Register scratch = a1;
1959 __ lw(scratch, MemOperand(sp, kPointerSize));
1960 __ Push(scratch, result_register());
1963 case KEYED_SUPER_PROPERTY: {
1964 const Register scratch = a1;
1966 property->obj()->AsSuperPropertyReference()->this_var());
1967 VisitForAccumulatorValue(
1968 property->obj()->AsSuperPropertyReference()->home_object());
1969 __ Move(scratch, result_register());
1970 VisitForAccumulatorValue(property->key());
1971 __ Push(scratch, result_register());
1972 if (expr->is_compound()) {
1973 const Register scratch1 = t0;
1974 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1975 __ Push(scratch1, scratch, result_register());
1979 case KEYED_PROPERTY:
1980 // We need the key and receiver on both the stack and in v0 and a1.
1981 if (expr->is_compound()) {
1982 VisitForStackValue(property->obj());
1983 VisitForStackValue(property->key());
1984 __ lw(LoadDescriptor::ReceiverRegister(),
1985 MemOperand(sp, 1 * kPointerSize));
1986 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1988 VisitForStackValue(property->obj());
1989 VisitForStackValue(property->key());
1994 // For compound assignments we need another deoptimization point after the
1995 // variable/property load.
1996 if (expr->is_compound()) {
1997 { AccumulatorValueContext context(this);
1998 switch (assign_type) {
2000 EmitVariableLoad(expr->target()->AsVariableProxy());
2001 PrepareForBailout(expr->target(), TOS_REG);
2003 case NAMED_PROPERTY:
2004 EmitNamedPropertyLoad(property);
2005 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2007 case NAMED_SUPER_PROPERTY:
2008 EmitNamedSuperPropertyLoad(property);
2009 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2011 case KEYED_SUPER_PROPERTY:
2012 EmitKeyedSuperPropertyLoad(property);
2013 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2015 case KEYED_PROPERTY:
2016 EmitKeyedPropertyLoad(property);
2017 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2022 Token::Value op = expr->binary_op();
2023 __ push(v0); // Left operand goes on the stack.
2024 VisitForAccumulatorValue(expr->value());
2026 AccumulatorValueContext context(this);
2027 if (ShouldInlineSmiCase(op)) {
2028 EmitInlineSmiBinaryOp(expr->binary_operation(),
2033 EmitBinaryOp(expr->binary_operation(), op);
2036 // Deoptimization point in case the binary operation may have side effects.
2037 PrepareForBailout(expr->binary_operation(), TOS_REG);
2039 VisitForAccumulatorValue(expr->value());
2042 SetExpressionPosition(expr);
2045 switch (assign_type) {
2047 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2048 expr->op(), expr->AssignmentSlot());
2049 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2050 context()->Plug(v0);
2052 case NAMED_PROPERTY:
2053 EmitNamedPropertyAssignment(expr);
2055 case NAMED_SUPER_PROPERTY:
2056 EmitNamedSuperPropertyStore(property);
2057 context()->Plug(v0);
2059 case KEYED_SUPER_PROPERTY:
2060 EmitKeyedSuperPropertyStore(property);
2061 context()->Plug(v0);
2063 case KEYED_PROPERTY:
2064 EmitKeyedPropertyAssignment(expr);
2070 void FullCodeGenerator::VisitYield(Yield* expr) {
2071 Comment cmnt(masm_, "[ Yield");
2072 SetExpressionPosition(expr);
2074 // Evaluate yielded value first; the initial iterator definition depends on
2075 // this. It stays on the stack while we update the iterator.
2076 VisitForStackValue(expr->expression());
2078 switch (expr->yield_kind()) {
2079 case Yield::kSuspend:
2080 // Pop value from top-of-stack slot; box result into result register.
2081 EmitCreateIteratorResult(false);
2082 __ push(result_register());
2084 case Yield::kInitial: {
2085 Label suspend, continuation, post_runtime, resume;
2088 __ bind(&continuation);
2089 __ RecordGeneratorContinuation();
2093 VisitForAccumulatorValue(expr->generator_object());
2094 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2095 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2096 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2097 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2099 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2100 kRAHasBeenSaved, kDontSaveFPRegs);
2101 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2102 __ Branch(&post_runtime, eq, sp, Operand(a1));
2103 __ push(v0); // generator object
2104 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2105 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2106 __ bind(&post_runtime);
2107 __ pop(result_register());
2108 EmitReturnSequence();
2111 context()->Plug(result_register());
2115 case Yield::kFinal: {
2116 VisitForAccumulatorValue(expr->generator_object());
2117 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2118 __ sw(a1, FieldMemOperand(result_register(),
2119 JSGeneratorObject::kContinuationOffset));
2120 // Pop value from top-of-stack slot, box result into result register.
2121 EmitCreateIteratorResult(true);
2122 EmitUnwindBeforeReturn();
2123 EmitReturnSequence();
2127 case Yield::kDelegating: {
2128 VisitForStackValue(expr->generator_object());
2130 // Initial stack layout is as follows:
2131 // [sp + 1 * kPointerSize] iter
2132 // [sp + 0 * kPointerSize] g
2134 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2135 Label l_next, l_call;
2136 Register load_receiver = LoadDescriptor::ReceiverRegister();
2137 Register load_name = LoadDescriptor::NameRegister();
2139 // Initial send value is undefined.
2140 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2143 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2146 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2147 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2148 __ Push(load_name, a3, a0); // "throw", iter, except
2151 // try { received = %yield result }
2152 // Shuffle the received result above a try handler and yield it without
2155 __ pop(a0); // result
2156 int handler_index = NewHandlerTableEntry();
2157 EnterTryBlock(handler_index, &l_catch);
2158 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2159 __ push(a0); // result
2162 __ bind(&l_continuation);
2163 __ RecordGeneratorContinuation();
2167 __ bind(&l_suspend);
2168 const int generator_object_depth = kPointerSize + try_block_size;
2169 __ lw(a0, MemOperand(sp, generator_object_depth));
2171 __ Push(Smi::FromInt(handler_index)); // handler-index
2172 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2173 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2174 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2175 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2177 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2178 kRAHasBeenSaved, kDontSaveFPRegs);
2179 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2180 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2181 __ pop(v0); // result
2182 EmitReturnSequence();
2184 __ bind(&l_resume); // received in a0
2185 ExitTryBlock(handler_index);
2187 // receiver = iter; f = 'next'; arg = received;
2190 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2191 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2192 __ Push(load_name, a3, a0); // "next", iter, received
2194 // result = receiver[f](arg);
2196 __ lw(load_receiver, MemOperand(sp, kPointerSize));
2197 __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2198 __ li(LoadDescriptor::SlotRegister(),
2199 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2200 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2201 CallIC(ic, TypeFeedbackId::None());
2204 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2205 SetCallPosition(expr, 1);
2206 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2209 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2210 __ Drop(1); // The function is still on the stack; drop it.
2212 // if (!result.done) goto l_try;
2213 __ Move(load_receiver, v0);
2215 __ push(load_receiver); // save result
2216 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2217 __ li(LoadDescriptor::SlotRegister(),
2218 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2219 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.done
2221 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2223 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2226 __ pop(load_receiver); // result
2227 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2228 __ li(LoadDescriptor::SlotRegister(),
2229 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2230 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.value
2231 context()->DropAndPlug(2, v0); // drop iter and g
2238 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2240 JSGeneratorObject::ResumeMode resume_mode) {
2241 // The value stays in a0, and is ultimately read by the resumed generator, as
2242 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2243 // is read to throw the value when the resumed generator is already closed.
2244 // a1 will hold the generator object until the activation has been resumed.
2245 VisitForStackValue(generator);
2246 VisitForAccumulatorValue(value);
2249 // Load suspended function and context.
2250 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2251 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2253 // Load receiver and store as the first argument.
2254 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2257 // Push holes for the rest of the arguments to the generator function.
2258 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2260 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2261 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2262 Label push_argument_holes, push_frame;
2263 __ bind(&push_argument_holes);
2264 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2265 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2267 __ jmp(&push_argument_holes);
2269 // Enter a new JavaScript frame, and initialize its slots as they were when
2270 // the generator was suspended.
2271 Label resume_frame, done;
2272 __ bind(&push_frame);
2273 __ Call(&resume_frame);
2275 __ bind(&resume_frame);
2276 // ra = return address.
2277 // fp = caller's frame pointer.
2278 // cp = callee's context,
2279 // t0 = callee's JS function.
2280 __ Push(ra, fp, cp, t0);
2281 // Adjust FP to point to saved FP.
2282 __ Addu(fp, sp, 2 * kPointerSize);
2284 // Load the operand stack size.
2285 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2286 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2289 // If we are sending a value and there is no operand stack, we can jump back
2291 if (resume_mode == JSGeneratorObject::NEXT) {
2293 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2294 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2295 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2297 __ Addu(a3, a3, Operand(a2));
2298 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2299 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2301 __ bind(&slow_resume);
2304 // Otherwise, we push holes for the operand stack and call the runtime to fix
2305 // up the stack and the handlers.
2306 Label push_operand_holes, call_resume;
2307 __ bind(&push_operand_holes);
2308 __ Subu(a3, a3, Operand(1));
2309 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2311 __ Branch(&push_operand_holes);
2312 __ bind(&call_resume);
2313 DCHECK(!result_register().is(a1));
2314 __ Push(a1, result_register());
2315 __ Push(Smi::FromInt(resume_mode));
2316 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2317 // Not reached: the runtime call returns elsewhere.
2318 __ stop("not-reached");
2321 context()->Plug(result_register());
2325 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2329 const int instance_size = 5 * kPointerSize;
2330 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2333 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2336 __ bind(&gc_required);
2337 __ Push(Smi::FromInt(instance_size));
2338 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2339 __ lw(context_register(),
2340 MemOperand(fp, StandardFrameConstants::kContextOffset));
2342 __ bind(&allocated);
2343 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2344 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2345 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2347 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2348 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2349 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2350 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2351 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2353 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2355 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2357 // Only the value field needs a write barrier, as the other values are in the
2359 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2360 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2364 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2365 SetExpressionPosition(prop);
2366 Literal* key = prop->key()->AsLiteral();
2367 DCHECK(!prop->IsSuperAccess());
2369 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2370 __ li(LoadDescriptor::SlotRegister(),
2371 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2372 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2376 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2377 // Stack: receiver, home_object.
2378 SetExpressionPosition(prop);
2380 Literal* key = prop->key()->AsLiteral();
2381 DCHECK(!key->value()->IsSmi());
2382 DCHECK(prop->IsSuperAccess());
2384 __ Push(key->value());
2385 __ Push(Smi::FromInt(language_mode()));
2386 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2390 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2391 SetExpressionPosition(prop);
2392 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2393 __ li(LoadDescriptor::SlotRegister(),
2394 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2399 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2400 // Stack: receiver, home_object, key.
2401 SetExpressionPosition(prop);
2402 __ Push(Smi::FromInt(language_mode()));
2403 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2407 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2409 Expression* left_expr,
2410 Expression* right_expr) {
2411 Label done, smi_case, stub_call;
2413 Register scratch1 = a2;
2414 Register scratch2 = a3;
2416 // Get the arguments.
2418 Register right = a0;
2420 __ mov(a0, result_register());
2422 // Perform combined smi check on both operands.
2423 __ Or(scratch1, left, Operand(right));
2424 STATIC_ASSERT(kSmiTag == 0);
2425 JumpPatchSite patch_site(masm_);
2426 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2428 __ bind(&stub_call);
2430 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2431 CallIC(code, expr->BinaryOperationFeedbackId());
2432 patch_site.EmitPatchInfo();
2436 // Smi case. This code works the same way as the smi-smi case in the type
2437 // recording binary operation stub, see
2440 __ GetLeastBitsFromSmi(scratch1, right, 5);
2441 __ srav(right, left, scratch1);
2442 __ And(v0, right, Operand(~kSmiTagMask));
2445 __ SmiUntag(scratch1, left);
2446 __ GetLeastBitsFromSmi(scratch2, right, 5);
2447 __ sllv(scratch1, scratch1, scratch2);
2448 __ Addu(scratch2, scratch1, Operand(0x40000000));
2449 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2450 __ SmiTag(v0, scratch1);
2454 __ SmiUntag(scratch1, left);
2455 __ GetLeastBitsFromSmi(scratch2, right, 5);
2456 __ srlv(scratch1, scratch1, scratch2);
2457 __ And(scratch2, scratch1, 0xc0000000);
2458 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2459 __ SmiTag(v0, scratch1);
2463 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2464 __ BranchOnOverflow(&stub_call, scratch1);
2467 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2468 __ BranchOnOverflow(&stub_call, scratch1);
2471 __ SmiUntag(scratch1, right);
2472 __ Mul(scratch2, v0, left, scratch1);
2473 __ sra(scratch1, v0, 31);
2474 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2475 __ Branch(&done, ne, v0, Operand(zero_reg));
2476 __ Addu(scratch2, right, left);
2477 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2478 DCHECK(Smi::FromInt(0) == 0);
2479 __ mov(v0, zero_reg);
2483 __ Or(v0, left, Operand(right));
2485 case Token::BIT_AND:
2486 __ And(v0, left, Operand(right));
2488 case Token::BIT_XOR:
2489 __ Xor(v0, left, Operand(right));
2496 context()->Plug(v0);
2500 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2501 int* used_store_slots) {
2502 // Constructor is in v0.
2503 DCHECK(lit != NULL);
2506 // No access check is needed here since the constructor is created by the
2508 Register scratch = a1;
2510 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2513 for (int i = 0; i < lit->properties()->length(); i++) {
2514 ObjectLiteral::Property* property = lit->properties()->at(i);
2515 Expression* value = property->value();
2517 if (property->is_static()) {
2518 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2520 __ lw(scratch, MemOperand(sp, 0)); // prototype
2523 EmitPropertyKey(property, lit->GetIdForProperty(i));
2525 // The static prototype property is read only. We handle the non computed
2526 // property name case in the parser. Since this is the only case where we
2527 // need to check for an own read only property we special case this so we do
2528 // not need to do this for every property.
2529 if (property->is_static() && property->is_computed_name()) {
2530 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2534 VisitForStackValue(value);
2535 EmitSetHomeObjectIfNeeded(value, 2,
2536 lit->SlotForHomeObject(value, used_store_slots));
2538 switch (property->kind()) {
2539 case ObjectLiteral::Property::CONSTANT:
2540 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2541 case ObjectLiteral::Property::PROTOTYPE:
2543 case ObjectLiteral::Property::COMPUTED:
2544 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2547 case ObjectLiteral::Property::GETTER:
2548 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2550 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2553 case ObjectLiteral::Property::SETTER:
2554 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2556 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2564 // Set both the prototype and constructor to have fast properties, and also
2565 // freeze them in strong mode.
2566 __ CallRuntime(Runtime::kFinalizeClassDefinition, 2);
2570 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2571 __ mov(a0, result_register());
2574 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2575 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2576 CallIC(code, expr->BinaryOperationFeedbackId());
2577 patch_site.EmitPatchInfo();
2578 context()->Plug(v0);
2582 void FullCodeGenerator::EmitAssignment(Expression* expr,
2583 FeedbackVectorICSlot slot) {
2584 DCHECK(expr->IsValidReferenceExpressionOrThis());
2586 Property* prop = expr->AsProperty();
2587 LhsKind assign_type = Property::GetAssignType(prop);
2589 switch (assign_type) {
2591 Variable* var = expr->AsVariableProxy()->var();
2592 EffectContext context(this);
2593 EmitVariableAssignment(var, Token::ASSIGN, slot);
2596 case NAMED_PROPERTY: {
2597 __ push(result_register()); // Preserve value.
2598 VisitForAccumulatorValue(prop->obj());
2599 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2600 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2601 __ li(StoreDescriptor::NameRegister(),
2602 Operand(prop->key()->AsLiteral()->value()));
2603 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2607 case NAMED_SUPER_PROPERTY: {
2609 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2610 VisitForAccumulatorValue(
2611 prop->obj()->AsSuperPropertyReference()->home_object());
2612 // stack: value, this; v0: home_object
2613 Register scratch = a2;
2614 Register scratch2 = a3;
2615 __ mov(scratch, result_register()); // home_object
2616 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2617 __ lw(scratch2, MemOperand(sp, 0)); // this
2618 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2619 __ sw(scratch, MemOperand(sp, 0)); // home_object
2620 // stack: this, home_object; v0: value
2621 EmitNamedSuperPropertyStore(prop);
2624 case KEYED_SUPER_PROPERTY: {
2626 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2628 prop->obj()->AsSuperPropertyReference()->home_object());
2629 VisitForAccumulatorValue(prop->key());
2630 Register scratch = a2;
2631 Register scratch2 = a3;
2632 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2633 // stack: value, this, home_object; v0: key, a3: value
2634 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2635 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2636 __ lw(scratch, MemOperand(sp, 0)); // home_object
2637 __ sw(scratch, MemOperand(sp, kPointerSize));
2638 __ sw(v0, MemOperand(sp, 0));
2639 __ Move(v0, scratch2);
2640 // stack: this, home_object, key; v0: value.
2641 EmitKeyedSuperPropertyStore(prop);
2644 case KEYED_PROPERTY: {
2645 __ push(result_register()); // Preserve value.
2646 VisitForStackValue(prop->obj());
2647 VisitForAccumulatorValue(prop->key());
2648 __ mov(StoreDescriptor::NameRegister(), result_register());
2649 __ Pop(StoreDescriptor::ValueRegister(),
2650 StoreDescriptor::ReceiverRegister());
2651 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2653 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2658 context()->Plug(v0);
2662 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2663 Variable* var, MemOperand location) {
2664 __ sw(result_register(), location);
2665 if (var->IsContextSlot()) {
2666 // RecordWrite may destroy all its register arguments.
2667 __ Move(a3, result_register());
2668 int offset = Context::SlotOffset(var->index());
2669 __ RecordWriteContextSlot(
2670 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2675 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2676 FeedbackVectorICSlot slot) {
2677 if (var->IsUnallocated()) {
2678 // Global var, const, or let.
2679 __ mov(StoreDescriptor::ValueRegister(), result_register());
2680 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2681 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2682 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2685 } else if (var->IsGlobalSlot()) {
2686 // Global var, const, or let.
2687 DCHECK(var->index() > 0);
2688 DCHECK(var->IsStaticGlobalObjectProperty());
2689 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
2690 __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
2691 int const slot = var->index();
2692 int const depth = scope()->ContextChainLength(var->scope());
2693 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2694 __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2695 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2698 __ Push(Smi::FromInt(slot));
2700 __ CallRuntime(is_strict(language_mode())
2701 ? Runtime::kStoreGlobalViaContext_Strict
2702 : Runtime::kStoreGlobalViaContext_Sloppy,
2706 } else if (var->mode() == LET && op != Token::INIT_LET) {
2707 // Non-initializing assignment to let variable needs a write barrier.
2708 DCHECK(!var->IsLookupSlot());
2709 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2711 MemOperand location = VarOperand(var, a1);
2712 __ lw(a3, location);
2713 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2714 __ Branch(&assign, ne, a3, Operand(t0));
2715 __ li(a3, Operand(var->name()));
2717 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2718 // Perform the assignment.
2720 EmitStoreToStackLocalOrContextSlot(var, location);
2722 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2723 // Assignment to const variable needs a write barrier.
2724 DCHECK(!var->IsLookupSlot());
2725 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2727 MemOperand location = VarOperand(var, a1);
2728 __ lw(a3, location);
2729 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2730 __ Branch(&const_error, ne, a3, Operand(at));
2731 __ li(a3, Operand(var->name()));
2733 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2734 __ bind(&const_error);
2735 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2737 } else if (var->is_this() && op == Token::INIT_CONST) {
2738 // Initializing assignment to const {this} needs a write barrier.
2739 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2740 Label uninitialized_this;
2741 MemOperand location = VarOperand(var, a1);
2742 __ lw(a3, location);
2743 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2744 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2745 __ li(a0, Operand(var->name()));
2747 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2748 __ bind(&uninitialized_this);
2749 EmitStoreToStackLocalOrContextSlot(var, location);
2751 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2752 if (var->IsLookupSlot()) {
2753 // Assignment to var.
2754 __ li(a1, Operand(var->name()));
2755 __ li(a0, Operand(Smi::FromInt(language_mode())));
2756 __ Push(v0, cp, a1, a0); // Value, context, name, language mode.
2757 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2759 // Assignment to var or initializing assignment to let/const in harmony
2761 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2762 MemOperand location = VarOperand(var, a1);
2763 if (generate_debug_code_ && op == Token::INIT_LET) {
2764 // Check for an uninitialized let binding.
2765 __ lw(a2, location);
2766 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2767 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2769 EmitStoreToStackLocalOrContextSlot(var, location);
2772 } else if (op == Token::INIT_CONST_LEGACY) {
2773 // Const initializers need a write barrier.
2774 DCHECK(!var->IsParameter()); // No const parameters.
2775 if (var->IsLookupSlot()) {
2776 __ li(a0, Operand(var->name()));
2777 __ Push(v0, cp, a0); // Context and name.
2778 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2780 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2782 MemOperand location = VarOperand(var, a1);
2783 __ lw(a2, location);
2784 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2785 __ Branch(&skip, ne, a2, Operand(at));
2786 EmitStoreToStackLocalOrContextSlot(var, location);
2791 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2792 if (is_strict(language_mode())) {
2793 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2795 // Silently ignore store in sloppy mode.
2800 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2801 // Assignment to a property, using a named store IC.
2802 Property* prop = expr->target()->AsProperty();
2803 DCHECK(prop != NULL);
2804 DCHECK(prop->key()->IsLiteral());
2806 __ mov(StoreDescriptor::ValueRegister(), result_register());
2807 __ li(StoreDescriptor::NameRegister(),
2808 Operand(prop->key()->AsLiteral()->value()));
2809 __ pop(StoreDescriptor::ReceiverRegister());
2810 if (FLAG_vector_stores) {
2811 EmitLoadStoreICSlot(expr->AssignmentSlot());
2814 CallStoreIC(expr->AssignmentFeedbackId());
2817 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2818 context()->Plug(v0);
2822 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2823 // Assignment to named property of super.
2825 // stack : receiver ('this'), home_object
2826 DCHECK(prop != NULL);
2827 Literal* key = prop->key()->AsLiteral();
2828 DCHECK(key != NULL);
2830 __ Push(key->value());
2832 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2833 : Runtime::kStoreToSuper_Sloppy),
2838 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2839 // Assignment to named property of super.
2841 // stack : receiver ('this'), home_object, key
2842 DCHECK(prop != NULL);
2846 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2847 : Runtime::kStoreKeyedToSuper_Sloppy),
2852 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2853 // Assignment to a property, using a keyed store IC.
2854 // Call keyed store IC.
2855 // The arguments are:
2856 // - a0 is the value,
2858 // - a2 is the receiver.
2859 __ mov(StoreDescriptor::ValueRegister(), result_register());
2860 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2861 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2864 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2865 if (FLAG_vector_stores) {
2866 EmitLoadStoreICSlot(expr->AssignmentSlot());
2869 CallIC(ic, expr->AssignmentFeedbackId());
2872 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2873 context()->Plug(v0);
2877 void FullCodeGenerator::VisitProperty(Property* expr) {
2878 Comment cmnt(masm_, "[ Property");
2879 SetExpressionPosition(expr);
2881 Expression* key = expr->key();
2883 if (key->IsPropertyName()) {
2884 if (!expr->IsSuperAccess()) {
2885 VisitForAccumulatorValue(expr->obj());
2886 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2887 EmitNamedPropertyLoad(expr);
2889 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2891 expr->obj()->AsSuperPropertyReference()->home_object());
2892 EmitNamedSuperPropertyLoad(expr);
2895 if (!expr->IsSuperAccess()) {
2896 VisitForStackValue(expr->obj());
2897 VisitForAccumulatorValue(expr->key());
2898 __ Move(LoadDescriptor::NameRegister(), v0);
2899 __ pop(LoadDescriptor::ReceiverRegister());
2900 EmitKeyedPropertyLoad(expr);
2902 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2904 expr->obj()->AsSuperPropertyReference()->home_object());
2905 VisitForStackValue(expr->key());
2906 EmitKeyedSuperPropertyLoad(expr);
2909 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2910 context()->Plug(v0);
2914 void FullCodeGenerator::CallIC(Handle<Code> code,
2915 TypeFeedbackId id) {
2917 __ Call(code, RelocInfo::CODE_TARGET, id);
2921 // Code common for calls using the IC.
2922 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2923 Expression* callee = expr->expression();
2925 CallICState::CallType call_type =
2926 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2928 // Get the target function.
2929 if (call_type == CallICState::FUNCTION) {
2930 { StackValueContext context(this);
2931 EmitVariableLoad(callee->AsVariableProxy());
2932 PrepareForBailout(callee, NO_REGISTERS);
2934 // Push undefined as receiver. This is patched in the method prologue if it
2935 // is a sloppy mode method.
2936 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2939 // Load the function from the receiver.
2940 DCHECK(callee->IsProperty());
2941 DCHECK(!callee->AsProperty()->IsSuperAccess());
2942 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2943 EmitNamedPropertyLoad(callee->AsProperty());
2944 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2945 // Push the target function under the receiver.
2946 __ lw(at, MemOperand(sp, 0));
2948 __ sw(v0, MemOperand(sp, kPointerSize));
2951 EmitCall(expr, call_type);
2955 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2956 SetExpressionPosition(expr);
2957 Expression* callee = expr->expression();
2958 DCHECK(callee->IsProperty());
2959 Property* prop = callee->AsProperty();
2960 DCHECK(prop->IsSuperAccess());
2962 Literal* key = prop->key()->AsLiteral();
2963 DCHECK(!key->value()->IsSmi());
2964 // Load the function from the receiver.
2965 const Register scratch = a1;
2966 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2967 VisitForAccumulatorValue(super_ref->home_object());
2968 __ mov(scratch, v0);
2969 VisitForAccumulatorValue(super_ref->this_var());
2970 __ Push(scratch, v0, v0, scratch);
2971 __ Push(key->value());
2972 __ Push(Smi::FromInt(language_mode()));
2976 // - this (receiver)
2977 // - this (receiver) <-- LoadFromSuper will pop here and below.
2981 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2983 // Replace home_object with target function.
2984 __ sw(v0, MemOperand(sp, kPointerSize));
2987 // - target function
2988 // - this (receiver)
2989 EmitCall(expr, CallICState::METHOD);
2993 // Code common for calls using the IC.
2994 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2997 VisitForAccumulatorValue(key);
2999 Expression* callee = expr->expression();
3001 // Load the function from the receiver.
3002 DCHECK(callee->IsProperty());
3003 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3004 __ Move(LoadDescriptor::NameRegister(), v0);
3005 EmitKeyedPropertyLoad(callee->AsProperty());
3006 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3008 // Push the target function under the receiver.
3009 __ lw(at, MemOperand(sp, 0));
3011 __ sw(v0, MemOperand(sp, kPointerSize));
3013 EmitCall(expr, CallICState::METHOD);
3017 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3018 Expression* callee = expr->expression();
3019 DCHECK(callee->IsProperty());
3020 Property* prop = callee->AsProperty();
3021 DCHECK(prop->IsSuperAccess());
3023 SetExpressionPosition(prop);
3024 // Load the function from the receiver.
3025 const Register scratch = a1;
3026 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3027 VisitForAccumulatorValue(super_ref->home_object());
3028 __ Move(scratch, v0);
3029 VisitForAccumulatorValue(super_ref->this_var());
3030 __ Push(scratch, v0, v0, scratch);
3031 VisitForStackValue(prop->key());
3032 __ Push(Smi::FromInt(language_mode()));
3036 // - this (receiver)
3037 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3041 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3043 // Replace home_object with target function.
3044 __ sw(v0, MemOperand(sp, kPointerSize));
3047 // - target function
3048 // - this (receiver)
3049 EmitCall(expr, CallICState::METHOD);
3053 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3054 // Load the arguments.
3055 ZoneList<Expression*>* args = expr->arguments();
3056 int arg_count = args->length();
3057 for (int i = 0; i < arg_count; i++) {
3058 VisitForStackValue(args->at(i));
3061 // Record source position of the IC call.
3062 SetCallPosition(expr, arg_count);
3063 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3064 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3065 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3066 // Don't assign a type feedback id to the IC, since type feedback is provided
3067 // by the vector above.
3070 RecordJSReturnSite(expr);
3071 // Restore context register.
3072 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3073 context()->DropAndPlug(1, v0);
3077 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3078 // t3: copy of the first argument or undefined if it doesn't exist.
3079 if (arg_count > 0) {
3080 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
3082 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
3085 // t2: the receiver of the enclosing function.
3086 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3088 // t1: the language mode.
3089 __ li(t1, Operand(Smi::FromInt(language_mode())));
3091 // t0: the start position of the scope the calls resides in.
3092 __ li(t0, Operand(Smi::FromInt(scope()->start_position())));
3094 // Do the runtime call.
3095 __ Push(t3, t2, t1, t0);
3096 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3100 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3101 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3102 VariableProxy* callee = expr->expression()->AsVariableProxy();
3103 if (callee->var()->IsLookupSlot()) {
3106 SetExpressionPosition(callee);
3107 // Generate code for loading from variables potentially shadowed by
3108 // eval-introduced variables.
3109 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3112 // Call the runtime to find the function to call (returned in v0)
3113 // and the object holding it (returned in v1).
3114 DCHECK(!context_register().is(a2));
3115 __ li(a2, Operand(callee->name()));
3116 __ Push(context_register(), a2);
3117 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3118 __ Push(v0, v1); // Function, receiver.
3119 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3121 // If fast case code has been generated, emit code to push the
3122 // function and receiver and have the slow path jump around this
3124 if (done.is_linked()) {
3130 // The receiver is implicitly the global receiver. Indicate this
3131 // by passing the hole to the call function stub.
3132 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3137 VisitForStackValue(callee);
3138 // refEnv.WithBaseObject()
3139 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3140 __ push(a2); // Reserved receiver slot.
3145 void FullCodeGenerator::VisitCall(Call* expr) {
3147 // We want to verify that RecordJSReturnSite gets called on all paths
3148 // through this function. Avoid early returns.
3149 expr->return_is_recorded_ = false;
3152 Comment cmnt(masm_, "[ Call");
3153 Expression* callee = expr->expression();
3154 Call::CallType call_type = expr->GetCallType(isolate());
3156 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3157 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3158 // to resolve the function we need to call. Then we call the resolved
3159 // function using the given arguments.
3160 ZoneList<Expression*>* args = expr->arguments();
3161 int arg_count = args->length();
3162 PushCalleeAndWithBaseObject(expr);
3164 // Push the arguments.
3165 for (int i = 0; i < arg_count; i++) {
3166 VisitForStackValue(args->at(i));
3169 // Push a copy of the function (found below the arguments) and
3171 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3173 EmitResolvePossiblyDirectEval(arg_count);
3175 // Touch up the stack with the resolved function.
3176 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3178 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3179 // Record source position for debugger.
3180 SetCallPosition(expr, arg_count);
3181 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3182 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3184 RecordJSReturnSite(expr);
3185 // Restore context register.
3186 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3187 context()->DropAndPlug(1, v0);
3188 } else if (call_type == Call::GLOBAL_CALL) {
3189 EmitCallWithLoadIC(expr);
3190 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3191 // Call to a lookup slot (dynamically introduced variable).
3192 PushCalleeAndWithBaseObject(expr);
3194 } else if (call_type == Call::PROPERTY_CALL) {
3195 Property* property = callee->AsProperty();
3196 bool is_named_call = property->key()->IsPropertyName();
3197 if (property->IsSuperAccess()) {
3198 if (is_named_call) {
3199 EmitSuperCallWithLoadIC(expr);
3201 EmitKeyedSuperCallWithLoadIC(expr);
3204 VisitForStackValue(property->obj());
3205 if (is_named_call) {
3206 EmitCallWithLoadIC(expr);
3208 EmitKeyedCallWithLoadIC(expr, property->key());
3211 } else if (call_type == Call::SUPER_CALL) {
3212 EmitSuperConstructorCall(expr);
3214 DCHECK(call_type == Call::OTHER_CALL);
3215 // Call to an arbitrary expression not handled specially above.
3216 VisitForStackValue(callee);
3217 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3219 // Emit function call.
3224 // RecordJSReturnSite should have been called.
3225 DCHECK(expr->return_is_recorded_);
3230 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3231 Comment cmnt(masm_, "[ CallNew");
3232 // According to ECMA-262, section 11.2.2, page 44, the function
3233 // expression in new calls must be evaluated before the
3236 // Push constructor on the stack. If it's not a function it's used as
3237 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3239 DCHECK(!expr->expression()->IsSuperPropertyReference());
3240 VisitForStackValue(expr->expression());
3242 // Push the arguments ("left-to-right") on the stack.
3243 ZoneList<Expression*>* args = expr->arguments();
3244 int arg_count = args->length();
3245 for (int i = 0; i < arg_count; i++) {
3246 VisitForStackValue(args->at(i));
3249 // Call the construct call builtin that handles allocation and
3250 // constructor invocation.
3251 SetConstructCallPosition(expr);
3253 // Load function and argument count into a1 and a0.
3254 __ li(a0, Operand(arg_count));
3255 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3257 // Record call targets in unoptimized code.
3258 if (FLAG_pretenuring_call_new) {
3259 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3260 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3261 expr->CallNewFeedbackSlot().ToInt() + 1);
3264 __ li(a2, FeedbackVector());
3265 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3267 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3268 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3269 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3270 context()->Plug(v0);
3274 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3275 SuperCallReference* super_call_ref =
3276 expr->expression()->AsSuperCallReference();
3277 DCHECK_NOT_NULL(super_call_ref);
3279 EmitLoadSuperConstructor(super_call_ref);
3280 __ push(result_register());
3282 // Push the arguments ("left-to-right") on the stack.
3283 ZoneList<Expression*>* args = expr->arguments();
3284 int arg_count = args->length();
3285 for (int i = 0; i < arg_count; i++) {
3286 VisitForStackValue(args->at(i));
3289 // Call the construct call builtin that handles allocation and
3290 // constructor invocation.
3291 SetConstructCallPosition(expr);
3293 // Load original constructor into t0.
3294 VisitForAccumulatorValue(super_call_ref->new_target_var());
3295 __ mov(t0, result_register());
3297 // Load function and argument count into a1 and a0.
3298 __ li(a0, Operand(arg_count));
3299 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3301 // Record call targets in unoptimized code.
3302 if (FLAG_pretenuring_call_new) {
3304 /* TODO(dslomov): support pretenuring.
3305 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3306 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3307 expr->CallNewFeedbackSlot().ToInt() + 1);
3311 __ li(a2, FeedbackVector());
3312 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3314 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3315 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3317 RecordJSReturnSite(expr);
3319 context()->Plug(v0);
3323 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3324 ZoneList<Expression*>* args = expr->arguments();
3325 DCHECK(args->length() == 1);
3327 VisitForAccumulatorValue(args->at(0));
3329 Label materialize_true, materialize_false;
3330 Label* if_true = NULL;
3331 Label* if_false = NULL;
3332 Label* fall_through = NULL;
3333 context()->PrepareTest(&materialize_true, &materialize_false,
3334 &if_true, &if_false, &fall_through);
3336 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3338 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
3340 context()->Plug(if_true, if_false);
3344 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3345 ZoneList<Expression*>* args = expr->arguments();
3346 DCHECK(args->length() == 1);
3348 VisitForAccumulatorValue(args->at(0));
3350 Label materialize_true, materialize_false;
3351 Label* if_true = NULL;
3352 Label* if_false = NULL;
3353 Label* fall_through = NULL;
3354 context()->PrepareTest(&materialize_true, &materialize_false,
3355 &if_true, &if_false, &fall_through);
3357 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3358 __ NonNegativeSmiTst(v0, at);
3359 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3361 context()->Plug(if_true, if_false);
3365 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3366 ZoneList<Expression*>* args = expr->arguments();
3367 DCHECK(args->length() == 1);
3369 VisitForAccumulatorValue(args->at(0));
3371 Label materialize_true, materialize_false;
3372 Label* if_true = NULL;
3373 Label* if_false = NULL;
3374 Label* fall_through = NULL;
3375 context()->PrepareTest(&materialize_true, &materialize_false,
3376 &if_true, &if_false, &fall_through);
3378 __ JumpIfSmi(v0, if_false);
3379 __ GetObjectType(v0, a1, a1);
3380 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3381 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3382 if_true, if_false, fall_through);
3384 context()->Plug(if_true, if_false);
3388 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3389 ZoneList<Expression*>* args = expr->arguments();
3390 DCHECK(args->length() == 1);
3392 VisitForAccumulatorValue(args->at(0));
3394 Label materialize_true, materialize_false;
3395 Label* if_true = NULL;
3396 Label* if_false = NULL;
3397 Label* fall_through = NULL;
3398 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3399 &if_false, &fall_through);
3401 __ JumpIfSmi(v0, if_false);
3402 __ GetObjectType(v0, a1, a1);
3403 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3404 Split(eq, a1, Operand(SIMD128_VALUE_TYPE), if_true, if_false, fall_through);
3406 context()->Plug(if_true, if_false);
3410 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3411 CallRuntime* expr) {
3412 ZoneList<Expression*>* args = expr->arguments();
3413 DCHECK(args->length() == 1);
3415 VisitForAccumulatorValue(args->at(0));
3417 Label materialize_true, materialize_false, skip_lookup;
3418 Label* if_true = NULL;
3419 Label* if_false = NULL;
3420 Label* fall_through = NULL;
3421 context()->PrepareTest(&materialize_true, &materialize_false,
3422 &if_true, &if_false, &fall_through);
3424 __ AssertNotSmi(v0);
3426 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3427 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3428 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3429 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3431 // Check for fast case object. Generate false result for slow case object.
3432 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3433 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3434 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3435 __ Branch(if_false, eq, a2, Operand(t0));
3437 // Look for valueOf name in the descriptor array, and indicate false if
3438 // found. Since we omit an enumeration index check, if it is added via a
3439 // transition that shares its descriptor array, this is a false positive.
3440 Label entry, loop, done;
3442 // Skip loop if no descriptors are valid.
3443 __ NumberOfOwnDescriptors(a3, a1);
3444 __ Branch(&done, eq, a3, Operand(zero_reg));
3446 __ LoadInstanceDescriptors(a1, t0);
3447 // t0: descriptor array.
3448 // a3: valid entries in the descriptor array.
3449 STATIC_ASSERT(kSmiTag == 0);
3450 STATIC_ASSERT(kSmiTagSize == 1);
3451 STATIC_ASSERT(kPointerSize == 4);
3452 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3454 // Calculate location of the first key name.
3455 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3456 // Calculate the end of the descriptor array.
3458 __ sll(t1, a3, kPointerSizeLog2);
3459 __ Addu(a2, a2, t1);
3461 // Loop through all the keys in the descriptor array. If one of these is the
3462 // string "valueOf" the result is false.
3463 // The use of t2 to store the valueOf string assumes that it is not otherwise
3464 // used in the loop below.
3465 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3468 __ lw(a3, MemOperand(t0, 0));
3469 __ Branch(if_false, eq, a3, Operand(t2));
3470 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3472 __ Branch(&loop, ne, t0, Operand(a2));
3476 // Set the bit in the map to indicate that there is no local valueOf field.
3477 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3478 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3479 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3481 __ bind(&skip_lookup);
3483 // If a valueOf property is not found on the object check that its
3484 // prototype is the un-modified String prototype. If not result is false.
3485 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3486 __ JumpIfSmi(a2, if_false);
3487 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3488 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3489 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3490 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3491 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3492 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3494 context()->Plug(if_true, if_false);
3498 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3499 ZoneList<Expression*>* args = expr->arguments();
3500 DCHECK(args->length() == 1);
3502 VisitForAccumulatorValue(args->at(0));
3504 Label materialize_true, materialize_false;
3505 Label* if_true = NULL;
3506 Label* if_false = NULL;
3507 Label* fall_through = NULL;
3508 context()->PrepareTest(&materialize_true, &materialize_false,
3509 &if_true, &if_false, &fall_through);
3511 __ JumpIfSmi(v0, if_false);
3512 __ GetObjectType(v0, a1, a2);
3513 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3514 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3515 __ Branch(if_false);
3517 context()->Plug(if_true, if_false);
3521 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3523 DCHECK(args->length() == 1);
3525 VisitForAccumulatorValue(args->at(0));
3527 Label materialize_true, materialize_false;
3528 Label* if_true = NULL;
3529 Label* if_false = NULL;
3530 Label* fall_through = NULL;
3531 context()->PrepareTest(&materialize_true, &materialize_false,
3532 &if_true, &if_false, &fall_through);
3534 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3535 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3536 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3537 __ li(t0, 0x80000000);
3539 __ Branch(¬_nan, ne, a2, Operand(t0));
3540 __ mov(t0, zero_reg);
3544 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3545 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3547 context()->Plug(if_true, if_false);
3551 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3552 ZoneList<Expression*>* args = expr->arguments();
3553 DCHECK(args->length() == 1);
3555 VisitForAccumulatorValue(args->at(0));
3557 Label materialize_true, materialize_false;
3558 Label* if_true = NULL;
3559 Label* if_false = NULL;
3560 Label* fall_through = NULL;
3561 context()->PrepareTest(&materialize_true, &materialize_false,
3562 &if_true, &if_false, &fall_through);
3564 __ JumpIfSmi(v0, if_false);
3565 __ GetObjectType(v0, a1, a1);
3566 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3567 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3568 if_true, if_false, fall_through);
3570 context()->Plug(if_true, if_false);
3574 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3575 ZoneList<Expression*>* args = expr->arguments();
3576 DCHECK(args->length() == 1);
3578 VisitForAccumulatorValue(args->at(0));
3580 Label materialize_true, materialize_false;
3581 Label* if_true = NULL;
3582 Label* if_false = NULL;
3583 Label* fall_through = NULL;
3584 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3585 &if_false, &fall_through);
3587 __ JumpIfSmi(v0, if_false);
3588 __ GetObjectType(v0, a1, a1);
3589 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3590 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3592 context()->Plug(if_true, if_false);
3596 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3597 ZoneList<Expression*>* args = expr->arguments();
3598 DCHECK(args->length() == 1);
3600 VisitForAccumulatorValue(args->at(0));
3602 Label materialize_true, materialize_false;
3603 Label* if_true = NULL;
3604 Label* if_false = NULL;
3605 Label* fall_through = NULL;
3606 context()->PrepareTest(&materialize_true, &materialize_false,
3607 &if_true, &if_false, &fall_through);
3609 __ JumpIfSmi(v0, if_false);
3610 __ GetObjectType(v0, a1, a1);
3611 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3612 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3614 context()->Plug(if_true, if_false);
3618 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3619 ZoneList<Expression*>* args = expr->arguments();
3620 DCHECK(args->length() == 1);
3622 VisitForAccumulatorValue(args->at(0));
3624 Label materialize_true, materialize_false;
3625 Label* if_true = NULL;
3626 Label* if_false = NULL;
3627 Label* fall_through = NULL;
3628 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3629 &if_false, &fall_through);
3631 __ JumpIfSmi(v0, if_false);
3633 Register type_reg = a2;
3634 __ GetObjectType(v0, map, type_reg);
3635 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3636 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3637 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3638 if_true, if_false, fall_through);
3640 context()->Plug(if_true, if_false);
3644 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3645 DCHECK(expr->arguments()->length() == 0);
3647 Label materialize_true, materialize_false;
3648 Label* if_true = NULL;
3649 Label* if_false = NULL;
3650 Label* fall_through = NULL;
3651 context()->PrepareTest(&materialize_true, &materialize_false,
3652 &if_true, &if_false, &fall_through);
3654 // Get the frame pointer for the calling frame.
3655 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3657 // Skip the arguments adaptor frame if it exists.
3658 Label check_frame_marker;
3659 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3660 __ Branch(&check_frame_marker, ne,
3661 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3662 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3664 // Check the marker in the calling frame.
3665 __ bind(&check_frame_marker);
3666 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3667 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3668 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3669 if_true, if_false, fall_through);
3671 context()->Plug(if_true, if_false);
3675 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments();
3677 DCHECK(args->length() == 2);
3679 // Load the two objects into registers and perform the comparison.
3680 VisitForStackValue(args->at(0));
3681 VisitForAccumulatorValue(args->at(1));
3683 Label materialize_true, materialize_false;
3684 Label* if_true = NULL;
3685 Label* if_false = NULL;
3686 Label* fall_through = NULL;
3687 context()->PrepareTest(&materialize_true, &materialize_false,
3688 &if_true, &if_false, &fall_through);
3691 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3692 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3694 context()->Plug(if_true, if_false);
3698 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3699 ZoneList<Expression*>* args = expr->arguments();
3700 DCHECK(args->length() == 1);
3702 // ArgumentsAccessStub expects the key in a1 and the formal
3703 // parameter count in a0.
3704 VisitForAccumulatorValue(args->at(0));
3706 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3707 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3709 context()->Plug(v0);
3713 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3714 DCHECK(expr->arguments()->length() == 0);
3716 // Get the number of formal parameters.
3717 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3719 // Check if the calling frame is an arguments adaptor frame.
3720 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3721 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3722 __ Branch(&exit, ne, a3,
3723 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3725 // Arguments adaptor case: Read the arguments length from the
3727 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3730 context()->Plug(v0);
3734 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3735 ZoneList<Expression*>* args = expr->arguments();
3736 DCHECK(args->length() == 1);
3737 Label done, null, function, non_function_constructor;
3739 VisitForAccumulatorValue(args->at(0));
3741 // If the object is a smi, we return null.
3742 __ JumpIfSmi(v0, &null);
3744 // Check that the object is a JS object but take special care of JS
3745 // functions to make sure they have 'Function' as their class.
3746 // Assume that there are only two callable types, and one of them is at
3747 // either end of the type range for JS object types. Saves extra comparisons.
3748 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3749 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3750 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3752 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3753 FIRST_SPEC_OBJECT_TYPE + 1);
3754 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3756 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3757 LAST_SPEC_OBJECT_TYPE - 1);
3758 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3759 // Assume that there is no larger type.
3760 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3762 // Check if the constructor in the map is a JS function.
3763 Register instance_type = a2;
3764 __ GetMapConstructor(v0, v0, a1, instance_type);
3765 __ Branch(&non_function_constructor, ne, instance_type,
3766 Operand(JS_FUNCTION_TYPE));
3768 // v0 now contains the constructor function. Grab the
3769 // instance class name from there.
3770 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3771 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3774 // Functions have class 'Function'.
3776 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3779 // Objects with a non-function constructor have class 'Object'.
3780 __ bind(&non_function_constructor);
3781 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3784 // Non-JS objects have class null.
3786 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3791 context()->Plug(v0);
3795 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3796 ZoneList<Expression*>* args = expr->arguments();
3797 DCHECK(args->length() == 1);
3799 VisitForAccumulatorValue(args->at(0)); // Load the object.
3802 // If the object is a smi return the object.
3803 __ JumpIfSmi(v0, &done);
3804 // If the object is not a value type, return the object.
3805 __ GetObjectType(v0, a1, a1);
3806 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3808 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3811 context()->Plug(v0);
3815 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3816 ZoneList<Expression*>* args = expr->arguments();
3817 DCHECK_EQ(1, args->length());
3819 VisitForAccumulatorValue(args->at(0));
3821 Label materialize_true, materialize_false;
3822 Label* if_true = nullptr;
3823 Label* if_false = nullptr;
3824 Label* fall_through = nullptr;
3825 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3826 &if_false, &fall_through);
3828 __ JumpIfSmi(v0, if_false);
3829 __ GetObjectType(v0, a1, a1);
3830 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3831 Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through);
3833 context()->Plug(if_true, if_false);
3837 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3838 ZoneList<Expression*>* args = expr->arguments();
3839 DCHECK(args->length() == 2);
3840 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3841 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3843 VisitForAccumulatorValue(args->at(0)); // Load the object.
3845 Register object = v0;
3846 Register result = v0;
3847 Register scratch0 = t5;
3848 Register scratch1 = a1;
3850 if (index->value() == 0) {
3851 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3853 Label runtime, done;
3854 if (index->value() < JSDate::kFirstUncachedField) {
3855 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3856 __ li(scratch1, Operand(stamp));
3857 __ lw(scratch1, MemOperand(scratch1));
3858 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3859 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3860 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3861 kPointerSize * index->value()));
3865 __ PrepareCallCFunction(2, scratch1);
3866 __ li(a1, Operand(index));
3867 __ Move(a0, object);
3868 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3872 context()->Plug(result);
3876 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3877 ZoneList<Expression*>* args = expr->arguments();
3878 DCHECK_EQ(3, args->length());
3880 Register string = v0;
3881 Register index = a1;
3882 Register value = a2;
3884 VisitForStackValue(args->at(0)); // index
3885 VisitForStackValue(args->at(1)); // value
3886 VisitForAccumulatorValue(args->at(2)); // string
3887 __ Pop(index, value);
3889 if (FLAG_debug_code) {
3890 __ SmiTst(value, at);
3891 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3892 __ SmiTst(index, at);
3893 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3894 __ SmiUntag(index, index);
3895 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3896 Register scratch = t5;
3897 __ EmitSeqStringSetCharCheck(
3898 string, index, value, scratch, one_byte_seq_type);
3899 __ SmiTag(index, index);
3902 __ SmiUntag(value, value);
3905 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3907 __ Addu(at, at, index);
3908 __ sb(value, MemOperand(at));
3909 context()->Plug(string);
3913 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3914 ZoneList<Expression*>* args = expr->arguments();
3915 DCHECK_EQ(3, args->length());
3917 Register string = v0;
3918 Register index = a1;
3919 Register value = a2;
3921 VisitForStackValue(args->at(0)); // index
3922 VisitForStackValue(args->at(1)); // value
3923 VisitForAccumulatorValue(args->at(2)); // string
3924 __ Pop(index, value);
3926 if (FLAG_debug_code) {
3927 __ SmiTst(value, at);
3928 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3929 __ SmiTst(index, at);
3930 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3931 __ SmiUntag(index, index);
3932 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3933 Register scratch = t5;
3934 __ EmitSeqStringSetCharCheck(
3935 string, index, value, scratch, two_byte_seq_type);
3936 __ SmiTag(index, index);
3939 __ SmiUntag(value, value);
3942 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3943 __ Addu(at, at, index);
3944 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3945 __ sh(value, MemOperand(at));
3946 context()->Plug(string);
3950 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3951 ZoneList<Expression*>* args = expr->arguments();
3952 DCHECK(args->length() == 2);
3954 VisitForStackValue(args->at(0)); // Load the object.
3955 VisitForAccumulatorValue(args->at(1)); // Load the value.
3956 __ pop(a1); // v0 = value. a1 = object.
3959 // If the object is a smi, return the value.
3960 __ JumpIfSmi(a1, &done);
3962 // If the object is not a value type, return the value.
3963 __ GetObjectType(a1, a2, a2);
3964 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3967 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3968 // Update the write barrier. Save the value as it will be
3969 // overwritten by the write barrier code and is needed afterward.
3971 __ RecordWriteField(
3972 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3975 context()->Plug(v0);
3979 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3980 ZoneList<Expression*>* args = expr->arguments();
3981 DCHECK_EQ(args->length(), 1);
3983 // Load the argument into a0 and call the stub.
3984 VisitForAccumulatorValue(args->at(0));
3985 __ mov(a0, result_register());
3987 NumberToStringStub stub(isolate());
3989 context()->Plug(v0);
3993 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
3994 ZoneList<Expression*>* args = expr->arguments();
3995 DCHECK_EQ(1, args->length());
3997 // Load the argument into a0 and convert it.
3998 VisitForAccumulatorValue(args->at(0));
3999 __ mov(a0, result_register());
4001 ToObjectStub stub(isolate());
4003 context()->Plug(v0);
4007 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4008 ZoneList<Expression*>* args = expr->arguments();
4009 DCHECK(args->length() == 1);
4011 VisitForAccumulatorValue(args->at(0));
4014 StringCharFromCodeGenerator generator(v0, a1);
4015 generator.GenerateFast(masm_);
4018 NopRuntimeCallHelper call_helper;
4019 generator.GenerateSlow(masm_, call_helper);
4022 context()->Plug(a1);
4026 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4027 ZoneList<Expression*>* args = expr->arguments();
4028 DCHECK(args->length() == 2);
4030 VisitForStackValue(args->at(0));
4031 VisitForAccumulatorValue(args->at(1));
4032 __ mov(a0, result_register());
4034 Register object = a1;
4035 Register index = a0;
4036 Register result = v0;
4040 Label need_conversion;
4041 Label index_out_of_range;
4043 StringCharCodeAtGenerator generator(object,
4048 &index_out_of_range,
4049 STRING_INDEX_IS_NUMBER);
4050 generator.GenerateFast(masm_);
4053 __ bind(&index_out_of_range);
4054 // When the index is out of range, the spec requires us to return
4056 __ LoadRoot(result, Heap::kNanValueRootIndex);
4059 __ bind(&need_conversion);
4060 // Load the undefined value into the result register, which will
4061 // trigger conversion.
4062 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4065 NopRuntimeCallHelper call_helper;
4066 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4069 context()->Plug(result);
4073 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4074 ZoneList<Expression*>* args = expr->arguments();
4075 DCHECK(args->length() == 2);
4077 VisitForStackValue(args->at(0));
4078 VisitForAccumulatorValue(args->at(1));
4079 __ mov(a0, result_register());
4081 Register object = a1;
4082 Register index = a0;
4083 Register scratch = a3;
4084 Register result = v0;
4088 Label need_conversion;
4089 Label index_out_of_range;
4091 StringCharAtGenerator generator(object,
4097 &index_out_of_range,
4098 STRING_INDEX_IS_NUMBER);
4099 generator.GenerateFast(masm_);
4102 __ bind(&index_out_of_range);
4103 // When the index is out of range, the spec requires us to return
4104 // the empty string.
4105 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4108 __ bind(&need_conversion);
4109 // Move smi zero into the result register, which will trigger
4111 __ li(result, Operand(Smi::FromInt(0)));
4114 NopRuntimeCallHelper call_helper;
4115 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4118 context()->Plug(result);
4122 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4123 ZoneList<Expression*>* args = expr->arguments();
4124 DCHECK_EQ(2, args->length());
4125 VisitForStackValue(args->at(0));
4126 VisitForAccumulatorValue(args->at(1));
4129 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4130 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4132 context()->Plug(v0);
4136 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4137 ZoneList<Expression*>* args = expr->arguments();
4138 DCHECK(args->length() >= 2);
4140 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4141 for (int i = 0; i < arg_count + 1; i++) {
4142 VisitForStackValue(args->at(i));
4144 VisitForAccumulatorValue(args->last()); // Function.
4146 Label runtime, done;
4147 // Check for non-function argument (including proxy).
4148 __ JumpIfSmi(v0, &runtime);
4149 __ GetObjectType(v0, a1, a1);
4150 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4152 // InvokeFunction requires the function in a1. Move it in there.
4153 __ mov(a1, result_register());
4154 ParameterCount count(arg_count);
4155 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4156 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4161 __ CallRuntime(Runtime::kCall, args->length());
4164 context()->Plug(v0);
4168 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4169 ZoneList<Expression*>* args = expr->arguments();
4170 DCHECK(args->length() == 2);
4173 VisitForStackValue(args->at(0));
4176 VisitForStackValue(args->at(1));
4177 __ CallRuntime(Runtime::kGetPrototype, 1);
4178 __ Push(result_register());
4180 // Load original constructor into t0.
4181 __ lw(t0, MemOperand(sp, 1 * kPointerSize));
4183 // Check if the calling frame is an arguments adaptor frame.
4184 Label adaptor_frame, args_set_up, runtime;
4185 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4186 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4187 __ Branch(&adaptor_frame, eq, a3,
4188 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4189 // default constructor has no arguments, so no adaptor frame means no args.
4190 __ mov(a0, zero_reg);
4191 __ Branch(&args_set_up);
4193 // Copy arguments from adaptor frame.
4195 __ bind(&adaptor_frame);
4196 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4197 __ SmiUntag(a1, a1);
4201 // Get arguments pointer in a2.
4202 __ sll(at, a1, kPointerSizeLog2);
4203 __ addu(a2, a2, at);
4204 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4207 // Pre-decrement a2 with kPointerSize on each iteration.
4208 // Pre-decrement in order to skip receiver.
4209 __ Addu(a2, a2, Operand(-kPointerSize));
4210 __ lw(a3, MemOperand(a2));
4212 __ Addu(a1, a1, Operand(-1));
4213 __ Branch(&loop, ne, a1, Operand(zero_reg));
4216 __ bind(&args_set_up);
4217 __ sll(at, a0, kPointerSizeLog2);
4218 __ Addu(at, at, Operand(sp));
4219 __ lw(a1, MemOperand(at, 0));
4220 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4222 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4223 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4227 context()->Plug(result_register());
4231 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4232 RegExpConstructResultStub stub(isolate());
4233 ZoneList<Expression*>* args = expr->arguments();
4234 DCHECK(args->length() == 3);
4235 VisitForStackValue(args->at(0));
4236 VisitForStackValue(args->at(1));
4237 VisitForAccumulatorValue(args->at(2));
4238 __ mov(a0, result_register());
4242 context()->Plug(v0);
4246 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4247 ZoneList<Expression*>* args = expr->arguments();
4248 VisitForAccumulatorValue(args->at(0));
4250 Label materialize_true, materialize_false;
4251 Label* if_true = NULL;
4252 Label* if_false = NULL;
4253 Label* fall_through = NULL;
4254 context()->PrepareTest(&materialize_true, &materialize_false,
4255 &if_true, &if_false, &fall_through);
4257 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4258 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4260 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4261 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4263 context()->Plug(if_true, if_false);
4267 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4268 ZoneList<Expression*>* args = expr->arguments();
4269 DCHECK(args->length() == 1);
4270 VisitForAccumulatorValue(args->at(0));
4272 __ AssertString(v0);
4274 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4275 __ IndexFromHash(v0, v0);
4277 context()->Plug(v0);
4281 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4282 Label bailout, done, one_char_separator, long_separator,
4283 non_trivial_array, not_size_one_array, loop,
4284 empty_separator_loop, one_char_separator_loop,
4285 one_char_separator_loop_entry, long_separator_loop;
4286 ZoneList<Expression*>* args = expr->arguments();
4287 DCHECK(args->length() == 2);
4288 VisitForStackValue(args->at(1));
4289 VisitForAccumulatorValue(args->at(0));
4291 // All aliases of the same register have disjoint lifetimes.
4292 Register array = v0;
4293 Register elements = no_reg; // Will be v0.
4294 Register result = no_reg; // Will be v0.
4295 Register separator = a1;
4296 Register array_length = a2;
4297 Register result_pos = no_reg; // Will be a2.
4298 Register string_length = a3;
4299 Register string = t0;
4300 Register element = t1;
4301 Register elements_end = t2;
4302 Register scratch1 = t3;
4303 Register scratch2 = t5;
4304 Register scratch3 = t4;
4306 // Separator operand is on the stack.
4309 // Check that the array is a JSArray.
4310 __ JumpIfSmi(array, &bailout);
4311 __ GetObjectType(array, scratch1, scratch2);
4312 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4314 // Check that the array has fast elements.
4315 __ CheckFastElements(scratch1, scratch2, &bailout);
4317 // If the array has length zero, return the empty string.
4318 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4319 __ SmiUntag(array_length);
4320 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4321 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4324 __ bind(&non_trivial_array);
4326 // Get the FixedArray containing array's elements.
4328 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4329 array = no_reg; // End of array's live range.
4331 // Check that all array elements are sequential one-byte strings, and
4332 // accumulate the sum of their lengths, as a smi-encoded value.
4333 __ mov(string_length, zero_reg);
4335 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4336 __ sll(elements_end, array_length, kPointerSizeLog2);
4337 __ Addu(elements_end, element, elements_end);
4338 // Loop condition: while (element < elements_end).
4339 // Live values in registers:
4340 // elements: Fixed array of strings.
4341 // array_length: Length of the fixed array of strings (not smi)
4342 // separator: Separator string
4343 // string_length: Accumulated sum of string lengths (smi).
4344 // element: Current array element.
4345 // elements_end: Array end.
4346 if (generate_debug_code_) {
4347 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4351 __ lw(string, MemOperand(element));
4352 __ Addu(element, element, kPointerSize);
4353 __ JumpIfSmi(string, &bailout);
4354 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4355 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4356 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4357 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4358 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4359 __ BranchOnOverflow(&bailout, scratch3);
4360 __ Branch(&loop, lt, element, Operand(elements_end));
4362 // If array_length is 1, return elements[0], a string.
4363 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4364 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4367 __ bind(¬_size_one_array);
4369 // Live values in registers:
4370 // separator: Separator string
4371 // array_length: Length of the array.
4372 // string_length: Sum of string lengths (smi).
4373 // elements: FixedArray of strings.
4375 // Check that the separator is a flat one-byte string.
4376 __ JumpIfSmi(separator, &bailout);
4377 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4378 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4379 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4381 // Add (separator length times array_length) - separator length to the
4382 // string_length to get the length of the result string. array_length is not
4383 // smi but the other values are, so the result is a smi.
4384 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4385 __ Subu(string_length, string_length, Operand(scratch1));
4386 __ Mul(scratch3, scratch2, array_length, scratch1);
4387 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4389 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4390 __ And(scratch3, scratch2, Operand(0x80000000));
4391 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4392 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4393 __ BranchOnOverflow(&bailout, scratch3);
4394 __ SmiUntag(string_length);
4396 // Get first element in the array to free up the elements register to be used
4399 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4400 result = elements; // End of live range for elements.
4402 // Live values in registers:
4403 // element: First array element
4404 // separator: Separator string
4405 // string_length: Length of result string (not smi)
4406 // array_length: Length of the array.
4407 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4408 elements_end, &bailout);
4409 // Prepare for looping. Set up elements_end to end of the array. Set
4410 // result_pos to the position of the result where to write the first
4412 __ sll(elements_end, array_length, kPointerSizeLog2);
4413 __ Addu(elements_end, element, elements_end);
4414 result_pos = array_length; // End of live range for array_length.
4415 array_length = no_reg;
4418 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4420 // Check the length of the separator.
4421 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4422 __ li(at, Operand(Smi::FromInt(1)));
4423 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4424 __ Branch(&long_separator, gt, scratch1, Operand(at));
4426 // Empty separator case.
4427 __ bind(&empty_separator_loop);
4428 // Live values in registers:
4429 // result_pos: the position to which we are currently copying characters.
4430 // element: Current array element.
4431 // elements_end: Array end.
4433 // Copy next array element to the result.
4434 __ lw(string, MemOperand(element));
4435 __ Addu(element, element, kPointerSize);
4436 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4437 __ SmiUntag(string_length);
4438 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4439 __ CopyBytes(string, result_pos, string_length, scratch1);
4440 // End while (element < elements_end).
4441 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4442 DCHECK(result.is(v0));
4445 // One-character separator case.
4446 __ bind(&one_char_separator);
4447 // Replace separator with its one-byte character value.
4448 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4449 // Jump into the loop after the code that copies the separator, so the first
4450 // element is not preceded by a separator.
4451 __ jmp(&one_char_separator_loop_entry);
4453 __ bind(&one_char_separator_loop);
4454 // Live values in registers:
4455 // result_pos: the position to which we are currently copying characters.
4456 // element: Current array element.
4457 // elements_end: Array end.
4458 // separator: Single separator one-byte char (in lower byte).
4460 // Copy the separator character to the result.
4461 __ sb(separator, MemOperand(result_pos));
4462 __ Addu(result_pos, result_pos, 1);
4464 // Copy next array element to the result.
4465 __ bind(&one_char_separator_loop_entry);
4466 __ lw(string, MemOperand(element));
4467 __ Addu(element, element, kPointerSize);
4468 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4469 __ SmiUntag(string_length);
4470 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4471 __ CopyBytes(string, result_pos, string_length, scratch1);
4472 // End while (element < elements_end).
4473 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4474 DCHECK(result.is(v0));
4477 // Long separator case (separator is more than one character). Entry is at the
4478 // label long_separator below.
4479 __ bind(&long_separator_loop);
4480 // Live values in registers:
4481 // result_pos: the position to which we are currently copying characters.
4482 // element: Current array element.
4483 // elements_end: Array end.
4484 // separator: Separator string.
4486 // Copy the separator to the result.
4487 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4488 __ SmiUntag(string_length);
4491 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4492 __ CopyBytes(string, result_pos, string_length, scratch1);
4494 __ bind(&long_separator);
4495 __ lw(string, MemOperand(element));
4496 __ Addu(element, element, kPointerSize);
4497 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4498 __ SmiUntag(string_length);
4499 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4500 __ CopyBytes(string, result_pos, string_length, scratch1);
4501 // End while (element < elements_end).
4502 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4503 DCHECK(result.is(v0));
4507 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4509 context()->Plug(v0);
4513 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4514 DCHECK(expr->arguments()->length() == 0);
4515 ExternalReference debug_is_active =
4516 ExternalReference::debug_is_active_address(isolate());
4517 __ li(at, Operand(debug_is_active));
4518 __ lb(v0, MemOperand(at));
4520 context()->Plug(v0);
4524 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4525 // Push undefined as the receiver.
4526 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4529 __ lw(v0, GlobalObjectOperand());
4530 __ lw(v0, FieldMemOperand(v0, GlobalObject::kNativeContextOffset));
4531 __ lw(v0, ContextOperand(v0, expr->context_index()));
4535 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4536 ZoneList<Expression*>* args = expr->arguments();
4537 int arg_count = args->length();
4539 SetCallPosition(expr, arg_count);
4540 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4541 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4546 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4547 ZoneList<Expression*>* args = expr->arguments();
4548 int arg_count = args->length();
4550 if (expr->is_jsruntime()) {
4551 Comment cmnt(masm_, "[ CallRuntime");
4552 EmitLoadJSRuntimeFunction(expr);
4554 // Push the target function under the receiver.
4555 __ lw(at, MemOperand(sp, 0));
4557 __ sw(v0, MemOperand(sp, kPointerSize));
4559 // Push the arguments ("left-to-right").
4560 for (int i = 0; i < arg_count; i++) {
4561 VisitForStackValue(args->at(i));
4564 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4565 EmitCallJSRuntimeFunction(expr);
4567 // Restore context register.
4568 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4570 context()->DropAndPlug(1, v0);
4573 const Runtime::Function* function = expr->function();
4574 switch (function->function_id) {
4575 #define CALL_INTRINSIC_GENERATOR(Name) \
4576 case Runtime::kInline##Name: { \
4577 Comment cmnt(masm_, "[ Inline" #Name); \
4578 return Emit##Name(expr); \
4580 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4581 #undef CALL_INTRINSIC_GENERATOR
4583 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4584 // Push the arguments ("left-to-right").
4585 for (int i = 0; i < arg_count; i++) {
4586 VisitForStackValue(args->at(i));
4589 // Call the C runtime function.
4590 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4591 __ CallRuntime(expr->function(), arg_count);
4592 context()->Plug(v0);
4599 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4600 switch (expr->op()) {
4601 case Token::DELETE: {
4602 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4603 Property* property = expr->expression()->AsProperty();
4604 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4606 if (property != NULL) {
4607 VisitForStackValue(property->obj());
4608 VisitForStackValue(property->key());
4609 __ CallRuntime(is_strict(language_mode())
4610 ? Runtime::kDeleteProperty_Strict
4611 : Runtime::kDeleteProperty_Sloppy,
4613 context()->Plug(v0);
4614 } else if (proxy != NULL) {
4615 Variable* var = proxy->var();
4616 // Delete of an unqualified identifier is disallowed in strict mode but
4617 // "delete this" is allowed.
4618 bool is_this = var->HasThisName(isolate());
4619 DCHECK(is_sloppy(language_mode()) || is_this);
4620 if (var->IsUnallocatedOrGlobalSlot()) {
4621 __ lw(a2, GlobalObjectOperand());
4622 __ li(a1, Operand(var->name()));
4624 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4625 context()->Plug(v0);
4626 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4627 // Result of deleting non-global, non-dynamic variables is false.
4628 // The subexpression does not have side effects.
4629 context()->Plug(is_this);
4631 // Non-global variable. Call the runtime to try to delete from the
4632 // context where the variable was introduced.
4633 DCHECK(!context_register().is(a2));
4634 __ li(a2, Operand(var->name()));
4635 __ Push(context_register(), a2);
4636 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4637 context()->Plug(v0);
4640 // Result of deleting non-property, non-variable reference is true.
4641 // The subexpression may have side effects.
4642 VisitForEffect(expr->expression());
4643 context()->Plug(true);
4649 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4650 VisitForEffect(expr->expression());
4651 context()->Plug(Heap::kUndefinedValueRootIndex);
4656 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4657 if (context()->IsEffect()) {
4658 // Unary NOT has no side effects so it's only necessary to visit the
4659 // subexpression. Match the optimizing compiler by not branching.
4660 VisitForEffect(expr->expression());
4661 } else if (context()->IsTest()) {
4662 const TestContext* test = TestContext::cast(context());
4663 // The labels are swapped for the recursive call.
4664 VisitForControl(expr->expression(),
4665 test->false_label(),
4667 test->fall_through());
4668 context()->Plug(test->true_label(), test->false_label());
4670 // We handle value contexts explicitly rather than simply visiting
4671 // for control and plugging the control flow into the context,
4672 // because we need to prepare a pair of extra administrative AST ids
4673 // for the optimizing compiler.
4674 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4675 Label materialize_true, materialize_false, done;
4676 VisitForControl(expr->expression(),
4680 __ bind(&materialize_true);
4681 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4682 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4683 if (context()->IsStackValue()) __ push(v0);
4685 __ bind(&materialize_false);
4686 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4687 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4688 if (context()->IsStackValue()) __ push(v0);
4694 case Token::TYPEOF: {
4695 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4697 AccumulatorValueContext context(this);
4698 VisitForTypeofValue(expr->expression());
4701 TypeofStub typeof_stub(isolate());
4702 __ CallStub(&typeof_stub);
4703 context()->Plug(v0);
4713 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4714 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4716 Comment cmnt(masm_, "[ CountOperation");
4718 Property* prop = expr->expression()->AsProperty();
4719 LhsKind assign_type = Property::GetAssignType(prop);
4721 // Evaluate expression and get value.
4722 if (assign_type == VARIABLE) {
4723 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4724 AccumulatorValueContext context(this);
4725 EmitVariableLoad(expr->expression()->AsVariableProxy());
4727 // Reserve space for result of postfix operation.
4728 if (expr->is_postfix() && !context()->IsEffect()) {
4729 __ li(at, Operand(Smi::FromInt(0)));
4732 switch (assign_type) {
4733 case NAMED_PROPERTY: {
4734 // Put the object both on the stack and in the register.
4735 VisitForStackValue(prop->obj());
4736 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4737 EmitNamedPropertyLoad(prop);
4741 case NAMED_SUPER_PROPERTY: {
4742 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4743 VisitForAccumulatorValue(
4744 prop->obj()->AsSuperPropertyReference()->home_object());
4745 __ Push(result_register());
4746 const Register scratch = a1;
4747 __ lw(scratch, MemOperand(sp, kPointerSize));
4748 __ Push(scratch, result_register());
4749 EmitNamedSuperPropertyLoad(prop);
4753 case KEYED_SUPER_PROPERTY: {
4754 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4755 VisitForAccumulatorValue(
4756 prop->obj()->AsSuperPropertyReference()->home_object());
4757 const Register scratch = a1;
4758 const Register scratch1 = t0;
4759 __ Move(scratch, result_register());
4760 VisitForAccumulatorValue(prop->key());
4761 __ Push(scratch, result_register());
4762 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
4763 __ Push(scratch1, scratch, result_register());
4764 EmitKeyedSuperPropertyLoad(prop);
4768 case KEYED_PROPERTY: {
4769 VisitForStackValue(prop->obj());
4770 VisitForStackValue(prop->key());
4771 __ lw(LoadDescriptor::ReceiverRegister(),
4772 MemOperand(sp, 1 * kPointerSize));
4773 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4774 EmitKeyedPropertyLoad(prop);
4783 // We need a second deoptimization point after loading the value
4784 // in case evaluating the property load my have a side effect.
4785 if (assign_type == VARIABLE) {
4786 PrepareForBailout(expr->expression(), TOS_REG);
4788 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4791 // Inline smi case if we are in a loop.
4792 Label stub_call, done;
4793 JumpPatchSite patch_site(masm_);
4795 int count_value = expr->op() == Token::INC ? 1 : -1;
4797 if (ShouldInlineSmiCase(expr->op())) {
4799 patch_site.EmitJumpIfNotSmi(v0, &slow);
4801 // Save result for postfix expressions.
4802 if (expr->is_postfix()) {
4803 if (!context()->IsEffect()) {
4804 // Save the result on the stack. If we have a named or keyed property
4805 // we store the result under the receiver that is currently on top
4807 switch (assign_type) {
4811 case NAMED_PROPERTY:
4812 __ sw(v0, MemOperand(sp, kPointerSize));
4814 case NAMED_SUPER_PROPERTY:
4815 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4817 case KEYED_PROPERTY:
4818 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4820 case KEYED_SUPER_PROPERTY:
4821 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4827 Register scratch1 = a1;
4828 Register scratch2 = t0;
4829 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4830 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4831 __ BranchOnNoOverflow(&done, scratch2);
4832 // Call stub. Undo operation first.
4837 if (!is_strong(language_mode())) {
4838 ToNumberStub convert_stub(isolate());
4839 __ CallStub(&convert_stub);
4840 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4843 // Save result for postfix expressions.
4844 if (expr->is_postfix()) {
4845 if (!context()->IsEffect()) {
4846 // Save the result on the stack. If we have a named or keyed property
4847 // we store the result under the receiver that is currently on top
4849 switch (assign_type) {
4853 case NAMED_PROPERTY:
4854 __ sw(v0, MemOperand(sp, kPointerSize));
4856 case NAMED_SUPER_PROPERTY:
4857 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4859 case KEYED_PROPERTY:
4860 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4862 case KEYED_SUPER_PROPERTY:
4863 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4869 __ bind(&stub_call);
4871 __ li(a0, Operand(Smi::FromInt(count_value)));
4873 SetExpressionPosition(expr);
4876 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4877 strength(language_mode())).code();
4878 CallIC(code, expr->CountBinOpFeedbackId());
4879 patch_site.EmitPatchInfo();
4882 if (is_strong(language_mode())) {
4883 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4885 // Store the value returned in v0.
4886 switch (assign_type) {
4888 if (expr->is_postfix()) {
4889 { EffectContext context(this);
4890 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4891 Token::ASSIGN, expr->CountSlot());
4892 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4895 // For all contexts except EffectConstant we have the result on
4896 // top of the stack.
4897 if (!context()->IsEffect()) {
4898 context()->PlugTOS();
4901 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4902 Token::ASSIGN, expr->CountSlot());
4903 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4904 context()->Plug(v0);
4907 case NAMED_PROPERTY: {
4908 __ mov(StoreDescriptor::ValueRegister(), result_register());
4909 __ li(StoreDescriptor::NameRegister(),
4910 Operand(prop->key()->AsLiteral()->value()));
4911 __ pop(StoreDescriptor::ReceiverRegister());
4912 if (FLAG_vector_stores) {
4913 EmitLoadStoreICSlot(expr->CountSlot());
4916 CallStoreIC(expr->CountStoreFeedbackId());
4918 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4919 if (expr->is_postfix()) {
4920 if (!context()->IsEffect()) {
4921 context()->PlugTOS();
4924 context()->Plug(v0);
4928 case NAMED_SUPER_PROPERTY: {
4929 EmitNamedSuperPropertyStore(prop);
4930 if (expr->is_postfix()) {
4931 if (!context()->IsEffect()) {
4932 context()->PlugTOS();
4935 context()->Plug(v0);
4939 case KEYED_SUPER_PROPERTY: {
4940 EmitKeyedSuperPropertyStore(prop);
4941 if (expr->is_postfix()) {
4942 if (!context()->IsEffect()) {
4943 context()->PlugTOS();
4946 context()->Plug(v0);
4950 case KEYED_PROPERTY: {
4951 __ mov(StoreDescriptor::ValueRegister(), result_register());
4952 __ Pop(StoreDescriptor::ReceiverRegister(),
4953 StoreDescriptor::NameRegister());
4955 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4956 if (FLAG_vector_stores) {
4957 EmitLoadStoreICSlot(expr->CountSlot());
4960 CallIC(ic, expr->CountStoreFeedbackId());
4962 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4963 if (expr->is_postfix()) {
4964 if (!context()->IsEffect()) {
4965 context()->PlugTOS();
4968 context()->Plug(v0);
4976 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4977 Expression* sub_expr,
4978 Handle<String> check) {
4979 Label materialize_true, materialize_false;
4980 Label* if_true = NULL;
4981 Label* if_false = NULL;
4982 Label* fall_through = NULL;
4983 context()->PrepareTest(&materialize_true, &materialize_false,
4984 &if_true, &if_false, &fall_through);
4986 { AccumulatorValueContext context(this);
4987 VisitForTypeofValue(sub_expr);
4989 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4991 Factory* factory = isolate()->factory();
4992 if (String::Equals(check, factory->number_string())) {
4993 __ JumpIfSmi(v0, if_true);
4994 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4995 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4996 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4997 } else if (String::Equals(check, factory->string_string())) {
4998 __ JumpIfSmi(v0, if_false);
4999 __ GetObjectType(v0, v0, a1);
5000 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
5002 } else if (String::Equals(check, factory->symbol_string())) {
5003 __ JumpIfSmi(v0, if_false);
5004 __ GetObjectType(v0, v0, a1);
5005 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5006 } else if (String::Equals(check, factory->boolean_string())) {
5007 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5008 __ Branch(if_true, eq, v0, Operand(at));
5009 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5010 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5011 } else if (String::Equals(check, factory->undefined_string())) {
5012 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5013 __ Branch(if_true, eq, v0, Operand(at));
5014 __ JumpIfSmi(v0, if_false);
5015 // Check for undetectable objects => true.
5016 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5017 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5018 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5019 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5020 } else if (String::Equals(check, factory->function_string())) {
5021 __ JumpIfSmi(v0, if_false);
5022 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5023 __ GetObjectType(v0, v0, a1);
5024 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5025 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5026 if_true, if_false, fall_through);
5027 } else if (String::Equals(check, factory->object_string())) {
5028 __ JumpIfSmi(v0, if_false);
5029 __ LoadRoot(at, Heap::kNullValueRootIndex);
5030 __ Branch(if_true, eq, v0, Operand(at));
5031 // Check for JS objects => true.
5032 __ GetObjectType(v0, v0, a1);
5033 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5034 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5035 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5036 // Check for undetectable objects => false.
5037 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5038 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5039 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5041 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5042 } else if (String::Equals(check, factory->type##_string())) { \
5043 __ JumpIfSmi(v0, if_false); \
5044 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
5045 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
5046 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5047 SIMD128_TYPES(SIMD128_TYPE)
5051 if (if_false != fall_through) __ jmp(if_false);
5053 context()->Plug(if_true, if_false);
5057 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5058 Comment cmnt(masm_, "[ CompareOperation");
5059 SetExpressionPosition(expr);
5061 // First we try a fast inlined version of the compare when one of
5062 // the operands is a literal.
5063 if (TryLiteralCompare(expr)) return;
5065 // Always perform the comparison for its control flow. Pack the result
5066 // into the expression's context after the comparison is performed.
5067 Label materialize_true, materialize_false;
5068 Label* if_true = NULL;
5069 Label* if_false = NULL;
5070 Label* fall_through = NULL;
5071 context()->PrepareTest(&materialize_true, &materialize_false,
5072 &if_true, &if_false, &fall_through);
5074 Token::Value op = expr->op();
5075 VisitForStackValue(expr->left());
5078 VisitForStackValue(expr->right());
5079 __ InvokeBuiltin(Context::IN_BUILTIN_INDEX, CALL_FUNCTION);
5080 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5081 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
5082 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
5085 case Token::INSTANCEOF: {
5086 VisitForAccumulatorValue(expr->right());
5087 __ mov(a0, result_register());
5089 InstanceOfStub stub(isolate());
5091 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5092 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5093 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5098 VisitForAccumulatorValue(expr->right());
5099 Condition cc = CompareIC::ComputeCondition(op);
5100 __ mov(a0, result_register());
5103 bool inline_smi_code = ShouldInlineSmiCase(op);
5104 JumpPatchSite patch_site(masm_);
5105 if (inline_smi_code) {
5107 __ Or(a2, a0, Operand(a1));
5108 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5109 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5110 __ bind(&slow_case);
5113 Handle<Code> ic = CodeFactory::CompareIC(
5114 isolate(), op, strength(language_mode())).code();
5115 CallIC(ic, expr->CompareOperationFeedbackId());
5116 patch_site.EmitPatchInfo();
5117 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5118 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5122 // Convert the result of the comparison into one expected for this
5123 // expression's context.
5124 context()->Plug(if_true, if_false);
5128 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5129 Expression* sub_expr,
5131 Label materialize_true, materialize_false;
5132 Label* if_true = NULL;
5133 Label* if_false = NULL;
5134 Label* fall_through = NULL;
5135 context()->PrepareTest(&materialize_true, &materialize_false,
5136 &if_true, &if_false, &fall_through);
5138 VisitForAccumulatorValue(sub_expr);
5139 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5140 __ mov(a0, result_register());
5141 if (expr->op() == Token::EQ_STRICT) {
5142 Heap::RootListIndex nil_value = nil == kNullValue ?
5143 Heap::kNullValueRootIndex :
5144 Heap::kUndefinedValueRootIndex;
5145 __ LoadRoot(a1, nil_value);
5146 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5148 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5149 CallIC(ic, expr->CompareOperationFeedbackId());
5150 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5152 context()->Plug(if_true, if_false);
5156 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5157 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5158 context()->Plug(v0);
5162 Register FullCodeGenerator::result_register() {
5167 Register FullCodeGenerator::context_register() {
5172 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5173 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5174 __ sw(value, MemOperand(fp, frame_offset));
5178 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5179 __ lw(dst, ContextOperand(cp, context_index));
5183 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5184 Scope* closure_scope = scope()->ClosureScope();
5185 if (closure_scope->is_script_scope() ||
5186 closure_scope->is_module_scope()) {
5187 // Contexts nested in the native context have a canonical empty function
5188 // as their closure, not the anonymous closure containing the global
5189 // code. Pass a smi sentinel and let the runtime look up the empty
5191 __ li(at, Operand(Smi::FromInt(0)));
5192 } else if (closure_scope->is_eval_scope()) {
5193 // Contexts created by a call to eval have the same closure as the
5194 // context calling eval, not the anonymous closure containing the eval
5195 // code. Fetch it from the context.
5196 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5198 DCHECK(closure_scope->is_function_scope());
5199 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5205 // ----------------------------------------------------------------------------
5206 // Non-local control flow support.
5208 void FullCodeGenerator::EnterFinallyBlock() {
5209 DCHECK(!result_register().is(a1));
5210 // Store result register while executing finally block.
5211 __ push(result_register());
5212 // Cook return address in link register to stack (smi encoded Code* delta).
5213 __ Subu(a1, ra, Operand(masm_->CodeObject()));
5214 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5215 STATIC_ASSERT(0 == kSmiTag);
5216 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
5218 // Store result register while executing finally block.
5221 // Store pending message while executing finally block.
5222 ExternalReference pending_message_obj =
5223 ExternalReference::address_of_pending_message_obj(isolate());
5224 __ li(at, Operand(pending_message_obj));
5225 __ lw(a1, MemOperand(at));
5228 ClearPendingMessage();
5232 void FullCodeGenerator::ExitFinallyBlock() {
5233 DCHECK(!result_register().is(a1));
5234 // Restore pending message from stack.
5236 ExternalReference pending_message_obj =
5237 ExternalReference::address_of_pending_message_obj(isolate());
5238 __ li(at, Operand(pending_message_obj));
5239 __ sw(a1, MemOperand(at));
5241 // Restore result register from stack.
5244 // Uncook return address and return.
5245 __ pop(result_register());
5246 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5247 __ sra(a1, a1, 1); // Un-smi-tag value.
5248 __ Addu(at, a1, Operand(masm_->CodeObject()));
5253 void FullCodeGenerator::ClearPendingMessage() {
5254 DCHECK(!result_register().is(a1));
5255 ExternalReference pending_message_obj =
5256 ExternalReference::address_of_pending_message_obj(isolate());
5257 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
5258 __ li(at, Operand(pending_message_obj));
5259 __ sw(a1, MemOperand(at));
5263 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5264 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5265 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
5266 Operand(SmiFromSlot(slot)));
5273 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5275 BackEdgeState target_state,
5276 Code* replacement_code) {
5277 static const int kInstrSize = Assembler::kInstrSize;
5278 Address branch_address = pc - 6 * kInstrSize;
5279 CodePatcher patcher(branch_address, 1);
5281 switch (target_state) {
5283 // slt at, a3, zero_reg (in case of count based interrupts)
5284 // beq at, zero_reg, ok
5285 // lui t9, <interrupt stub address> upper
5286 // ori t9, <interrupt stub address> lower
5289 // ok-label ----- pc_after points here
5290 patcher.masm()->slt(at, a3, zero_reg);
5292 case ON_STACK_REPLACEMENT:
5293 case OSR_AFTER_STACK_CHECK:
5294 // addiu at, zero_reg, 1
5295 // beq at, zero_reg, ok ;; Not changed
5296 // lui t9, <on-stack replacement address> upper
5297 // ori t9, <on-stack replacement address> lower
5298 // jalr t9 ;; Not changed
5299 // nop ;; Not changed
5300 // ok-label ----- pc_after points here
5301 patcher.masm()->addiu(at, zero_reg, 1);
5304 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5305 // Replace the stack check address in the load-immediate (lui/ori pair)
5306 // with the entry address of the replacement code.
5307 Assembler::set_target_address_at(pc_immediate_load_address,
5308 replacement_code->entry());
5310 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5311 unoptimized_code, pc_immediate_load_address, replacement_code);
5315 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5317 Code* unoptimized_code,
5319 static const int kInstrSize = Assembler::kInstrSize;
5320 Address branch_address = pc - 6 * kInstrSize;
5321 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5323 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
5324 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5325 DCHECK(reinterpret_cast<uint32_t>(
5326 Assembler::target_address_at(pc_immediate_load_address)) ==
5327 reinterpret_cast<uint32_t>(
5328 isolate->builtins()->InterruptCheck()->entry()));
5332 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5334 if (reinterpret_cast<uint32_t>(
5335 Assembler::target_address_at(pc_immediate_load_address)) ==
5336 reinterpret_cast<uint32_t>(
5337 isolate->builtins()->OnStackReplacement()->entry())) {
5338 return ON_STACK_REPLACEMENT;
5341 DCHECK(reinterpret_cast<uint32_t>(
5342 Assembler::target_address_at(pc_immediate_load_address)) ==
5343 reinterpret_cast<uint32_t>(
5344 isolate->builtins()->OsrAfterStackCheck()->entry()));
5345 return OSR_AFTER_STACK_CHECK;
5349 } // namespace internal
5352 #endif // V8_TARGET_ARCH_MIPS