1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_MIPS
9 // Note on Mips implementation:
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
28 #include "src/mips/code-stubs-mips.h"
29 #include "src/mips/macro-assembler-mips.h"
34 #define __ ACCESS_MASM(masm_)
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
49 info_emitted_ = false;
54 DCHECK(patch_site_.is_bound() == info_emitted_);
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
64 // Always taken before patched.
65 __ BranchShort(target, eq, at, Operand(zero_reg));
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
75 // Never taken before patched.
76 __ BranchShort(target, ne, at, Operand(zero_reg));
79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
88 __ nop(); // Signals no inlined code.
93 MacroAssembler* masm_;
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
115 void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
118 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
120 profiling_counter_ = isolate()->factory()->NewCell(
121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122 SetFunctionPosition(function());
123 Comment cmnt(masm_, "[ function compiled by full code generator");
125 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
134 // Sloppy mode functions and builtins need to replace the receiver with the
135 // global proxy when called as functions (without an explicit receiver
137 if (is_sloppy(info->language_mode()) && !info->is_native()) {
139 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
140 __ lw(at, MemOperand(sp, receiver_offset));
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 __ Branch(&ok, ne, a2, Operand(at));
144 __ lw(a2, GlobalObjectOperand());
145 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
147 __ sw(a2, MemOperand(sp, receiver_offset));
152 // Open a frame scope to indicate that there is a frame on the stack. The
153 // MANUAL indicates that the scope shouldn't actually generate code to set up
154 // the frame (that is done below).
155 FrameScope frame_scope(masm_, StackFrame::MANUAL);
157 info->set_prologue_offset(masm_->pc_offset());
158 __ Prologue(info->IsCodePreAgingActive());
159 info->AddNoFrameRange(0, masm_->pc_offset());
161 { Comment cmnt(masm_, "[ Allocate locals");
162 int locals_count = info->scope()->num_stack_slots();
163 // Generators allocate locals, if any, in context slots.
164 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
165 if (locals_count > 0) {
166 if (locals_count >= 128) {
168 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
169 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
170 __ Branch(&ok, hs, t5, Operand(a2));
171 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
174 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
175 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
176 if (locals_count >= kMaxPushes) {
177 int loop_iterations = locals_count / kMaxPushes;
178 __ li(a2, Operand(loop_iterations));
180 __ bind(&loop_header);
182 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
183 for (int i = 0; i < kMaxPushes; i++) {
184 __ sw(t5, MemOperand(sp, i * kPointerSize));
186 // Continue loop if not done.
187 __ Subu(a2, a2, Operand(1));
188 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
190 int remaining = locals_count % kMaxPushes;
191 // Emit the remaining pushes.
192 __ Subu(sp, sp, Operand(remaining * kPointerSize));
193 for (int i = 0; i < remaining; i++) {
194 __ sw(t5, MemOperand(sp, i * kPointerSize));
199 bool function_in_register = true;
201 // Possibly allocate a local context.
202 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
203 if (heap_slots > 0) {
204 Comment cmnt(masm_, "[ Allocate context");
205 // Argument to NewContext is the function, which is still in a1.
206 bool need_write_barrier = true;
207 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) {
209 __ Push(info->scope()->GetScopeInfo(info->isolate()));
210 __ CallRuntime(Runtime::kNewScriptContext, 2);
211 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
212 FastNewContextStub stub(isolate(), heap_slots);
214 // Result of FastNewContextStub is always in new space.
215 need_write_barrier = false;
218 __ CallRuntime(Runtime::kNewFunctionContext, 1);
220 function_in_register = false;
221 // Context is returned in v0. It replaces the context passed to us.
222 // It's saved in the stack and kept live in cp.
224 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
225 // Copy any necessary parameters into the context.
226 int num_parameters = info->scope()->num_parameters();
227 for (int i = 0; i < num_parameters; i++) {
228 Variable* var = scope()->parameter(i);
229 if (var->IsContextSlot()) {
230 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
231 (num_parameters - 1 - i) * kPointerSize;
232 // Load parameter from stack.
233 __ lw(a0, MemOperand(fp, parameter_offset));
234 // Store it in the context.
235 MemOperand target = ContextOperand(cp, var->index());
238 // Update the write barrier.
239 if (need_write_barrier) {
240 __ RecordWriteContextSlot(
241 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
242 } else if (FLAG_debug_code) {
244 __ JumpIfInNewSpace(cp, a0, &done);
245 __ Abort(kExpectedNewSpaceObject);
252 // Possibly allocate RestParameters
254 Variable* rest_param = scope()->rest_parameter(&rest_index);
256 Comment cmnt(masm_, "[ Allocate rest parameter array");
258 int num_parameters = info->scope()->num_parameters();
259 int offset = num_parameters * kPointerSize;
261 Operand(StandardFrameConstants::kCallerSPOffset + offset));
262 __ li(a2, Operand(Smi::FromInt(num_parameters)));
263 __ li(a1, Operand(Smi::FromInt(rest_index)));
266 RestParamAccessStub stub(isolate());
269 SetVar(rest_param, v0, a1, a2);
272 Variable* arguments = scope()->arguments();
273 if (arguments != NULL) {
274 // Function uses arguments object.
275 Comment cmnt(masm_, "[ Allocate arguments object");
276 if (!function_in_register) {
277 // Load this again, if it's used by the local context below.
278 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
282 // Receiver is just before the parameters on the caller's stack.
283 int num_parameters = info->scope()->num_parameters();
284 int offset = num_parameters * kPointerSize;
286 Operand(StandardFrameConstants::kCallerSPOffset + offset));
287 __ li(a1, Operand(Smi::FromInt(num_parameters)));
290 // Arguments to ArgumentsAccessStub:
291 // function, receiver address, parameter count.
292 // The stub will rewrite receiever and parameter count if the previous
293 // stack frame was an arguments adapter frame.
294 ArgumentsAccessStub::HasNewTarget has_new_target =
295 IsSubclassConstructor(info->function()->kind())
296 ? ArgumentsAccessStub::HAS_NEW_TARGET
297 : ArgumentsAccessStub::NO_NEW_TARGET;
298 ArgumentsAccessStub::Type type;
299 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
300 type = ArgumentsAccessStub::NEW_STRICT;
301 } else if (function()->has_duplicate_parameters()) {
302 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
304 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
306 ArgumentsAccessStub stub(isolate(), type, has_new_target);
309 SetVar(arguments, v0, a1, a2);
313 __ CallRuntime(Runtime::kTraceEnter, 0);
316 // Visit the declarations and body unless there is an illegal
318 if (scope()->HasIllegalRedeclaration()) {
319 Comment cmnt(masm_, "[ Declarations");
320 scope()->VisitIllegalRedeclaration(this);
323 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
324 { Comment cmnt(masm_, "[ Declarations");
325 // For named function expressions, declare the function name as a
327 if (scope()->is_function_scope() && scope()->function() != NULL) {
328 VariableDeclaration* function = scope()->function();
329 DCHECK(function->proxy()->var()->mode() == CONST ||
330 function->proxy()->var()->mode() == CONST_LEGACY);
331 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
332 VisitVariableDeclaration(function);
334 VisitDeclarations(scope()->declarations());
337 { Comment cmnt(masm_, "[ Stack check");
338 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
340 __ LoadRoot(at, Heap::kStackLimitRootIndex);
341 __ Branch(&ok, hs, sp, Operand(at));
342 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
343 PredictableCodeSizeScope predictable(masm_,
344 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
345 __ Call(stack_check, RelocInfo::CODE_TARGET);
349 { Comment cmnt(masm_, "[ Body");
350 DCHECK(loop_depth() == 0);
351 VisitStatements(function()->body());
352 DCHECK(loop_depth() == 0);
356 // Always emit a 'return undefined' in case control fell off the end of
358 { Comment cmnt(masm_, "[ return <undefined>;");
359 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
361 EmitReturnSequence();
365 void FullCodeGenerator::ClearAccumulator() {
366 DCHECK(Smi::FromInt(0) == 0);
367 __ mov(v0, zero_reg);
371 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
372 __ li(a2, Operand(profiling_counter_));
373 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
374 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
375 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
379 void FullCodeGenerator::EmitProfilingCounterReset() {
380 int reset_value = FLAG_interrupt_budget;
381 if (info_->is_debug()) {
382 // Detect debug break requests as soon as possible.
383 reset_value = FLAG_interrupt_budget >> 4;
385 __ li(a2, Operand(profiling_counter_));
386 __ li(a3, Operand(Smi::FromInt(reset_value)));
387 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
391 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
392 Label* back_edge_target) {
393 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
394 // to make sure it is constant. Branch may emit a skip-or-jump sequence
395 // instead of the normal Branch. It seems that the "skip" part of that
396 // sequence is about as long as this Branch would be so it is safe to ignore
398 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
399 Comment cmnt(masm_, "[ Back edge bookkeeping");
401 DCHECK(back_edge_target->is_bound());
402 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
403 int weight = Min(kMaxBackEdgeWeight,
404 Max(1, distance / kCodeSizeMultiplier));
405 EmitProfilingCounterDecrement(weight);
406 __ slt(at, a3, zero_reg);
407 __ beq(at, zero_reg, &ok);
408 // Call will emit a li t9 first, so it is safe to use the delay slot.
409 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
410 // Record a mapping of this PC offset to the OSR id. This is used to find
411 // the AST id from the unoptimized code in order to use it as a key into
412 // the deoptimization input data found in the optimized code.
413 RecordBackEdge(stmt->OsrEntryId());
414 EmitProfilingCounterReset();
417 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
418 // Record a mapping of the OSR id to this PC. This is used if the OSR
419 // entry becomes the target of a bailout. We don't expect it to be, but
420 // we want it to work if it is.
421 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
425 void FullCodeGenerator::EmitReturnSequence() {
426 Comment cmnt(masm_, "[ Return sequence");
427 if (return_label_.is_bound()) {
428 __ Branch(&return_label_);
430 __ bind(&return_label_);
432 // Push the return value on the stack as the parameter.
433 // Runtime::TraceExit returns its parameter in v0.
435 __ CallRuntime(Runtime::kTraceExit, 1);
437 // Pretend that the exit is a backwards jump to the entry.
439 if (info_->ShouldSelfOptimize()) {
440 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
442 int distance = masm_->pc_offset();
443 weight = Min(kMaxBackEdgeWeight,
444 Max(1, distance / kCodeSizeMultiplier));
446 EmitProfilingCounterDecrement(weight);
448 __ Branch(&ok, ge, a3, Operand(zero_reg));
450 __ Call(isolate()->builtins()->InterruptCheck(),
451 RelocInfo::CODE_TARGET);
453 EmitProfilingCounterReset();
457 // Add a label for checking the size of the code used for returning.
458 Label check_exit_codesize;
459 masm_->bind(&check_exit_codesize);
461 // Make sure that the constant pool is not emitted inside of the return
463 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
464 // Here we use masm_-> instead of the __ macro to avoid the code coverage
465 // tool from instrumenting as we rely on the code size here.
466 int32_t arg_count = info_->scope()->num_parameters() + 1;
467 if (IsSubclassConstructor(info_->function()->kind())) {
470 int32_t sp_delta = arg_count * kPointerSize;
471 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
474 int no_frame_start = masm_->pc_offset();
475 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
476 masm_->Addu(sp, sp, Operand(sp_delta));
478 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
482 // Check that the size of the code used for returning is large enough
483 // for the debugger's requirements.
484 DCHECK(Assembler::kJSReturnSequenceInstructions <=
485 masm_->InstructionsGeneratedSince(&check_exit_codesize));
491 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
492 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
497 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
498 codegen()->GetVar(result_register(), var);
502 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
503 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
504 codegen()->GetVar(result_register(), var);
505 __ push(result_register());
509 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
510 // For simplicity we always test the accumulator register.
511 codegen()->GetVar(result_register(), var);
512 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
513 codegen()->DoTest(this);
517 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
521 void FullCodeGenerator::AccumulatorValueContext::Plug(
522 Heap::RootListIndex index) const {
523 __ LoadRoot(result_register(), index);
527 void FullCodeGenerator::StackValueContext::Plug(
528 Heap::RootListIndex index) const {
529 __ LoadRoot(result_register(), index);
530 __ push(result_register());
534 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
535 codegen()->PrepareForBailoutBeforeSplit(condition(),
539 if (index == Heap::kUndefinedValueRootIndex ||
540 index == Heap::kNullValueRootIndex ||
541 index == Heap::kFalseValueRootIndex) {
542 if (false_label_ != fall_through_) __ Branch(false_label_);
543 } else if (index == Heap::kTrueValueRootIndex) {
544 if (true_label_ != fall_through_) __ Branch(true_label_);
546 __ LoadRoot(result_register(), index);
547 codegen()->DoTest(this);
552 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
556 void FullCodeGenerator::AccumulatorValueContext::Plug(
557 Handle<Object> lit) const {
558 __ li(result_register(), Operand(lit));
562 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
563 // Immediates cannot be pushed directly.
564 __ li(result_register(), Operand(lit));
565 __ push(result_register());
569 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
570 codegen()->PrepareForBailoutBeforeSplit(condition(),
574 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
575 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
576 if (false_label_ != fall_through_) __ Branch(false_label_);
577 } else if (lit->IsTrue() || lit->IsJSObject()) {
578 if (true_label_ != fall_through_) __ Branch(true_label_);
579 } else if (lit->IsString()) {
580 if (String::cast(*lit)->length() == 0) {
581 if (false_label_ != fall_through_) __ Branch(false_label_);
583 if (true_label_ != fall_through_) __ Branch(true_label_);
585 } else if (lit->IsSmi()) {
586 if (Smi::cast(*lit)->value() == 0) {
587 if (false_label_ != fall_through_) __ Branch(false_label_);
589 if (true_label_ != fall_through_) __ Branch(true_label_);
592 // For simplicity we always test the accumulator register.
593 __ li(result_register(), Operand(lit));
594 codegen()->DoTest(this);
599 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
600 Register reg) const {
606 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
608 Register reg) const {
611 __ Move(result_register(), reg);
615 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
616 Register reg) const {
618 if (count > 1) __ Drop(count - 1);
619 __ sw(reg, MemOperand(sp, 0));
623 void FullCodeGenerator::TestContext::DropAndPlug(int count,
624 Register reg) const {
626 // For simplicity we always test the accumulator register.
628 __ Move(result_register(), reg);
629 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
630 codegen()->DoTest(this);
634 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
635 Label* materialize_false) const {
636 DCHECK(materialize_true == materialize_false);
637 __ bind(materialize_true);
641 void FullCodeGenerator::AccumulatorValueContext::Plug(
642 Label* materialize_true,
643 Label* materialize_false) const {
645 __ bind(materialize_true);
646 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
648 __ bind(materialize_false);
649 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
654 void FullCodeGenerator::StackValueContext::Plug(
655 Label* materialize_true,
656 Label* materialize_false) const {
658 __ bind(materialize_true);
659 __ LoadRoot(at, Heap::kTrueValueRootIndex);
660 // Push the value as the following branch can clobber at in long branch mode.
663 __ bind(materialize_false);
664 __ LoadRoot(at, Heap::kFalseValueRootIndex);
670 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
671 Label* materialize_false) const {
672 DCHECK(materialize_true == true_label_);
673 DCHECK(materialize_false == false_label_);
677 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
681 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
682 Heap::RootListIndex value_root_index =
683 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
684 __ LoadRoot(result_register(), value_root_index);
688 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
689 Heap::RootListIndex value_root_index =
690 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
691 __ LoadRoot(at, value_root_index);
696 void FullCodeGenerator::TestContext::Plug(bool flag) const {
697 codegen()->PrepareForBailoutBeforeSplit(condition(),
702 if (true_label_ != fall_through_) __ Branch(true_label_);
704 if (false_label_ != fall_through_) __ Branch(false_label_);
709 void FullCodeGenerator::DoTest(Expression* condition,
712 Label* fall_through) {
713 __ mov(a0, result_register());
714 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
715 CallIC(ic, condition->test_id());
716 __ mov(at, zero_reg);
717 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
721 void FullCodeGenerator::Split(Condition cc,
726 Label* fall_through) {
727 if (if_false == fall_through) {
728 __ Branch(if_true, cc, lhs, rhs);
729 } else if (if_true == fall_through) {
730 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
732 __ Branch(if_true, cc, lhs, rhs);
738 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
739 DCHECK(var->IsStackAllocated());
740 // Offset is negative because higher indexes are at lower addresses.
741 int offset = -var->index() * kPointerSize;
742 // Adjust by a (parameter or local) base offset.
743 if (var->IsParameter()) {
744 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
746 offset += JavaScriptFrameConstants::kLocal0Offset;
748 return MemOperand(fp, offset);
752 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
753 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
754 if (var->IsContextSlot()) {
755 int context_chain_length = scope()->ContextChainLength(var->scope());
756 __ LoadContext(scratch, context_chain_length);
757 return ContextOperand(scratch, var->index());
759 return StackOperand(var);
764 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
765 // Use destination as scratch.
766 MemOperand location = VarOperand(var, dest);
767 __ lw(dest, location);
771 void FullCodeGenerator::SetVar(Variable* var,
775 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
776 DCHECK(!scratch0.is(src));
777 DCHECK(!scratch0.is(scratch1));
778 DCHECK(!scratch1.is(src));
779 MemOperand location = VarOperand(var, scratch0);
780 __ sw(src, location);
781 // Emit the write barrier code if the location is in the heap.
782 if (var->IsContextSlot()) {
783 __ RecordWriteContextSlot(scratch0,
793 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
794 bool should_normalize,
797 // Only prepare for bailouts before splits if we're in a test
798 // context. Otherwise, we let the Visit function deal with the
799 // preparation to avoid preparing with the same AST id twice.
800 if (!context()->IsTest() || !info_->IsOptimizable()) return;
803 if (should_normalize) __ Branch(&skip);
804 PrepareForBailout(expr, TOS_REG);
805 if (should_normalize) {
806 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
807 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
813 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
814 // The variable in the declaration always resides in the current function
816 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
817 if (generate_debug_code_) {
818 // Check that we're not inside a with or catch context.
819 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
820 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
821 __ Check(ne, kDeclarationInWithContext,
823 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
824 __ Check(ne, kDeclarationInCatchContext,
830 void FullCodeGenerator::VisitVariableDeclaration(
831 VariableDeclaration* declaration) {
832 // If it was not possible to allocate the variable at compile time, we
833 // need to "declare" it at runtime to make sure it actually exists in the
835 VariableProxy* proxy = declaration->proxy();
836 VariableMode mode = declaration->mode();
837 Variable* variable = proxy->var();
838 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
839 switch (variable->location()) {
840 case Variable::UNALLOCATED:
841 globals_->Add(variable->name(), zone());
842 globals_->Add(variable->binding_needs_init()
843 ? isolate()->factory()->the_hole_value()
844 : isolate()->factory()->undefined_value(),
848 case Variable::PARAMETER:
849 case Variable::LOCAL:
851 Comment cmnt(masm_, "[ VariableDeclaration");
852 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
853 __ sw(t0, StackOperand(variable));
857 case Variable::CONTEXT:
859 Comment cmnt(masm_, "[ VariableDeclaration");
860 EmitDebugCheckDeclarationContext(variable);
861 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
862 __ sw(at, ContextOperand(cp, variable->index()));
863 // No write barrier since the_hole_value is in old space.
864 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
868 case Variable::LOOKUP: {
869 Comment cmnt(masm_, "[ VariableDeclaration");
870 __ li(a2, Operand(variable->name()));
871 // Declaration nodes are always introduced in one of four modes.
872 DCHECK(IsDeclaredVariableMode(mode));
873 PropertyAttributes attr =
874 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
875 __ li(a1, Operand(Smi::FromInt(attr)));
876 // Push initial value, if any.
877 // Note: For variables we must not push an initial value (such as
878 // 'undefined') because we may have a (legal) redeclaration and we
879 // must not destroy the current value.
881 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
882 __ Push(cp, a2, a1, a0);
884 DCHECK(Smi::FromInt(0) == 0);
885 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
886 __ Push(cp, a2, a1, a0);
888 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
895 void FullCodeGenerator::VisitFunctionDeclaration(
896 FunctionDeclaration* declaration) {
897 VariableProxy* proxy = declaration->proxy();
898 Variable* variable = proxy->var();
899 switch (variable->location()) {
900 case Variable::UNALLOCATED: {
901 globals_->Add(variable->name(), zone());
902 Handle<SharedFunctionInfo> function =
903 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
904 // Check for stack-overflow exception.
905 if (function.is_null()) return SetStackOverflow();
906 globals_->Add(function, zone());
910 case Variable::PARAMETER:
911 case Variable::LOCAL: {
912 Comment cmnt(masm_, "[ FunctionDeclaration");
913 VisitForAccumulatorValue(declaration->fun());
914 __ sw(result_register(), StackOperand(variable));
918 case Variable::CONTEXT: {
919 Comment cmnt(masm_, "[ FunctionDeclaration");
920 EmitDebugCheckDeclarationContext(variable);
921 VisitForAccumulatorValue(declaration->fun());
922 __ sw(result_register(), ContextOperand(cp, variable->index()));
923 int offset = Context::SlotOffset(variable->index());
924 // We know that we have written a function, which is not a smi.
925 __ RecordWriteContextSlot(cp,
933 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
937 case Variable::LOOKUP: {
938 Comment cmnt(masm_, "[ FunctionDeclaration");
939 __ li(a2, Operand(variable->name()));
940 __ li(a1, Operand(Smi::FromInt(NONE)));
942 // Push initial value for function declaration.
943 VisitForStackValue(declaration->fun());
944 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
951 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
952 Variable* variable = declaration->proxy()->var();
953 ModuleDescriptor* descriptor = declaration->module()->descriptor();
954 DCHECK(variable->location() == Variable::CONTEXT);
955 DCHECK(descriptor->IsFrozen());
957 Comment cmnt(masm_, "[ ModuleDeclaration");
958 EmitDebugCheckDeclarationContext(variable);
960 // Load instance object.
961 __ LoadContext(a1, scope_->ContextChainLength(scope_->ScriptScope()));
962 __ lw(a1, ContextOperand(a1, descriptor->Index()));
963 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
966 __ sw(a1, ContextOperand(cp, variable->index()));
967 // We know that we have written a module, which is not a smi.
968 __ RecordWriteContextSlot(cp,
969 Context::SlotOffset(variable->index()),
976 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
978 // Traverse into body.
979 Visit(declaration->module());
983 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
984 VariableProxy* proxy = declaration->proxy();
985 Variable* variable = proxy->var();
986 switch (variable->location()) {
987 case Variable::UNALLOCATED:
991 case Variable::CONTEXT: {
992 Comment cmnt(masm_, "[ ImportDeclaration");
993 EmitDebugCheckDeclarationContext(variable);
998 case Variable::PARAMETER:
999 case Variable::LOCAL:
1000 case Variable::LOOKUP:
1006 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1011 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1012 // Call the runtime to declare the globals.
1013 // The context is the first argument.
1014 __ li(a1, Operand(pairs));
1015 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1016 __ Push(cp, a1, a0);
1017 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1018 // Return value is ignored.
1022 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1023 // Call the runtime to declare the modules.
1024 __ Push(descriptions);
1025 __ CallRuntime(Runtime::kDeclareModules, 1);
1026 // Return value is ignored.
1030 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1031 Comment cmnt(masm_, "[ SwitchStatement");
1032 Breakable nested_statement(this, stmt);
1033 SetStatementPosition(stmt);
1035 // Keep the switch value on the stack until a case matches.
1036 VisitForStackValue(stmt->tag());
1037 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1039 ZoneList<CaseClause*>* clauses = stmt->cases();
1040 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1042 Label next_test; // Recycled for each test.
1043 // Compile all the tests with branches to their bodies.
1044 for (int i = 0; i < clauses->length(); i++) {
1045 CaseClause* clause = clauses->at(i);
1046 clause->body_target()->Unuse();
1048 // The default is not a test, but remember it as final fall through.
1049 if (clause->is_default()) {
1050 default_clause = clause;
1054 Comment cmnt(masm_, "[ Case comparison");
1055 __ bind(&next_test);
1058 // Compile the label expression.
1059 VisitForAccumulatorValue(clause->label());
1060 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1062 // Perform the comparison as if via '==='.
1063 __ lw(a1, MemOperand(sp, 0)); // Switch value.
1064 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1065 JumpPatchSite patch_site(masm_);
1066 if (inline_smi_code) {
1069 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1071 __ Branch(&next_test, ne, a1, Operand(a0));
1072 __ Drop(1); // Switch value is no longer needed.
1073 __ Branch(clause->body_target());
1075 __ bind(&slow_case);
1078 // Record position before stub call for type feedback.
1079 SetSourcePosition(clause->position());
1081 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1082 CallIC(ic, clause->CompareId());
1083 patch_site.EmitPatchInfo();
1087 PrepareForBailout(clause, TOS_REG);
1088 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1089 __ Branch(&next_test, ne, v0, Operand(at));
1091 __ Branch(clause->body_target());
1094 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1095 __ Drop(1); // Switch value is no longer needed.
1096 __ Branch(clause->body_target());
1099 // Discard the test value and jump to the default if present, otherwise to
1100 // the end of the statement.
1101 __ bind(&next_test);
1102 __ Drop(1); // Switch value is no longer needed.
1103 if (default_clause == NULL) {
1104 __ Branch(nested_statement.break_label());
1106 __ Branch(default_clause->body_target());
1109 // Compile all the case bodies.
1110 for (int i = 0; i < clauses->length(); i++) {
1111 Comment cmnt(masm_, "[ Case body");
1112 CaseClause* clause = clauses->at(i);
1113 __ bind(clause->body_target());
1114 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1115 VisitStatements(clause->statements());
1118 __ bind(nested_statement.break_label());
1119 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1123 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1124 Comment cmnt(masm_, "[ ForInStatement");
1125 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1126 SetStatementPosition(stmt);
1129 ForIn loop_statement(this, stmt);
1130 increment_loop_depth();
1132 // Get the object to enumerate over. If the object is null or undefined, skip
1133 // over the loop. See ECMA-262 version 5, section 12.6.4.
1134 SetExpressionPosition(stmt->enumerable());
1135 VisitForAccumulatorValue(stmt->enumerable());
1136 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1137 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1138 __ Branch(&exit, eq, a0, Operand(at));
1139 Register null_value = t1;
1140 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1141 __ Branch(&exit, eq, a0, Operand(null_value));
1142 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1144 // Convert the object to a JS object.
1145 Label convert, done_convert;
1146 __ JumpIfSmi(a0, &convert);
1147 __ GetObjectType(a0, a1, a1);
1148 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1151 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1153 __ bind(&done_convert);
1154 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1157 // Check for proxies.
1159 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1160 __ GetObjectType(a0, a1, a1);
1161 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1163 // Check cache validity in generated code. This is a fast case for
1164 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1165 // guarantee cache validity, call the runtime system to check cache
1166 // validity or get the property names in a fixed array.
1167 __ CheckEnumCache(null_value, &call_runtime);
1169 // The enum cache is valid. Load the map of the object being
1170 // iterated over and use the cache for the iteration.
1172 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1173 __ Branch(&use_cache);
1175 // Get the set of properties to enumerate.
1176 __ bind(&call_runtime);
1177 __ push(a0); // Duplicate the enumerable object on the stack.
1178 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1179 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1181 // If we got a map from the runtime call, we can do a fast
1182 // modification check. Otherwise, we got a fixed array, and we have
1183 // to do a slow check.
1185 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1186 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1187 __ Branch(&fixed_array, ne, a2, Operand(at));
1189 // We got a map in register v0. Get the enumeration cache from it.
1190 Label no_descriptors;
1191 __ bind(&use_cache);
1193 __ EnumLength(a1, v0);
1194 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1196 __ LoadInstanceDescriptors(v0, a2);
1197 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1198 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1200 // Set up the four remaining stack slots.
1201 __ li(a0, Operand(Smi::FromInt(0)));
1202 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1203 __ Push(v0, a2, a1, a0);
1206 __ bind(&no_descriptors);
1210 // We got a fixed array in register v0. Iterate through that.
1212 __ bind(&fixed_array);
1214 __ li(a1, FeedbackVector());
1215 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1216 int vector_index = FeedbackVector()->GetIndex(slot);
1217 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1219 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1220 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1221 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1222 __ GetObjectType(a2, a3, a3);
1223 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1224 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1225 __ bind(&non_proxy);
1226 __ Push(a1, v0); // Smi and array
1227 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1228 __ li(a0, Operand(Smi::FromInt(0)));
1229 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1231 // Generate code for doing the condition check.
1232 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1234 SetExpressionPosition(stmt->each());
1236 // Load the current count to a0, load the length to a1.
1237 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1238 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1239 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1241 // Get the current entry of the array into register a3.
1242 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1243 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1244 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1245 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1246 __ lw(a3, MemOperand(t0)); // Current entry.
1248 // Get the expected map from the stack or a smi in the
1249 // permanent slow case into register a2.
1250 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1252 // Check if the expected map still matches that of the enumerable.
1253 // If not, we may have to filter the key.
1255 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1256 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1257 __ Branch(&update_each, eq, t0, Operand(a2));
1259 // For proxies, no filtering is done.
1260 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1261 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1262 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1264 // Convert the entry to a string or (smi) 0 if it isn't a property
1265 // any more. If the property has been removed while iterating, we
1267 __ Push(a1, a3); // Enumerable and current entry.
1268 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1269 __ mov(a3, result_register());
1270 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1272 // Update the 'each' property or variable from the possibly filtered
1273 // entry in register a3.
1274 __ bind(&update_each);
1275 __ mov(result_register(), a3);
1276 // Perform the assignment as if via '='.
1277 { EffectContext context(this);
1278 EmitAssignment(stmt->each());
1279 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1282 // Generate code for the body of the loop.
1283 Visit(stmt->body());
1285 // Generate code for the going to the next element by incrementing
1286 // the index (smi) stored on top of the stack.
1287 __ bind(loop_statement.continue_label());
1289 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1292 EmitBackEdgeBookkeeping(stmt, &loop);
1295 // Remove the pointers stored on the stack.
1296 __ bind(loop_statement.break_label());
1299 // Exit and decrement the loop depth.
1300 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1302 decrement_loop_depth();
1306 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1308 // Use the fast case closure allocation code that allocates in new
1309 // space for nested functions that don't need literals cloning. If
1310 // we're running with the --always-opt or the --prepare-always-opt
1311 // flag, we need to use the runtime function so that the new function
1312 // we are creating here gets a chance to have its code optimized and
1313 // doesn't just get a copy of the existing unoptimized code.
1314 if (!FLAG_always_opt &&
1315 !FLAG_prepare_always_opt &&
1317 scope()->is_function_scope() &&
1318 info->num_literals() == 0) {
1319 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1320 __ li(a2, Operand(info));
1323 __ li(a0, Operand(info));
1324 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1325 : Heap::kFalseValueRootIndex);
1326 __ Push(cp, a0, a1);
1327 __ CallRuntime(Runtime::kNewClosure, 3);
1329 context()->Plug(v0);
1333 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1334 Comment cmnt(masm_, "[ VariableProxy");
1335 EmitVariableLoad(expr);
1339 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1340 Comment cnmt(masm_, "[ SuperReference ");
1342 __ lw(LoadDescriptor::ReceiverRegister(),
1343 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1345 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1346 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1348 if (FLAG_vector_ics) {
1349 __ li(VectorLoadICDescriptor::SlotRegister(),
1350 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1351 CallLoadIC(NOT_CONTEXTUAL);
1353 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1357 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1358 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1363 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1365 if (NeedsHomeObject(initializer)) {
1366 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1367 __ li(StoreDescriptor::NameRegister(),
1368 Operand(isolate()->factory()->home_object_symbol()));
1369 __ lw(StoreDescriptor::ValueRegister(),
1370 MemOperand(sp, offset * kPointerSize));
1376 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1377 TypeofState typeof_state,
1379 Register current = cp;
1385 if (s->num_heap_slots() > 0) {
1386 if (s->calls_sloppy_eval()) {
1387 // Check that extension is NULL.
1388 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1389 __ Branch(slow, ne, temp, Operand(zero_reg));
1391 // Load next context in chain.
1392 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1393 // Walk the rest of the chain without clobbering cp.
1396 // If no outer scope calls eval, we do not need to check more
1397 // context extensions.
1398 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1399 s = s->outer_scope();
1402 if (s->is_eval_scope()) {
1404 if (!current.is(next)) {
1405 __ Move(next, current);
1408 // Terminate at native context.
1409 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1410 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1411 __ Branch(&fast, eq, temp, Operand(t0));
1412 // Check that extension is NULL.
1413 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1414 __ Branch(slow, ne, temp, Operand(zero_reg));
1415 // Load next context in chain.
1416 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1421 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1422 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1423 if (FLAG_vector_ics) {
1424 __ li(VectorLoadICDescriptor::SlotRegister(),
1425 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1428 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1435 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1437 DCHECK(var->IsContextSlot());
1438 Register context = cp;
1442 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1443 if (s->num_heap_slots() > 0) {
1444 if (s->calls_sloppy_eval()) {
1445 // Check that extension is NULL.
1446 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1447 __ Branch(slow, ne, temp, Operand(zero_reg));
1449 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1450 // Walk the rest of the chain without clobbering cp.
1454 // Check that last extension is NULL.
1455 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1456 __ Branch(slow, ne, temp, Operand(zero_reg));
1458 // This function is used only for loads, not stores, so it's safe to
1459 // return an cp-based operand (the write barrier cannot be allowed to
1460 // destroy the cp register).
1461 return ContextOperand(context, var->index());
1465 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1466 TypeofState typeof_state,
1469 // Generate fast-case code for variables that might be shadowed by
1470 // eval-introduced variables. Eval is used a lot without
1471 // introducing variables. In those cases, we do not want to
1472 // perform a runtime call for all variables in the scope
1473 // containing the eval.
1474 Variable* var = proxy->var();
1475 if (var->mode() == DYNAMIC_GLOBAL) {
1476 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1478 } else if (var->mode() == DYNAMIC_LOCAL) {
1479 Variable* local = var->local_if_not_shadowed();
1480 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1481 if (local->mode() == LET || local->mode() == CONST ||
1482 local->mode() == CONST_LEGACY) {
1483 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1484 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1485 if (local->mode() == CONST_LEGACY) {
1486 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1487 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1488 } else { // LET || CONST
1489 __ Branch(done, ne, at, Operand(zero_reg));
1490 __ li(a0, Operand(var->name()));
1492 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1500 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1501 // Record position before possible IC call.
1502 SetSourcePosition(proxy->position());
1503 Variable* var = proxy->var();
1505 // Three cases: global variables, lookup variables, and all other types of
1507 switch (var->location()) {
1508 case Variable::UNALLOCATED: {
1509 Comment cmnt(masm_, "[ Global variable");
1510 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1511 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1512 if (FLAG_vector_ics) {
1513 __ li(VectorLoadICDescriptor::SlotRegister(),
1514 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1516 CallLoadIC(CONTEXTUAL);
1517 context()->Plug(v0);
1521 case Variable::PARAMETER:
1522 case Variable::LOCAL:
1523 case Variable::CONTEXT: {
1524 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1525 : "[ Stack variable");
1526 if (var->binding_needs_init()) {
1527 // var->scope() may be NULL when the proxy is located in eval code and
1528 // refers to a potential outside binding. Currently those bindings are
1529 // always looked up dynamically, i.e. in that case
1530 // var->location() == LOOKUP.
1532 DCHECK(var->scope() != NULL);
1534 // Check if the binding really needs an initialization check. The check
1535 // can be skipped in the following situation: we have a LET or CONST
1536 // binding in harmony mode, both the Variable and the VariableProxy have
1537 // the same declaration scope (i.e. they are both in global code, in the
1538 // same function or in the same eval code) and the VariableProxy is in
1539 // the source physically located after the initializer of the variable.
1541 // We cannot skip any initialization checks for CONST in non-harmony
1542 // mode because const variables may be declared but never initialized:
1543 // if (false) { const x; }; var y = x;
1545 // The condition on the declaration scopes is a conservative check for
1546 // nested functions that access a binding and are called before the
1547 // binding is initialized:
1548 // function() { f(); let x = 1; function f() { x = 2; } }
1550 bool skip_init_check;
1551 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1552 skip_init_check = false;
1553 } else if (var->is_this()) {
1554 CHECK(info_->function() != nullptr &&
1555 (info_->function()->kind() & kSubclassConstructor) != 0);
1556 // TODO(dslomov): implement 'this' hole check elimination.
1557 skip_init_check = false;
1559 // Check that we always have valid source position.
1560 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1561 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1562 skip_init_check = var->mode() != CONST_LEGACY &&
1563 var->initializer_position() < proxy->position();
1566 if (!skip_init_check) {
1567 // Let and const need a read barrier.
1569 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1570 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1571 if (var->mode() == LET || var->mode() == CONST) {
1572 // Throw a reference error when using an uninitialized let/const
1573 // binding in harmony mode.
1575 __ Branch(&done, ne, at, Operand(zero_reg));
1576 __ li(a0, Operand(var->name()));
1578 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1581 // Uninitalized const bindings outside of harmony mode are unholed.
1582 DCHECK(var->mode() == CONST_LEGACY);
1583 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1584 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1586 context()->Plug(v0);
1590 context()->Plug(var);
1594 case Variable::LOOKUP: {
1595 Comment cmnt(masm_, "[ Lookup variable");
1597 // Generate code for loading from variables potentially shadowed
1598 // by eval-introduced variables.
1599 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1601 __ li(a1, Operand(var->name()));
1602 __ Push(cp, a1); // Context and name.
1603 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1605 context()->Plug(v0);
1611 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1612 Comment cmnt(masm_, "[ RegExpLiteral");
1614 // Registers will be used as follows:
1615 // t1 = materialized value (RegExp literal)
1616 // t0 = JS function, literals array
1617 // a3 = literal index
1618 // a2 = RegExp pattern
1619 // a1 = RegExp flags
1620 // a0 = RegExp literal clone
1621 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1622 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1623 int literal_offset =
1624 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1625 __ lw(t1, FieldMemOperand(t0, literal_offset));
1626 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1627 __ Branch(&materialized, ne, t1, Operand(at));
1629 // Create regexp literal using runtime function.
1630 // Result will be in v0.
1631 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1632 __ li(a2, Operand(expr->pattern()));
1633 __ li(a1, Operand(expr->flags()));
1634 __ Push(t0, a3, a2, a1);
1635 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1638 __ bind(&materialized);
1639 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1640 Label allocated, runtime_allocate;
1641 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1644 __ bind(&runtime_allocate);
1645 __ li(a0, Operand(Smi::FromInt(size)));
1647 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1650 __ bind(&allocated);
1652 // After this, registers are used as follows:
1653 // v0: Newly allocated regexp.
1654 // t1: Materialized regexp.
1656 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1657 context()->Plug(v0);
1661 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1662 if (expression == NULL) {
1663 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1666 VisitForStackValue(expression);
1671 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1672 Comment cmnt(masm_, "[ ObjectLiteral");
1674 expr->BuildConstantProperties(isolate());
1675 Handle<FixedArray> constant_properties = expr->constant_properties();
1676 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1677 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1678 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1679 __ li(a1, Operand(constant_properties));
1680 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1681 if (MustCreateObjectLiteralWithRuntime(expr)) {
1682 __ Push(a3, a2, a1, a0);
1683 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1685 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1688 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1690 // If result_saved is true the result is on top of the stack. If
1691 // result_saved is false the result is in v0.
1692 bool result_saved = false;
1694 // Mark all computed expressions that are bound to a key that
1695 // is shadowed by a later occurrence of the same key. For the
1696 // marked expressions, no store code is emitted.
1697 expr->CalculateEmitStore(zone());
1699 AccessorTable accessor_table(zone());
1700 int property_index = 0;
1701 for (; property_index < expr->properties()->length(); property_index++) {
1702 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1703 if (property->is_computed_name()) break;
1704 if (property->IsCompileTimeValue()) continue;
1706 Literal* key = property->key()->AsLiteral();
1707 Expression* value = property->value();
1708 if (!result_saved) {
1709 __ push(v0); // Save result on stack.
1710 result_saved = true;
1712 switch (property->kind()) {
1713 case ObjectLiteral::Property::CONSTANT:
1715 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1716 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1718 case ObjectLiteral::Property::COMPUTED:
1719 // It is safe to use [[Put]] here because the boilerplate already
1720 // contains computed properties with an uninitialized value.
1721 if (key->value()->IsInternalizedString()) {
1722 if (property->emit_store()) {
1723 VisitForAccumulatorValue(value);
1724 __ mov(StoreDescriptor::ValueRegister(), result_register());
1725 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1726 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1727 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1728 CallStoreIC(key->LiteralFeedbackId());
1729 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1731 if (NeedsHomeObject(value)) {
1732 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1733 __ li(StoreDescriptor::NameRegister(),
1734 Operand(isolate()->factory()->home_object_symbol()));
1735 __ lw(StoreDescriptor::ValueRegister(), MemOperand(sp));
1739 VisitForEffect(value);
1743 // Duplicate receiver on stack.
1744 __ lw(a0, MemOperand(sp));
1746 VisitForStackValue(key);
1747 VisitForStackValue(value);
1748 if (property->emit_store()) {
1749 EmitSetHomeObjectIfNeeded(value, 2);
1750 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1752 __ CallRuntime(Runtime::kSetProperty, 4);
1757 case ObjectLiteral::Property::PROTOTYPE:
1758 // Duplicate receiver on stack.
1759 __ lw(a0, MemOperand(sp));
1761 VisitForStackValue(value);
1762 DCHECK(property->emit_store());
1763 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1765 case ObjectLiteral::Property::GETTER:
1766 if (property->emit_store()) {
1767 accessor_table.lookup(key)->second->getter = value;
1770 case ObjectLiteral::Property::SETTER:
1771 if (property->emit_store()) {
1772 accessor_table.lookup(key)->second->setter = value;
1778 // Emit code to define accessors, using only a single call to the runtime for
1779 // each pair of corresponding getters and setters.
1780 for (AccessorTable::Iterator it = accessor_table.begin();
1781 it != accessor_table.end();
1783 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1785 VisitForStackValue(it->first);
1786 EmitAccessor(it->second->getter);
1787 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1788 EmitAccessor(it->second->setter);
1789 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1790 __ li(a0, Operand(Smi::FromInt(NONE)));
1792 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1795 // Object literals have two parts. The "static" part on the left contains no
1796 // computed property names, and so we can compute its map ahead of time; see
1797 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1798 // starts with the first computed property name, and continues with all
1799 // properties to its right. All the code from above initializes the static
1800 // component of the object literal, and arranges for the map of the result to
1801 // reflect the static order in which the keys appear. For the dynamic
1802 // properties, we compile them into a series of "SetOwnProperty" runtime
1803 // calls. This will preserve insertion order.
1804 for (; property_index < expr->properties()->length(); property_index++) {
1805 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1807 Expression* value = property->value();
1808 if (!result_saved) {
1809 __ push(v0); // Save result on the stack
1810 result_saved = true;
1813 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1816 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1817 DCHECK(!property->is_computed_name());
1818 VisitForStackValue(value);
1819 DCHECK(property->emit_store());
1820 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1822 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1823 VisitForStackValue(value);
1824 EmitSetHomeObjectIfNeeded(value, 2);
1826 switch (property->kind()) {
1827 case ObjectLiteral::Property::CONSTANT:
1828 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1829 case ObjectLiteral::Property::COMPUTED:
1830 if (property->emit_store()) {
1831 __ li(a0, Operand(Smi::FromInt(NONE)));
1833 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1839 case ObjectLiteral::Property::PROTOTYPE:
1843 case ObjectLiteral::Property::GETTER:
1844 __ li(a0, Operand(Smi::FromInt(NONE)));
1846 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1849 case ObjectLiteral::Property::SETTER:
1850 __ li(a0, Operand(Smi::FromInt(NONE)));
1852 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1858 if (expr->has_function()) {
1859 DCHECK(result_saved);
1860 __ lw(a0, MemOperand(sp));
1862 __ CallRuntime(Runtime::kToFastProperties, 1);
1866 context()->PlugTOS();
1868 context()->Plug(v0);
1873 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1874 Comment cmnt(masm_, "[ ArrayLiteral");
1876 expr->BuildConstantElements(isolate());
1878 Handle<FixedArray> constant_elements = expr->constant_elements();
1879 bool has_fast_elements =
1880 IsFastObjectElementsKind(expr->constant_elements_kind());
1882 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1883 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1884 // If the only customer of allocation sites is transitioning, then
1885 // we can turn it off if we don't have anywhere else to transition to.
1886 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1889 __ mov(a0, result_register());
1890 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1891 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1892 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1893 __ li(a1, Operand(constant_elements));
1894 if (MustCreateArrayLiteralWithRuntime(expr)) {
1895 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1896 __ Push(a3, a2, a1, a0);
1897 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1899 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1902 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1904 bool result_saved = false; // Is the result saved to the stack?
1905 ZoneList<Expression*>* subexprs = expr->values();
1906 int length = subexprs->length();
1908 // Emit code to evaluate all the non-constant subexpressions and to store
1909 // them into the newly cloned array.
1910 for (int i = 0; i < length; i++) {
1911 Expression* subexpr = subexprs->at(i);
1912 // If the subexpression is a literal or a simple materialized literal it
1913 // is already set in the cloned array.
1914 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1916 if (!result_saved) {
1917 __ push(v0); // array literal
1918 __ Push(Smi::FromInt(expr->literal_index()));
1919 result_saved = true;
1922 VisitForAccumulatorValue(subexpr);
1924 if (has_fast_elements) {
1925 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1926 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1927 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1928 __ sw(result_register(), FieldMemOperand(a1, offset));
1929 // Update the write barrier for the array store.
1930 __ RecordWriteField(a1, offset, result_register(), a2,
1931 kRAHasBeenSaved, kDontSaveFPRegs,
1932 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1934 __ li(a3, Operand(Smi::FromInt(i)));
1935 __ mov(a0, result_register());
1936 StoreArrayLiteralElementStub stub(isolate());
1940 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1943 __ Pop(); // literal index
1944 context()->PlugTOS();
1946 context()->Plug(v0);
1951 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1952 DCHECK(expr->target()->IsValidReferenceExpression());
1954 Comment cmnt(masm_, "[ Assignment");
1956 Property* property = expr->target()->AsProperty();
1957 LhsKind assign_type = GetAssignType(property);
1959 // Evaluate LHS expression.
1960 switch (assign_type) {
1962 // Nothing to do here.
1964 case NAMED_PROPERTY:
1965 if (expr->is_compound()) {
1966 // We need the receiver both on the stack and in the register.
1967 VisitForStackValue(property->obj());
1968 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1970 VisitForStackValue(property->obj());
1973 case NAMED_SUPER_PROPERTY:
1974 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1975 EmitLoadHomeObject(property->obj()->AsSuperReference());
1976 __ Push(result_register());
1977 if (expr->is_compound()) {
1978 const Register scratch = a1;
1979 __ lw(scratch, MemOperand(sp, kPointerSize));
1980 __ Push(scratch, result_register());
1983 case KEYED_SUPER_PROPERTY: {
1984 const Register scratch = a1;
1985 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1986 EmitLoadHomeObject(property->obj()->AsSuperReference());
1987 __ Move(scratch, result_register());
1988 VisitForAccumulatorValue(property->key());
1989 __ Push(scratch, result_register());
1990 if (expr->is_compound()) {
1991 const Register scratch1 = t0;
1992 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1993 __ Push(scratch1, scratch, result_register());
1997 case KEYED_PROPERTY:
1998 // We need the key and receiver on both the stack and in v0 and a1.
1999 if (expr->is_compound()) {
2000 VisitForStackValue(property->obj());
2001 VisitForStackValue(property->key());
2002 __ lw(LoadDescriptor::ReceiverRegister(),
2003 MemOperand(sp, 1 * kPointerSize));
2004 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2006 VisitForStackValue(property->obj());
2007 VisitForStackValue(property->key());
2012 // For compound assignments we need another deoptimization point after the
2013 // variable/property load.
2014 if (expr->is_compound()) {
2015 { AccumulatorValueContext context(this);
2016 switch (assign_type) {
2018 EmitVariableLoad(expr->target()->AsVariableProxy());
2019 PrepareForBailout(expr->target(), TOS_REG);
2021 case NAMED_PROPERTY:
2022 EmitNamedPropertyLoad(property);
2023 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2025 case NAMED_SUPER_PROPERTY:
2026 EmitNamedSuperPropertyLoad(property);
2027 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2029 case KEYED_SUPER_PROPERTY:
2030 EmitKeyedSuperPropertyLoad(property);
2031 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2033 case KEYED_PROPERTY:
2034 EmitKeyedPropertyLoad(property);
2035 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2040 Token::Value op = expr->binary_op();
2041 __ push(v0); // Left operand goes on the stack.
2042 VisitForAccumulatorValue(expr->value());
2044 SetSourcePosition(expr->position() + 1);
2045 AccumulatorValueContext context(this);
2046 if (ShouldInlineSmiCase(op)) {
2047 EmitInlineSmiBinaryOp(expr->binary_operation(),
2052 EmitBinaryOp(expr->binary_operation(), op);
2055 // Deoptimization point in case the binary operation may have side effects.
2056 PrepareForBailout(expr->binary_operation(), TOS_REG);
2058 VisitForAccumulatorValue(expr->value());
2061 // Record source position before possible IC call.
2062 SetSourcePosition(expr->position());
2065 switch (assign_type) {
2067 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2069 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2070 context()->Plug(v0);
2072 case NAMED_PROPERTY:
2073 EmitNamedPropertyAssignment(expr);
2075 case NAMED_SUPER_PROPERTY:
2076 EmitNamedSuperPropertyStore(property);
2077 context()->Plug(v0);
2079 case KEYED_SUPER_PROPERTY:
2080 EmitKeyedSuperPropertyStore(property);
2081 context()->Plug(v0);
2083 case KEYED_PROPERTY:
2084 EmitKeyedPropertyAssignment(expr);
2090 void FullCodeGenerator::VisitYield(Yield* expr) {
2091 Comment cmnt(masm_, "[ Yield");
2092 // Evaluate yielded value first; the initial iterator definition depends on
2093 // this. It stays on the stack while we update the iterator.
2094 VisitForStackValue(expr->expression());
2096 switch (expr->yield_kind()) {
2097 case Yield::kSuspend:
2098 // Pop value from top-of-stack slot; box result into result register.
2099 EmitCreateIteratorResult(false);
2100 __ push(result_register());
2102 case Yield::kInitial: {
2103 Label suspend, continuation, post_runtime, resume;
2107 __ bind(&continuation);
2111 VisitForAccumulatorValue(expr->generator_object());
2112 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2113 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2114 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2115 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2117 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2118 kRAHasBeenSaved, kDontSaveFPRegs);
2119 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2120 __ Branch(&post_runtime, eq, sp, Operand(a1));
2121 __ push(v0); // generator object
2122 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2123 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2124 __ bind(&post_runtime);
2125 __ pop(result_register());
2126 EmitReturnSequence();
2129 context()->Plug(result_register());
2133 case Yield::kFinal: {
2134 VisitForAccumulatorValue(expr->generator_object());
2135 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2136 __ sw(a1, FieldMemOperand(result_register(),
2137 JSGeneratorObject::kContinuationOffset));
2138 // Pop value from top-of-stack slot, box result into result register.
2139 EmitCreateIteratorResult(true);
2140 EmitUnwindBeforeReturn();
2141 EmitReturnSequence();
2145 case Yield::kDelegating: {
2146 VisitForStackValue(expr->generator_object());
2148 // Initial stack layout is as follows:
2149 // [sp + 1 * kPointerSize] iter
2150 // [sp + 0 * kPointerSize] g
2152 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2153 Label l_next, l_call;
2154 Register load_receiver = LoadDescriptor::ReceiverRegister();
2155 Register load_name = LoadDescriptor::NameRegister();
2157 // Initial send value is undefined.
2158 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2161 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2164 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2165 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2166 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2167 __ Push(load_name, a3, a0); // "throw", iter, except
2170 // try { received = %yield result }
2171 // Shuffle the received result above a try handler and yield it without
2174 __ pop(a0); // result
2175 __ PushTryHandler(StackHandler::CATCH, expr->index());
2176 const int handler_size = StackHandlerConstants::kSize;
2177 __ push(a0); // result
2179 __ bind(&l_continuation);
2182 __ bind(&l_suspend);
2183 const int generator_object_depth = kPointerSize + handler_size;
2184 __ lw(a0, MemOperand(sp, generator_object_depth));
2186 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2187 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2188 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2189 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2191 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2192 kRAHasBeenSaved, kDontSaveFPRegs);
2193 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2194 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2195 __ pop(v0); // result
2196 EmitReturnSequence();
2198 __ bind(&l_resume); // received in a0
2201 // receiver = iter; f = 'next'; arg = received;
2204 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2205 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2206 __ Push(load_name, a3, a0); // "next", iter, received
2208 // result = receiver[f](arg);
2210 __ lw(load_receiver, MemOperand(sp, kPointerSize));
2211 __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2212 if (FLAG_vector_ics) {
2213 __ li(VectorLoadICDescriptor::SlotRegister(),
2214 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2216 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2217 CallIC(ic, TypeFeedbackId::None());
2220 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2221 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2224 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2225 __ Drop(1); // The function is still on the stack; drop it.
2227 // if (!result.done) goto l_try;
2228 __ Move(load_receiver, v0);
2230 __ push(load_receiver); // save result
2231 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2232 if (FLAG_vector_ics) {
2233 __ li(VectorLoadICDescriptor::SlotRegister(),
2234 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2236 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2238 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2240 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2243 __ pop(load_receiver); // result
2244 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2245 if (FLAG_vector_ics) {
2246 __ li(VectorLoadICDescriptor::SlotRegister(),
2247 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2249 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2250 context()->DropAndPlug(2, v0); // drop iter and g
2257 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2259 JSGeneratorObject::ResumeMode resume_mode) {
2260 // The value stays in a0, and is ultimately read by the resumed generator, as
2261 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2262 // is read to throw the value when the resumed generator is already closed.
2263 // a1 will hold the generator object until the activation has been resumed.
2264 VisitForStackValue(generator);
2265 VisitForAccumulatorValue(value);
2268 // Load suspended function and context.
2269 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2270 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2272 // Load receiver and store as the first argument.
2273 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2276 // Push holes for the rest of the arguments to the generator function.
2277 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2279 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2280 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2281 Label push_argument_holes, push_frame;
2282 __ bind(&push_argument_holes);
2283 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2284 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2286 __ jmp(&push_argument_holes);
2288 // Enter a new JavaScript frame, and initialize its slots as they were when
2289 // the generator was suspended.
2290 Label resume_frame, done;
2291 __ bind(&push_frame);
2292 __ Call(&resume_frame);
2294 __ bind(&resume_frame);
2295 // ra = return address.
2296 // fp = caller's frame pointer.
2297 // cp = callee's context,
2298 // t0 = callee's JS function.
2299 __ Push(ra, fp, cp, t0);
2300 // Adjust FP to point to saved FP.
2301 __ Addu(fp, sp, 2 * kPointerSize);
2303 // Load the operand stack size.
2304 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2305 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2308 // If we are sending a value and there is no operand stack, we can jump back
2310 if (resume_mode == JSGeneratorObject::NEXT) {
2312 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2313 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2314 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2316 __ Addu(a3, a3, Operand(a2));
2317 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2318 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2320 __ bind(&slow_resume);
2323 // Otherwise, we push holes for the operand stack and call the runtime to fix
2324 // up the stack and the handlers.
2325 Label push_operand_holes, call_resume;
2326 __ bind(&push_operand_holes);
2327 __ Subu(a3, a3, Operand(1));
2328 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2330 __ Branch(&push_operand_holes);
2331 __ bind(&call_resume);
2332 DCHECK(!result_register().is(a1));
2333 __ Push(a1, result_register());
2334 __ Push(Smi::FromInt(resume_mode));
2335 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2336 // Not reached: the runtime call returns elsewhere.
2337 __ stop("not-reached");
2340 context()->Plug(result_register());
2344 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2348 const int instance_size = 5 * kPointerSize;
2349 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2352 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2355 __ bind(&gc_required);
2356 __ Push(Smi::FromInt(instance_size));
2357 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2358 __ lw(context_register(),
2359 MemOperand(fp, StandardFrameConstants::kContextOffset));
2361 __ bind(&allocated);
2362 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2363 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2364 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2366 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2367 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2368 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2369 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2370 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2372 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2374 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2376 // Only the value field needs a write barrier, as the other values are in the
2378 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2379 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2383 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2384 SetSourcePosition(prop->position());
2385 Literal* key = prop->key()->AsLiteral();
2386 DCHECK(!prop->IsSuperAccess());
2388 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2389 if (FLAG_vector_ics) {
2390 __ li(VectorLoadICDescriptor::SlotRegister(),
2391 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2392 CallLoadIC(NOT_CONTEXTUAL);
2394 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2399 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2400 // Stack: receiver, home_object.
2401 SetSourcePosition(prop->position());
2402 Literal* key = prop->key()->AsLiteral();
2403 DCHECK(!key->value()->IsSmi());
2404 DCHECK(prop->IsSuperAccess());
2406 __ Push(key->value());
2407 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2411 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2412 SetSourcePosition(prop->position());
2413 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2414 if (FLAG_vector_ics) {
2415 __ li(VectorLoadICDescriptor::SlotRegister(),
2416 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2419 CallIC(ic, prop->PropertyFeedbackId());
2424 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2425 // Stack: receiver, home_object, key.
2426 SetSourcePosition(prop->position());
2428 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2432 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2434 Expression* left_expr,
2435 Expression* right_expr) {
2436 Label done, smi_case, stub_call;
2438 Register scratch1 = a2;
2439 Register scratch2 = a3;
2441 // Get the arguments.
2443 Register right = a0;
2445 __ mov(a0, result_register());
2447 // Perform combined smi check on both operands.
2448 __ Or(scratch1, left, Operand(right));
2449 STATIC_ASSERT(kSmiTag == 0);
2450 JumpPatchSite patch_site(masm_);
2451 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2453 __ bind(&stub_call);
2454 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2455 CallIC(code, expr->BinaryOperationFeedbackId());
2456 patch_site.EmitPatchInfo();
2460 // Smi case. This code works the same way as the smi-smi case in the type
2461 // recording binary operation stub, see
2464 __ GetLeastBitsFromSmi(scratch1, right, 5);
2465 __ srav(right, left, scratch1);
2466 __ And(v0, right, Operand(~kSmiTagMask));
2469 __ SmiUntag(scratch1, left);
2470 __ GetLeastBitsFromSmi(scratch2, right, 5);
2471 __ sllv(scratch1, scratch1, scratch2);
2472 __ Addu(scratch2, scratch1, Operand(0x40000000));
2473 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2474 __ SmiTag(v0, scratch1);
2478 __ SmiUntag(scratch1, left);
2479 __ GetLeastBitsFromSmi(scratch2, right, 5);
2480 __ srlv(scratch1, scratch1, scratch2);
2481 __ And(scratch2, scratch1, 0xc0000000);
2482 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2483 __ SmiTag(v0, scratch1);
2487 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2488 __ BranchOnOverflow(&stub_call, scratch1);
2491 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2492 __ BranchOnOverflow(&stub_call, scratch1);
2495 __ SmiUntag(scratch1, right);
2496 __ Mul(scratch2, v0, left, scratch1);
2497 __ sra(scratch1, v0, 31);
2498 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2499 __ Branch(&done, ne, v0, Operand(zero_reg));
2500 __ Addu(scratch2, right, left);
2501 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2502 DCHECK(Smi::FromInt(0) == 0);
2503 __ mov(v0, zero_reg);
2507 __ Or(v0, left, Operand(right));
2509 case Token::BIT_AND:
2510 __ And(v0, left, Operand(right));
2512 case Token::BIT_XOR:
2513 __ Xor(v0, left, Operand(right));
2520 context()->Plug(v0);
2524 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2525 // Constructor is in v0.
2526 DCHECK(lit != NULL);
2529 // No access check is needed here since the constructor is created by the
2531 Register scratch = a1;
2533 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2536 for (int i = 0; i < lit->properties()->length(); i++) {
2537 ObjectLiteral::Property* property = lit->properties()->at(i);
2538 Expression* value = property->value();
2540 if (property->is_static()) {
2541 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2543 __ lw(scratch, MemOperand(sp, 0)); // prototype
2546 EmitPropertyKey(property, lit->GetIdForProperty(i));
2547 VisitForStackValue(value);
2548 EmitSetHomeObjectIfNeeded(value, 2);
2550 switch (property->kind()) {
2551 case ObjectLiteral::Property::CONSTANT:
2552 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2553 case ObjectLiteral::Property::PROTOTYPE:
2555 case ObjectLiteral::Property::COMPUTED:
2556 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2559 case ObjectLiteral::Property::GETTER:
2560 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2562 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2565 case ObjectLiteral::Property::SETTER:
2566 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2568 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2577 __ CallRuntime(Runtime::kToFastProperties, 1);
2580 __ CallRuntime(Runtime::kToFastProperties, 1);
2584 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2585 __ mov(a0, result_register());
2587 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2588 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2589 CallIC(code, expr->BinaryOperationFeedbackId());
2590 patch_site.EmitPatchInfo();
2591 context()->Plug(v0);
2595 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2596 DCHECK(expr->IsValidReferenceExpression());
2598 Property* prop = expr->AsProperty();
2599 LhsKind assign_type = GetAssignType(prop);
2601 switch (assign_type) {
2603 Variable* var = expr->AsVariableProxy()->var();
2604 EffectContext context(this);
2605 EmitVariableAssignment(var, Token::ASSIGN);
2608 case NAMED_PROPERTY: {
2609 __ push(result_register()); // Preserve value.
2610 VisitForAccumulatorValue(prop->obj());
2611 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2612 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2613 __ li(StoreDescriptor::NameRegister(),
2614 Operand(prop->key()->AsLiteral()->value()));
2618 case NAMED_SUPER_PROPERTY: {
2620 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2621 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2622 // stack: value, this; v0: home_object
2623 Register scratch = a2;
2624 Register scratch2 = a3;
2625 __ mov(scratch, result_register()); // home_object
2626 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2627 __ lw(scratch2, MemOperand(sp, 0)); // this
2628 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2629 __ sw(scratch, MemOperand(sp, 0)); // home_object
2630 // stack: this, home_object; v0: value
2631 EmitNamedSuperPropertyStore(prop);
2634 case KEYED_SUPER_PROPERTY: {
2636 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2637 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2638 __ Push(result_register());
2639 VisitForAccumulatorValue(prop->key());
2640 Register scratch = a2;
2641 Register scratch2 = a3;
2642 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2643 // stack: value, this, home_object; v0: key, a3: value
2644 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2645 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2646 __ lw(scratch, MemOperand(sp, 0)); // home_object
2647 __ sw(scratch, MemOperand(sp, kPointerSize));
2648 __ sw(v0, MemOperand(sp, 0));
2649 __ Move(v0, scratch2);
2650 // stack: this, home_object, key; v0: value.
2651 EmitKeyedSuperPropertyStore(prop);
2654 case KEYED_PROPERTY: {
2655 __ push(result_register()); // Preserve value.
2656 VisitForStackValue(prop->obj());
2657 VisitForAccumulatorValue(prop->key());
2658 __ mov(StoreDescriptor::NameRegister(), result_register());
2659 __ Pop(StoreDescriptor::ValueRegister(),
2660 StoreDescriptor::ReceiverRegister());
2662 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2667 context()->Plug(v0);
2671 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2672 Variable* var, MemOperand location) {
2673 __ sw(result_register(), location);
2674 if (var->IsContextSlot()) {
2675 // RecordWrite may destroy all its register arguments.
2676 __ Move(a3, result_register());
2677 int offset = Context::SlotOffset(var->index());
2678 __ RecordWriteContextSlot(
2679 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2684 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2685 if (var->IsUnallocated()) {
2686 // Global var, const, or let.
2687 __ mov(StoreDescriptor::ValueRegister(), result_register());
2688 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2689 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2692 } else if (op == Token::INIT_CONST_LEGACY) {
2693 // Const initializers need a write barrier.
2694 DCHECK(!var->IsParameter()); // No const parameters.
2695 if (var->IsLookupSlot()) {
2696 __ li(a0, Operand(var->name()));
2697 __ Push(v0, cp, a0); // Context and name.
2698 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2700 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2702 MemOperand location = VarOperand(var, a1);
2703 __ lw(a2, location);
2704 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2705 __ Branch(&skip, ne, a2, Operand(at));
2706 EmitStoreToStackLocalOrContextSlot(var, location);
2710 } else if (var->mode() == LET && op != Token::INIT_LET) {
2711 // Non-initializing assignment to let variable needs a write barrier.
2712 DCHECK(!var->IsLookupSlot());
2713 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2715 MemOperand location = VarOperand(var, a1);
2716 __ lw(a3, location);
2717 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2718 __ Branch(&assign, ne, a3, Operand(t0));
2719 __ li(a3, Operand(var->name()));
2721 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2722 // Perform the assignment.
2724 EmitStoreToStackLocalOrContextSlot(var, location);
2725 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2726 if (var->IsLookupSlot()) {
2727 // Assignment to var.
2728 __ li(a1, Operand(var->name()));
2729 __ li(a0, Operand(Smi::FromInt(language_mode())));
2730 __ Push(v0, cp, a1, a0); // Value, context, name, language mode.
2731 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2733 // Assignment to var or initializing assignment to let/const in harmony
2735 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2736 MemOperand location = VarOperand(var, a1);
2737 if (generate_debug_code_ && op == Token::INIT_LET) {
2738 // Check for an uninitialized let binding.
2739 __ lw(a2, location);
2740 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2741 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2743 EmitStoreToStackLocalOrContextSlot(var, location);
2745 } else if (IsSignallingAssignmentToConst(var, op, language_mode())) {
2746 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2751 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2752 // Assignment to a property, using a named store IC.
2753 Property* prop = expr->target()->AsProperty();
2754 DCHECK(prop != NULL);
2755 DCHECK(prop->key()->IsLiteral());
2757 // Record source code position before IC call.
2758 SetSourcePosition(expr->position());
2759 __ mov(StoreDescriptor::ValueRegister(), result_register());
2760 __ li(StoreDescriptor::NameRegister(),
2761 Operand(prop->key()->AsLiteral()->value()));
2762 __ pop(StoreDescriptor::ReceiverRegister());
2763 CallStoreIC(expr->AssignmentFeedbackId());
2765 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2766 context()->Plug(v0);
2770 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2771 // Assignment to named property of super.
2773 // stack : receiver ('this'), home_object
2774 DCHECK(prop != NULL);
2775 Literal* key = prop->key()->AsLiteral();
2776 DCHECK(key != NULL);
2778 __ Push(key->value());
2780 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2781 : Runtime::kStoreToSuper_Sloppy),
2786 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2787 // Assignment to named property of super.
2789 // stack : receiver ('this'), home_object, key
2790 DCHECK(prop != NULL);
2794 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2795 : Runtime::kStoreKeyedToSuper_Sloppy),
2800 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2801 // Assignment to a property, using a keyed store IC.
2803 // Record source code position before IC call.
2804 SetSourcePosition(expr->position());
2805 // Call keyed store IC.
2806 // The arguments are:
2807 // - a0 is the value,
2809 // - a2 is the receiver.
2810 __ mov(StoreDescriptor::ValueRegister(), result_register());
2811 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2812 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2815 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2816 CallIC(ic, expr->AssignmentFeedbackId());
2818 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2819 context()->Plug(v0);
2823 void FullCodeGenerator::VisitProperty(Property* expr) {
2824 Comment cmnt(masm_, "[ Property");
2825 Expression* key = expr->key();
2827 if (key->IsPropertyName()) {
2828 if (!expr->IsSuperAccess()) {
2829 VisitForAccumulatorValue(expr->obj());
2830 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2831 EmitNamedPropertyLoad(expr);
2833 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2834 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2835 __ Push(result_register());
2836 EmitNamedSuperPropertyLoad(expr);
2839 if (!expr->IsSuperAccess()) {
2840 VisitForStackValue(expr->obj());
2841 VisitForAccumulatorValue(expr->key());
2842 __ Move(LoadDescriptor::NameRegister(), v0);
2843 __ pop(LoadDescriptor::ReceiverRegister());
2844 EmitKeyedPropertyLoad(expr);
2846 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2847 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2848 __ Push(result_register());
2849 VisitForStackValue(expr->key());
2850 EmitKeyedSuperPropertyLoad(expr);
2853 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2854 context()->Plug(v0);
2858 void FullCodeGenerator::CallIC(Handle<Code> code,
2859 TypeFeedbackId id) {
2861 __ Call(code, RelocInfo::CODE_TARGET, id);
2865 // Code common for calls using the IC.
2866 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2867 Expression* callee = expr->expression();
2869 CallICState::CallType call_type =
2870 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2872 // Get the target function.
2873 if (call_type == CallICState::FUNCTION) {
2874 { StackValueContext context(this);
2875 EmitVariableLoad(callee->AsVariableProxy());
2876 PrepareForBailout(callee, NO_REGISTERS);
2878 // Push undefined as receiver. This is patched in the method prologue if it
2879 // is a sloppy mode method.
2880 __ Push(isolate()->factory()->undefined_value());
2882 // Load the function from the receiver.
2883 DCHECK(callee->IsProperty());
2884 DCHECK(!callee->AsProperty()->IsSuperAccess());
2885 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2886 EmitNamedPropertyLoad(callee->AsProperty());
2887 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2888 // Push the target function under the receiver.
2889 __ lw(at, MemOperand(sp, 0));
2891 __ sw(v0, MemOperand(sp, kPointerSize));
2894 EmitCall(expr, call_type);
2898 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2899 Expression* callee = expr->expression();
2900 DCHECK(callee->IsProperty());
2901 Property* prop = callee->AsProperty();
2902 DCHECK(prop->IsSuperAccess());
2904 SetSourcePosition(prop->position());
2905 Literal* key = prop->key()->AsLiteral();
2906 DCHECK(!key->value()->IsSmi());
2907 // Load the function from the receiver.
2908 const Register scratch = a1;
2909 SuperReference* super_ref = prop->obj()->AsSuperReference();
2910 EmitLoadHomeObject(super_ref);
2911 __ mov(scratch, v0);
2912 VisitForAccumulatorValue(super_ref->this_var());
2913 __ Push(scratch, v0, v0, scratch);
2914 __ Push(key->value());
2918 // - this (receiver)
2919 // - this (receiver) <-- LoadFromSuper will pop here and below.
2922 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2924 // Replace home_object with target function.
2925 __ sw(v0, MemOperand(sp, kPointerSize));
2928 // - target function
2929 // - this (receiver)
2930 EmitCall(expr, CallICState::METHOD);
2934 // Code common for calls using the IC.
2935 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2938 VisitForAccumulatorValue(key);
2940 Expression* callee = expr->expression();
2942 // Load the function from the receiver.
2943 DCHECK(callee->IsProperty());
2944 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2945 __ Move(LoadDescriptor::NameRegister(), v0);
2946 EmitKeyedPropertyLoad(callee->AsProperty());
2947 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2949 // Push the target function under the receiver.
2950 __ lw(at, MemOperand(sp, 0));
2952 __ sw(v0, MemOperand(sp, kPointerSize));
2954 EmitCall(expr, CallICState::METHOD);
2958 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2959 Expression* callee = expr->expression();
2960 DCHECK(callee->IsProperty());
2961 Property* prop = callee->AsProperty();
2962 DCHECK(prop->IsSuperAccess());
2964 SetSourcePosition(prop->position());
2965 // Load the function from the receiver.
2966 const Register scratch = a1;
2967 SuperReference* super_ref = prop->obj()->AsSuperReference();
2968 EmitLoadHomeObject(super_ref);
2969 __ Move(scratch, v0);
2970 VisitForAccumulatorValue(super_ref->this_var());
2971 __ Push(scratch, v0, v0, scratch);
2972 VisitForStackValue(prop->key());
2976 // - this (receiver)
2977 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2980 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2982 // Replace home_object with target function.
2983 __ sw(v0, MemOperand(sp, kPointerSize));
2986 // - target function
2987 // - this (receiver)
2988 EmitCall(expr, CallICState::METHOD);
2992 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2993 // Load the arguments.
2994 ZoneList<Expression*>* args = expr->arguments();
2995 int arg_count = args->length();
2996 { PreservePositionScope scope(masm()->positions_recorder());
2997 for (int i = 0; i < arg_count; i++) {
2998 VisitForStackValue(args->at(i));
3002 // Record source position of the IC call.
3003 SetSourcePosition(expr->position());
3004 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3005 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3006 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3007 // Don't assign a type feedback id to the IC, since type feedback is provided
3008 // by the vector above.
3011 RecordJSReturnSite(expr);
3012 // Restore context register.
3013 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3014 context()->DropAndPlug(1, v0);
3018 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3019 // t3: copy of the first argument or undefined if it doesn't exist.
3020 if (arg_count > 0) {
3021 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
3023 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
3026 // t2: the receiver of the enclosing function.
3027 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3029 // t1: the receiver of the enclosing function.
3030 int receiver_offset = 2 + info_->scope()->num_parameters();
3031 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
3033 // t0: the language mode.
3034 __ li(t0, Operand(Smi::FromInt(language_mode())));
3036 // a1: the start position of the scope the calls resides in.
3037 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3039 // Do the runtime call.
3041 __ Push(t2, t1, t0, a1);
3042 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3046 void FullCodeGenerator::EmitLoadSuperConstructor() {
3047 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3049 __ CallRuntime(Runtime::kGetPrototype, 1);
3053 void FullCodeGenerator::VisitCall(Call* expr) {
3055 // We want to verify that RecordJSReturnSite gets called on all paths
3056 // through this function. Avoid early returns.
3057 expr->return_is_recorded_ = false;
3060 Comment cmnt(masm_, "[ Call");
3061 Expression* callee = expr->expression();
3062 Call::CallType call_type = expr->GetCallType(isolate());
3064 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3065 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3066 // to resolve the function we need to call and the receiver of the
3067 // call. Then we call the resolved function using the given
3069 ZoneList<Expression*>* args = expr->arguments();
3070 int arg_count = args->length();
3072 { PreservePositionScope pos_scope(masm()->positions_recorder());
3073 VisitForStackValue(callee);
3074 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3075 __ push(a2); // Reserved receiver slot.
3077 // Push the arguments.
3078 for (int i = 0; i < arg_count; i++) {
3079 VisitForStackValue(args->at(i));
3082 // Push a copy of the function (found below the arguments) and
3084 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3086 EmitResolvePossiblyDirectEval(arg_count);
3088 // The runtime call returns a pair of values in v0 (function) and
3089 // v1 (receiver). Touch up the stack with the right values.
3090 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3091 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
3093 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3095 // Record source position for debugger.
3096 SetSourcePosition(expr->position());
3097 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3098 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3100 RecordJSReturnSite(expr);
3101 // Restore context register.
3102 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3103 context()->DropAndPlug(1, v0);
3104 } else if (call_type == Call::GLOBAL_CALL) {
3105 EmitCallWithLoadIC(expr);
3106 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3107 // Call to a lookup slot (dynamically introduced variable).
3108 VariableProxy* proxy = callee->AsVariableProxy();
3111 { PreservePositionScope scope(masm()->positions_recorder());
3112 // Generate code for loading from variables potentially shadowed
3113 // by eval-introduced variables.
3114 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3118 // Call the runtime to find the function to call (returned in v0)
3119 // and the object holding it (returned in v1).
3120 DCHECK(!context_register().is(a2));
3121 __ li(a2, Operand(proxy->name()));
3122 __ Push(context_register(), a2);
3123 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3124 __ Push(v0, v1); // Function, receiver.
3125 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3127 // If fast case code has been generated, emit code to push the
3128 // function and receiver and have the slow path jump around this
3130 if (done.is_linked()) {
3136 // The receiver is implicitly the global receiver. Indicate this
3137 // by passing the hole to the call function stub.
3138 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3143 // The receiver is either the global receiver or an object found
3144 // by LoadContextSlot.
3146 } else if (call_type == Call::PROPERTY_CALL) {
3147 Property* property = callee->AsProperty();
3148 bool is_named_call = property->key()->IsPropertyName();
3149 if (property->IsSuperAccess()) {
3150 if (is_named_call) {
3151 EmitSuperCallWithLoadIC(expr);
3153 EmitKeyedSuperCallWithLoadIC(expr);
3157 PreservePositionScope scope(masm()->positions_recorder());
3158 VisitForStackValue(property->obj());
3160 if (is_named_call) {
3161 EmitCallWithLoadIC(expr);
3163 EmitKeyedCallWithLoadIC(expr, property->key());
3166 } else if (call_type == Call::SUPER_CALL) {
3167 EmitSuperConstructorCall(expr);
3169 DCHECK(call_type == Call::OTHER_CALL);
3170 // Call to an arbitrary expression not handled specially above.
3171 { PreservePositionScope scope(masm()->positions_recorder());
3172 VisitForStackValue(callee);
3174 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3176 // Emit function call.
3181 // RecordJSReturnSite should have been called.
3182 DCHECK(expr->return_is_recorded_);
3187 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3188 Comment cmnt(masm_, "[ CallNew");
3189 // According to ECMA-262, section 11.2.2, page 44, the function
3190 // expression in new calls must be evaluated before the
3193 // Push constructor on the stack. If it's not a function it's used as
3194 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3196 DCHECK(!expr->expression()->IsSuperReference());
3197 VisitForStackValue(expr->expression());
3199 // Push the arguments ("left-to-right") on the stack.
3200 ZoneList<Expression*>* args = expr->arguments();
3201 int arg_count = args->length();
3202 for (int i = 0; i < arg_count; i++) {
3203 VisitForStackValue(args->at(i));
3206 // Call the construct call builtin that handles allocation and
3207 // constructor invocation.
3208 SetSourcePosition(expr->position());
3210 // Load function and argument count into a1 and a0.
3211 __ li(a0, Operand(arg_count));
3212 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3214 // Record call targets in unoptimized code.
3215 if (FLAG_pretenuring_call_new) {
3216 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3217 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3218 expr->CallNewFeedbackSlot().ToInt() + 1);
3221 __ li(a2, FeedbackVector());
3222 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3224 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3225 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3226 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3227 context()->Plug(v0);
3231 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3232 if (!ValidateSuperCall(expr)) return;
3233 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3234 GetVar(result_register(), new_target_var);
3235 __ Push(result_register());
3237 EmitLoadSuperConstructor();
3238 __ push(result_register());
3240 // Push the arguments ("left-to-right") on the stack.
3241 ZoneList<Expression*>* args = expr->arguments();
3242 int arg_count = args->length();
3243 for (int i = 0; i < arg_count; i++) {
3244 VisitForStackValue(args->at(i));
3247 // Call the construct call builtin that handles allocation and
3248 // constructor invocation.
3249 SetSourcePosition(expr->position());
3251 // Load function and argument count into a1 and a0.
3252 __ li(a0, Operand(arg_count));
3253 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3255 // Record call targets in unoptimized code.
3256 if (FLAG_pretenuring_call_new) {
3258 /* TODO(dslomov): support pretenuring.
3259 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3260 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3261 expr->CallNewFeedbackSlot().ToInt() + 1);
3265 __ li(a2, FeedbackVector());
3266 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3268 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3269 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3273 RecordJSReturnSite(expr);
3275 SuperReference* super_ref = expr->expression()->AsSuperReference();
3276 Variable* this_var = super_ref->this_var()->var();
3277 GetVar(a1, this_var);
3278 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3279 Label uninitialized_this;
3280 __ Branch(&uninitialized_this, eq, a1, Operand(at));
3281 __ li(a0, Operand(this_var->name()));
3283 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3284 __ bind(&uninitialized_this);
3286 EmitVariableAssignment(this_var, Token::INIT_CONST);
3287 context()->Plug(v0);
3291 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK(args->length() == 1);
3295 VisitForAccumulatorValue(args->at(0));
3297 Label materialize_true, materialize_false;
3298 Label* if_true = NULL;
3299 Label* if_false = NULL;
3300 Label* fall_through = NULL;
3301 context()->PrepareTest(&materialize_true, &materialize_false,
3302 &if_true, &if_false, &fall_through);
3304 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3306 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
3308 context()->Plug(if_true, if_false);
3312 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 DCHECK(args->length() == 1);
3316 VisitForAccumulatorValue(args->at(0));
3318 Label materialize_true, materialize_false;
3319 Label* if_true = NULL;
3320 Label* if_false = NULL;
3321 Label* fall_through = NULL;
3322 context()->PrepareTest(&materialize_true, &materialize_false,
3323 &if_true, &if_false, &fall_through);
3325 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3326 __ NonNegativeSmiTst(v0, at);
3327 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3329 context()->Plug(if_true, if_false);
3333 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3334 ZoneList<Expression*>* args = expr->arguments();
3335 DCHECK(args->length() == 1);
3337 VisitForAccumulatorValue(args->at(0));
3339 Label materialize_true, materialize_false;
3340 Label* if_true = NULL;
3341 Label* if_false = NULL;
3342 Label* fall_through = NULL;
3343 context()->PrepareTest(&materialize_true, &materialize_false,
3344 &if_true, &if_false, &fall_through);
3346 __ JumpIfSmi(v0, if_false);
3347 __ LoadRoot(at, Heap::kNullValueRootIndex);
3348 __ Branch(if_true, eq, v0, Operand(at));
3349 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3350 // Undetectable objects behave like undefined when tested with typeof.
3351 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3352 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3353 __ Branch(if_false, ne, at, Operand(zero_reg));
3354 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3355 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3356 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3357 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3358 if_true, if_false, fall_through);
3360 context()->Plug(if_true, if_false);
3364 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3365 ZoneList<Expression*>* args = expr->arguments();
3366 DCHECK(args->length() == 1);
3368 VisitForAccumulatorValue(args->at(0));
3370 Label materialize_true, materialize_false;
3371 Label* if_true = NULL;
3372 Label* if_false = NULL;
3373 Label* fall_through = NULL;
3374 context()->PrepareTest(&materialize_true, &materialize_false,
3375 &if_true, &if_false, &fall_through);
3377 __ JumpIfSmi(v0, if_false);
3378 __ GetObjectType(v0, a1, a1);
3379 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3380 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3381 if_true, if_false, fall_through);
3383 context()->Plug(if_true, if_false);
3387 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3388 ZoneList<Expression*>* args = expr->arguments();
3389 DCHECK(args->length() == 1);
3391 VisitForAccumulatorValue(args->at(0));
3393 Label materialize_true, materialize_false;
3394 Label* if_true = NULL;
3395 Label* if_false = NULL;
3396 Label* fall_through = NULL;
3397 context()->PrepareTest(&materialize_true, &materialize_false,
3398 &if_true, &if_false, &fall_through);
3400 __ JumpIfSmi(v0, if_false);
3401 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3402 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3403 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3404 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3405 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3407 context()->Plug(if_true, if_false);
3411 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3412 CallRuntime* expr) {
3413 ZoneList<Expression*>* args = expr->arguments();
3414 DCHECK(args->length() == 1);
3416 VisitForAccumulatorValue(args->at(0));
3418 Label materialize_true, materialize_false, skip_lookup;
3419 Label* if_true = NULL;
3420 Label* if_false = NULL;
3421 Label* fall_through = NULL;
3422 context()->PrepareTest(&materialize_true, &materialize_false,
3423 &if_true, &if_false, &fall_through);
3425 __ AssertNotSmi(v0);
3427 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3428 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3429 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3430 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3432 // Check for fast case object. Generate false result for slow case object.
3433 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3434 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3435 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3436 __ Branch(if_false, eq, a2, Operand(t0));
3438 // Look for valueOf name in the descriptor array, and indicate false if
3439 // found. Since we omit an enumeration index check, if it is added via a
3440 // transition that shares its descriptor array, this is a false positive.
3441 Label entry, loop, done;
3443 // Skip loop if no descriptors are valid.
3444 __ NumberOfOwnDescriptors(a3, a1);
3445 __ Branch(&done, eq, a3, Operand(zero_reg));
3447 __ LoadInstanceDescriptors(a1, t0);
3448 // t0: descriptor array.
3449 // a3: valid entries in the descriptor array.
3450 STATIC_ASSERT(kSmiTag == 0);
3451 STATIC_ASSERT(kSmiTagSize == 1);
3452 STATIC_ASSERT(kPointerSize == 4);
3453 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3455 // Calculate location of the first key name.
3456 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3457 // Calculate the end of the descriptor array.
3459 __ sll(t1, a3, kPointerSizeLog2);
3460 __ Addu(a2, a2, t1);
3462 // Loop through all the keys in the descriptor array. If one of these is the
3463 // string "valueOf" the result is false.
3464 // The use of t2 to store the valueOf string assumes that it is not otherwise
3465 // used in the loop below.
3466 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3469 __ lw(a3, MemOperand(t0, 0));
3470 __ Branch(if_false, eq, a3, Operand(t2));
3471 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3473 __ Branch(&loop, ne, t0, Operand(a2));
3477 // Set the bit in the map to indicate that there is no local valueOf field.
3478 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3479 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3480 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3482 __ bind(&skip_lookup);
3484 // If a valueOf property is not found on the object check that its
3485 // prototype is the un-modified String prototype. If not result is false.
3486 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3487 __ JumpIfSmi(a2, if_false);
3488 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3489 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3490 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3491 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3492 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3493 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3495 context()->Plug(if_true, if_false);
3499 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3500 ZoneList<Expression*>* args = expr->arguments();
3501 DCHECK(args->length() == 1);
3503 VisitForAccumulatorValue(args->at(0));
3505 Label materialize_true, materialize_false;
3506 Label* if_true = NULL;
3507 Label* if_false = NULL;
3508 Label* fall_through = NULL;
3509 context()->PrepareTest(&materialize_true, &materialize_false,
3510 &if_true, &if_false, &fall_through);
3512 __ JumpIfSmi(v0, if_false);
3513 __ GetObjectType(v0, a1, a2);
3514 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3515 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3516 __ Branch(if_false);
3518 context()->Plug(if_true, if_false);
3522 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3523 ZoneList<Expression*>* args = expr->arguments();
3524 DCHECK(args->length() == 1);
3526 VisitForAccumulatorValue(args->at(0));
3528 Label materialize_true, materialize_false;
3529 Label* if_true = NULL;
3530 Label* if_false = NULL;
3531 Label* fall_through = NULL;
3532 context()->PrepareTest(&materialize_true, &materialize_false,
3533 &if_true, &if_false, &fall_through);
3535 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3536 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3537 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3538 __ li(t0, 0x80000000);
3540 __ Branch(¬_nan, ne, a2, Operand(t0));
3541 __ mov(t0, zero_reg);
3545 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3546 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3548 context()->Plug(if_true, if_false);
3552 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3553 ZoneList<Expression*>* args = expr->arguments();
3554 DCHECK(args->length() == 1);
3556 VisitForAccumulatorValue(args->at(0));
3558 Label materialize_true, materialize_false;
3559 Label* if_true = NULL;
3560 Label* if_false = NULL;
3561 Label* fall_through = NULL;
3562 context()->PrepareTest(&materialize_true, &materialize_false,
3563 &if_true, &if_false, &fall_through);
3565 __ JumpIfSmi(v0, if_false);
3566 __ GetObjectType(v0, a1, a1);
3567 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3568 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3569 if_true, if_false, fall_through);
3571 context()->Plug(if_true, if_false);
3575 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3576 ZoneList<Expression*>* args = expr->arguments();
3577 DCHECK(args->length() == 1);
3579 VisitForAccumulatorValue(args->at(0));
3581 Label materialize_true, materialize_false;
3582 Label* if_true = NULL;
3583 Label* if_false = NULL;
3584 Label* fall_through = NULL;
3585 context()->PrepareTest(&materialize_true, &materialize_false,
3586 &if_true, &if_false, &fall_through);
3588 __ JumpIfSmi(v0, if_false);
3589 __ GetObjectType(v0, a1, a1);
3590 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3591 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3593 context()->Plug(if_true, if_false);
3597 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3598 ZoneList<Expression*>* args = expr->arguments();
3599 DCHECK(args->length() == 1);
3601 VisitForAccumulatorValue(args->at(0));
3603 Label materialize_true, materialize_false;
3604 Label* if_true = NULL;
3605 Label* if_false = NULL;
3606 Label* fall_through = NULL;
3607 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3608 &if_false, &fall_through);
3610 __ JumpIfSmi(v0, if_false);
3612 Register type_reg = a2;
3613 __ GetObjectType(v0, map, type_reg);
3614 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3616 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3617 if_true, if_false, fall_through);
3619 context()->Plug(if_true, if_false);
3623 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3624 DCHECK(expr->arguments()->length() == 0);
3626 Label materialize_true, materialize_false;
3627 Label* if_true = NULL;
3628 Label* if_false = NULL;
3629 Label* fall_through = NULL;
3630 context()->PrepareTest(&materialize_true, &materialize_false,
3631 &if_true, &if_false, &fall_through);
3633 // Get the frame pointer for the calling frame.
3634 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3636 // Skip the arguments adaptor frame if it exists.
3637 Label check_frame_marker;
3638 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3639 __ Branch(&check_frame_marker, ne,
3640 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3641 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3643 // Check the marker in the calling frame.
3644 __ bind(&check_frame_marker);
3645 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3646 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3647 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3648 if_true, if_false, fall_through);
3650 context()->Plug(if_true, if_false);
3654 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3655 ZoneList<Expression*>* args = expr->arguments();
3656 DCHECK(args->length() == 2);
3658 // Load the two objects into registers and perform the comparison.
3659 VisitForStackValue(args->at(0));
3660 VisitForAccumulatorValue(args->at(1));
3662 Label materialize_true, materialize_false;
3663 Label* if_true = NULL;
3664 Label* if_false = NULL;
3665 Label* fall_through = NULL;
3666 context()->PrepareTest(&materialize_true, &materialize_false,
3667 &if_true, &if_false, &fall_through);
3670 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3671 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3673 context()->Plug(if_true, if_false);
3677 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3678 ZoneList<Expression*>* args = expr->arguments();
3679 DCHECK(args->length() == 1);
3681 // ArgumentsAccessStub expects the key in a1 and the formal
3682 // parameter count in a0.
3683 VisitForAccumulatorValue(args->at(0));
3685 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3686 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3688 context()->Plug(v0);
3692 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3693 DCHECK(expr->arguments()->length() == 0);
3695 // Get the number of formal parameters.
3696 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3698 // Check if the calling frame is an arguments adaptor frame.
3699 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3700 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3701 __ Branch(&exit, ne, a3,
3702 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3704 // Arguments adaptor case: Read the arguments length from the
3706 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3709 context()->Plug(v0);
3713 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3714 ZoneList<Expression*>* args = expr->arguments();
3715 DCHECK(args->length() == 1);
3716 Label done, null, function, non_function_constructor;
3718 VisitForAccumulatorValue(args->at(0));
3720 // If the object is a smi, we return null.
3721 __ JumpIfSmi(v0, &null);
3723 // Check that the object is a JS object but take special care of JS
3724 // functions to make sure they have 'Function' as their class.
3725 // Assume that there are only two callable types, and one of them is at
3726 // either end of the type range for JS object types. Saves extra comparisons.
3727 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3728 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3729 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3731 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3732 FIRST_SPEC_OBJECT_TYPE + 1);
3733 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3735 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3736 LAST_SPEC_OBJECT_TYPE - 1);
3737 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3738 // Assume that there is no larger type.
3739 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3741 // Check if the constructor in the map is a JS function.
3742 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3743 __ GetObjectType(v0, a1, a1);
3744 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3746 // v0 now contains the constructor function. Grab the
3747 // instance class name from there.
3748 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3749 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3752 // Functions have class 'Function'.
3754 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3757 // Objects with a non-function constructor have class 'Object'.
3758 __ bind(&non_function_constructor);
3759 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3762 // Non-JS objects have class null.
3764 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3769 context()->Plug(v0);
3773 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3774 // Load the arguments on the stack and call the stub.
3775 SubStringStub stub(isolate());
3776 ZoneList<Expression*>* args = expr->arguments();
3777 DCHECK(args->length() == 3);
3778 VisitForStackValue(args->at(0));
3779 VisitForStackValue(args->at(1));
3780 VisitForStackValue(args->at(2));
3782 context()->Plug(v0);
3786 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3787 // Load the arguments on the stack and call the stub.
3788 RegExpExecStub stub(isolate());
3789 ZoneList<Expression*>* args = expr->arguments();
3790 DCHECK(args->length() == 4);
3791 VisitForStackValue(args->at(0));
3792 VisitForStackValue(args->at(1));
3793 VisitForStackValue(args->at(2));
3794 VisitForStackValue(args->at(3));
3796 context()->Plug(v0);
3800 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3801 ZoneList<Expression*>* args = expr->arguments();
3802 DCHECK(args->length() == 1);
3804 VisitForAccumulatorValue(args->at(0)); // Load the object.
3807 // If the object is a smi return the object.
3808 __ JumpIfSmi(v0, &done);
3809 // If the object is not a value type, return the object.
3810 __ GetObjectType(v0, a1, a1);
3811 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3813 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3816 context()->Plug(v0);
3820 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3821 ZoneList<Expression*>* args = expr->arguments();
3822 DCHECK(args->length() == 2);
3823 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3824 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3826 VisitForAccumulatorValue(args->at(0)); // Load the object.
3828 Label runtime, done, not_date_object;
3829 Register object = v0;
3830 Register result = v0;
3831 Register scratch0 = t5;
3832 Register scratch1 = a1;
3834 __ JumpIfSmi(object, ¬_date_object);
3835 __ GetObjectType(object, scratch1, scratch1);
3836 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3838 if (index->value() == 0) {
3839 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3842 if (index->value() < JSDate::kFirstUncachedField) {
3843 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3844 __ li(scratch1, Operand(stamp));
3845 __ lw(scratch1, MemOperand(scratch1));
3846 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3847 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3848 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3849 kPointerSize * index->value()));
3853 __ PrepareCallCFunction(2, scratch1);
3854 __ li(a1, Operand(index));
3855 __ Move(a0, object);
3856 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3860 __ bind(¬_date_object);
3861 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3863 context()->Plug(v0);
3867 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3868 ZoneList<Expression*>* args = expr->arguments();
3869 DCHECK_EQ(3, args->length());
3871 Register string = v0;
3872 Register index = a1;
3873 Register value = a2;
3875 VisitForStackValue(args->at(0)); // index
3876 VisitForStackValue(args->at(1)); // value
3877 VisitForAccumulatorValue(args->at(2)); // string
3878 __ Pop(index, value);
3880 if (FLAG_debug_code) {
3881 __ SmiTst(value, at);
3882 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3883 __ SmiTst(index, at);
3884 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3885 __ SmiUntag(index, index);
3886 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3887 Register scratch = t5;
3888 __ EmitSeqStringSetCharCheck(
3889 string, index, value, scratch, one_byte_seq_type);
3890 __ SmiTag(index, index);
3893 __ SmiUntag(value, value);
3896 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3898 __ Addu(at, at, index);
3899 __ sb(value, MemOperand(at));
3900 context()->Plug(string);
3904 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3905 ZoneList<Expression*>* args = expr->arguments();
3906 DCHECK_EQ(3, args->length());
3908 Register string = v0;
3909 Register index = a1;
3910 Register value = a2;
3912 VisitForStackValue(args->at(0)); // index
3913 VisitForStackValue(args->at(1)); // value
3914 VisitForAccumulatorValue(args->at(2)); // string
3915 __ Pop(index, value);
3917 if (FLAG_debug_code) {
3918 __ SmiTst(value, at);
3919 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3920 __ SmiTst(index, at);
3921 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3922 __ SmiUntag(index, index);
3923 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3924 Register scratch = t5;
3925 __ EmitSeqStringSetCharCheck(
3926 string, index, value, scratch, two_byte_seq_type);
3927 __ SmiTag(index, index);
3930 __ SmiUntag(value, value);
3933 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3934 __ Addu(at, at, index);
3935 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3936 __ sh(value, MemOperand(at));
3937 context()->Plug(string);
3941 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3942 // Load the arguments on the stack and call the runtime function.
3943 ZoneList<Expression*>* args = expr->arguments();
3944 DCHECK(args->length() == 2);
3945 VisitForStackValue(args->at(0));
3946 VisitForStackValue(args->at(1));
3947 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3949 context()->Plug(v0);
3953 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3954 ZoneList<Expression*>* args = expr->arguments();
3955 DCHECK(args->length() == 2);
3957 VisitForStackValue(args->at(0)); // Load the object.
3958 VisitForAccumulatorValue(args->at(1)); // Load the value.
3959 __ pop(a1); // v0 = value. a1 = object.
3962 // If the object is a smi, return the value.
3963 __ JumpIfSmi(a1, &done);
3965 // If the object is not a value type, return the value.
3966 __ GetObjectType(a1, a2, a2);
3967 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3970 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3971 // Update the write barrier. Save the value as it will be
3972 // overwritten by the write barrier code and is needed afterward.
3974 __ RecordWriteField(
3975 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3978 context()->Plug(v0);
3982 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3983 ZoneList<Expression*>* args = expr->arguments();
3984 DCHECK_EQ(args->length(), 1);
3986 // Load the argument into a0 and call the stub.
3987 VisitForAccumulatorValue(args->at(0));
3988 __ mov(a0, result_register());
3990 NumberToStringStub stub(isolate());
3992 context()->Plug(v0);
3996 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3997 ZoneList<Expression*>* args = expr->arguments();
3998 DCHECK(args->length() == 1);
4000 VisitForAccumulatorValue(args->at(0));
4003 StringCharFromCodeGenerator generator(v0, a1);
4004 generator.GenerateFast(masm_);
4007 NopRuntimeCallHelper call_helper;
4008 generator.GenerateSlow(masm_, call_helper);
4011 context()->Plug(a1);
4015 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4016 ZoneList<Expression*>* args = expr->arguments();
4017 DCHECK(args->length() == 2);
4019 VisitForStackValue(args->at(0));
4020 VisitForAccumulatorValue(args->at(1));
4021 __ mov(a0, result_register());
4023 Register object = a1;
4024 Register index = a0;
4025 Register result = v0;
4029 Label need_conversion;
4030 Label index_out_of_range;
4032 StringCharCodeAtGenerator generator(object,
4037 &index_out_of_range,
4038 STRING_INDEX_IS_NUMBER);
4039 generator.GenerateFast(masm_);
4042 __ bind(&index_out_of_range);
4043 // When the index is out of range, the spec requires us to return
4045 __ LoadRoot(result, Heap::kNanValueRootIndex);
4048 __ bind(&need_conversion);
4049 // Load the undefined value into the result register, which will
4050 // trigger conversion.
4051 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4054 NopRuntimeCallHelper call_helper;
4055 generator.GenerateSlow(masm_, call_helper);
4058 context()->Plug(result);
4062 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4063 ZoneList<Expression*>* args = expr->arguments();
4064 DCHECK(args->length() == 2);
4066 VisitForStackValue(args->at(0));
4067 VisitForAccumulatorValue(args->at(1));
4068 __ mov(a0, result_register());
4070 Register object = a1;
4071 Register index = a0;
4072 Register scratch = a3;
4073 Register result = v0;
4077 Label need_conversion;
4078 Label index_out_of_range;
4080 StringCharAtGenerator generator(object,
4086 &index_out_of_range,
4087 STRING_INDEX_IS_NUMBER);
4088 generator.GenerateFast(masm_);
4091 __ bind(&index_out_of_range);
4092 // When the index is out of range, the spec requires us to return
4093 // the empty string.
4094 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4097 __ bind(&need_conversion);
4098 // Move smi zero into the result register, which will trigger
4100 __ li(result, Operand(Smi::FromInt(0)));
4103 NopRuntimeCallHelper call_helper;
4104 generator.GenerateSlow(masm_, call_helper);
4107 context()->Plug(result);
4111 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4112 ZoneList<Expression*>* args = expr->arguments();
4113 DCHECK_EQ(2, args->length());
4114 VisitForStackValue(args->at(0));
4115 VisitForAccumulatorValue(args->at(1));
4118 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4119 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4121 context()->Plug(v0);
4125 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4126 ZoneList<Expression*>* args = expr->arguments();
4127 DCHECK_EQ(2, args->length());
4129 VisitForStackValue(args->at(0));
4130 VisitForStackValue(args->at(1));
4132 StringCompareStub stub(isolate());
4134 context()->Plug(v0);
4138 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4139 ZoneList<Expression*>* args = expr->arguments();
4140 DCHECK(args->length() >= 2);
4142 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4143 for (int i = 0; i < arg_count + 1; i++) {
4144 VisitForStackValue(args->at(i));
4146 VisitForAccumulatorValue(args->last()); // Function.
4148 Label runtime, done;
4149 // Check for non-function argument (including proxy).
4150 __ JumpIfSmi(v0, &runtime);
4151 __ GetObjectType(v0, a1, a1);
4152 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4154 // InvokeFunction requires the function in a1. Move it in there.
4155 __ mov(a1, result_register());
4156 ParameterCount count(arg_count);
4157 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4158 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4163 __ CallRuntime(Runtime::kCall, args->length());
4166 context()->Plug(v0);
4170 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4171 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4172 GetVar(result_register(), new_target_var);
4173 __ Push(result_register());
4175 EmitLoadSuperConstructor();
4176 __ Push(result_register());
4178 // Check if the calling frame is an arguments adaptor frame.
4179 Label adaptor_frame, args_set_up, runtime;
4180 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4181 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4182 __ Branch(&adaptor_frame, eq, a3,
4183 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4184 // default constructor has no arguments, so no adaptor frame means no args.
4185 __ mov(a0, zero_reg);
4186 __ Branch(&args_set_up);
4188 // Copy arguments from adaptor frame.
4190 __ bind(&adaptor_frame);
4191 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4192 __ SmiUntag(a1, a1);
4194 // Subtract 1 from arguments count, for new.target.
4195 __ Addu(a1, a1, Operand(-1));
4198 // Get arguments pointer in a2.
4199 __ sll(at, a1, kPointerSizeLog2);
4200 __ addu(a2, a2, at);
4201 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4204 // Pre-decrement a2 with kPointerSize on each iteration.
4205 // Pre-decrement in order to skip receiver.
4206 __ Addu(a2, a2, Operand(-kPointerSize));
4207 __ lw(a3, MemOperand(a2));
4209 __ Addu(a1, a1, Operand(-1));
4210 __ Branch(&loop, ne, a1, Operand(zero_reg));
4213 __ bind(&args_set_up);
4214 __ sll(at, a0, kPointerSizeLog2);
4215 __ Addu(at, at, Operand(sp));
4216 __ lw(a1, MemOperand(at, 0));
4217 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4219 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4220 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4224 context()->Plug(result_register());
4228 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4229 RegExpConstructResultStub stub(isolate());
4230 ZoneList<Expression*>* args = expr->arguments();
4231 DCHECK(args->length() == 3);
4232 VisitForStackValue(args->at(0));
4233 VisitForStackValue(args->at(1));
4234 VisitForAccumulatorValue(args->at(2));
4235 __ mov(a0, result_register());
4239 context()->Plug(v0);
4243 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4244 ZoneList<Expression*>* args = expr->arguments();
4245 DCHECK_EQ(2, args->length());
4247 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4248 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4250 Handle<FixedArray> jsfunction_result_caches(
4251 isolate()->native_context()->jsfunction_result_caches());
4252 if (jsfunction_result_caches->length() <= cache_id) {
4253 __ Abort(kAttemptToUseUndefinedCache);
4254 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4255 context()->Plug(v0);
4259 VisitForAccumulatorValue(args->at(1));
4262 Register cache = a1;
4263 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4264 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4267 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4269 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4272 Label done, not_found;
4273 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4274 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4275 // a2 now holds finger offset as a smi.
4276 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4277 // a3 now points to the start of fixed array elements.
4278 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
4279 __ addu(a3, a3, at);
4280 // a3 now points to key of indexed element of cache.
4281 __ lw(a2, MemOperand(a3));
4282 __ Branch(¬_found, ne, key, Operand(a2));
4284 __ lw(v0, MemOperand(a3, kPointerSize));
4287 __ bind(¬_found);
4288 // Call runtime to perform the lookup.
4289 __ Push(cache, key);
4290 __ CallRuntime(Runtime::kGetFromCache, 2);
4293 context()->Plug(v0);
4297 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4298 ZoneList<Expression*>* args = expr->arguments();
4299 VisitForAccumulatorValue(args->at(0));
4301 Label materialize_true, materialize_false;
4302 Label* if_true = NULL;
4303 Label* if_false = NULL;
4304 Label* fall_through = NULL;
4305 context()->PrepareTest(&materialize_true, &materialize_false,
4306 &if_true, &if_false, &fall_through);
4308 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4309 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4311 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4312 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4314 context()->Plug(if_true, if_false);
4318 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4319 ZoneList<Expression*>* args = expr->arguments();
4320 DCHECK(args->length() == 1);
4321 VisitForAccumulatorValue(args->at(0));
4323 __ AssertString(v0);
4325 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4326 __ IndexFromHash(v0, v0);
4328 context()->Plug(v0);
4332 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4333 Label bailout, done, one_char_separator, long_separator,
4334 non_trivial_array, not_size_one_array, loop,
4335 empty_separator_loop, one_char_separator_loop,
4336 one_char_separator_loop_entry, long_separator_loop;
4337 ZoneList<Expression*>* args = expr->arguments();
4338 DCHECK(args->length() == 2);
4339 VisitForStackValue(args->at(1));
4340 VisitForAccumulatorValue(args->at(0));
4342 // All aliases of the same register have disjoint lifetimes.
4343 Register array = v0;
4344 Register elements = no_reg; // Will be v0.
4345 Register result = no_reg; // Will be v0.
4346 Register separator = a1;
4347 Register array_length = a2;
4348 Register result_pos = no_reg; // Will be a2.
4349 Register string_length = a3;
4350 Register string = t0;
4351 Register element = t1;
4352 Register elements_end = t2;
4353 Register scratch1 = t3;
4354 Register scratch2 = t5;
4355 Register scratch3 = t4;
4357 // Separator operand is on the stack.
4360 // Check that the array is a JSArray.
4361 __ JumpIfSmi(array, &bailout);
4362 __ GetObjectType(array, scratch1, scratch2);
4363 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4365 // Check that the array has fast elements.
4366 __ CheckFastElements(scratch1, scratch2, &bailout);
4368 // If the array has length zero, return the empty string.
4369 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4370 __ SmiUntag(array_length);
4371 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4372 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4375 __ bind(&non_trivial_array);
4377 // Get the FixedArray containing array's elements.
4379 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4380 array = no_reg; // End of array's live range.
4382 // Check that all array elements are sequential one-byte strings, and
4383 // accumulate the sum of their lengths, as a smi-encoded value.
4384 __ mov(string_length, zero_reg);
4386 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4387 __ sll(elements_end, array_length, kPointerSizeLog2);
4388 __ Addu(elements_end, element, elements_end);
4389 // Loop condition: while (element < elements_end).
4390 // Live values in registers:
4391 // elements: Fixed array of strings.
4392 // array_length: Length of the fixed array of strings (not smi)
4393 // separator: Separator string
4394 // string_length: Accumulated sum of string lengths (smi).
4395 // element: Current array element.
4396 // elements_end: Array end.
4397 if (generate_debug_code_) {
4398 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4402 __ lw(string, MemOperand(element));
4403 __ Addu(element, element, kPointerSize);
4404 __ JumpIfSmi(string, &bailout);
4405 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4406 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4407 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4408 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4409 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4410 __ BranchOnOverflow(&bailout, scratch3);
4411 __ Branch(&loop, lt, element, Operand(elements_end));
4413 // If array_length is 1, return elements[0], a string.
4414 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4415 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4418 __ bind(¬_size_one_array);
4420 // Live values in registers:
4421 // separator: Separator string
4422 // array_length: Length of the array.
4423 // string_length: Sum of string lengths (smi).
4424 // elements: FixedArray of strings.
4426 // Check that the separator is a flat one-byte string.
4427 __ JumpIfSmi(separator, &bailout);
4428 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4429 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4430 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4432 // Add (separator length times array_length) - separator length to the
4433 // string_length to get the length of the result string. array_length is not
4434 // smi but the other values are, so the result is a smi.
4435 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4436 __ Subu(string_length, string_length, Operand(scratch1));
4437 __ Mul(scratch3, scratch2, array_length, scratch1);
4438 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4440 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4441 __ And(scratch3, scratch2, Operand(0x80000000));
4442 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4443 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4444 __ BranchOnOverflow(&bailout, scratch3);
4445 __ SmiUntag(string_length);
4447 // Get first element in the array to free up the elements register to be used
4450 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4451 result = elements; // End of live range for elements.
4453 // Live values in registers:
4454 // element: First array element
4455 // separator: Separator string
4456 // string_length: Length of result string (not smi)
4457 // array_length: Length of the array.
4458 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4459 elements_end, &bailout);
4460 // Prepare for looping. Set up elements_end to end of the array. Set
4461 // result_pos to the position of the result where to write the first
4463 __ sll(elements_end, array_length, kPointerSizeLog2);
4464 __ Addu(elements_end, element, elements_end);
4465 result_pos = array_length; // End of live range for array_length.
4466 array_length = no_reg;
4469 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4471 // Check the length of the separator.
4472 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4473 __ li(at, Operand(Smi::FromInt(1)));
4474 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4475 __ Branch(&long_separator, gt, scratch1, Operand(at));
4477 // Empty separator case.
4478 __ bind(&empty_separator_loop);
4479 // Live values in registers:
4480 // result_pos: the position to which we are currently copying characters.
4481 // element: Current array element.
4482 // elements_end: Array end.
4484 // Copy next array element to the result.
4485 __ lw(string, MemOperand(element));
4486 __ Addu(element, element, kPointerSize);
4487 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4488 __ SmiUntag(string_length);
4489 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4490 __ CopyBytes(string, result_pos, string_length, scratch1);
4491 // End while (element < elements_end).
4492 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4493 DCHECK(result.is(v0));
4496 // One-character separator case.
4497 __ bind(&one_char_separator);
4498 // Replace separator with its one-byte character value.
4499 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4500 // Jump into the loop after the code that copies the separator, so the first
4501 // element is not preceded by a separator.
4502 __ jmp(&one_char_separator_loop_entry);
4504 __ bind(&one_char_separator_loop);
4505 // Live values in registers:
4506 // result_pos: the position to which we are currently copying characters.
4507 // element: Current array element.
4508 // elements_end: Array end.
4509 // separator: Single separator one-byte char (in lower byte).
4511 // Copy the separator character to the result.
4512 __ sb(separator, MemOperand(result_pos));
4513 __ Addu(result_pos, result_pos, 1);
4515 // Copy next array element to the result.
4516 __ bind(&one_char_separator_loop_entry);
4517 __ lw(string, MemOperand(element));
4518 __ Addu(element, element, kPointerSize);
4519 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4520 __ SmiUntag(string_length);
4521 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4522 __ CopyBytes(string, result_pos, string_length, scratch1);
4523 // End while (element < elements_end).
4524 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4525 DCHECK(result.is(v0));
4528 // Long separator case (separator is more than one character). Entry is at the
4529 // label long_separator below.
4530 __ bind(&long_separator_loop);
4531 // Live values in registers:
4532 // result_pos: the position to which we are currently copying characters.
4533 // element: Current array element.
4534 // elements_end: Array end.
4535 // separator: Separator string.
4537 // Copy the separator to the result.
4538 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4539 __ SmiUntag(string_length);
4542 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4543 __ CopyBytes(string, result_pos, string_length, scratch1);
4545 __ bind(&long_separator);
4546 __ lw(string, MemOperand(element));
4547 __ Addu(element, element, kPointerSize);
4548 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4549 __ SmiUntag(string_length);
4550 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4551 __ CopyBytes(string, result_pos, string_length, scratch1);
4552 // End while (element < elements_end).
4553 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4554 DCHECK(result.is(v0));
4558 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4560 context()->Plug(v0);
4564 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4565 DCHECK(expr->arguments()->length() == 0);
4566 ExternalReference debug_is_active =
4567 ExternalReference::debug_is_active_address(isolate());
4568 __ li(at, Operand(debug_is_active));
4569 __ lb(v0, MemOperand(at));
4571 context()->Plug(v0);
4575 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4576 if (expr->function() != NULL &&
4577 expr->function()->intrinsic_type == Runtime::INLINE) {
4578 Comment cmnt(masm_, "[ InlineRuntimeCall");
4579 EmitInlineRuntimeCall(expr);
4583 Comment cmnt(masm_, "[ CallRuntime");
4584 ZoneList<Expression*>* args = expr->arguments();
4585 int arg_count = args->length();
4587 if (expr->is_jsruntime()) {
4588 // Push the builtins object as the receiver.
4589 Register receiver = LoadDescriptor::ReceiverRegister();
4590 __ lw(receiver, GlobalObjectOperand());
4591 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4594 // Load the function from the receiver.
4595 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4596 if (FLAG_vector_ics) {
4597 __ li(VectorLoadICDescriptor::SlotRegister(),
4598 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4599 CallLoadIC(NOT_CONTEXTUAL);
4601 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4604 // Push the target function under the receiver.
4605 __ lw(at, MemOperand(sp, 0));
4607 __ sw(v0, MemOperand(sp, kPointerSize));
4609 // Push the arguments ("left-to-right").
4610 int arg_count = args->length();
4611 for (int i = 0; i < arg_count; i++) {
4612 VisitForStackValue(args->at(i));
4615 // Record source position of the IC call.
4616 SetSourcePosition(expr->position());
4617 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4618 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4621 // Restore context register.
4622 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4624 context()->DropAndPlug(1, v0);
4626 // Push the arguments ("left-to-right").
4627 for (int i = 0; i < arg_count; i++) {
4628 VisitForStackValue(args->at(i));
4631 // Call the C runtime function.
4632 __ CallRuntime(expr->function(), arg_count);
4633 context()->Plug(v0);
4638 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4639 switch (expr->op()) {
4640 case Token::DELETE: {
4641 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4642 Property* property = expr->expression()->AsProperty();
4643 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4645 if (property != NULL) {
4646 VisitForStackValue(property->obj());
4647 VisitForStackValue(property->key());
4648 __ li(a1, Operand(Smi::FromInt(language_mode())));
4650 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4651 context()->Plug(v0);
4652 } else if (proxy != NULL) {
4653 Variable* var = proxy->var();
4654 // Delete of an unqualified identifier is disallowed in strict mode
4655 // but "delete this" is allowed.
4656 DCHECK(is_sloppy(language_mode()) || var->is_this());
4657 if (var->IsUnallocated()) {
4658 __ lw(a2, GlobalObjectOperand());
4659 __ li(a1, Operand(var->name()));
4660 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4661 __ Push(a2, a1, a0);
4662 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4663 context()->Plug(v0);
4664 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4665 // Result of deleting non-global, non-dynamic variables is false.
4666 // The subexpression does not have side effects.
4667 context()->Plug(var->is_this());
4669 // Non-global variable. Call the runtime to try to delete from the
4670 // context where the variable was introduced.
4671 DCHECK(!context_register().is(a2));
4672 __ li(a2, Operand(var->name()));
4673 __ Push(context_register(), a2);
4674 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4675 context()->Plug(v0);
4678 // Result of deleting non-property, non-variable reference is true.
4679 // The subexpression may have side effects.
4680 VisitForEffect(expr->expression());
4681 context()->Plug(true);
4687 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4688 VisitForEffect(expr->expression());
4689 context()->Plug(Heap::kUndefinedValueRootIndex);
4694 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4695 if (context()->IsEffect()) {
4696 // Unary NOT has no side effects so it's only necessary to visit the
4697 // subexpression. Match the optimizing compiler by not branching.
4698 VisitForEffect(expr->expression());
4699 } else if (context()->IsTest()) {
4700 const TestContext* test = TestContext::cast(context());
4701 // The labels are swapped for the recursive call.
4702 VisitForControl(expr->expression(),
4703 test->false_label(),
4705 test->fall_through());
4706 context()->Plug(test->true_label(), test->false_label());
4708 // We handle value contexts explicitly rather than simply visiting
4709 // for control and plugging the control flow into the context,
4710 // because we need to prepare a pair of extra administrative AST ids
4711 // for the optimizing compiler.
4712 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4713 Label materialize_true, materialize_false, done;
4714 VisitForControl(expr->expression(),
4718 __ bind(&materialize_true);
4719 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4720 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4721 if (context()->IsStackValue()) __ push(v0);
4723 __ bind(&materialize_false);
4724 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4725 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4726 if (context()->IsStackValue()) __ push(v0);
4732 case Token::TYPEOF: {
4733 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4734 { StackValueContext context(this);
4735 VisitForTypeofValue(expr->expression());
4737 __ CallRuntime(Runtime::kTypeof, 1);
4738 context()->Plug(v0);
4748 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4749 DCHECK(expr->expression()->IsValidReferenceExpression());
4751 Comment cmnt(masm_, "[ CountOperation");
4752 SetSourcePosition(expr->position());
4754 Property* prop = expr->expression()->AsProperty();
4755 LhsKind assign_type = GetAssignType(prop);
4757 // Evaluate expression and get value.
4758 if (assign_type == VARIABLE) {
4759 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4760 AccumulatorValueContext context(this);
4761 EmitVariableLoad(expr->expression()->AsVariableProxy());
4763 // Reserve space for result of postfix operation.
4764 if (expr->is_postfix() && !context()->IsEffect()) {
4765 __ li(at, Operand(Smi::FromInt(0)));
4768 switch (assign_type) {
4769 case NAMED_PROPERTY: {
4770 // Put the object both on the stack and in the register.
4771 VisitForStackValue(prop->obj());
4772 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4773 EmitNamedPropertyLoad(prop);
4777 case NAMED_SUPER_PROPERTY: {
4778 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4779 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4780 __ Push(result_register());
4781 const Register scratch = a1;
4782 __ lw(scratch, MemOperand(sp, kPointerSize));
4783 __ Push(scratch, result_register());
4784 EmitNamedSuperPropertyLoad(prop);
4788 case KEYED_SUPER_PROPERTY: {
4789 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4790 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4791 const Register scratch = a1;
4792 const Register scratch1 = t0;
4793 __ Move(scratch, result_register());
4794 VisitForAccumulatorValue(prop->key());
4795 __ Push(scratch, result_register());
4796 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
4797 __ Push(scratch1, scratch, result_register());
4798 EmitKeyedSuperPropertyLoad(prop);
4802 case KEYED_PROPERTY: {
4803 VisitForStackValue(prop->obj());
4804 VisitForStackValue(prop->key());
4805 __ lw(LoadDescriptor::ReceiverRegister(),
4806 MemOperand(sp, 1 * kPointerSize));
4807 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4808 EmitKeyedPropertyLoad(prop);
4817 // We need a second deoptimization point after loading the value
4818 // in case evaluating the property load my have a side effect.
4819 if (assign_type == VARIABLE) {
4820 PrepareForBailout(expr->expression(), TOS_REG);
4822 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4825 // Inline smi case if we are in a loop.
4826 Label stub_call, done;
4827 JumpPatchSite patch_site(masm_);
4829 int count_value = expr->op() == Token::INC ? 1 : -1;
4831 if (ShouldInlineSmiCase(expr->op())) {
4833 patch_site.EmitJumpIfNotSmi(v0, &slow);
4835 // Save result for postfix expressions.
4836 if (expr->is_postfix()) {
4837 if (!context()->IsEffect()) {
4838 // Save the result on the stack. If we have a named or keyed property
4839 // we store the result under the receiver that is currently on top
4841 switch (assign_type) {
4845 case NAMED_PROPERTY:
4846 __ sw(v0, MemOperand(sp, kPointerSize));
4848 case NAMED_SUPER_PROPERTY:
4849 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4851 case KEYED_PROPERTY:
4852 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4854 case KEYED_SUPER_PROPERTY:
4855 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4861 Register scratch1 = a1;
4862 Register scratch2 = t0;
4863 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4864 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4865 __ BranchOnNoOverflow(&done, scratch2);
4866 // Call stub. Undo operation first.
4871 ToNumberStub convert_stub(isolate());
4872 __ CallStub(&convert_stub);
4873 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4875 // Save result for postfix expressions.
4876 if (expr->is_postfix()) {
4877 if (!context()->IsEffect()) {
4878 // Save the result on the stack. If we have a named or keyed property
4879 // we store the result under the receiver that is currently on top
4881 switch (assign_type) {
4885 case NAMED_PROPERTY:
4886 __ sw(v0, MemOperand(sp, kPointerSize));
4888 case NAMED_SUPER_PROPERTY:
4889 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4891 case KEYED_PROPERTY:
4892 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4894 case KEYED_SUPER_PROPERTY:
4895 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4901 __ bind(&stub_call);
4903 __ li(a0, Operand(Smi::FromInt(count_value)));
4905 // Record position before stub call.
4906 SetSourcePosition(expr->position());
4908 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4909 CallIC(code, expr->CountBinOpFeedbackId());
4910 patch_site.EmitPatchInfo();
4913 // Store the value returned in v0.
4914 switch (assign_type) {
4916 if (expr->is_postfix()) {
4917 { EffectContext context(this);
4918 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4920 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4923 // For all contexts except EffectConstant we have the result on
4924 // top of the stack.
4925 if (!context()->IsEffect()) {
4926 context()->PlugTOS();
4929 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4931 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4932 context()->Plug(v0);
4935 case NAMED_PROPERTY: {
4936 __ mov(StoreDescriptor::ValueRegister(), result_register());
4937 __ li(StoreDescriptor::NameRegister(),
4938 Operand(prop->key()->AsLiteral()->value()));
4939 __ pop(StoreDescriptor::ReceiverRegister());
4940 CallStoreIC(expr->CountStoreFeedbackId());
4941 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4942 if (expr->is_postfix()) {
4943 if (!context()->IsEffect()) {
4944 context()->PlugTOS();
4947 context()->Plug(v0);
4951 case NAMED_SUPER_PROPERTY: {
4952 EmitNamedSuperPropertyStore(prop);
4953 if (expr->is_postfix()) {
4954 if (!context()->IsEffect()) {
4955 context()->PlugTOS();
4958 context()->Plug(v0);
4962 case KEYED_SUPER_PROPERTY: {
4963 EmitKeyedSuperPropertyStore(prop);
4964 if (expr->is_postfix()) {
4965 if (!context()->IsEffect()) {
4966 context()->PlugTOS();
4969 context()->Plug(v0);
4973 case KEYED_PROPERTY: {
4974 __ mov(StoreDescriptor::ValueRegister(), result_register());
4975 __ Pop(StoreDescriptor::ReceiverRegister(),
4976 StoreDescriptor::NameRegister());
4978 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4979 CallIC(ic, expr->CountStoreFeedbackId());
4980 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4981 if (expr->is_postfix()) {
4982 if (!context()->IsEffect()) {
4983 context()->PlugTOS();
4986 context()->Plug(v0);
4994 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4995 DCHECK(!context()->IsEffect());
4996 DCHECK(!context()->IsTest());
4997 VariableProxy* proxy = expr->AsVariableProxy();
4998 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4999 Comment cmnt(masm_, "[ Global variable");
5000 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5001 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5002 if (FLAG_vector_ics) {
5003 __ li(VectorLoadICDescriptor::SlotRegister(),
5004 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5006 // Use a regular load, not a contextual load, to avoid a reference
5008 CallLoadIC(NOT_CONTEXTUAL);
5009 PrepareForBailout(expr, TOS_REG);
5010 context()->Plug(v0);
5011 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5012 Comment cmnt(masm_, "[ Lookup slot");
5015 // Generate code for loading from variables potentially shadowed
5016 // by eval-introduced variables.
5017 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5020 __ li(a0, Operand(proxy->name()));
5022 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5023 PrepareForBailout(expr, TOS_REG);
5026 context()->Plug(v0);
5028 // This expression cannot throw a reference error at the top level.
5029 VisitInDuplicateContext(expr);
5033 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5034 Expression* sub_expr,
5035 Handle<String> check) {
5036 Label materialize_true, materialize_false;
5037 Label* if_true = NULL;
5038 Label* if_false = NULL;
5039 Label* fall_through = NULL;
5040 context()->PrepareTest(&materialize_true, &materialize_false,
5041 &if_true, &if_false, &fall_through);
5043 { AccumulatorValueContext context(this);
5044 VisitForTypeofValue(sub_expr);
5046 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5048 Factory* factory = isolate()->factory();
5049 if (String::Equals(check, factory->number_string())) {
5050 __ JumpIfSmi(v0, if_true);
5051 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5052 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5053 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5054 } else if (String::Equals(check, factory->string_string())) {
5055 __ JumpIfSmi(v0, if_false);
5056 // Check for undetectable objects => false.
5057 __ GetObjectType(v0, v0, a1);
5058 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5059 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5060 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5061 Split(eq, a1, Operand(zero_reg),
5062 if_true, if_false, fall_through);
5063 } else if (String::Equals(check, factory->symbol_string())) {
5064 __ JumpIfSmi(v0, if_false);
5065 __ GetObjectType(v0, v0, a1);
5066 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5067 } else if (String::Equals(check, factory->boolean_string())) {
5068 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5069 __ Branch(if_true, eq, v0, Operand(at));
5070 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5071 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5072 } else if (String::Equals(check, factory->undefined_string())) {
5073 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5074 __ Branch(if_true, eq, v0, Operand(at));
5075 __ JumpIfSmi(v0, if_false);
5076 // Check for undetectable objects => true.
5077 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5078 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5079 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5080 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5081 } else if (String::Equals(check, factory->function_string())) {
5082 __ JumpIfSmi(v0, if_false);
5083 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5084 __ GetObjectType(v0, v0, a1);
5085 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5086 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5087 if_true, if_false, fall_through);
5088 } else if (String::Equals(check, factory->object_string())) {
5089 __ JumpIfSmi(v0, if_false);
5090 __ LoadRoot(at, Heap::kNullValueRootIndex);
5091 __ Branch(if_true, eq, v0, Operand(at));
5092 // Check for JS objects => true.
5093 __ GetObjectType(v0, v0, a1);
5094 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5095 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5096 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5097 // Check for undetectable objects => false.
5098 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5099 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5100 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5102 if (if_false != fall_through) __ jmp(if_false);
5104 context()->Plug(if_true, if_false);
5108 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5109 Comment cmnt(masm_, "[ CompareOperation");
5110 SetSourcePosition(expr->position());
5112 // First we try a fast inlined version of the compare when one of
5113 // the operands is a literal.
5114 if (TryLiteralCompare(expr)) return;
5116 // Always perform the comparison for its control flow. Pack the result
5117 // into the expression's context after the comparison is performed.
5118 Label materialize_true, materialize_false;
5119 Label* if_true = NULL;
5120 Label* if_false = NULL;
5121 Label* fall_through = NULL;
5122 context()->PrepareTest(&materialize_true, &materialize_false,
5123 &if_true, &if_false, &fall_through);
5125 Token::Value op = expr->op();
5126 VisitForStackValue(expr->left());
5129 VisitForStackValue(expr->right());
5130 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5131 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5132 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
5133 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
5136 case Token::INSTANCEOF: {
5137 VisitForStackValue(expr->right());
5138 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5140 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5141 // The stub returns 0 for true.
5142 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5147 VisitForAccumulatorValue(expr->right());
5148 Condition cc = CompareIC::ComputeCondition(op);
5149 __ mov(a0, result_register());
5152 bool inline_smi_code = ShouldInlineSmiCase(op);
5153 JumpPatchSite patch_site(masm_);
5154 if (inline_smi_code) {
5156 __ Or(a2, a0, Operand(a1));
5157 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5158 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5159 __ bind(&slow_case);
5161 // Record position and call the compare IC.
5162 SetSourcePosition(expr->position());
5163 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5164 CallIC(ic, expr->CompareOperationFeedbackId());
5165 patch_site.EmitPatchInfo();
5166 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5167 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5171 // Convert the result of the comparison into one expected for this
5172 // expression's context.
5173 context()->Plug(if_true, if_false);
5177 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5178 Expression* sub_expr,
5180 Label materialize_true, materialize_false;
5181 Label* if_true = NULL;
5182 Label* if_false = NULL;
5183 Label* fall_through = NULL;
5184 context()->PrepareTest(&materialize_true, &materialize_false,
5185 &if_true, &if_false, &fall_through);
5187 VisitForAccumulatorValue(sub_expr);
5188 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5189 __ mov(a0, result_register());
5190 if (expr->op() == Token::EQ_STRICT) {
5191 Heap::RootListIndex nil_value = nil == kNullValue ?
5192 Heap::kNullValueRootIndex :
5193 Heap::kUndefinedValueRootIndex;
5194 __ LoadRoot(a1, nil_value);
5195 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5197 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5198 CallIC(ic, expr->CompareOperationFeedbackId());
5199 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5201 context()->Plug(if_true, if_false);
5205 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5206 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5207 context()->Plug(v0);
5211 Register FullCodeGenerator::result_register() {
5216 Register FullCodeGenerator::context_register() {
5221 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5222 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5223 __ sw(value, MemOperand(fp, frame_offset));
5227 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5228 __ lw(dst, ContextOperand(cp, context_index));
5232 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5233 Scope* declaration_scope = scope()->DeclarationScope();
5234 if (declaration_scope->is_script_scope() ||
5235 declaration_scope->is_module_scope()) {
5236 // Contexts nested in the native context have a canonical empty function
5237 // as their closure, not the anonymous closure containing the global
5238 // code. Pass a smi sentinel and let the runtime look up the empty
5240 __ li(at, Operand(Smi::FromInt(0)));
5241 } else if (declaration_scope->is_eval_scope()) {
5242 // Contexts created by a call to eval have the same closure as the
5243 // context calling eval, not the anonymous closure containing the eval
5244 // code. Fetch it from the context.
5245 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5247 DCHECK(declaration_scope->is_function_scope());
5248 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5254 // ----------------------------------------------------------------------------
5255 // Non-local control flow support.
5257 void FullCodeGenerator::EnterFinallyBlock() {
5258 DCHECK(!result_register().is(a1));
5259 // Store result register while executing finally block.
5260 __ push(result_register());
5261 // Cook return address in link register to stack (smi encoded Code* delta).
5262 __ Subu(a1, ra, Operand(masm_->CodeObject()));
5263 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5264 STATIC_ASSERT(0 == kSmiTag);
5265 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
5267 // Store result register while executing finally block.
5270 // Store pending message while executing finally block.
5271 ExternalReference pending_message_obj =
5272 ExternalReference::address_of_pending_message_obj(isolate());
5273 __ li(at, Operand(pending_message_obj));
5274 __ lw(a1, MemOperand(at));
5277 ExternalReference has_pending_message =
5278 ExternalReference::address_of_has_pending_message(isolate());
5279 __ li(at, Operand(has_pending_message));
5280 __ lw(a1, MemOperand(at));
5284 ExternalReference pending_message_script =
5285 ExternalReference::address_of_pending_message_script(isolate());
5286 __ li(at, Operand(pending_message_script));
5287 __ lw(a1, MemOperand(at));
5292 void FullCodeGenerator::ExitFinallyBlock() {
5293 DCHECK(!result_register().is(a1));
5294 // Restore pending message from stack.
5296 ExternalReference pending_message_script =
5297 ExternalReference::address_of_pending_message_script(isolate());
5298 __ li(at, Operand(pending_message_script));
5299 __ sw(a1, MemOperand(at));
5303 ExternalReference has_pending_message =
5304 ExternalReference::address_of_has_pending_message(isolate());
5305 __ li(at, Operand(has_pending_message));
5306 __ sw(a1, MemOperand(at));
5309 ExternalReference pending_message_obj =
5310 ExternalReference::address_of_pending_message_obj(isolate());
5311 __ li(at, Operand(pending_message_obj));
5312 __ sw(a1, MemOperand(at));
5314 // Restore result register from stack.
5317 // Uncook return address and return.
5318 __ pop(result_register());
5319 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
5320 __ sra(a1, a1, 1); // Un-smi-tag value.
5321 __ Addu(at, a1, Operand(masm_->CodeObject()));
5328 #define __ ACCESS_MASM(masm())
5330 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5332 int* context_length) {
5333 // The macros used here must preserve the result register.
5335 // Because the handler block contains the context of the finally
5336 // code, we can restore it directly from there for the finally code
5337 // rather than iteratively unwinding contexts via their previous
5339 __ Drop(*stack_depth); // Down to the handler block.
5340 if (*context_length > 0) {
5341 // Restore the context to its dedicated register and the stack.
5342 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
5343 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5346 __ Call(finally_entry_);
5349 *context_length = 0;
5357 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5359 BackEdgeState target_state,
5360 Code* replacement_code) {
5361 static const int kInstrSize = Assembler::kInstrSize;
5362 Address branch_address = pc - 6 * kInstrSize;
5363 CodePatcher patcher(branch_address, 1);
5365 switch (target_state) {
5367 // slt at, a3, zero_reg (in case of count based interrupts)
5368 // beq at, zero_reg, ok
5369 // lui t9, <interrupt stub address> upper
5370 // ori t9, <interrupt stub address> lower
5373 // ok-label ----- pc_after points here
5374 patcher.masm()->slt(at, a3, zero_reg);
5376 case ON_STACK_REPLACEMENT:
5377 case OSR_AFTER_STACK_CHECK:
5378 // addiu at, zero_reg, 1
5379 // beq at, zero_reg, ok ;; Not changed
5380 // lui t9, <on-stack replacement address> upper
5381 // ori t9, <on-stack replacement address> lower
5382 // jalr t9 ;; Not changed
5383 // nop ;; Not changed
5384 // ok-label ----- pc_after points here
5385 patcher.masm()->addiu(at, zero_reg, 1);
5388 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5389 // Replace the stack check address in the load-immediate (lui/ori pair)
5390 // with the entry address of the replacement code.
5391 Assembler::set_target_address_at(pc_immediate_load_address,
5392 replacement_code->entry());
5394 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5395 unoptimized_code, pc_immediate_load_address, replacement_code);
5399 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5401 Code* unoptimized_code,
5403 static const int kInstrSize = Assembler::kInstrSize;
5404 Address branch_address = pc - 6 * kInstrSize;
5405 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5407 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
5408 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5409 DCHECK(reinterpret_cast<uint32_t>(
5410 Assembler::target_address_at(pc_immediate_load_address)) ==
5411 reinterpret_cast<uint32_t>(
5412 isolate->builtins()->InterruptCheck()->entry()));
5416 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5418 if (reinterpret_cast<uint32_t>(
5419 Assembler::target_address_at(pc_immediate_load_address)) ==
5420 reinterpret_cast<uint32_t>(
5421 isolate->builtins()->OnStackReplacement()->entry())) {
5422 return ON_STACK_REPLACEMENT;
5425 DCHECK(reinterpret_cast<uint32_t>(
5426 Assembler::target_address_at(pc_immediate_load_address)) ==
5427 reinterpret_cast<uint32_t>(
5428 isolate->builtins()->OsrAfterStackCheck()->entry()));
5429 return OSR_AFTER_STACK_CHECK;
5433 } } // namespace v8::internal
5435 #endif // V8_TARGET_ARCH_MIPS