1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/ppc/code-stubs-ppc.h"
20 #include "src/ppc/macro-assembler-ppc.h"
25 #define __ ACCESS_MASM(masm_)
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
42 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, reg, cr0);
51 __ beq(target, cr0); // Always taken before patched.
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
56 void EmitJumpIfSmi(Register reg, Label* target) {
57 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 __ bind(&patch_site_);
60 __ cmp(reg, reg, cr0);
61 __ bne(target, cr0); // Never taken before patched.
64 void EmitPatchInfo() {
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
68 // I believe this is using reg as the high bits of of the offset
69 reg.set_code(delta_to_patch_site / kOff16Mask);
70 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
75 __ nop(); // Signals no inlined code.
80 MacroAssembler* masm_;
88 // Generate code for a JS function. On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right. The actual
90 // argument count matches the formal parameter count expected by the
93 // The live registers are:
94 // o r4: the JS function object being called (i.e., ourselves)
96 // o fp: our caller's frame pointer (aka r31)
97 // o sp: stack pointer
98 // o lr: return address
99 // o ip: our own function entry (required by the prologue)
101 // The function builds a JS frame. Please see JavaScriptFrameConstants in
102 // frames-ppc.h for its layout.
103 void FullCodeGenerator::Generate() {
104 CompilationInfo* info = info_;
106 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
107 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
109 profiling_counter_ = isolate()->factory()->NewCell(
110 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
111 SetFunctionPosition(function());
112 Comment cmnt(masm_, "[ function compiled by full code generator");
114 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
117 if (strlen(FLAG_stop_at) > 0 &&
118 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
123 // Sloppy mode functions and builtins need to replace the receiver with the
124 // global proxy when called as functions (without an explicit receiver
126 if (is_sloppy(info->language_mode()) && !info->is_native() &&
127 info->MayUseThis() && info->scope()->has_this_declaration()) {
129 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
130 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
131 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
134 __ LoadP(r5, GlobalObjectOperand());
135 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
137 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
142 // Open a frame scope to indicate that there is a frame on the stack. The
143 // MANUAL indicates that the scope shouldn't actually generate code to set up
144 // the frame (that is done below).
145 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 int prologue_offset = masm_->pc_offset();
148 if (prologue_offset) {
149 // Prologue logic requires it's starting address in ip and the
150 // corresponding offset from the function entry.
151 prologue_offset += Instruction::kInstrSize;
152 __ addi(ip, ip, Operand(prologue_offset));
154 info->set_prologue_offset(prologue_offset);
155 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
156 info->AddNoFrameRange(0, masm_->pc_offset());
159 Comment cmnt(masm_, "[ Allocate locals");
160 int locals_count = info->scope()->num_stack_slots();
161 // Generators allocate locals, if any, in context slots.
162 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
163 if (locals_count > 0) {
164 if (locals_count >= 128) {
166 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
167 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
169 __ bc_short(ge, &ok);
170 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
173 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
174 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
175 if (locals_count >= kMaxPushes) {
176 int loop_iterations = locals_count / kMaxPushes;
177 __ mov(r5, Operand(loop_iterations));
180 __ bind(&loop_header);
182 for (int i = 0; i < kMaxPushes; i++) {
185 // Continue loop if not done.
186 __ bdnz(&loop_header);
188 int remaining = locals_count % kMaxPushes;
189 // Emit the remaining pushes.
190 for (int i = 0; i < remaining; i++) {
196 bool function_in_register = true;
198 // Possibly allocate a local context.
199 if (info->scope()->num_heap_slots() > 0) {
200 // Argument to NewContext is the function, which is still in r4.
201 Comment cmnt(masm_, "[ Allocate context");
202 bool need_write_barrier = true;
203 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
204 if (info->scope()->is_script_scope()) {
206 __ Push(info->scope()->GetScopeInfo(info->isolate()));
207 __ CallRuntime(Runtime::kNewScriptContext, 2);
208 } else if (slots <= FastNewContextStub::kMaximumSlots) {
209 FastNewContextStub stub(isolate(), slots);
211 // Result of FastNewContextStub is always in new space.
212 need_write_barrier = false;
215 __ CallRuntime(Runtime::kNewFunctionContext, 1);
217 function_in_register = false;
218 // Context is returned in r3. It replaces the context passed to us.
219 // It's saved in the stack and kept live in cp.
221 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
222 // Copy any necessary parameters into the context.
223 int num_parameters = info->scope()->num_parameters();
224 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
225 for (int i = first_parameter; i < num_parameters; i++) {
226 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
227 if (var->IsContextSlot()) {
228 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
229 (num_parameters - 1 - i) * kPointerSize;
230 // Load parameter from stack.
231 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
232 // Store it in the context.
233 MemOperand target = ContextOperand(cp, var->index());
234 __ StoreP(r3, target, r0);
236 // Update the write barrier.
237 if (need_write_barrier) {
238 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
239 kLRHasBeenSaved, kDontSaveFPRegs);
240 } else if (FLAG_debug_code) {
242 __ JumpIfInNewSpace(cp, r3, &done);
243 __ Abort(kExpectedNewSpaceObject);
250 // Possibly set up a local binding to the this function which is used in
251 // derived constructors with super calls.
252 Variable* this_function_var = scope()->this_function_var();
253 if (this_function_var != nullptr) {
254 Comment cmnt(masm_, "[ This function");
255 SetVar(this_function_var, r4, r3, r5);
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
261 // new.target is parameter -2.
262 int offset = 2 * kPointerSize +
263 (info_->scope()->num_parameters() + 1) * kPointerSize;
264 __ LoadP(r3, MemOperand(fp, offset));
265 SetVar(new_target_var, r3, r5, r6);
268 ArgumentsAccessStub::HasNewTarget has_new_target =
269 IsSubclassConstructor(info->function()->kind())
270 ? ArgumentsAccessStub::HAS_NEW_TARGET
271 : ArgumentsAccessStub::NO_NEW_TARGET;
273 // Possibly allocate RestParameters
275 Variable* rest_param = scope()->rest_parameter(&rest_index);
277 Comment cmnt(masm_, "[ Allocate rest parameter array");
279 int num_parameters = info->scope()->num_parameters();
280 int offset = num_parameters * kPointerSize;
281 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
286 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
287 __ LoadSmiLiteral(r5, Smi::FromInt(num_parameters));
288 __ LoadSmiLiteral(r4, Smi::FromInt(rest_index));
289 __ LoadSmiLiteral(r3, Smi::FromInt(language_mode()));
290 __ Push(r6, r5, r4, r3);
292 RestParamAccessStub stub(isolate());
295 SetVar(rest_param, r3, r4, r5);
298 Variable* arguments = scope()->arguments();
299 if (arguments != NULL) {
300 // Function uses arguments object.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (!function_in_register) {
303 // Load this again, if it's used by the local context below.
304 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
308 // Receiver is just before the parameters on the caller's stack.
309 int num_parameters = info->scope()->num_parameters();
310 int offset = num_parameters * kPointerSize;
311 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
312 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
315 // Arguments to ArgumentsAccessStub:
316 // function, receiver address, parameter count.
317 // The stub will rewrite receiever and parameter count if the previous
318 // stack frame was an arguments adapter frame.
319 ArgumentsAccessStub::Type type;
320 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
321 type = ArgumentsAccessStub::NEW_STRICT;
322 } else if (function()->has_duplicate_parameters()) {
323 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
325 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
327 ArgumentsAccessStub stub(isolate(), type, has_new_target);
330 SetVar(arguments, r3, r4, r5);
334 __ CallRuntime(Runtime::kTraceEnter, 0);
337 // Visit the declarations and body unless there is an illegal
339 if (scope()->HasIllegalRedeclaration()) {
340 Comment cmnt(masm_, "[ Declarations");
341 scope()->VisitIllegalRedeclaration(this);
344 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
346 Comment cmnt(masm_, "[ Declarations");
347 // For named function expressions, declare the function name as a
349 if (scope()->is_function_scope() && scope()->function() != NULL) {
350 VariableDeclaration* function = scope()->function();
351 DCHECK(function->proxy()->var()->mode() == CONST ||
352 function->proxy()->var()->mode() == CONST_LEGACY);
353 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
354 VisitVariableDeclaration(function);
356 VisitDeclarations(scope()->declarations());
360 Comment cmnt(masm_, "[ Stack check");
361 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
363 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
365 __ bc_short(ge, &ok);
366 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
371 Comment cmnt(masm_, "[ Body");
372 DCHECK(loop_depth() == 0);
373 VisitStatements(function()->body());
374 DCHECK(loop_depth() == 0);
378 // Always emit a 'return undefined' in case control fell off the end of
381 Comment cmnt(masm_, "[ return <undefined>;");
382 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
384 EmitReturnSequence();
386 if (HasStackOverflow()) {
387 masm_->AbortConstantPoolBuilding();
392 void FullCodeGenerator::ClearAccumulator() {
393 __ LoadSmiLiteral(r3, Smi::FromInt(0));
397 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
398 __ mov(r5, Operand(profiling_counter_));
399 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
400 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
401 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
405 void FullCodeGenerator::EmitProfilingCounterReset() {
406 int reset_value = FLAG_interrupt_budget;
407 if (info_->is_debug()) {
408 // Detect debug break requests as soon as possible.
409 reset_value = FLAG_interrupt_budget >> 4;
411 __ mov(r5, Operand(profiling_counter_));
412 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
413 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
417 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
418 Label* back_edge_target) {
419 Comment cmnt(masm_, "[ Back edge bookkeeping");
422 DCHECK(back_edge_target->is_bound());
423 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
424 kCodeSizeMultiplier / 2;
425 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
426 EmitProfilingCounterDecrement(weight);
428 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
429 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
430 // BackEdgeTable::PatchAt manipulates this sequence.
431 __ cmpi(r6, Operand::Zero());
432 __ bc_short(ge, &ok);
433 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
435 // Record a mapping of this PC offset to the OSR id. This is used to find
436 // the AST id from the unoptimized code in order to use it as a key into
437 // the deoptimization input data found in the optimized code.
438 RecordBackEdge(stmt->OsrEntryId());
440 EmitProfilingCounterReset();
443 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
444 // Record a mapping of the OSR id to this PC. This is used if the OSR
445 // entry becomes the target of a bailout. We don't expect it to be, but
446 // we want it to work if it is.
447 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
451 void FullCodeGenerator::EmitReturnSequence() {
452 Comment cmnt(masm_, "[ Return sequence");
453 if (return_label_.is_bound()) {
454 __ b(&return_label_);
456 __ bind(&return_label_);
458 // Push the return value on the stack as the parameter.
459 // Runtime::TraceExit returns its parameter in r3
461 __ CallRuntime(Runtime::kTraceExit, 1);
463 // Pretend that the exit is a backwards jump to the entry.
465 if (info_->ShouldSelfOptimize()) {
466 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
468 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
469 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
471 EmitProfilingCounterDecrement(weight);
473 __ cmpi(r6, Operand::Zero());
476 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
478 EmitProfilingCounterReset();
482 // Add a label for checking the size of the code used for returning.
483 Label check_exit_codesize;
484 __ bind(&check_exit_codesize);
486 // Make sure that the constant pool is not emitted inside of the return
489 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
490 int32_t arg_count = info_->scope()->num_parameters() + 1;
491 if (IsSubclassConstructor(info_->function()->kind())) {
494 int32_t sp_delta = arg_count * kPointerSize;
495 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
497 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
498 #if V8_TARGET_ARCH_PPC64
499 // With 64bit we may need nop() instructions to ensure we have
500 // enough space to SetDebugBreakAtReturn()
501 if (is_int16(sp_delta)) {
502 if (!FLAG_enable_embedded_constant_pool) masm_->nop();
507 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
511 // Check that the size of the code used for returning is large enough
512 // for the debugger's requirements.
513 DCHECK(Assembler::kJSReturnSequenceInstructions <=
514 masm_->InstructionsGeneratedSince(&check_exit_codesize));
520 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
521 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
525 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
526 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
527 codegen()->GetVar(result_register(), var);
531 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
532 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
533 codegen()->GetVar(result_register(), var);
534 __ push(result_register());
538 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
539 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
540 // For simplicity we always test the accumulator register.
541 codegen()->GetVar(result_register(), var);
542 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
543 codegen()->DoTest(this);
547 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
550 void FullCodeGenerator::AccumulatorValueContext::Plug(
551 Heap::RootListIndex index) const {
552 __ LoadRoot(result_register(), index);
556 void FullCodeGenerator::StackValueContext::Plug(
557 Heap::RootListIndex index) const {
558 __ LoadRoot(result_register(), index);
559 __ push(result_register());
563 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
564 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
566 if (index == Heap::kUndefinedValueRootIndex ||
567 index == Heap::kNullValueRootIndex ||
568 index == Heap::kFalseValueRootIndex) {
569 if (false_label_ != fall_through_) __ b(false_label_);
570 } else if (index == Heap::kTrueValueRootIndex) {
571 if (true_label_ != fall_through_) __ b(true_label_);
573 __ LoadRoot(result_register(), index);
574 codegen()->DoTest(this);
579 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
582 void FullCodeGenerator::AccumulatorValueContext::Plug(
583 Handle<Object> lit) const {
584 __ mov(result_register(), Operand(lit));
588 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
589 // Immediates cannot be pushed directly.
590 __ mov(result_register(), Operand(lit));
591 __ push(result_register());
595 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
596 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
598 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
599 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
600 if (false_label_ != fall_through_) __ b(false_label_);
601 } else if (lit->IsTrue() || lit->IsJSObject()) {
602 if (true_label_ != fall_through_) __ b(true_label_);
603 } else if (lit->IsString()) {
604 if (String::cast(*lit)->length() == 0) {
605 if (false_label_ != fall_through_) __ b(false_label_);
607 if (true_label_ != fall_through_) __ b(true_label_);
609 } else if (lit->IsSmi()) {
610 if (Smi::cast(*lit)->value() == 0) {
611 if (false_label_ != fall_through_) __ b(false_label_);
613 if (true_label_ != fall_through_) __ b(true_label_);
616 // For simplicity we always test the accumulator register.
617 __ mov(result_register(), Operand(lit));
618 codegen()->DoTest(this);
623 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
624 Register reg) const {
630 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
631 int count, Register reg) const {
634 __ Move(result_register(), reg);
638 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
639 Register reg) const {
641 if (count > 1) __ Drop(count - 1);
642 __ StoreP(reg, MemOperand(sp, 0));
646 void FullCodeGenerator::TestContext::DropAndPlug(int count,
647 Register reg) const {
649 // For simplicity we always test the accumulator register.
651 __ Move(result_register(), reg);
652 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
653 codegen()->DoTest(this);
657 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
658 Label* materialize_false) const {
659 DCHECK(materialize_true == materialize_false);
660 __ bind(materialize_true);
664 void FullCodeGenerator::AccumulatorValueContext::Plug(
665 Label* materialize_true, Label* materialize_false) const {
667 __ bind(materialize_true);
668 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
670 __ bind(materialize_false);
671 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
676 void FullCodeGenerator::StackValueContext::Plug(
677 Label* materialize_true, Label* materialize_false) const {
679 __ bind(materialize_true);
680 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
682 __ bind(materialize_false);
683 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
689 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
690 Label* materialize_false) const {
691 DCHECK(materialize_true == true_label_);
692 DCHECK(materialize_false == false_label_);
696 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
699 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
700 Heap::RootListIndex value_root_index =
701 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
702 __ LoadRoot(result_register(), value_root_index);
706 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
707 Heap::RootListIndex value_root_index =
708 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
709 __ LoadRoot(ip, value_root_index);
714 void FullCodeGenerator::TestContext::Plug(bool flag) const {
715 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
718 if (true_label_ != fall_through_) __ b(true_label_);
720 if (false_label_ != fall_through_) __ b(false_label_);
725 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
726 Label* if_false, Label* fall_through) {
727 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
728 CallIC(ic, condition->test_id());
729 __ cmpi(result_register(), Operand::Zero());
730 Split(ne, if_true, if_false, fall_through);
734 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
735 Label* fall_through, CRegister cr) {
736 if (if_false == fall_through) {
737 __ b(cond, if_true, cr);
738 } else if (if_true == fall_through) {
739 __ b(NegateCondition(cond), if_false, cr);
741 __ b(cond, if_true, cr);
747 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
748 DCHECK(var->IsStackAllocated());
749 // Offset is negative because higher indexes are at lower addresses.
750 int offset = -var->index() * kPointerSize;
751 // Adjust by a (parameter or local) base offset.
752 if (var->IsParameter()) {
753 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
755 offset += JavaScriptFrameConstants::kLocal0Offset;
757 return MemOperand(fp, offset);
761 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
762 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
763 if (var->IsContextSlot()) {
764 int context_chain_length = scope()->ContextChainLength(var->scope());
765 __ LoadContext(scratch, context_chain_length);
766 return ContextOperand(scratch, var->index());
768 return StackOperand(var);
773 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
774 // Use destination as scratch.
775 MemOperand location = VarOperand(var, dest);
776 __ LoadP(dest, location, r0);
780 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
782 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
783 DCHECK(!scratch0.is(src));
784 DCHECK(!scratch0.is(scratch1));
785 DCHECK(!scratch1.is(src));
786 MemOperand location = VarOperand(var, scratch0);
787 __ StoreP(src, location, r0);
789 // Emit the write barrier code if the location is in the heap.
790 if (var->IsContextSlot()) {
791 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
792 kLRHasBeenSaved, kDontSaveFPRegs);
797 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
798 bool should_normalize,
801 // Only prepare for bailouts before splits if we're in a test
802 // context. Otherwise, we let the Visit function deal with the
803 // preparation to avoid preparing with the same AST id twice.
804 if (!context()->IsTest() || !info_->IsOptimizable()) return;
807 if (should_normalize) __ b(&skip);
808 PrepareForBailout(expr, TOS_REG);
809 if (should_normalize) {
810 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
812 Split(eq, if_true, if_false, NULL);
818 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
819 // The variable in the declaration always resides in the current function
821 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
822 if (generate_debug_code_) {
823 // Check that we're not inside a with or catch context.
824 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
825 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
826 __ Check(ne, kDeclarationInWithContext);
827 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
828 __ Check(ne, kDeclarationInCatchContext);
833 void FullCodeGenerator::VisitVariableDeclaration(
834 VariableDeclaration* declaration) {
835 // If it was not possible to allocate the variable at compile time, we
836 // need to "declare" it at runtime to make sure it actually exists in the
838 VariableProxy* proxy = declaration->proxy();
839 VariableMode mode = declaration->mode();
840 Variable* variable = proxy->var();
841 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
842 switch (variable->location()) {
843 case Variable::UNALLOCATED:
844 globals_->Add(variable->name(), zone());
845 globals_->Add(variable->binding_needs_init()
846 ? isolate()->factory()->the_hole_value()
847 : isolate()->factory()->undefined_value(),
851 case Variable::PARAMETER:
852 case Variable::LOCAL:
854 Comment cmnt(masm_, "[ VariableDeclaration");
855 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
856 __ StoreP(ip, StackOperand(variable));
860 case Variable::CONTEXT:
862 Comment cmnt(masm_, "[ VariableDeclaration");
863 EmitDebugCheckDeclarationContext(variable);
864 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
865 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
866 // No write barrier since the_hole_value is in old space.
867 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
871 case Variable::LOOKUP: {
872 Comment cmnt(masm_, "[ VariableDeclaration");
873 __ mov(r5, Operand(variable->name()));
874 // Declaration nodes are always introduced in one of four modes.
875 DCHECK(IsDeclaredVariableMode(mode));
876 PropertyAttributes attr =
877 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
878 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
879 // Push initial value, if any.
880 // Note: For variables we must not push an initial value (such as
881 // 'undefined') because we may have a (legal) redeclaration and we
882 // must not destroy the current value.
884 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
885 __ Push(cp, r5, r4, r3);
887 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
888 __ Push(cp, r5, r4, r3);
890 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
897 void FullCodeGenerator::VisitFunctionDeclaration(
898 FunctionDeclaration* declaration) {
899 VariableProxy* proxy = declaration->proxy();
900 Variable* variable = proxy->var();
901 switch (variable->location()) {
902 case Variable::UNALLOCATED: {
903 globals_->Add(variable->name(), zone());
904 Handle<SharedFunctionInfo> function =
905 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
906 // Check for stack-overflow exception.
907 if (function.is_null()) return SetStackOverflow();
908 globals_->Add(function, zone());
912 case Variable::PARAMETER:
913 case Variable::LOCAL: {
914 Comment cmnt(masm_, "[ FunctionDeclaration");
915 VisitForAccumulatorValue(declaration->fun());
916 __ StoreP(result_register(), StackOperand(variable));
920 case Variable::CONTEXT: {
921 Comment cmnt(masm_, "[ FunctionDeclaration");
922 EmitDebugCheckDeclarationContext(variable);
923 VisitForAccumulatorValue(declaration->fun());
924 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
925 int offset = Context::SlotOffset(variable->index());
926 // We know that we have written a function, which is not a smi.
927 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
928 kLRHasBeenSaved, kDontSaveFPRegs,
929 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
930 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
934 case Variable::LOOKUP: {
935 Comment cmnt(masm_, "[ FunctionDeclaration");
936 __ mov(r5, Operand(variable->name()));
937 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
939 // Push initial value for function declaration.
940 VisitForStackValue(declaration->fun());
941 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
948 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
949 VariableProxy* proxy = declaration->proxy();
950 Variable* variable = proxy->var();
951 switch (variable->location()) {
952 case Variable::UNALLOCATED:
956 case Variable::CONTEXT: {
957 Comment cmnt(masm_, "[ ImportDeclaration");
958 EmitDebugCheckDeclarationContext(variable);
963 case Variable::PARAMETER:
964 case Variable::LOCAL:
965 case Variable::LOOKUP:
971 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
976 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
977 // Call the runtime to declare the globals.
978 // The context is the first argument.
979 __ mov(r4, Operand(pairs));
980 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
982 __ CallRuntime(Runtime::kDeclareGlobals, 3);
983 // Return value is ignored.
987 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
988 // Call the runtime to declare the modules.
989 __ Push(descriptions);
990 __ CallRuntime(Runtime::kDeclareModules, 1);
991 // Return value is ignored.
995 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
996 Comment cmnt(masm_, "[ SwitchStatement");
997 Breakable nested_statement(this, stmt);
998 SetStatementPosition(stmt);
1000 // Keep the switch value on the stack until a case matches.
1001 VisitForStackValue(stmt->tag());
1002 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1004 ZoneList<CaseClause*>* clauses = stmt->cases();
1005 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1007 Label next_test; // Recycled for each test.
1008 // Compile all the tests with branches to their bodies.
1009 for (int i = 0; i < clauses->length(); i++) {
1010 CaseClause* clause = clauses->at(i);
1011 clause->body_target()->Unuse();
1013 // The default is not a test, but remember it as final fall through.
1014 if (clause->is_default()) {
1015 default_clause = clause;
1019 Comment cmnt(masm_, "[ Case comparison");
1020 __ bind(&next_test);
1023 // Compile the label expression.
1024 VisitForAccumulatorValue(clause->label());
1026 // Perform the comparison as if via '==='.
1027 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
1028 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1029 JumpPatchSite patch_site(masm_);
1030 if (inline_smi_code) {
1033 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
1037 __ Drop(1); // Switch value is no longer needed.
1038 __ b(clause->body_target());
1039 __ bind(&slow_case);
1042 // Record position before stub call for type feedback.
1043 SetSourcePosition(clause->position());
1044 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1045 language_mode()).code();
1046 CallIC(ic, clause->CompareId());
1047 patch_site.EmitPatchInfo();
1051 PrepareForBailout(clause, TOS_REG);
1052 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1056 __ b(clause->body_target());
1059 __ cmpi(r3, Operand::Zero());
1061 __ Drop(1); // Switch value is no longer needed.
1062 __ b(clause->body_target());
1065 // Discard the test value and jump to the default if present, otherwise to
1066 // the end of the statement.
1067 __ bind(&next_test);
1068 __ Drop(1); // Switch value is no longer needed.
1069 if (default_clause == NULL) {
1070 __ b(nested_statement.break_label());
1072 __ b(default_clause->body_target());
1075 // Compile all the case bodies.
1076 for (int i = 0; i < clauses->length(); i++) {
1077 Comment cmnt(masm_, "[ Case body");
1078 CaseClause* clause = clauses->at(i);
1079 __ bind(clause->body_target());
1080 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1081 VisitStatements(clause->statements());
1084 __ bind(nested_statement.break_label());
1085 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1089 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1090 Comment cmnt(masm_, "[ ForInStatement");
1091 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1092 SetStatementPosition(stmt);
1095 ForIn loop_statement(this, stmt);
1096 increment_loop_depth();
1098 // Get the object to enumerate over. If the object is null or undefined, skip
1099 // over the loop. See ECMA-262 version 5, section 12.6.4.
1100 SetExpressionPosition(stmt->enumerable());
1101 VisitForAccumulatorValue(stmt->enumerable());
1102 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1105 Register null_value = r7;
1106 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1107 __ cmp(r3, null_value);
1110 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1112 // Convert the object to a JS object.
1113 Label convert, done_convert;
1114 __ JumpIfSmi(r3, &convert);
1115 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1116 __ bge(&done_convert);
1119 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1120 __ bind(&done_convert);
1121 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1124 // Check for proxies.
1126 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1127 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1128 __ ble(&call_runtime);
1130 // Check cache validity in generated code. This is a fast case for
1131 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1132 // guarantee cache validity, call the runtime system to check cache
1133 // validity or get the property names in a fixed array.
1134 __ CheckEnumCache(null_value, &call_runtime);
1136 // The enum cache is valid. Load the map of the object being
1137 // iterated over and use the cache for the iteration.
1139 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1142 // Get the set of properties to enumerate.
1143 __ bind(&call_runtime);
1144 __ push(r3); // Duplicate the enumerable object on the stack.
1145 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1146 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1148 // If we got a map from the runtime call, we can do a fast
1149 // modification check. Otherwise, we got a fixed array, and we have
1150 // to do a slow check.
1152 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1153 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1155 __ bne(&fixed_array);
1157 // We got a map in register r3. Get the enumeration cache from it.
1158 Label no_descriptors;
1159 __ bind(&use_cache);
1161 __ EnumLength(r4, r3);
1162 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1163 __ beq(&no_descriptors);
1165 __ LoadInstanceDescriptors(r3, r5);
1166 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1168 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1170 // Set up the four remaining stack slots.
1171 __ push(r3); // Map.
1172 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1173 // Push enumeration cache, enumeration cache length (as smi) and zero.
1174 __ Push(r5, r4, r3);
1177 __ bind(&no_descriptors);
1181 // We got a fixed array in register r3. Iterate through that.
1183 __ bind(&fixed_array);
1185 __ Move(r4, FeedbackVector());
1186 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1187 int vector_index = FeedbackVector()->GetIndex(slot);
1189 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1191 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1192 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1193 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1194 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1196 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1197 __ bind(&non_proxy);
1198 __ Push(r4, r3); // Smi and array
1199 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1200 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1201 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1203 // Generate code for doing the condition check.
1204 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1206 SetExpressionPosition(stmt->each());
1208 // Load the current count to r3, load the length to r4.
1209 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1210 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1211 __ cmpl(r3, r4); // Compare to the array length.
1212 __ bge(loop_statement.break_label());
1214 // Get the current entry of the array into register r6.
1215 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1216 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1217 __ SmiToPtrArrayOffset(r6, r3);
1218 __ LoadPX(r6, MemOperand(r6, r5));
1220 // Get the expected map from the stack or a smi in the
1221 // permanent slow case into register r5.
1222 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1224 // Check if the expected map still matches that of the enumerable.
1225 // If not, we may have to filter the key.
1227 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1228 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1230 __ beq(&update_each);
1232 // For proxies, no filtering is done.
1233 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1234 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1235 __ beq(&update_each);
1237 // Convert the entry to a string or (smi) 0 if it isn't a property
1238 // any more. If the property has been removed while iterating, we
1240 __ Push(r4, r6); // Enumerable and current entry.
1241 __ CallRuntime(Runtime::kForInFilter, 2);
1242 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1244 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1246 __ beq(loop_statement.continue_label());
1248 // Update the 'each' property or variable from the possibly filtered
1249 // entry in register r6.
1250 __ bind(&update_each);
1251 __ mr(result_register(), r6);
1252 // Perform the assignment as if via '='.
1254 EffectContext context(this);
1255 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1256 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1259 // Generate code for the body of the loop.
1260 Visit(stmt->body());
1262 // Generate code for the going to the next element by incrementing
1263 // the index (smi) stored on top of the stack.
1264 __ bind(loop_statement.continue_label());
1266 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1269 EmitBackEdgeBookkeeping(stmt, &loop);
1272 // Remove the pointers stored on the stack.
1273 __ bind(loop_statement.break_label());
1276 // Exit and decrement the loop depth.
1277 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1279 decrement_loop_depth();
1283 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1285 // Use the fast case closure allocation code that allocates in new
1286 // space for nested functions that don't need literals cloning. If
1287 // we're running with the --always-opt or the --prepare-always-opt
1288 // flag, we need to use the runtime function so that the new function
1289 // we are creating here gets a chance to have its code optimized and
1290 // doesn't just get a copy of the existing unoptimized code.
1291 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1292 scope()->is_function_scope() && info->num_literals() == 0) {
1293 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1294 __ mov(r5, Operand(info));
1297 __ mov(r3, Operand(info));
1299 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1300 __ Push(cp, r3, r4);
1301 __ CallRuntime(Runtime::kNewClosure, 3);
1303 context()->Plug(r3);
1307 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1308 Comment cmnt(masm_, "[ VariableProxy");
1309 EmitVariableLoad(expr);
1313 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1315 FeedbackVectorICSlot slot) {
1316 if (NeedsHomeObject(initializer)) {
1317 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1318 __ mov(StoreDescriptor::NameRegister(),
1319 Operand(isolate()->factory()->home_object_symbol()));
1320 __ LoadP(StoreDescriptor::ValueRegister(),
1321 MemOperand(sp, offset * kPointerSize));
1322 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1328 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1329 TypeofState typeof_state,
1331 Register current = cp;
1337 if (s->num_heap_slots() > 0) {
1338 if (s->calls_sloppy_eval()) {
1339 // Check that extension is NULL.
1340 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1341 __ cmpi(temp, Operand::Zero());
1344 // Load next context in chain.
1345 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1346 // Walk the rest of the chain without clobbering cp.
1349 // If no outer scope calls eval, we do not need to check more
1350 // context extensions.
1351 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1352 s = s->outer_scope();
1355 if (s->is_eval_scope()) {
1357 if (!current.is(next)) {
1358 __ Move(next, current);
1361 // Terminate at native context.
1362 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1363 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1366 // Check that extension is NULL.
1367 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1368 __ cmpi(temp, Operand::Zero());
1370 // Load next context in chain.
1371 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1376 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1377 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1378 __ mov(LoadDescriptor::SlotRegister(),
1379 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1381 ContextualMode mode =
1382 (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL : CONTEXTUAL;
1387 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1389 DCHECK(var->IsContextSlot());
1390 Register context = cp;
1394 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1395 if (s->num_heap_slots() > 0) {
1396 if (s->calls_sloppy_eval()) {
1397 // Check that extension is NULL.
1398 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1399 __ cmpi(temp, Operand::Zero());
1402 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1403 // Walk the rest of the chain without clobbering cp.
1407 // Check that last extension is NULL.
1408 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1409 __ cmpi(temp, Operand::Zero());
1412 // This function is used only for loads, not stores, so it's safe to
1413 // return an cp-based operand (the write barrier cannot be allowed to
1414 // destroy the cp register).
1415 return ContextOperand(context, var->index());
1419 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1420 TypeofState typeof_state,
1421 Label* slow, Label* done) {
1422 // Generate fast-case code for variables that might be shadowed by
1423 // eval-introduced variables. Eval is used a lot without
1424 // introducing variables. In those cases, we do not want to
1425 // perform a runtime call for all variables in the scope
1426 // containing the eval.
1427 Variable* var = proxy->var();
1428 if (var->mode() == DYNAMIC_GLOBAL) {
1429 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1431 } else if (var->mode() == DYNAMIC_LOCAL) {
1432 Variable* local = var->local_if_not_shadowed();
1433 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1434 if (local->mode() == LET || local->mode() == CONST ||
1435 local->mode() == CONST_LEGACY) {
1436 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1438 if (local->mode() == CONST_LEGACY) {
1439 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1440 } else { // LET || CONST
1441 __ mov(r3, Operand(var->name()));
1443 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1451 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1452 // Record position before possible IC call.
1453 SetSourcePosition(proxy->position());
1454 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1455 Variable* var = proxy->var();
1457 // Three cases: global variables, lookup variables, and all other types of
1459 switch (var->location()) {
1460 case Variable::UNALLOCATED: {
1461 Comment cmnt(masm_, "[ Global variable");
1462 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1463 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1464 __ mov(LoadDescriptor::SlotRegister(),
1465 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1466 CallGlobalLoadIC(var->name());
1467 context()->Plug(r3);
1471 case Variable::PARAMETER:
1472 case Variable::LOCAL:
1473 case Variable::CONTEXT: {
1474 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1475 : "[ Stack variable");
1476 if (var->binding_needs_init()) {
1477 // var->scope() may be NULL when the proxy is located in eval code and
1478 // refers to a potential outside binding. Currently those bindings are
1479 // always looked up dynamically, i.e. in that case
1480 // var->location() == LOOKUP.
1482 DCHECK(var->scope() != NULL);
1484 // Check if the binding really needs an initialization check. The check
1485 // can be skipped in the following situation: we have a LET or CONST
1486 // binding in harmony mode, both the Variable and the VariableProxy have
1487 // the same declaration scope (i.e. they are both in global code, in the
1488 // same function or in the same eval code) and the VariableProxy is in
1489 // the source physically located after the initializer of the variable.
1491 // We cannot skip any initialization checks for CONST in non-harmony
1492 // mode because const variables may be declared but never initialized:
1493 // if (false) { const x; }; var y = x;
1495 // The condition on the declaration scopes is a conservative check for
1496 // nested functions that access a binding and are called before the
1497 // binding is initialized:
1498 // function() { f(); let x = 1; function f() { x = 2; } }
1500 bool skip_init_check;
1501 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1502 skip_init_check = false;
1503 } else if (var->is_this()) {
1504 CHECK(info_->function() != nullptr &&
1505 (info_->function()->kind() & kSubclassConstructor) != 0);
1506 // TODO(dslomov): implement 'this' hole check elimination.
1507 skip_init_check = false;
1509 // Check that we always have valid source position.
1510 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1511 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1512 skip_init_check = var->mode() != CONST_LEGACY &&
1513 var->initializer_position() < proxy->position();
1516 if (!skip_init_check) {
1518 // Let and const need a read barrier.
1520 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1522 if (var->mode() == LET || var->mode() == CONST) {
1523 // Throw a reference error when using an uninitialized let/const
1524 // binding in harmony mode.
1525 __ mov(r3, Operand(var->name()));
1527 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1529 // Uninitalized const bindings outside of harmony mode are unholed.
1530 DCHECK(var->mode() == CONST_LEGACY);
1531 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1534 context()->Plug(r3);
1538 context()->Plug(var);
1542 case Variable::LOOKUP: {
1543 Comment cmnt(masm_, "[ Lookup variable");
1545 // Generate code for loading from variables potentially shadowed
1546 // by eval-introduced variables.
1547 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1549 __ mov(r4, Operand(var->name()));
1550 __ Push(cp, r4); // Context and name.
1551 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1553 context()->Plug(r3);
1559 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1560 Comment cmnt(masm_, "[ RegExpLiteral");
1562 // Registers will be used as follows:
1563 // r8 = materialized value (RegExp literal)
1564 // r7 = JS function, literals array
1565 // r6 = literal index
1566 // r5 = RegExp pattern
1567 // r4 = RegExp flags
1568 // r3 = RegExp literal clone
1569 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1570 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1571 int literal_offset =
1572 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1573 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1574 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1576 __ bne(&materialized);
1578 // Create regexp literal using runtime function.
1579 // Result will be in r3.
1580 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1581 __ mov(r5, Operand(expr->pattern()));
1582 __ mov(r4, Operand(expr->flags()));
1583 __ Push(r7, r6, r5, r4);
1584 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1587 __ bind(&materialized);
1588 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1589 Label allocated, runtime_allocate;
1590 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1593 __ bind(&runtime_allocate);
1594 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1596 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1599 __ bind(&allocated);
1600 // After this, registers are used as follows:
1601 // r3: Newly allocated regexp.
1602 // r8: Materialized regexp.
1604 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1605 context()->Plug(r3);
1609 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1610 if (expression == NULL) {
1611 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1614 VisitForStackValue(expression);
1619 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1620 Comment cmnt(masm_, "[ ObjectLiteral");
1622 Handle<FixedArray> constant_properties = expr->constant_properties();
1623 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1624 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1625 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1626 __ mov(r4, Operand(constant_properties));
1627 int flags = expr->ComputeFlags();
1628 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1629 if (MustCreateObjectLiteralWithRuntime(expr)) {
1630 __ Push(r6, r5, r4, r3);
1631 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1633 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1636 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1638 // If result_saved is true the result is on top of the stack. If
1639 // result_saved is false the result is in r3.
1640 bool result_saved = false;
1642 AccessorTable accessor_table(zone());
1643 int property_index = 0;
1644 // store_slot_index points to the vector ic slot for the next store ic used.
1645 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1646 // and must be updated if the number of store ics emitted here changes.
1647 int store_slot_index = 0;
1648 for (; property_index < expr->properties()->length(); property_index++) {
1649 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1650 if (property->is_computed_name()) break;
1651 if (property->IsCompileTimeValue()) continue;
1653 Literal* key = property->key()->AsLiteral();
1654 Expression* value = property->value();
1655 if (!result_saved) {
1656 __ push(r3); // Save result on stack
1657 result_saved = true;
1659 switch (property->kind()) {
1660 case ObjectLiteral::Property::CONSTANT:
1662 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1663 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1665 case ObjectLiteral::Property::COMPUTED:
1666 // It is safe to use [[Put]] here because the boilerplate already
1667 // contains computed properties with an uninitialized value.
1668 if (key->value()->IsInternalizedString()) {
1669 if (property->emit_store()) {
1670 VisitForAccumulatorValue(value);
1671 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1672 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1673 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1674 if (FLAG_vector_stores) {
1675 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1678 CallStoreIC(key->LiteralFeedbackId());
1680 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1682 if (NeedsHomeObject(value)) {
1683 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1684 __ mov(StoreDescriptor::NameRegister(),
1685 Operand(isolate()->factory()->home_object_symbol()));
1686 __ LoadP(StoreDescriptor::ValueRegister(), MemOperand(sp));
1687 if (FLAG_vector_stores) {
1688 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1693 VisitForEffect(value);
1697 // Duplicate receiver on stack.
1698 __ LoadP(r3, MemOperand(sp));
1700 VisitForStackValue(key);
1701 VisitForStackValue(value);
1702 if (property->emit_store()) {
1703 EmitSetHomeObjectIfNeeded(
1704 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1705 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1707 __ CallRuntime(Runtime::kSetProperty, 4);
1712 case ObjectLiteral::Property::PROTOTYPE:
1713 // Duplicate receiver on stack.
1714 __ LoadP(r3, MemOperand(sp));
1716 VisitForStackValue(value);
1717 DCHECK(property->emit_store());
1718 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1720 case ObjectLiteral::Property::GETTER:
1721 if (property->emit_store()) {
1722 accessor_table.lookup(key)->second->getter = value;
1725 case ObjectLiteral::Property::SETTER:
1726 if (property->emit_store()) {
1727 accessor_table.lookup(key)->second->setter = value;
1733 // Emit code to define accessors, using only a single call to the runtime for
1734 // each pair of corresponding getters and setters.
1735 for (AccessorTable::Iterator it = accessor_table.begin();
1736 it != accessor_table.end(); ++it) {
1737 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1739 VisitForStackValue(it->first);
1740 EmitAccessor(it->second->getter);
1741 EmitSetHomeObjectIfNeeded(
1742 it->second->getter, 2,
1743 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1744 EmitAccessor(it->second->setter);
1745 EmitSetHomeObjectIfNeeded(
1746 it->second->setter, 3,
1747 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1748 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1750 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1753 // Object literals have two parts. The "static" part on the left contains no
1754 // computed property names, and so we can compute its map ahead of time; see
1755 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1756 // starts with the first computed property name, and continues with all
1757 // properties to its right. All the code from above initializes the static
1758 // component of the object literal, and arranges for the map of the result to
1759 // reflect the static order in which the keys appear. For the dynamic
1760 // properties, we compile them into a series of "SetOwnProperty" runtime
1761 // calls. This will preserve insertion order.
1762 for (; property_index < expr->properties()->length(); property_index++) {
1763 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1765 Expression* value = property->value();
1766 if (!result_saved) {
1767 __ push(r3); // Save result on the stack
1768 result_saved = true;
1771 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1774 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1775 DCHECK(!property->is_computed_name());
1776 VisitForStackValue(value);
1777 DCHECK(property->emit_store());
1778 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1780 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1781 VisitForStackValue(value);
1782 EmitSetHomeObjectIfNeeded(
1783 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1785 switch (property->kind()) {
1786 case ObjectLiteral::Property::CONSTANT:
1787 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1788 case ObjectLiteral::Property::COMPUTED:
1789 if (property->emit_store()) {
1790 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1792 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1798 case ObjectLiteral::Property::PROTOTYPE:
1802 case ObjectLiteral::Property::GETTER:
1803 __ mov(r3, Operand(Smi::FromInt(NONE)));
1805 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1808 case ObjectLiteral::Property::SETTER:
1809 __ mov(r3, Operand(Smi::FromInt(NONE)));
1811 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1817 if (expr->has_function()) {
1818 DCHECK(result_saved);
1819 __ LoadP(r3, MemOperand(sp));
1821 __ CallRuntime(Runtime::kToFastProperties, 1);
1825 context()->PlugTOS();
1827 context()->Plug(r3);
1830 // Verify that compilation exactly consumed the number of store ic slots that
1831 // the ObjectLiteral node had to offer.
1832 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1836 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1837 Comment cmnt(masm_, "[ ArrayLiteral");
1839 expr->BuildConstantElements(isolate());
1840 Handle<FixedArray> constant_elements = expr->constant_elements();
1841 bool has_fast_elements =
1842 IsFastObjectElementsKind(expr->constant_elements_kind());
1843 Handle<FixedArrayBase> constant_elements_values(
1844 FixedArrayBase::cast(constant_elements->get(1)));
1846 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1847 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1848 // If the only customer of allocation sites is transitioning, then
1849 // we can turn it off if we don't have anywhere else to transition to.
1850 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1853 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1854 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1855 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1856 __ mov(r4, Operand(constant_elements));
1857 if (MustCreateArrayLiteralWithRuntime(expr)) {
1858 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1859 __ Push(r6, r5, r4, r3);
1860 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1862 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1865 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1867 bool result_saved = false; // Is the result saved to the stack?
1868 ZoneList<Expression*>* subexprs = expr->values();
1869 int length = subexprs->length();
1871 // Emit code to evaluate all the non-constant subexpressions and to store
1872 // them into the newly cloned array.
1873 int array_index = 0;
1874 for (; array_index < length; array_index++) {
1875 Expression* subexpr = subexprs->at(array_index);
1876 if (subexpr->IsSpread()) break;
1877 // If the subexpression is a literal or a simple materialized literal it
1878 // is already set in the cloned array.
1879 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1881 if (!result_saved) {
1883 __ Push(Smi::FromInt(expr->literal_index()));
1884 result_saved = true;
1886 VisitForAccumulatorValue(subexpr);
1888 if (has_fast_elements) {
1889 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1890 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1891 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1892 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1893 // Update the write barrier for the array store.
1894 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1895 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1898 __ LoadSmiLiteral(r6, Smi::FromInt(array_index));
1899 StoreArrayLiteralElementStub stub(isolate());
1903 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1906 // In case the array literal contains spread expressions it has two parts. The
1907 // first part is the "static" array which has a literal index is handled
1908 // above. The second part is the part after the first spread expression
1909 // (inclusive) and these elements gets appended to the array. Note that the
1910 // number elements an iterable produces is unknown ahead of time.
1911 if (array_index < length && result_saved) {
1912 __ Drop(1); // literal index
1914 result_saved = false;
1916 for (; array_index < length; array_index++) {
1917 Expression* subexpr = subexprs->at(array_index);
1920 if (subexpr->IsSpread()) {
1921 VisitForStackValue(subexpr->AsSpread()->expression());
1922 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1924 VisitForStackValue(subexpr);
1925 __ CallRuntime(Runtime::kAppendElement, 2);
1928 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1932 __ Drop(1); // literal index
1933 context()->PlugTOS();
1935 context()->Plug(r3);
1940 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1941 DCHECK(expr->target()->IsValidReferenceExpression());
1943 Comment cmnt(masm_, "[ Assignment");
1945 Property* property = expr->target()->AsProperty();
1946 LhsKind assign_type = Property::GetAssignType(property);
1948 // Evaluate LHS expression.
1949 switch (assign_type) {
1951 // Nothing to do here.
1953 case NAMED_PROPERTY:
1954 if (expr->is_compound()) {
1955 // We need the receiver both on the stack and in the register.
1956 VisitForStackValue(property->obj());
1957 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1959 VisitForStackValue(property->obj());
1962 case NAMED_SUPER_PROPERTY:
1964 property->obj()->AsSuperPropertyReference()->this_var());
1965 VisitForAccumulatorValue(
1966 property->obj()->AsSuperPropertyReference()->home_object());
1967 __ Push(result_register());
1968 if (expr->is_compound()) {
1969 const Register scratch = r4;
1970 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1971 __ Push(scratch, result_register());
1974 case KEYED_SUPER_PROPERTY: {
1975 const Register scratch = r4;
1977 property->obj()->AsSuperPropertyReference()->this_var());
1978 VisitForAccumulatorValue(
1979 property->obj()->AsSuperPropertyReference()->home_object());
1980 __ mr(scratch, result_register());
1981 VisitForAccumulatorValue(property->key());
1982 __ Push(scratch, result_register());
1983 if (expr->is_compound()) {
1984 const Register scratch1 = r5;
1985 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1986 __ Push(scratch1, scratch, result_register());
1990 case KEYED_PROPERTY:
1991 if (expr->is_compound()) {
1992 VisitForStackValue(property->obj());
1993 VisitForStackValue(property->key());
1994 __ LoadP(LoadDescriptor::ReceiverRegister(),
1995 MemOperand(sp, 1 * kPointerSize));
1996 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1998 VisitForStackValue(property->obj());
1999 VisitForStackValue(property->key());
2004 // For compound assignments we need another deoptimization point after the
2005 // variable/property load.
2006 if (expr->is_compound()) {
2008 AccumulatorValueContext context(this);
2009 switch (assign_type) {
2011 EmitVariableLoad(expr->target()->AsVariableProxy());
2012 PrepareForBailout(expr->target(), TOS_REG);
2014 case NAMED_PROPERTY:
2015 EmitNamedPropertyLoad(property);
2016 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2018 case NAMED_SUPER_PROPERTY:
2019 EmitNamedSuperPropertyLoad(property);
2020 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2022 case KEYED_SUPER_PROPERTY:
2023 EmitKeyedSuperPropertyLoad(property);
2024 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2026 case KEYED_PROPERTY:
2027 EmitKeyedPropertyLoad(property);
2028 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2033 Token::Value op = expr->binary_op();
2034 __ push(r3); // Left operand goes on the stack.
2035 VisitForAccumulatorValue(expr->value());
2037 SetSourcePosition(expr->position() + 1);
2038 AccumulatorValueContext context(this);
2039 if (ShouldInlineSmiCase(op)) {
2040 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
2043 EmitBinaryOp(expr->binary_operation(), op);
2046 // Deoptimization point in case the binary operation may have side effects.
2047 PrepareForBailout(expr->binary_operation(), TOS_REG);
2049 VisitForAccumulatorValue(expr->value());
2052 // Record source position before possible IC call.
2053 SetSourcePosition(expr->position());
2056 switch (assign_type) {
2058 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2059 expr->op(), expr->AssignmentSlot());
2060 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2061 context()->Plug(r3);
2063 case NAMED_PROPERTY:
2064 EmitNamedPropertyAssignment(expr);
2066 case NAMED_SUPER_PROPERTY:
2067 EmitNamedSuperPropertyStore(property);
2068 context()->Plug(r3);
2070 case KEYED_SUPER_PROPERTY:
2071 EmitKeyedSuperPropertyStore(property);
2072 context()->Plug(r3);
2074 case KEYED_PROPERTY:
2075 EmitKeyedPropertyAssignment(expr);
2081 void FullCodeGenerator::VisitYield(Yield* expr) {
2082 Comment cmnt(masm_, "[ Yield");
2083 // Evaluate yielded value first; the initial iterator definition depends on
2084 // this. It stays on the stack while we update the iterator.
2085 VisitForStackValue(expr->expression());
2087 switch (expr->yield_kind()) {
2088 case Yield::kSuspend:
2089 // Pop value from top-of-stack slot; box result into result register.
2090 EmitCreateIteratorResult(false);
2091 __ push(result_register());
2093 case Yield::kInitial: {
2094 Label suspend, continuation, post_runtime, resume;
2098 __ bind(&continuation);
2102 VisitForAccumulatorValue(expr->generator_object());
2103 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2104 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2105 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2107 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2109 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2110 kLRHasBeenSaved, kDontSaveFPRegs);
2111 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2113 __ beq(&post_runtime);
2114 __ push(r3); // generator object
2115 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2116 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2117 __ bind(&post_runtime);
2118 __ pop(result_register());
2119 EmitReturnSequence();
2122 context()->Plug(result_register());
2126 case Yield::kFinal: {
2127 VisitForAccumulatorValue(expr->generator_object());
2128 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2129 __ StoreP(r4, FieldMemOperand(result_register(),
2130 JSGeneratorObject::kContinuationOffset),
2132 // Pop value from top-of-stack slot, box result into result register.
2133 EmitCreateIteratorResult(true);
2134 EmitUnwindBeforeReturn();
2135 EmitReturnSequence();
2139 case Yield::kDelegating: {
2140 VisitForStackValue(expr->generator_object());
2142 // Initial stack layout is as follows:
2143 // [sp + 1 * kPointerSize] iter
2144 // [sp + 0 * kPointerSize] g
2146 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2147 Label l_next, l_call;
2148 Register load_receiver = LoadDescriptor::ReceiverRegister();
2149 Register load_name = LoadDescriptor::NameRegister();
2151 // Initial send value is undefined.
2152 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2155 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2157 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2158 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2159 __ Push(load_name, r6, r3); // "throw", iter, except
2162 // try { received = %yield result }
2163 // Shuffle the received result above a try handler and yield it without
2166 __ pop(r3); // result
2167 EnterTryBlock(expr->index(), &l_catch);
2168 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2169 __ push(r3); // result
2171 __ bind(&l_continuation);
2173 __ bind(&l_suspend);
2174 const int generator_object_depth = kPointerSize + try_block_size;
2175 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2177 __ Push(Smi::FromInt(expr->index())); // handler-index
2178 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2179 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2180 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2182 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2184 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2185 kLRHasBeenSaved, kDontSaveFPRegs);
2186 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2187 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2188 __ pop(r3); // result
2189 EmitReturnSequence();
2190 __ bind(&l_resume); // received in r3
2191 ExitTryBlock(expr->index());
2193 // receiver = iter; f = 'next'; arg = received;
2196 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2197 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2198 __ Push(load_name, r6, r3); // "next", iter, received
2200 // result = receiver[f](arg);
2202 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2203 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2204 __ mov(LoadDescriptor::SlotRegister(),
2205 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2206 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2207 CallIC(ic, TypeFeedbackId::None());
2209 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2210 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2213 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2214 __ Drop(1); // The function is still on the stack; drop it.
2216 // if (!result.done) goto l_try;
2217 __ Move(load_receiver, r3);
2219 __ push(load_receiver); // save result
2220 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2221 __ mov(LoadDescriptor::SlotRegister(),
2222 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2223 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2224 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2226 __ cmpi(r3, Operand::Zero());
2230 __ pop(load_receiver); // result
2231 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2232 __ mov(LoadDescriptor::SlotRegister(),
2233 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2234 CallLoadIC(NOT_CONTEXTUAL); // r3=result.value
2235 context()->DropAndPlug(2, r3); // drop iter and g
2242 void FullCodeGenerator::EmitGeneratorResume(
2243 Expression* generator, Expression* value,
2244 JSGeneratorObject::ResumeMode resume_mode) {
2245 // The value stays in r3, and is ultimately read by the resumed generator, as
2246 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2247 // is read to throw the value when the resumed generator is already closed.
2248 // r4 will hold the generator object until the activation has been resumed.
2249 VisitForStackValue(generator);
2250 VisitForAccumulatorValue(value);
2253 // Load suspended function and context.
2254 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2255 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2257 // Load receiver and store as the first argument.
2258 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2261 // Push holes for the rest of the arguments to the generator function.
2262 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2264 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2265 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2266 Label argument_loop, push_frame;
2267 #if V8_TARGET_ARCH_PPC64
2268 __ cmpi(r6, Operand::Zero());
2269 __ beq(&push_frame);
2271 __ SmiUntag(r6, SetRC);
2272 __ beq(&push_frame, cr0);
2275 __ bind(&argument_loop);
2277 __ bdnz(&argument_loop);
2279 // Enter a new JavaScript frame, and initialize its slots as they were when
2280 // the generator was suspended.
2281 Label resume_frame, done;
2282 __ bind(&push_frame);
2283 __ b(&resume_frame, SetLK);
2285 __ bind(&resume_frame);
2286 // lr = return address.
2287 // fp = caller's frame pointer.
2288 // cp = callee's context,
2289 // r7 = callee's JS function.
2290 __ PushFixedFrame(r7);
2291 // Adjust FP to point to saved FP.
2292 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2294 // Load the operand stack size.
2295 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2296 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2297 __ SmiUntag(r6, SetRC);
2299 // If we are sending a value and there is no operand stack, we can jump back
2302 if (resume_mode == JSGeneratorObject::NEXT) {
2304 __ bne(&slow_resume, cr0);
2305 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2307 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2308 if (FLAG_enable_embedded_constant_pool) {
2309 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
2311 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2314 __ LoadSmiLiteral(r5,
2315 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2316 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2319 __ bind(&slow_resume);
2322 __ beq(&call_resume, cr0);
2325 // Otherwise, we push holes for the operand stack and call the runtime to fix
2326 // up the stack and the handlers.
2329 __ bind(&operand_loop);
2331 __ bdnz(&operand_loop);
2333 __ bind(&call_resume);
2334 DCHECK(!result_register().is(r4));
2335 __ Push(r4, result_register());
2336 __ Push(Smi::FromInt(resume_mode));
2337 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2338 // Not reached: the runtime call returns elsewhere.
2339 __ stop("not-reached");
2342 context()->Plug(result_register());
2346 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2350 const int instance_size = 5 * kPointerSize;
2351 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2354 __ Allocate(instance_size, r3, r5, r6, &gc_required, TAG_OBJECT);
2357 __ bind(&gc_required);
2358 __ Push(Smi::FromInt(instance_size));
2359 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2360 __ LoadP(context_register(),
2361 MemOperand(fp, StandardFrameConstants::kContextOffset));
2363 __ bind(&allocated);
2364 __ LoadP(r4, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2365 __ LoadP(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2366 __ LoadP(r4, ContextOperand(r4, Context::ITERATOR_RESULT_MAP_INDEX));
2368 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2369 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2370 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2371 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2372 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2374 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2377 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2380 // Only the value field needs a write barrier, as the other values are in the
2382 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2383 kLRHasBeenSaved, kDontSaveFPRegs);
2387 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2388 SetSourcePosition(prop->position());
2389 Literal* key = prop->key()->AsLiteral();
2390 DCHECK(!prop->IsSuperAccess());
2392 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2393 __ mov(LoadDescriptor::SlotRegister(),
2394 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2395 CallLoadIC(NOT_CONTEXTUAL);
2399 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2400 // Stack: receiver, home_object.
2401 SetSourcePosition(prop->position());
2402 Literal* key = prop->key()->AsLiteral();
2403 DCHECK(!key->value()->IsSmi());
2404 DCHECK(prop->IsSuperAccess());
2406 __ Push(key->value());
2407 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2411 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2412 SetSourcePosition(prop->position());
2413 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2414 __ mov(LoadDescriptor::SlotRegister(),
2415 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2420 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2421 // Stack: receiver, home_object, key.
2422 SetSourcePosition(prop->position());
2424 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2428 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2430 Expression* left_expr,
2431 Expression* right_expr) {
2432 Label done, smi_case, stub_call;
2434 Register scratch1 = r5;
2435 Register scratch2 = r6;
2437 // Get the arguments.
2439 Register right = r3;
2442 // Perform combined smi check on both operands.
2443 __ orx(scratch1, left, right);
2444 STATIC_ASSERT(kSmiTag == 0);
2445 JumpPatchSite patch_site(masm_);
2446 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2448 __ bind(&stub_call);
2449 Handle<Code> code = CodeFactory::BinaryOpIC(
2450 isolate(), op, language_mode()).code();
2451 CallIC(code, expr->BinaryOperationFeedbackId());
2452 patch_site.EmitPatchInfo();
2456 // Smi case. This code works the same way as the smi-smi case in the type
2457 // recording binary operation stub.
2460 __ GetLeastBitsFromSmi(scratch1, right, 5);
2461 __ ShiftRightArith(right, left, scratch1);
2462 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2465 __ GetLeastBitsFromSmi(scratch2, right, 5);
2466 #if V8_TARGET_ARCH_PPC64
2467 __ ShiftLeft_(right, left, scratch2);
2469 __ SmiUntag(scratch1, left);
2470 __ ShiftLeft_(scratch1, scratch1, scratch2);
2471 // Check that the *signed* result fits in a smi
2472 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2473 __ SmiTag(right, scratch1);
2478 __ SmiUntag(scratch1, left);
2479 __ GetLeastBitsFromSmi(scratch2, right, 5);
2480 __ srw(scratch1, scratch1, scratch2);
2481 // Unsigned shift is not allowed to produce a negative number.
2482 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2483 __ SmiTag(right, scratch1);
2487 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2488 __ BranchOnOverflow(&stub_call);
2489 __ mr(right, scratch1);
2493 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2494 __ BranchOnOverflow(&stub_call);
2495 __ mr(right, scratch1);
2500 #if V8_TARGET_ARCH_PPC64
2501 // Remove tag from both operands.
2502 __ SmiUntag(ip, right);
2503 __ SmiUntag(r0, left);
2504 __ Mul(scratch1, r0, ip);
2505 // Check for overflowing the smi range - no overflow if higher 33 bits of
2506 // the result are identical.
2507 __ TestIfInt32(scratch1, r0);
2510 __ SmiUntag(ip, right);
2511 __ mullw(scratch1, left, ip);
2512 __ mulhw(scratch2, left, ip);
2513 // Check for overflowing the smi range - no overflow if higher 33 bits of
2514 // the result are identical.
2515 __ TestIfInt32(scratch2, scratch1, ip);
2518 // Go slow on zero result to handle -0.
2519 __ cmpi(scratch1, Operand::Zero());
2521 #if V8_TARGET_ARCH_PPC64
2522 __ SmiTag(right, scratch1);
2524 __ mr(right, scratch1);
2527 // We need -0 if we were multiplying a negative number with 0 to get 0.
2528 // We know one of them was zero.
2530 __ add(scratch2, right, left);
2531 __ cmpi(scratch2, Operand::Zero());
2533 __ LoadSmiLiteral(right, Smi::FromInt(0));
2537 __ orx(right, left, right);
2539 case Token::BIT_AND:
2540 __ and_(right, left, right);
2542 case Token::BIT_XOR:
2543 __ xor_(right, left, right);
2550 context()->Plug(r3);
2554 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2555 // Constructor is in r3.
2556 DCHECK(lit != NULL);
2559 // No access check is needed here since the constructor is created by the
2561 Register scratch = r4;
2563 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2566 for (int i = 0; i < lit->properties()->length(); i++) {
2567 ObjectLiteral::Property* property = lit->properties()->at(i);
2568 Expression* value = property->value();
2570 if (property->is_static()) {
2571 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2573 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2576 EmitPropertyKey(property, lit->GetIdForProperty(i));
2578 // The static prototype property is read only. We handle the non computed
2579 // property name case in the parser. Since this is the only case where we
2580 // need to check for an own read only property we special case this so we do
2581 // not need to do this for every property.
2582 if (property->is_static() && property->is_computed_name()) {
2583 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2587 VisitForStackValue(value);
2588 EmitSetHomeObjectIfNeeded(value, 2);
2590 switch (property->kind()) {
2591 case ObjectLiteral::Property::CONSTANT:
2592 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2593 case ObjectLiteral::Property::PROTOTYPE:
2595 case ObjectLiteral::Property::COMPUTED:
2596 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2599 case ObjectLiteral::Property::GETTER:
2600 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2602 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2605 case ObjectLiteral::Property::SETTER:
2606 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2608 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2617 __ CallRuntime(Runtime::kToFastProperties, 1);
2620 __ CallRuntime(Runtime::kToFastProperties, 1);
2624 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2626 Handle<Code> code = CodeFactory::BinaryOpIC(
2627 isolate(), op, language_mode()).code();
2628 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2629 CallIC(code, expr->BinaryOperationFeedbackId());
2630 patch_site.EmitPatchInfo();
2631 context()->Plug(r3);
2635 void FullCodeGenerator::EmitAssignment(Expression* expr,
2636 FeedbackVectorICSlot slot) {
2637 DCHECK(expr->IsValidReferenceExpression());
2639 Property* prop = expr->AsProperty();
2640 LhsKind assign_type = Property::GetAssignType(prop);
2642 switch (assign_type) {
2644 Variable* var = expr->AsVariableProxy()->var();
2645 EffectContext context(this);
2646 EmitVariableAssignment(var, Token::ASSIGN, slot);
2649 case NAMED_PROPERTY: {
2650 __ push(r3); // Preserve value.
2651 VisitForAccumulatorValue(prop->obj());
2652 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2653 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2654 __ mov(StoreDescriptor::NameRegister(),
2655 Operand(prop->key()->AsLiteral()->value()));
2656 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2660 case NAMED_SUPER_PROPERTY: {
2662 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2663 VisitForAccumulatorValue(
2664 prop->obj()->AsSuperPropertyReference()->home_object());
2665 // stack: value, this; r3: home_object
2666 Register scratch = r5;
2667 Register scratch2 = r6;
2668 __ mr(scratch, result_register()); // home_object
2669 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2670 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2671 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2672 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2673 // stack: this, home_object; r3: value
2674 EmitNamedSuperPropertyStore(prop);
2677 case KEYED_SUPER_PROPERTY: {
2679 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2681 prop->obj()->AsSuperPropertyReference()->home_object());
2682 VisitForAccumulatorValue(prop->key());
2683 Register scratch = r5;
2684 Register scratch2 = r6;
2685 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2686 // stack: value, this, home_object; r3: key, r6: value
2687 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2688 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2689 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2690 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2691 __ StoreP(r3, MemOperand(sp, 0));
2692 __ Move(r3, scratch2);
2693 // stack: this, home_object, key; r3: value.
2694 EmitKeyedSuperPropertyStore(prop);
2697 case KEYED_PROPERTY: {
2698 __ push(r3); // Preserve value.
2699 VisitForStackValue(prop->obj());
2700 VisitForAccumulatorValue(prop->key());
2701 __ Move(StoreDescriptor::NameRegister(), r3);
2702 __ Pop(StoreDescriptor::ValueRegister(),
2703 StoreDescriptor::ReceiverRegister());
2704 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2706 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2711 context()->Plug(r3);
2715 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2716 Variable* var, MemOperand location) {
2717 __ StoreP(result_register(), location, r0);
2718 if (var->IsContextSlot()) {
2719 // RecordWrite may destroy all its register arguments.
2720 __ mr(r6, result_register());
2721 int offset = Context::SlotOffset(var->index());
2722 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2728 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2729 FeedbackVectorICSlot slot) {
2730 if (var->IsUnallocated()) {
2731 // Global var, const, or let.
2732 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2733 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2734 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2737 } else if (var->mode() == LET && op != Token::INIT_LET) {
2738 // Non-initializing assignment to let variable needs a write barrier.
2739 DCHECK(!var->IsLookupSlot());
2740 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2742 MemOperand location = VarOperand(var, r4);
2743 __ LoadP(r6, location);
2744 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2746 __ mov(r6, Operand(var->name()));
2748 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2749 // Perform the assignment.
2751 EmitStoreToStackLocalOrContextSlot(var, location);
2753 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2754 // Assignment to const variable needs a write barrier.
2755 DCHECK(!var->IsLookupSlot());
2756 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2758 MemOperand location = VarOperand(var, r4);
2759 __ LoadP(r6, location);
2760 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2761 __ bne(&const_error);
2762 __ mov(r6, Operand(var->name()));
2764 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2765 __ bind(&const_error);
2766 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2768 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2769 if (var->IsLookupSlot()) {
2770 // Assignment to var.
2771 __ push(r3); // Value.
2772 __ mov(r4, Operand(var->name()));
2773 __ mov(r3, Operand(Smi::FromInt(language_mode())));
2774 __ Push(cp, r4, r3); // Context, name, language mode.
2775 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2777 // Assignment to var or initializing assignment to let/const in harmony
2779 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2780 MemOperand location = VarOperand(var, r4);
2781 if (generate_debug_code_ && op == Token::INIT_LET) {
2782 // Check for an uninitialized let binding.
2783 __ LoadP(r5, location);
2784 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2785 __ Check(eq, kLetBindingReInitialization);
2787 EmitStoreToStackLocalOrContextSlot(var, location);
2789 } else if (op == Token::INIT_CONST_LEGACY) {
2790 // Const initializers need a write barrier.
2791 DCHECK(var->mode() == CONST_LEGACY);
2792 DCHECK(!var->IsParameter()); // No const parameters.
2793 if (var->IsLookupSlot()) {
2795 __ mov(r3, Operand(var->name()));
2796 __ Push(cp, r3); // Context and name.
2797 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2799 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2801 MemOperand location = VarOperand(var, r4);
2802 __ LoadP(r5, location);
2803 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2805 EmitStoreToStackLocalOrContextSlot(var, location);
2810 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2811 if (is_strict(language_mode())) {
2812 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2814 // Silently ignore store in sloppy mode.
2819 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2820 // Assignment to a property, using a named store IC.
2821 Property* prop = expr->target()->AsProperty();
2822 DCHECK(prop != NULL);
2823 DCHECK(prop->key()->IsLiteral());
2825 // Record source code position before IC call.
2826 SetSourcePosition(expr->position());
2827 __ mov(StoreDescriptor::NameRegister(),
2828 Operand(prop->key()->AsLiteral()->value()));
2829 __ pop(StoreDescriptor::ReceiverRegister());
2830 if (FLAG_vector_stores) {
2831 EmitLoadStoreICSlot(expr->AssignmentSlot());
2834 CallStoreIC(expr->AssignmentFeedbackId());
2837 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2838 context()->Plug(r3);
2842 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2843 // Assignment to named property of super.
2845 // stack : receiver ('this'), home_object
2846 DCHECK(prop != NULL);
2847 Literal* key = prop->key()->AsLiteral();
2848 DCHECK(key != NULL);
2850 __ Push(key->value());
2852 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2853 : Runtime::kStoreToSuper_Sloppy),
2858 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2859 // Assignment to named property of super.
2861 // stack : receiver ('this'), home_object, key
2862 DCHECK(prop != NULL);
2866 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2867 : Runtime::kStoreKeyedToSuper_Sloppy),
2872 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2873 // Assignment to a property, using a keyed store IC.
2875 // Record source code position before IC call.
2876 SetSourcePosition(expr->position());
2877 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2878 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2881 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2882 if (FLAG_vector_stores) {
2883 EmitLoadStoreICSlot(expr->AssignmentSlot());
2886 CallIC(ic, expr->AssignmentFeedbackId());
2889 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2890 context()->Plug(r3);
2894 void FullCodeGenerator::VisitProperty(Property* expr) {
2895 Comment cmnt(masm_, "[ Property");
2896 Expression* key = expr->key();
2898 if (key->IsPropertyName()) {
2899 if (!expr->IsSuperAccess()) {
2900 VisitForAccumulatorValue(expr->obj());
2901 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2902 EmitNamedPropertyLoad(expr);
2904 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2906 expr->obj()->AsSuperPropertyReference()->home_object());
2907 EmitNamedSuperPropertyLoad(expr);
2910 if (!expr->IsSuperAccess()) {
2911 VisitForStackValue(expr->obj());
2912 VisitForAccumulatorValue(expr->key());
2913 __ Move(LoadDescriptor::NameRegister(), r3);
2914 __ pop(LoadDescriptor::ReceiverRegister());
2915 EmitKeyedPropertyLoad(expr);
2917 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2919 expr->obj()->AsSuperPropertyReference()->home_object());
2920 VisitForStackValue(expr->key());
2921 EmitKeyedSuperPropertyLoad(expr);
2924 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2925 context()->Plug(r3);
2929 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2931 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2935 // Code common for calls using the IC.
2936 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2937 Expression* callee = expr->expression();
2939 CallICState::CallType call_type =
2940 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2942 // Get the target function.
2943 if (call_type == CallICState::FUNCTION) {
2945 StackValueContext context(this);
2946 EmitVariableLoad(callee->AsVariableProxy());
2947 PrepareForBailout(callee, NO_REGISTERS);
2949 // Push undefined as receiver. This is patched in the method prologue if it
2950 // is a sloppy mode method.
2951 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2954 // Load the function from the receiver.
2955 DCHECK(callee->IsProperty());
2956 DCHECK(!callee->AsProperty()->IsSuperAccess());
2957 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2958 EmitNamedPropertyLoad(callee->AsProperty());
2959 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2960 // Push the target function under the receiver.
2961 __ LoadP(r0, MemOperand(sp, 0));
2963 __ StoreP(r3, MemOperand(sp, kPointerSize));
2966 EmitCall(expr, call_type);
2970 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2971 Expression* callee = expr->expression();
2972 DCHECK(callee->IsProperty());
2973 Property* prop = callee->AsProperty();
2974 DCHECK(prop->IsSuperAccess());
2976 SetSourcePosition(prop->position());
2977 Literal* key = prop->key()->AsLiteral();
2978 DCHECK(!key->value()->IsSmi());
2979 // Load the function from the receiver.
2980 const Register scratch = r4;
2981 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2982 VisitForAccumulatorValue(super_ref->home_object());
2984 VisitForAccumulatorValue(super_ref->this_var());
2985 __ Push(scratch, r3, r3, scratch);
2986 __ Push(key->value());
2990 // - this (receiver)
2991 // - this (receiver) <-- LoadFromSuper will pop here and below.
2994 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2996 // Replace home_object with target function.
2997 __ StoreP(r3, MemOperand(sp, kPointerSize));
3000 // - target function
3001 // - this (receiver)
3002 EmitCall(expr, CallICState::METHOD);
3006 // Code common for calls using the IC.
3007 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
3009 VisitForAccumulatorValue(key);
3011 Expression* callee = expr->expression();
3013 // Load the function from the receiver.
3014 DCHECK(callee->IsProperty());
3015 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3016 __ Move(LoadDescriptor::NameRegister(), r3);
3017 EmitKeyedPropertyLoad(callee->AsProperty());
3018 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3020 // Push the target function under the receiver.
3021 __ LoadP(ip, MemOperand(sp, 0));
3023 __ StoreP(r3, MemOperand(sp, kPointerSize));
3025 EmitCall(expr, CallICState::METHOD);
3029 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3030 Expression* callee = expr->expression();
3031 DCHECK(callee->IsProperty());
3032 Property* prop = callee->AsProperty();
3033 DCHECK(prop->IsSuperAccess());
3035 SetSourcePosition(prop->position());
3036 // Load the function from the receiver.
3037 const Register scratch = r4;
3038 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3039 VisitForAccumulatorValue(super_ref->home_object());
3041 VisitForAccumulatorValue(super_ref->this_var());
3042 __ Push(scratch, r3, r3, scratch);
3043 VisitForStackValue(prop->key());
3047 // - this (receiver)
3048 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3051 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3053 // Replace home_object with target function.
3054 __ StoreP(r3, MemOperand(sp, kPointerSize));
3057 // - target function
3058 // - this (receiver)
3059 EmitCall(expr, CallICState::METHOD);
3063 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3064 // Load the arguments.
3065 ZoneList<Expression*>* args = expr->arguments();
3066 int arg_count = args->length();
3068 PreservePositionScope scope(masm()->positions_recorder());
3069 for (int i = 0; i < arg_count; i++) {
3070 VisitForStackValue(args->at(i));
3074 // Record source position of the IC call.
3075 SetSourcePosition(expr->position());
3076 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3077 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
3078 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3079 // Don't assign a type feedback id to the IC, since type feedback is provided
3080 // by the vector above.
3083 RecordJSReturnSite(expr);
3084 // Restore context register.
3085 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3086 context()->DropAndPlug(1, r3);
3090 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3091 // r8: copy of the first argument or undefined if it doesn't exist.
3092 if (arg_count > 0) {
3093 __ LoadP(r8, MemOperand(sp, arg_count * kPointerSize), r0);
3095 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
3098 // r7: the receiver of the enclosing function.
3099 __ LoadP(r7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3101 // r6: the receiver of the enclosing function.
3102 Variable* this_var = scope()->LookupThis();
3103 DCHECK_NOT_NULL(this_var);
3104 GetVar(r6, this_var);
3106 // r5: language mode.
3107 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
3109 // r4: the start position of the scope the calls resides in.
3110 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
3112 // Do the runtime call.
3113 __ Push(r8, r7, r6, r5, r4);
3114 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3118 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3119 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
3120 Variable* this_var = super_ref->this_var()->var();
3121 GetVar(r4, this_var);
3122 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
3123 Label uninitialized_this;
3124 __ beq(&uninitialized_this);
3125 __ mov(r4, Operand(this_var->name()));
3127 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3128 __ bind(&uninitialized_this);
3130 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3134 void FullCodeGenerator::VisitCall(Call* expr) {
3136 // We want to verify that RecordJSReturnSite gets called on all paths
3137 // through this function. Avoid early returns.
3138 expr->return_is_recorded_ = false;
3141 Comment cmnt(masm_, "[ Call");
3142 Expression* callee = expr->expression();
3143 Call::CallType call_type = expr->GetCallType(isolate());
3145 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3146 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3147 // to resolve the function we need to call and the receiver of the
3148 // call. Then we call the resolved function using the given
3150 ZoneList<Expression*>* args = expr->arguments();
3151 int arg_count = args->length();
3154 PreservePositionScope pos_scope(masm()->positions_recorder());
3155 VisitForStackValue(callee);
3156 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3157 __ push(r5); // Reserved receiver slot.
3159 // Push the arguments.
3160 for (int i = 0; i < arg_count; i++) {
3161 VisitForStackValue(args->at(i));
3164 // Push a copy of the function (found below the arguments) and
3166 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3168 EmitResolvePossiblyDirectEval(arg_count);
3170 // The runtime call returns a pair of values in r3 (function) and
3171 // r4 (receiver). Touch up the stack with the right values.
3172 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3173 __ StoreP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3175 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3178 // Record source position for debugger.
3179 SetSourcePosition(expr->position());
3180 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3181 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3183 RecordJSReturnSite(expr);
3184 // Restore context register.
3185 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3186 context()->DropAndPlug(1, r3);
3187 } else if (call_type == Call::GLOBAL_CALL) {
3188 EmitCallWithLoadIC(expr);
3190 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3191 // Call to a lookup slot (dynamically introduced variable).
3192 VariableProxy* proxy = callee->AsVariableProxy();
3196 PreservePositionScope scope(masm()->positions_recorder());
3197 // Generate code for loading from variables potentially shadowed
3198 // by eval-introduced variables.
3199 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3203 // Call the runtime to find the function to call (returned in r3)
3204 // and the object holding it (returned in edx).
3205 DCHECK(!context_register().is(r5));
3206 __ mov(r5, Operand(proxy->name()));
3207 __ Push(context_register(), r5);
3208 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3209 __ Push(r3, r4); // Function, receiver.
3210 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3212 // If fast case code has been generated, emit code to push the
3213 // function and receiver and have the slow path jump around this
3215 if (done.is_linked()) {
3221 // The receiver is implicitly the global receiver. Indicate this
3222 // by passing the hole to the call function stub.
3223 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3228 // The receiver is either the global receiver or an object found
3229 // by LoadContextSlot.
3231 } else if (call_type == Call::PROPERTY_CALL) {
3232 Property* property = callee->AsProperty();
3233 bool is_named_call = property->key()->IsPropertyName();
3234 if (property->IsSuperAccess()) {
3235 if (is_named_call) {
3236 EmitSuperCallWithLoadIC(expr);
3238 EmitKeyedSuperCallWithLoadIC(expr);
3242 PreservePositionScope scope(masm()->positions_recorder());
3243 VisitForStackValue(property->obj());
3245 if (is_named_call) {
3246 EmitCallWithLoadIC(expr);
3248 EmitKeyedCallWithLoadIC(expr, property->key());
3251 } else if (call_type == Call::SUPER_CALL) {
3252 EmitSuperConstructorCall(expr);
3254 DCHECK(call_type == Call::OTHER_CALL);
3255 // Call to an arbitrary expression not handled specially above.
3257 PreservePositionScope scope(masm()->positions_recorder());
3258 VisitForStackValue(callee);
3260 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3262 // Emit function call.
3267 // RecordJSReturnSite should have been called.
3268 DCHECK(expr->return_is_recorded_);
3273 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3274 Comment cmnt(masm_, "[ CallNew");
3275 // According to ECMA-262, section 11.2.2, page 44, the function
3276 // expression in new calls must be evaluated before the
3279 // Push constructor on the stack. If it's not a function it's used as
3280 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3282 DCHECK(!expr->expression()->IsSuperPropertyReference());
3283 VisitForStackValue(expr->expression());
3285 // Push the arguments ("left-to-right") on the stack.
3286 ZoneList<Expression*>* args = expr->arguments();
3287 int arg_count = args->length();
3288 for (int i = 0; i < arg_count; i++) {
3289 VisitForStackValue(args->at(i));
3292 // Call the construct call builtin that handles allocation and
3293 // constructor invocation.
3294 SetSourcePosition(expr->position());
3296 // Load function and argument count into r4 and r3.
3297 __ mov(r3, Operand(arg_count));
3298 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3300 // Record call targets in unoptimized code.
3301 if (FLAG_pretenuring_call_new) {
3302 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3303 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3304 expr->CallNewFeedbackSlot().ToInt() + 1);
3307 __ Move(r5, FeedbackVector());
3308 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3310 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3311 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3312 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3313 context()->Plug(r3);
3317 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3318 SuperCallReference* super_call_ref =
3319 expr->expression()->AsSuperCallReference();
3320 DCHECK_NOT_NULL(super_call_ref);
3322 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3323 VisitForStackValue(new_target_proxy);
3325 EmitLoadSuperConstructor(super_call_ref);
3326 __ push(result_register());
3328 // Push the arguments ("left-to-right") on the stack.
3329 ZoneList<Expression*>* args = expr->arguments();
3330 int arg_count = args->length();
3331 for (int i = 0; i < arg_count; i++) {
3332 VisitForStackValue(args->at(i));
3335 // Call the construct call builtin that handles allocation and
3336 // constructor invocation.
3337 SetSourcePosition(expr->position());
3339 // Load function and argument count into r1 and r0.
3340 __ mov(r3, Operand(arg_count));
3341 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
3343 // Record call targets in unoptimized code.
3344 if (FLAG_pretenuring_call_new) {
3346 /* TODO(dslomov): support pretenuring.
3347 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3348 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3349 expr->CallNewFeedbackSlot().ToInt() + 1);
3353 __ Move(r5, FeedbackVector());
3354 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
3356 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3357 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3361 RecordJSReturnSite(expr);
3363 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3364 context()->Plug(r3);
3368 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3369 ZoneList<Expression*>* args = expr->arguments();
3370 DCHECK(args->length() == 1);
3372 VisitForAccumulatorValue(args->at(0));
3374 Label materialize_true, materialize_false;
3375 Label* if_true = NULL;
3376 Label* if_false = NULL;
3377 Label* fall_through = NULL;
3378 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3379 &if_false, &fall_through);
3381 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3382 __ TestIfSmi(r3, r0);
3383 Split(eq, if_true, if_false, fall_through, cr0);
3385 context()->Plug(if_true, if_false);
3389 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3390 ZoneList<Expression*>* args = expr->arguments();
3391 DCHECK(args->length() == 1);
3393 VisitForAccumulatorValue(args->at(0));
3395 Label materialize_true, materialize_false;
3396 Label* if_true = NULL;
3397 Label* if_false = NULL;
3398 Label* fall_through = NULL;
3399 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3400 &if_false, &fall_through);
3402 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3403 __ TestIfPositiveSmi(r3, r0);
3404 Split(eq, if_true, if_false, fall_through, cr0);
3406 context()->Plug(if_true, if_false);
3410 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3411 ZoneList<Expression*>* args = expr->arguments();
3412 DCHECK(args->length() == 1);
3414 VisitForAccumulatorValue(args->at(0));
3416 Label materialize_true, materialize_false;
3417 Label* if_true = NULL;
3418 Label* if_false = NULL;
3419 Label* fall_through = NULL;
3420 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3421 &if_false, &fall_through);
3423 __ JumpIfSmi(r3, if_false);
3424 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3427 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3428 // Undetectable objects behave like undefined when tested with typeof.
3429 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3430 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3431 __ bne(if_false, cr0);
3432 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3433 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3435 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3436 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3437 Split(le, if_true, if_false, fall_through);
3439 context()->Plug(if_true, if_false);
3443 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3444 ZoneList<Expression*>* args = expr->arguments();
3445 DCHECK(args->length() == 1);
3447 VisitForAccumulatorValue(args->at(0));
3449 Label materialize_true, materialize_false;
3450 Label* if_true = NULL;
3451 Label* if_false = NULL;
3452 Label* fall_through = NULL;
3453 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3454 &if_false, &fall_through);
3456 __ JumpIfSmi(r3, if_false);
3457 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3458 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3459 Split(ge, if_true, if_false, fall_through);
3461 context()->Plug(if_true, if_false);
3465 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3466 ZoneList<Expression*>* args = expr->arguments();
3467 DCHECK(args->length() == 1);
3469 VisitForAccumulatorValue(args->at(0));
3471 Label materialize_true, materialize_false;
3472 Label* if_true = NULL;
3473 Label* if_false = NULL;
3474 Label* fall_through = NULL;
3475 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3476 &if_false, &fall_through);
3478 __ JumpIfSmi(r3, if_false);
3479 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3480 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3481 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3482 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3483 Split(ne, if_true, if_false, fall_through, cr0);
3485 context()->Plug(if_true, if_false);
3489 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3490 CallRuntime* expr) {
3491 ZoneList<Expression*>* args = expr->arguments();
3492 DCHECK(args->length() == 1);
3494 VisitForAccumulatorValue(args->at(0));
3496 Label materialize_true, materialize_false, skip_lookup;
3497 Label* if_true = NULL;
3498 Label* if_false = NULL;
3499 Label* fall_through = NULL;
3500 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3501 &if_false, &fall_through);
3503 __ AssertNotSmi(r3);
3505 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3506 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3507 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3508 __ bne(&skip_lookup, cr0);
3510 // Check for fast case object. Generate false result for slow case object.
3511 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3512 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3513 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3517 // Look for valueOf name in the descriptor array, and indicate false if
3518 // found. Since we omit an enumeration index check, if it is added via a
3519 // transition that shares its descriptor array, this is a false positive.
3520 Label entry, loop, done;
3522 // Skip loop if no descriptors are valid.
3523 __ NumberOfOwnDescriptors(r6, r4);
3524 __ cmpi(r6, Operand::Zero());
3527 __ LoadInstanceDescriptors(r4, r7);
3528 // r7: descriptor array.
3529 // r6: valid entries in the descriptor array.
3530 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3532 // Calculate location of the first key name.
3533 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3534 // Calculate the end of the descriptor array.
3536 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3539 // Loop through all the keys in the descriptor array. If one of these is the
3540 // string "valueOf" the result is false.
3541 // The use of ip to store the valueOf string assumes that it is not otherwise
3542 // used in the loop below.
3543 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3546 __ LoadP(r6, MemOperand(r7, 0));
3549 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3556 // Set the bit in the map to indicate that there is no local valueOf field.
3557 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3558 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3559 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3561 __ bind(&skip_lookup);
3563 // If a valueOf property is not found on the object check that its
3564 // prototype is the un-modified String prototype. If not result is false.
3565 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3566 __ JumpIfSmi(r5, if_false);
3567 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3568 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3569 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3571 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3573 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3574 Split(eq, if_true, if_false, fall_through);
3576 context()->Plug(if_true, if_false);
3580 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3581 ZoneList<Expression*>* args = expr->arguments();
3582 DCHECK(args->length() == 1);
3584 VisitForAccumulatorValue(args->at(0));
3586 Label materialize_true, materialize_false;
3587 Label* if_true = NULL;
3588 Label* if_false = NULL;
3589 Label* fall_through = NULL;
3590 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3591 &if_false, &fall_through);
3593 __ JumpIfSmi(r3, if_false);
3594 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3595 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3596 Split(eq, if_true, if_false, fall_through);
3598 context()->Plug(if_true, if_false);
3602 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3603 ZoneList<Expression*>* args = expr->arguments();
3604 DCHECK(args->length() == 1);
3606 VisitForAccumulatorValue(args->at(0));
3608 Label materialize_true, materialize_false;
3609 Label* if_true = NULL;
3610 Label* if_false = NULL;
3611 Label* fall_through = NULL;
3612 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3613 &if_false, &fall_through);
3615 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3616 #if V8_TARGET_ARCH_PPC64
3617 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3618 __ li(r5, Operand(1));
3619 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3622 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3623 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3625 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3628 __ cmpi(r4, Operand::Zero());
3632 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3633 Split(eq, if_true, if_false, fall_through);
3635 context()->Plug(if_true, if_false);
3639 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3640 ZoneList<Expression*>* args = expr->arguments();
3641 DCHECK(args->length() == 1);
3643 VisitForAccumulatorValue(args->at(0));
3645 Label materialize_true, materialize_false;
3646 Label* if_true = NULL;
3647 Label* if_false = NULL;
3648 Label* fall_through = NULL;
3649 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3650 &if_false, &fall_through);
3652 __ JumpIfSmi(r3, if_false);
3653 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3654 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3655 Split(eq, if_true, if_false, fall_through);
3657 context()->Plug(if_true, if_false);
3661 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3662 ZoneList<Expression*>* args = expr->arguments();
3663 DCHECK(args->length() == 1);
3665 VisitForAccumulatorValue(args->at(0));
3667 Label materialize_true, materialize_false;
3668 Label* if_true = NULL;
3669 Label* if_false = NULL;
3670 Label* fall_through = NULL;
3671 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3672 &if_false, &fall_through);
3674 __ JumpIfSmi(r3, if_false);
3675 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3676 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3677 Split(eq, if_true, if_false, fall_through);
3679 context()->Plug(if_true, if_false);
3683 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3684 ZoneList<Expression*>* args = expr->arguments();
3685 DCHECK(args->length() == 1);
3687 VisitForAccumulatorValue(args->at(0));
3689 Label materialize_true, materialize_false;
3690 Label* if_true = NULL;
3691 Label* if_false = NULL;
3692 Label* fall_through = NULL;
3693 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3694 &if_false, &fall_through);
3696 __ JumpIfSmi(r3, if_false);
3698 Register type_reg = r5;
3699 __ LoadP(map, FieldMemOperand(r3, HeapObject::kMapOffset));
3700 __ lbz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3701 __ subi(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3702 __ cmpli(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3703 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3704 Split(le, if_true, if_false, fall_through);
3706 context()->Plug(if_true, if_false);
3710 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3711 DCHECK(expr->arguments()->length() == 0);
3713 Label materialize_true, materialize_false;
3714 Label* if_true = NULL;
3715 Label* if_false = NULL;
3716 Label* fall_through = NULL;
3717 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3718 &if_false, &fall_through);
3720 // Get the frame pointer for the calling frame.
3721 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3723 // Skip the arguments adaptor frame if it exists.
3724 Label check_frame_marker;
3725 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3726 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3727 __ bne(&check_frame_marker);
3728 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3730 // Check the marker in the calling frame.
3731 __ bind(&check_frame_marker);
3732 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3733 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3734 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3735 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3736 Split(eq, if_true, if_false, fall_through);
3738 context()->Plug(if_true, if_false);
3742 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3743 ZoneList<Expression*>* args = expr->arguments();
3744 DCHECK(args->length() == 2);
3746 // Load the two objects into registers and perform the comparison.
3747 VisitForStackValue(args->at(0));
3748 VisitForAccumulatorValue(args->at(1));
3750 Label materialize_true, materialize_false;
3751 Label* if_true = NULL;
3752 Label* if_false = NULL;
3753 Label* fall_through = NULL;
3754 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3755 &if_false, &fall_through);
3759 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3760 Split(eq, if_true, if_false, fall_through);
3762 context()->Plug(if_true, if_false);
3766 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3767 ZoneList<Expression*>* args = expr->arguments();
3768 DCHECK(args->length() == 1);
3770 // ArgumentsAccessStub expects the key in edx and the formal
3771 // parameter count in r3.
3772 VisitForAccumulatorValue(args->at(0));
3774 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3775 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3777 context()->Plug(r3);
3781 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3782 DCHECK(expr->arguments()->length() == 0);
3784 // Get the number of formal parameters.
3785 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3787 // Check if the calling frame is an arguments adaptor frame.
3788 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3789 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3790 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3793 // Arguments adaptor case: Read the arguments length from the
3795 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3798 context()->Plug(r3);
3802 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3803 ZoneList<Expression*>* args = expr->arguments();
3804 DCHECK(args->length() == 1);
3805 Label done, null, function, non_function_constructor;
3807 VisitForAccumulatorValue(args->at(0));
3809 // If the object is a smi, we return null.
3810 __ JumpIfSmi(r3, &null);
3812 // Check that the object is a JS object but take special care of JS
3813 // functions to make sure they have 'Function' as their class.
3814 // Assume that there are only two callable types, and one of them is at
3815 // either end of the type range for JS object types. Saves extra comparisons.
3816 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3817 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3818 // Map is now in r3.
3820 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3821 FIRST_SPEC_OBJECT_TYPE + 1);
3824 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3825 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3827 // Assume that there is no larger type.
3828 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3830 // Check if the constructor in the map is a JS function.
3831 Register instance_type = r5;
3832 __ GetMapConstructor(r3, r3, r4, instance_type);
3833 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3834 __ bne(&non_function_constructor);
3836 // r3 now contains the constructor function. Grab the
3837 // instance class name from there.
3838 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3840 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3843 // Functions have class 'Function'.
3845 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3848 // Objects with a non-function constructor have class 'Object'.
3849 __ bind(&non_function_constructor);
3850 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3853 // Non-JS objects have class null.
3855 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3860 context()->Plug(r3);
3864 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3865 // Load the arguments on the stack and call the stub.
3866 SubStringStub stub(isolate());
3867 ZoneList<Expression*>* args = expr->arguments();
3868 DCHECK(args->length() == 3);
3869 VisitForStackValue(args->at(0));
3870 VisitForStackValue(args->at(1));
3871 VisitForStackValue(args->at(2));
3873 context()->Plug(r3);
3877 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3878 // Load the arguments on the stack and call the stub.
3879 RegExpExecStub stub(isolate());
3880 ZoneList<Expression*>* args = expr->arguments();
3881 DCHECK(args->length() == 4);
3882 VisitForStackValue(args->at(0));
3883 VisitForStackValue(args->at(1));
3884 VisitForStackValue(args->at(2));
3885 VisitForStackValue(args->at(3));
3887 context()->Plug(r3);
3891 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3892 ZoneList<Expression*>* args = expr->arguments();
3893 DCHECK(args->length() == 1);
3894 VisitForAccumulatorValue(args->at(0)); // Load the object.
3897 // If the object is a smi return the object.
3898 __ JumpIfSmi(r3, &done);
3899 // If the object is not a value type, return the object.
3900 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3902 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3905 context()->Plug(r3);
3909 void FullCodeGenerator::EmitThrowIfNotADate(CallRuntime* expr) {
3910 ZoneList<Expression*>* args = expr->arguments();
3911 DCHECK_EQ(1, args->length());
3913 VisitForAccumulatorValue(args->at(0)); // Load the object.
3915 Label done, not_date_object;
3916 Register object = r3;
3917 Register result = r3;
3918 Register scratch0 = r4;
3920 __ JumpIfSmi(object, ¬_date_object);
3921 __ CompareObjectType(object, scratch0, scratch0, JS_DATE_TYPE);
3923 __ bind(¬_date_object);
3924 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3927 context()->Plug(result);
3931 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3932 ZoneList<Expression*>* args = expr->arguments();
3933 DCHECK(args->length() == 2);
3934 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3935 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3937 VisitForAccumulatorValue(args->at(0)); // Load the object.
3939 Register object = r3;
3940 Register result = r3;
3941 Register scratch0 = r11;
3942 Register scratch1 = r4;
3944 if (index->value() == 0) {
3945 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3947 Label runtime, done;
3948 if (index->value() < JSDate::kFirstUncachedField) {
3949 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3950 __ mov(scratch1, Operand(stamp));
3951 __ LoadP(scratch1, MemOperand(scratch1));
3952 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3953 __ cmp(scratch1, scratch0);
3956 FieldMemOperand(object, JSDate::kValueOffset +
3957 kPointerSize * index->value()),
3962 __ PrepareCallCFunction(2, scratch1);
3963 __ LoadSmiLiteral(r4, index);
3964 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3968 context()->Plug(result);
3972 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3973 ZoneList<Expression*>* args = expr->arguments();
3974 DCHECK_EQ(3, args->length());
3976 Register string = r3;
3977 Register index = r4;
3978 Register value = r5;
3980 VisitForStackValue(args->at(0)); // index
3981 VisitForStackValue(args->at(1)); // value
3982 VisitForAccumulatorValue(args->at(2)); // string
3983 __ Pop(index, value);
3985 if (FLAG_debug_code) {
3986 __ TestIfSmi(value, r0);
3987 __ Check(eq, kNonSmiValue, cr0);
3988 __ TestIfSmi(index, r0);
3989 __ Check(eq, kNonSmiIndex, cr0);
3990 __ SmiUntag(index, index);
3991 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3992 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3993 __ SmiTag(index, index);
3997 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3998 __ SmiToByteArrayOffset(r0, index);
3999 __ stbx(value, MemOperand(ip, r0));
4000 context()->Plug(string);
4004 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
4005 ZoneList<Expression*>* args = expr->arguments();
4006 DCHECK_EQ(3, args->length());
4008 Register string = r3;
4009 Register index = r4;
4010 Register value = r5;
4012 VisitForStackValue(args->at(0)); // index
4013 VisitForStackValue(args->at(1)); // value
4014 VisitForAccumulatorValue(args->at(2)); // string
4015 __ Pop(index, value);
4017 if (FLAG_debug_code) {
4018 __ TestIfSmi(value, r0);
4019 __ Check(eq, kNonSmiValue, cr0);
4020 __ TestIfSmi(index, r0);
4021 __ Check(eq, kNonSmiIndex, cr0);
4022 __ SmiUntag(index, index);
4023 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
4024 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
4025 __ SmiTag(index, index);
4029 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4030 __ SmiToShortArrayOffset(r0, index);
4031 __ sthx(value, MemOperand(ip, r0));
4032 context()->Plug(string);
4036 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
4037 // Load the arguments on the stack and call the runtime function.
4038 ZoneList<Expression*>* args = expr->arguments();
4039 DCHECK(args->length() == 2);
4040 VisitForStackValue(args->at(0));
4041 VisitForStackValue(args->at(1));
4042 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4044 context()->Plug(r3);
4048 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4049 ZoneList<Expression*>* args = expr->arguments();
4050 DCHECK(args->length() == 2);
4051 VisitForStackValue(args->at(0)); // Load the object.
4052 VisitForAccumulatorValue(args->at(1)); // Load the value.
4053 __ pop(r4); // r3 = value. r4 = object.
4056 // If the object is a smi, return the value.
4057 __ JumpIfSmi(r4, &done);
4059 // If the object is not a value type, return the value.
4060 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
4064 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
4065 // Update the write barrier. Save the value as it will be
4066 // overwritten by the write barrier code and is needed afterward.
4068 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
4072 context()->Plug(r3);
4076 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4077 ZoneList<Expression*>* args = expr->arguments();
4078 DCHECK_EQ(args->length(), 1);
4079 // Load the argument into r3 and call the stub.
4080 VisitForAccumulatorValue(args->at(0));
4082 NumberToStringStub stub(isolate());
4084 context()->Plug(r3);
4088 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4089 ZoneList<Expression*>* args = expr->arguments();
4090 DCHECK(args->length() == 1);
4091 VisitForAccumulatorValue(args->at(0));
4094 StringCharFromCodeGenerator generator(r3, r4);
4095 generator.GenerateFast(masm_);
4098 NopRuntimeCallHelper call_helper;
4099 generator.GenerateSlow(masm_, call_helper);
4102 context()->Plug(r4);
4106 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4107 ZoneList<Expression*>* args = expr->arguments();
4108 DCHECK(args->length() == 2);
4109 VisitForStackValue(args->at(0));
4110 VisitForAccumulatorValue(args->at(1));
4112 Register object = r4;
4113 Register index = r3;
4114 Register result = r6;
4118 Label need_conversion;
4119 Label index_out_of_range;
4121 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
4122 &need_conversion, &index_out_of_range,
4123 STRING_INDEX_IS_NUMBER);
4124 generator.GenerateFast(masm_);
4127 __ bind(&index_out_of_range);
4128 // When the index is out of range, the spec requires us to return
4130 __ LoadRoot(result, Heap::kNanValueRootIndex);
4133 __ bind(&need_conversion);
4134 // Load the undefined value into the result register, which will
4135 // trigger conversion.
4136 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4139 NopRuntimeCallHelper call_helper;
4140 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4143 context()->Plug(result);
4147 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4148 ZoneList<Expression*>* args = expr->arguments();
4149 DCHECK(args->length() == 2);
4150 VisitForStackValue(args->at(0));
4151 VisitForAccumulatorValue(args->at(1));
4153 Register object = r4;
4154 Register index = r3;
4155 Register scratch = r6;
4156 Register result = r3;
4160 Label need_conversion;
4161 Label index_out_of_range;
4163 StringCharAtGenerator generator(object, index, scratch, result,
4164 &need_conversion, &need_conversion,
4165 &index_out_of_range, STRING_INDEX_IS_NUMBER);
4166 generator.GenerateFast(masm_);
4169 __ bind(&index_out_of_range);
4170 // When the index is out of range, the spec requires us to return
4171 // the empty string.
4172 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4175 __ bind(&need_conversion);
4176 // Move smi zero into the result register, which will trigger
4178 __ LoadSmiLiteral(result, Smi::FromInt(0));
4181 NopRuntimeCallHelper call_helper;
4182 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4185 context()->Plug(result);
4189 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4190 ZoneList<Expression*>* args = expr->arguments();
4191 DCHECK_EQ(2, args->length());
4192 VisitForStackValue(args->at(0));
4193 VisitForAccumulatorValue(args->at(1));
4196 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4198 context()->Plug(r3);
4202 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4203 ZoneList<Expression*>* args = expr->arguments();
4204 DCHECK_EQ(2, args->length());
4205 VisitForStackValue(args->at(0));
4206 VisitForStackValue(args->at(1));
4208 StringCompareStub stub(isolate());
4210 context()->Plug(r3);
4214 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4215 ZoneList<Expression*>* args = expr->arguments();
4216 DCHECK(args->length() >= 2);
4218 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4219 for (int i = 0; i < arg_count + 1; i++) {
4220 VisitForStackValue(args->at(i));
4222 VisitForAccumulatorValue(args->last()); // Function.
4224 Label runtime, done;
4225 // Check for non-function argument (including proxy).
4226 __ JumpIfSmi(r3, &runtime);
4227 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
4230 // InvokeFunction requires the function in r4. Move it in there.
4231 __ mr(r4, result_register());
4232 ParameterCount count(arg_count);
4233 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
4234 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4239 __ CallRuntime(Runtime::kCall, args->length());
4242 context()->Plug(r3);
4246 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4247 ZoneList<Expression*>* args = expr->arguments();
4248 DCHECK(args->length() == 2);
4251 VisitForStackValue(args->at(0));
4254 VisitForStackValue(args->at(1));
4255 __ CallRuntime(Runtime::kGetPrototype, 1);
4256 __ mr(r4, result_register());
4259 // Check if the calling frame is an arguments adaptor frame.
4260 Label adaptor_frame, args_set_up, runtime;
4261 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4262 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
4263 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
4264 __ beq(&adaptor_frame);
4266 // default constructor has no arguments, so no adaptor frame means no args.
4267 __ li(r3, Operand::Zero());
4270 // Copy arguments from adaptor frame.
4272 __ bind(&adaptor_frame);
4273 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
4276 // Subtract 1 from arguments count, for new.target.
4277 __ subi(r3, r3, Operand(1));
4279 // Get arguments pointer in r5.
4280 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
4282 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
4287 // Pre-decrement in order to skip receiver.
4288 __ LoadPU(r6, MemOperand(r5, -kPointerSize));
4293 __ bind(&args_set_up);
4294 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
4296 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4297 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4301 context()->Plug(result_register());
4305 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4306 RegExpConstructResultStub stub(isolate());
4307 ZoneList<Expression*>* args = expr->arguments();
4308 DCHECK(args->length() == 3);
4309 VisitForStackValue(args->at(0));
4310 VisitForStackValue(args->at(1));
4311 VisitForAccumulatorValue(args->at(2));
4314 context()->Plug(r3);
4318 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4319 ZoneList<Expression*>* args = expr->arguments();
4320 DCHECK_EQ(2, args->length());
4321 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4322 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4324 Handle<FixedArray> jsfunction_result_caches(
4325 isolate()->native_context()->jsfunction_result_caches());
4326 if (jsfunction_result_caches->length() <= cache_id) {
4327 __ Abort(kAttemptToUseUndefinedCache);
4328 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4329 context()->Plug(r3);
4333 VisitForAccumulatorValue(args->at(1));
4336 Register cache = r4;
4337 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4338 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4340 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4342 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
4344 Label done, not_found;
4345 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4346 // r5 now holds finger offset as a smi.
4347 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4348 // r6 now points to the start of fixed array elements.
4349 __ SmiToPtrArrayOffset(r5, r5);
4350 __ LoadPUX(r5, MemOperand(r6, r5));
4351 // r6 now points to the key of the pair.
4355 __ LoadP(r3, MemOperand(r6, kPointerSize));
4358 __ bind(¬_found);
4359 // Call runtime to perform the lookup.
4360 __ Push(cache, key);
4361 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4364 context()->Plug(r3);
4368 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4369 ZoneList<Expression*>* args = expr->arguments();
4370 VisitForAccumulatorValue(args->at(0));
4372 Label materialize_true, materialize_false;
4373 Label* if_true = NULL;
4374 Label* if_false = NULL;
4375 Label* fall_through = NULL;
4376 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4377 &if_false, &fall_through);
4379 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4380 // PPC - assume ip is free
4381 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
4382 __ and_(r0, r3, ip);
4383 __ cmpi(r0, Operand::Zero());
4384 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4385 Split(eq, if_true, if_false, fall_through);
4387 context()->Plug(if_true, if_false);
4391 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4392 ZoneList<Expression*>* args = expr->arguments();
4393 DCHECK(args->length() == 1);
4394 VisitForAccumulatorValue(args->at(0));
4396 __ AssertString(r3);
4398 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4399 __ IndexFromHash(r3, r3);
4401 context()->Plug(r3);
4405 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4406 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4407 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4408 one_char_separator_loop_entry, long_separator_loop;
4409 ZoneList<Expression*>* args = expr->arguments();
4410 DCHECK(args->length() == 2);
4411 VisitForStackValue(args->at(1));
4412 VisitForAccumulatorValue(args->at(0));
4414 // All aliases of the same register have disjoint lifetimes.
4415 Register array = r3;
4416 Register elements = no_reg; // Will be r3.
4417 Register result = no_reg; // Will be r3.
4418 Register separator = r4;
4419 Register array_length = r5;
4420 Register result_pos = no_reg; // Will be r5
4421 Register string_length = r6;
4422 Register string = r7;
4423 Register element = r8;
4424 Register elements_end = r9;
4425 Register scratch1 = r10;
4426 Register scratch2 = r11;
4428 // Separator operand is on the stack.
4431 // Check that the array is a JSArray.
4432 __ JumpIfSmi(array, &bailout);
4433 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
4436 // Check that the array has fast elements.
4437 __ CheckFastElements(scratch1, scratch2, &bailout);
4439 // If the array has length zero, return the empty string.
4440 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4441 __ SmiUntag(array_length);
4442 __ cmpi(array_length, Operand::Zero());
4443 __ bne(&non_trivial_array);
4444 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
4447 __ bind(&non_trivial_array);
4449 // Get the FixedArray containing array's elements.
4451 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4452 array = no_reg; // End of array's live range.
4454 // Check that all array elements are sequential one-byte strings, and
4455 // accumulate the sum of their lengths, as a smi-encoded value.
4456 __ li(string_length, Operand::Zero());
4457 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4458 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4459 __ add(elements_end, element, elements_end);
4460 // Loop condition: while (element < elements_end).
4461 // Live values in registers:
4462 // elements: Fixed array of strings.
4463 // array_length: Length of the fixed array of strings (not smi)
4464 // separator: Separator string
4465 // string_length: Accumulated sum of string lengths (smi).
4466 // element: Current array element.
4467 // elements_end: Array end.
4468 if (generate_debug_code_) {
4469 __ cmpi(array_length, Operand::Zero());
4470 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4473 __ LoadP(string, MemOperand(element));
4474 __ addi(element, element, Operand(kPointerSize));
4475 __ JumpIfSmi(string, &bailout);
4476 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4477 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4478 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4479 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4481 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4483 __ BranchOnOverflow(&bailout);
4485 __ cmp(element, elements_end);
4488 // If array_length is 1, return elements[0], a string.
4489 __ cmpi(array_length, Operand(1));
4490 __ bne(¬_size_one_array);
4491 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
4494 __ bind(¬_size_one_array);
4496 // Live values in registers:
4497 // separator: Separator string
4498 // array_length: Length of the array.
4499 // string_length: Sum of string lengths (smi).
4500 // elements: FixedArray of strings.
4502 // Check that the separator is a flat one-byte string.
4503 __ JumpIfSmi(separator, &bailout);
4504 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4505 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4506 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4508 // Add (separator length times array_length) - separator length to the
4509 // string_length to get the length of the result string.
4511 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4512 __ sub(string_length, string_length, scratch1);
4513 #if V8_TARGET_ARCH_PPC64
4514 __ SmiUntag(scratch1, scratch1);
4515 __ Mul(scratch2, array_length, scratch1);
4516 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4518 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4519 __ bne(&bailout, cr0);
4520 __ SmiTag(scratch2, scratch2);
4522 // array_length is not smi but the other values are, so the result is a smi
4523 __ mullw(scratch2, array_length, scratch1);
4524 __ mulhw(ip, array_length, scratch1);
4525 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4527 __ cmpi(ip, Operand::Zero());
4529 __ cmpwi(scratch2, Operand::Zero());
4533 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4535 __ BranchOnOverflow(&bailout);
4536 __ SmiUntag(string_length);
4538 // Get first element in the array to free up the elements register to be used
4540 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4541 result = elements; // End of live range for elements.
4543 // Live values in registers:
4544 // element: First array element
4545 // separator: Separator string
4546 // string_length: Length of result string (not smi)
4547 // array_length: Length of the array.
4548 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4549 elements_end, &bailout);
4550 // Prepare for looping. Set up elements_end to end of the array. Set
4551 // result_pos to the position of the result where to write the first
4553 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4554 __ add(elements_end, element, elements_end);
4555 result_pos = array_length; // End of live range for array_length.
4556 array_length = no_reg;
4557 __ addi(result_pos, result,
4558 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4560 // Check the length of the separator.
4562 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4563 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4564 __ beq(&one_char_separator);
4565 __ bgt(&long_separator);
4567 // Empty separator case
4568 __ bind(&empty_separator_loop);
4569 // Live values in registers:
4570 // result_pos: the position to which we are currently copying characters.
4571 // element: Current array element.
4572 // elements_end: Array end.
4574 // Copy next array element to the result.
4575 __ LoadP(string, MemOperand(element));
4576 __ addi(element, element, Operand(kPointerSize));
4577 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4578 __ SmiUntag(string_length);
4579 __ addi(string, string,
4580 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4581 __ CopyBytes(string, result_pos, string_length, scratch1);
4582 __ cmp(element, elements_end);
4583 __ blt(&empty_separator_loop); // End while (element < elements_end).
4584 DCHECK(result.is(r3));
4587 // One-character separator case
4588 __ bind(&one_char_separator);
4589 // Replace separator with its one-byte character value.
4590 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4591 // Jump into the loop after the code that copies the separator, so the first
4592 // element is not preceded by a separator
4593 __ b(&one_char_separator_loop_entry);
4595 __ bind(&one_char_separator_loop);
4596 // Live values in registers:
4597 // result_pos: the position to which we are currently copying characters.
4598 // element: Current array element.
4599 // elements_end: Array end.
4600 // separator: Single separator one-byte char (in lower byte).
4602 // Copy the separator character to the result.
4603 __ stb(separator, MemOperand(result_pos));
4604 __ addi(result_pos, result_pos, Operand(1));
4606 // Copy next array element to the result.
4607 __ bind(&one_char_separator_loop_entry);
4608 __ LoadP(string, MemOperand(element));
4609 __ addi(element, element, Operand(kPointerSize));
4610 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4611 __ SmiUntag(string_length);
4612 __ addi(string, string,
4613 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4614 __ CopyBytes(string, result_pos, string_length, scratch1);
4615 __ cmpl(element, elements_end);
4616 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4617 DCHECK(result.is(r3));
4620 // Long separator case (separator is more than one character). Entry is at the
4621 // label long_separator below.
4622 __ bind(&long_separator_loop);
4623 // Live values in registers:
4624 // result_pos: the position to which we are currently copying characters.
4625 // element: Current array element.
4626 // elements_end: Array end.
4627 // separator: Separator string.
4629 // Copy the separator to the result.
4630 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4631 __ SmiUntag(string_length);
4632 __ addi(string, separator,
4633 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4634 __ CopyBytes(string, result_pos, string_length, scratch1);
4636 __ bind(&long_separator);
4637 __ LoadP(string, MemOperand(element));
4638 __ addi(element, element, Operand(kPointerSize));
4639 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4640 __ SmiUntag(string_length);
4641 __ addi(string, string,
4642 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4643 __ CopyBytes(string, result_pos, string_length, scratch1);
4644 __ cmpl(element, elements_end);
4645 __ blt(&long_separator_loop); // End while (element < elements_end).
4646 DCHECK(result.is(r3));
4650 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4652 context()->Plug(r3);
4656 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4657 DCHECK(expr->arguments()->length() == 0);
4658 ExternalReference debug_is_active =
4659 ExternalReference::debug_is_active_address(isolate());
4660 __ mov(ip, Operand(debug_is_active));
4661 __ lbz(r3, MemOperand(ip));
4663 context()->Plug(r3);
4667 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4668 // Assert: expr === CallRuntime("ReflectConstruct")
4669 DCHECK_EQ(1, expr->arguments()->length());
4670 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4672 ZoneList<Expression*>* args = call->arguments();
4673 DCHECK_EQ(3, args->length());
4675 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4676 DCHECK_NOT_NULL(super_call_ref);
4678 // Load ReflectConstruct function
4679 EmitLoadJSRuntimeFunction(call);
4681 // Push the target function under the receiver.
4682 __ LoadP(r0, MemOperand(sp, 0));
4684 __ StoreP(r3, MemOperand(sp, kPointerSize));
4686 // Push super constructor
4687 EmitLoadSuperConstructor(super_call_ref);
4688 __ Push(result_register());
4690 // Push arguments array
4691 VisitForStackValue(args->at(1));
4694 DCHECK(args->at(2)->IsVariableProxy());
4695 VisitForStackValue(args->at(2));
4697 EmitCallJSRuntimeFunction(call);
4699 // Restore context register.
4700 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4701 context()->DropAndPlug(1, r3);
4703 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4704 EmitInitializeThisAfterSuper(super_call_ref);
4708 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4709 // Push the builtins object as the receiver.
4710 Register receiver = LoadDescriptor::ReceiverRegister();
4711 __ LoadP(receiver, GlobalObjectOperand());
4712 __ LoadP(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4715 // Load the function from the receiver.
4716 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4717 __ mov(LoadDescriptor::SlotRegister(),
4718 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4719 CallLoadIC(NOT_CONTEXTUAL);
4723 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4724 ZoneList<Expression*>* args = expr->arguments();
4725 int arg_count = args->length();
4727 // Record source position of the IC call.
4728 SetSourcePosition(expr->position());
4729 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4730 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4735 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4736 ZoneList<Expression*>* args = expr->arguments();
4737 int arg_count = args->length();
4739 if (expr->is_jsruntime()) {
4740 Comment cmnt(masm_, "[ CallRuntime");
4741 EmitLoadJSRuntimeFunction(expr);
4743 // Push the target function under the receiver.
4744 __ LoadP(ip, MemOperand(sp, 0));
4746 __ StoreP(r3, MemOperand(sp, kPointerSize));
4748 // Push the arguments ("left-to-right").
4749 for (int i = 0; i < arg_count; i++) {
4750 VisitForStackValue(args->at(i));
4753 EmitCallJSRuntimeFunction(expr);
4755 // Restore context register.
4756 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4758 context()->DropAndPlug(1, r3);
4761 const Runtime::Function* function = expr->function();
4762 switch (function->function_id) {
4763 #define CALL_INTRINSIC_GENERATOR(Name) \
4764 case Runtime::kInline##Name: { \
4765 Comment cmnt(masm_, "[ Inline" #Name); \
4766 return Emit##Name(expr); \
4768 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4769 #undef CALL_INTRINSIC_GENERATOR
4771 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4772 // Push the arguments ("left-to-right").
4773 for (int i = 0; i < arg_count; i++) {
4774 VisitForStackValue(args->at(i));
4777 // Call the C runtime function.
4778 __ CallRuntime(expr->function(), arg_count);
4779 context()->Plug(r3);
4786 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4787 switch (expr->op()) {
4788 case Token::DELETE: {
4789 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4790 Property* property = expr->expression()->AsProperty();
4791 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4793 if (property != NULL) {
4794 VisitForStackValue(property->obj());
4795 VisitForStackValue(property->key());
4796 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
4798 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4799 context()->Plug(r3);
4800 } else if (proxy != NULL) {
4801 Variable* var = proxy->var();
4802 // Delete of an unqualified identifier is disallowed in strict mode
4803 // but "delete this" is allowed.
4804 DCHECK(is_sloppy(language_mode()) || var->is_this());
4805 if (var->IsUnallocated()) {
4806 __ LoadP(r5, GlobalObjectOperand());
4807 __ mov(r4, Operand(var->name()));
4808 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4809 __ Push(r5, r4, r3);
4810 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4811 context()->Plug(r3);
4812 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4813 // Result of deleting non-global, non-dynamic variables is false.
4814 // The subexpression does not have side effects.
4815 context()->Plug(var->is_this());
4817 // Non-global variable. Call the runtime to try to delete from the
4818 // context where the variable was introduced.
4819 DCHECK(!context_register().is(r5));
4820 __ mov(r5, Operand(var->name()));
4821 __ Push(context_register(), r5);
4822 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4823 context()->Plug(r3);
4826 // Result of deleting non-property, non-variable reference is true.
4827 // The subexpression may have side effects.
4828 VisitForEffect(expr->expression());
4829 context()->Plug(true);
4835 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4836 VisitForEffect(expr->expression());
4837 context()->Plug(Heap::kUndefinedValueRootIndex);
4842 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4843 if (context()->IsEffect()) {
4844 // Unary NOT has no side effects so it's only necessary to visit the
4845 // subexpression. Match the optimizing compiler by not branching.
4846 VisitForEffect(expr->expression());
4847 } else if (context()->IsTest()) {
4848 const TestContext* test = TestContext::cast(context());
4849 // The labels are swapped for the recursive call.
4850 VisitForControl(expr->expression(), test->false_label(),
4851 test->true_label(), test->fall_through());
4852 context()->Plug(test->true_label(), test->false_label());
4854 // We handle value contexts explicitly rather than simply visiting
4855 // for control and plugging the control flow into the context,
4856 // because we need to prepare a pair of extra administrative AST ids
4857 // for the optimizing compiler.
4858 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4859 Label materialize_true, materialize_false, done;
4860 VisitForControl(expr->expression(), &materialize_false,
4861 &materialize_true, &materialize_true);
4862 __ bind(&materialize_true);
4863 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4864 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4865 if (context()->IsStackValue()) __ push(r3);
4867 __ bind(&materialize_false);
4868 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4869 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4870 if (context()->IsStackValue()) __ push(r3);
4876 case Token::TYPEOF: {
4877 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4879 AccumulatorValueContext context(this);
4880 VisitForTypeofValue(expr->expression());
4883 TypeofStub typeof_stub(isolate());
4884 __ CallStub(&typeof_stub);
4885 context()->Plug(r3);
4895 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4896 DCHECK(expr->expression()->IsValidReferenceExpression());
4898 Comment cmnt(masm_, "[ CountOperation");
4899 SetSourcePosition(expr->position());
4901 Property* prop = expr->expression()->AsProperty();
4902 LhsKind assign_type = Property::GetAssignType(prop);
4904 // Evaluate expression and get value.
4905 if (assign_type == VARIABLE) {
4906 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4907 AccumulatorValueContext context(this);
4908 EmitVariableLoad(expr->expression()->AsVariableProxy());
4910 // Reserve space for result of postfix operation.
4911 if (expr->is_postfix() && !context()->IsEffect()) {
4912 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4915 switch (assign_type) {
4916 case NAMED_PROPERTY: {
4917 // Put the object both on the stack and in the register.
4918 VisitForStackValue(prop->obj());
4919 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4920 EmitNamedPropertyLoad(prop);
4924 case NAMED_SUPER_PROPERTY: {
4925 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4926 VisitForAccumulatorValue(
4927 prop->obj()->AsSuperPropertyReference()->home_object());
4928 __ Push(result_register());
4929 const Register scratch = r4;
4930 __ LoadP(scratch, MemOperand(sp, kPointerSize));
4931 __ Push(scratch, result_register());
4932 EmitNamedSuperPropertyLoad(prop);
4936 case KEYED_SUPER_PROPERTY: {
4937 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4938 VisitForAccumulatorValue(
4939 prop->obj()->AsSuperPropertyReference()->home_object());
4940 const Register scratch = r4;
4941 const Register scratch1 = r5;
4942 __ mr(scratch, result_register());
4943 VisitForAccumulatorValue(prop->key());
4944 __ Push(scratch, result_register());
4945 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
4946 __ Push(scratch1, scratch, result_register());
4947 EmitKeyedSuperPropertyLoad(prop);
4951 case KEYED_PROPERTY: {
4952 VisitForStackValue(prop->obj());
4953 VisitForStackValue(prop->key());
4954 __ LoadP(LoadDescriptor::ReceiverRegister(),
4955 MemOperand(sp, 1 * kPointerSize));
4956 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4957 EmitKeyedPropertyLoad(prop);
4966 // We need a second deoptimization point after loading the value
4967 // in case evaluating the property load my have a side effect.
4968 if (assign_type == VARIABLE) {
4969 PrepareForBailout(expr->expression(), TOS_REG);
4971 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4974 // Inline smi case if we are in a loop.
4975 Label stub_call, done;
4976 JumpPatchSite patch_site(masm_);
4978 int count_value = expr->op() == Token::INC ? 1 : -1;
4979 if (ShouldInlineSmiCase(expr->op())) {
4981 patch_site.EmitJumpIfNotSmi(r3, &slow);
4983 // Save result for postfix expressions.
4984 if (expr->is_postfix()) {
4985 if (!context()->IsEffect()) {
4986 // Save the result on the stack. If we have a named or keyed property
4987 // we store the result under the receiver that is currently on top
4989 switch (assign_type) {
4993 case NAMED_PROPERTY:
4994 __ StoreP(r3, MemOperand(sp, kPointerSize));
4996 case NAMED_SUPER_PROPERTY:
4997 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4999 case KEYED_PROPERTY:
5000 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5002 case KEYED_SUPER_PROPERTY:
5003 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
5009 Register scratch1 = r4;
5010 Register scratch2 = r5;
5011 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
5012 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
5013 __ BranchOnNoOverflow(&done);
5014 // Call stub. Undo operation first.
5015 __ sub(r3, r3, scratch1);
5019 ToNumberStub convert_stub(isolate());
5020 __ CallStub(&convert_stub);
5021 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5023 // Save result for postfix expressions.
5024 if (expr->is_postfix()) {
5025 if (!context()->IsEffect()) {
5026 // Save the result on the stack. If we have a named or keyed property
5027 // we store the result under the receiver that is currently on top
5029 switch (assign_type) {
5033 case NAMED_PROPERTY:
5034 __ StoreP(r3, MemOperand(sp, kPointerSize));
5036 case NAMED_SUPER_PROPERTY:
5037 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5039 case KEYED_PROPERTY:
5040 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5042 case KEYED_SUPER_PROPERTY:
5043 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
5049 __ bind(&stub_call);
5051 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
5053 // Record position before stub call.
5054 SetSourcePosition(expr->position());
5056 Handle<Code> code = CodeFactory::BinaryOpIC(
5057 isolate(), Token::ADD, language_mode()).code();
5058 CallIC(code, expr->CountBinOpFeedbackId());
5059 patch_site.EmitPatchInfo();
5062 // Store the value returned in r3.
5063 switch (assign_type) {
5065 if (expr->is_postfix()) {
5067 EffectContext context(this);
5068 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5069 Token::ASSIGN, expr->CountSlot());
5070 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5073 // For all contexts except EffectConstant We have the result on
5074 // top of the stack.
5075 if (!context()->IsEffect()) {
5076 context()->PlugTOS();
5079 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5080 Token::ASSIGN, expr->CountSlot());
5081 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5082 context()->Plug(r3);
5085 case NAMED_PROPERTY: {
5086 __ mov(StoreDescriptor::NameRegister(),
5087 Operand(prop->key()->AsLiteral()->value()));
5088 __ pop(StoreDescriptor::ReceiverRegister());
5089 if (FLAG_vector_stores) {
5090 EmitLoadStoreICSlot(expr->CountSlot());
5093 CallStoreIC(expr->CountStoreFeedbackId());
5095 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5096 if (expr->is_postfix()) {
5097 if (!context()->IsEffect()) {
5098 context()->PlugTOS();
5101 context()->Plug(r3);
5105 case NAMED_SUPER_PROPERTY: {
5106 EmitNamedSuperPropertyStore(prop);
5107 if (expr->is_postfix()) {
5108 if (!context()->IsEffect()) {
5109 context()->PlugTOS();
5112 context()->Plug(r3);
5116 case KEYED_SUPER_PROPERTY: {
5117 EmitKeyedSuperPropertyStore(prop);
5118 if (expr->is_postfix()) {
5119 if (!context()->IsEffect()) {
5120 context()->PlugTOS();
5123 context()->Plug(r3);
5127 case KEYED_PROPERTY: {
5128 __ Pop(StoreDescriptor::ReceiverRegister(),
5129 StoreDescriptor::NameRegister());
5131 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5132 if (FLAG_vector_stores) {
5133 EmitLoadStoreICSlot(expr->CountSlot());
5136 CallIC(ic, expr->CountStoreFeedbackId());
5138 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5139 if (expr->is_postfix()) {
5140 if (!context()->IsEffect()) {
5141 context()->PlugTOS();
5144 context()->Plug(r3);
5152 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5153 DCHECK(!context()->IsEffect());
5154 DCHECK(!context()->IsTest());
5155 VariableProxy* proxy = expr->AsVariableProxy();
5156 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5157 Comment cmnt(masm_, "[ Global variable");
5158 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5159 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5160 __ mov(LoadDescriptor::SlotRegister(),
5161 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5162 // Use a regular load, not a contextual load, to avoid a reference
5164 CallLoadIC(NOT_CONTEXTUAL);
5165 PrepareForBailout(expr, TOS_REG);
5166 context()->Plug(r3);
5167 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5168 Comment cmnt(masm_, "[ Lookup slot");
5171 // Generate code for loading from variables potentially shadowed
5172 // by eval-introduced variables.
5173 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5176 __ mov(r3, Operand(proxy->name()));
5178 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5179 PrepareForBailout(expr, TOS_REG);
5182 context()->Plug(r3);
5184 // This expression cannot throw a reference error at the top level.
5185 VisitInDuplicateContext(expr);
5190 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5191 Expression* sub_expr,
5192 Handle<String> check) {
5193 Label materialize_true, materialize_false;
5194 Label* if_true = NULL;
5195 Label* if_false = NULL;
5196 Label* fall_through = NULL;
5197 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5198 &if_false, &fall_through);
5201 AccumulatorValueContext context(this);
5202 VisitForTypeofValue(sub_expr);
5204 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5206 Factory* factory = isolate()->factory();
5207 if (String::Equals(check, factory->number_string())) {
5208 __ JumpIfSmi(r3, if_true);
5209 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5210 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5212 Split(eq, if_true, if_false, fall_through);
5213 } else if (String::Equals(check, factory->string_string())) {
5214 __ JumpIfSmi(r3, if_false);
5215 // Check for undetectable objects => false.
5216 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
5218 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5219 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
5220 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5221 Split(eq, if_true, if_false, fall_through, cr0);
5222 } else if (String::Equals(check, factory->symbol_string())) {
5223 __ JumpIfSmi(r3, if_false);
5224 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
5225 Split(eq, if_true, if_false, fall_through);
5226 } else if (String::Equals(check, factory->boolean_string())) {
5227 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
5229 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
5230 Split(eq, if_true, if_false, fall_through);
5231 } else if (String::Equals(check, factory->undefined_string())) {
5232 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
5234 __ JumpIfSmi(r3, if_false);
5235 // Check for undetectable objects => true.
5236 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5237 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5238 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5239 Split(ne, if_true, if_false, fall_through, cr0);
5241 } else if (String::Equals(check, factory->function_string())) {
5242 __ JumpIfSmi(r3, if_false);
5243 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5244 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
5246 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
5247 Split(eq, if_true, if_false, fall_through);
5248 } else if (String::Equals(check, factory->object_string())) {
5249 __ JumpIfSmi(r3, if_false);
5250 __ CompareRoot(r3, Heap::kNullValueRootIndex);
5252 // Check for JS objects => true.
5253 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5255 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5257 // Check for undetectable objects => false.
5258 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5259 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5260 Split(eq, if_true, if_false, fall_through, cr0);
5262 if (if_false != fall_through) __ b(if_false);
5264 context()->Plug(if_true, if_false);
5268 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5269 Comment cmnt(masm_, "[ CompareOperation");
5270 SetSourcePosition(expr->position());
5272 // First we try a fast inlined version of the compare when one of
5273 // the operands is a literal.
5274 if (TryLiteralCompare(expr)) return;
5276 // Always perform the comparison for its control flow. Pack the result
5277 // into the expression's context after the comparison is performed.
5278 Label materialize_true, materialize_false;
5279 Label* if_true = NULL;
5280 Label* if_false = NULL;
5281 Label* fall_through = NULL;
5282 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5283 &if_false, &fall_through);
5285 Token::Value op = expr->op();
5286 VisitForStackValue(expr->left());
5289 VisitForStackValue(expr->right());
5290 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5291 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5292 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5294 Split(eq, if_true, if_false, fall_through);
5297 case Token::INSTANCEOF: {
5298 VisitForStackValue(expr->right());
5299 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5301 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5302 // The stub returns 0 for true.
5303 __ cmpi(r3, Operand::Zero());
5304 Split(eq, if_true, if_false, fall_through);
5309 VisitForAccumulatorValue(expr->right());
5310 Condition cond = CompareIC::ComputeCondition(op);
5313 bool inline_smi_code = ShouldInlineSmiCase(op);
5314 JumpPatchSite patch_site(masm_);
5315 if (inline_smi_code) {
5318 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
5320 Split(cond, if_true, if_false, NULL);
5321 __ bind(&slow_case);
5324 // Record position and call the compare IC.
5325 SetSourcePosition(expr->position());
5327 CodeFactory::CompareIC(isolate(), op, language_mode()).code();
5328 CallIC(ic, expr->CompareOperationFeedbackId());
5329 patch_site.EmitPatchInfo();
5330 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5331 __ cmpi(r3, Operand::Zero());
5332 Split(cond, if_true, if_false, fall_through);
5336 // Convert the result of the comparison into one expected for this
5337 // expression's context.
5338 context()->Plug(if_true, if_false);
5342 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5343 Expression* sub_expr,
5345 Label materialize_true, materialize_false;
5346 Label* if_true = NULL;
5347 Label* if_false = NULL;
5348 Label* fall_through = NULL;
5349 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5350 &if_false, &fall_through);
5352 VisitForAccumulatorValue(sub_expr);
5353 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5354 if (expr->op() == Token::EQ_STRICT) {
5355 Heap::RootListIndex nil_value = nil == kNullValue
5356 ? Heap::kNullValueRootIndex
5357 : Heap::kUndefinedValueRootIndex;
5358 __ LoadRoot(r4, nil_value);
5360 Split(eq, if_true, if_false, fall_through);
5362 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5363 CallIC(ic, expr->CompareOperationFeedbackId());
5364 __ cmpi(r3, Operand::Zero());
5365 Split(ne, if_true, if_false, fall_through);
5367 context()->Plug(if_true, if_false);
5371 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5372 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5373 context()->Plug(r3);
5377 Register FullCodeGenerator::result_register() { return r3; }
5380 Register FullCodeGenerator::context_register() { return cp; }
5383 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5384 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
5385 __ StoreP(value, MemOperand(fp, frame_offset), r0);
5389 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5390 __ LoadP(dst, ContextOperand(cp, context_index), r0);
5394 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5395 Scope* declaration_scope = scope()->DeclarationScope();
5396 if (declaration_scope->is_script_scope() ||
5397 declaration_scope->is_module_scope()) {
5398 // Contexts nested in the native context have a canonical empty function
5399 // as their closure, not the anonymous closure containing the global
5400 // code. Pass a smi sentinel and let the runtime look up the empty
5402 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5403 } else if (declaration_scope->is_eval_scope()) {
5404 // Contexts created by a call to eval have the same closure as the
5405 // context calling eval, not the anonymous closure containing the eval
5406 // code. Fetch it from the context.
5407 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5409 DCHECK(declaration_scope->is_function_scope());
5410 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5416 // ----------------------------------------------------------------------------
5417 // Non-local control flow support.
5419 void FullCodeGenerator::EnterFinallyBlock() {
5420 DCHECK(!result_register().is(r4));
5421 // Store result register while executing finally block.
5422 __ push(result_register());
5423 // Cook return address in link register to stack (smi encoded Code* delta)
5425 __ mov(ip, Operand(masm_->CodeObject()));
5429 // Store result register while executing finally block.
5432 // Store pending message while executing finally block.
5433 ExternalReference pending_message_obj =
5434 ExternalReference::address_of_pending_message_obj(isolate());
5435 __ mov(ip, Operand(pending_message_obj));
5436 __ LoadP(r4, MemOperand(ip));
5439 ClearPendingMessage();
5443 void FullCodeGenerator::ExitFinallyBlock() {
5444 DCHECK(!result_register().is(r4));
5445 // Restore pending message from stack.
5447 ExternalReference pending_message_obj =
5448 ExternalReference::address_of_pending_message_obj(isolate());
5449 __ mov(ip, Operand(pending_message_obj));
5450 __ StoreP(r4, MemOperand(ip));
5452 // Restore result register from stack.
5455 // Uncook return address and return.
5456 __ pop(result_register());
5458 __ mov(ip, Operand(masm_->CodeObject()));
5465 void FullCodeGenerator::ClearPendingMessage() {
5466 DCHECK(!result_register().is(r4));
5467 ExternalReference pending_message_obj =
5468 ExternalReference::address_of_pending_message_obj(isolate());
5469 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5470 __ mov(ip, Operand(pending_message_obj));
5471 __ StoreP(r4, MemOperand(ip));
5475 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5476 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5477 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5478 Operand(SmiFromSlot(slot)));
5485 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
5486 BackEdgeState target_state,
5487 Code* replacement_code) {
5488 Address mov_address = Assembler::target_address_from_return_address(pc);
5489 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5490 CodePatcher patcher(cmp_address, 1);
5492 switch (target_state) {
5494 // <decrement profiling counter>
5496 // bge <ok> ;; not changed
5497 // mov r12, <interrupt stub address>
5500 // <reset profiling counter>
5502 patcher.masm()->cmpi(r6, Operand::Zero());
5505 case ON_STACK_REPLACEMENT:
5506 case OSR_AFTER_STACK_CHECK:
5507 // <decrement profiling counter>
5509 // bge <ok> ;; not changed
5510 // mov r12, <on-stack replacement address>
5513 // <reset profiling counter>
5514 // ok-label ----- pc_after points here
5516 // Set the LT bit such that bge is a NOP
5517 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
5521 // Replace the stack check address in the mov sequence with the
5522 // entry address of the replacement code.
5523 Assembler::set_target_address_at(mov_address, unoptimized_code,
5524 replacement_code->entry());
5526 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5527 unoptimized_code, mov_address, replacement_code);
5531 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5532 Isolate* isolate, Code* unoptimized_code, Address pc) {
5533 Address mov_address = Assembler::target_address_from_return_address(pc);
5534 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5535 Address interrupt_address =
5536 Assembler::target_address_at(mov_address, unoptimized_code);
5538 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
5539 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
5543 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5545 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5546 return ON_STACK_REPLACEMENT;
5549 DCHECK(interrupt_address ==
5550 isolate->builtins()->OsrAfterStackCheck()->entry());
5551 return OSR_AFTER_STACK_CHECK;
5553 } // namespace internal
5555 #endif // V8_TARGET_ARCH_PPC