1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug/debug.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/ppc/code-stubs-ppc.h"
20 #include "src/ppc/macro-assembler-ppc.h"
25 #define __ ACCESS_MASM(masm_)
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
42 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, reg, cr0);
51 __ beq(target, cr0); // Always taken before patched.
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
56 void EmitJumpIfSmi(Register reg, Label* target) {
57 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 __ bind(&patch_site_);
60 __ cmp(reg, reg, cr0);
61 __ bne(target, cr0); // Never taken before patched.
64 void EmitPatchInfo() {
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
68 // I believe this is using reg as the high bits of of the offset
69 reg.set_code(delta_to_patch_site / kOff16Mask);
70 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
75 __ nop(); // Signals no inlined code.
80 MacroAssembler* masm_;
88 // Generate code for a JS function. On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right. The actual
90 // argument count matches the formal parameter count expected by the
93 // The live registers are:
94 // o r4: the JS function object being called (i.e., ourselves)
96 // o fp: our caller's frame pointer (aka r31)
97 // o sp: stack pointer
98 // o lr: return address
99 // o ip: our own function entry (required by the prologue)
101 // The function builds a JS frame. Please see JavaScriptFrameConstants in
102 // frames-ppc.h for its layout.
103 void FullCodeGenerator::Generate() {
104 CompilationInfo* info = info_;
105 profiling_counter_ = isolate()->factory()->NewCell(
106 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
107 SetFunctionPosition(function());
108 Comment cmnt(masm_, "[ function compiled by full code generator");
110 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
113 if (strlen(FLAG_stop_at) > 0 &&
114 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
119 // Sloppy mode functions and builtins need to replace the receiver with the
120 // global proxy when called as functions (without an explicit receiver
122 if (is_sloppy(info->language_mode()) && !info->is_native() &&
123 info->MayUseThis() && info->scope()->has_this_declaration()) {
125 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
126 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
127 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
130 __ LoadP(r5, GlobalObjectOperand());
131 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
133 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
138 // Open a frame scope to indicate that there is a frame on the stack. The
139 // MANUAL indicates that the scope shouldn't actually generate code to set up
140 // the frame (that is done below).
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
142 int prologue_offset = masm_->pc_offset();
144 if (prologue_offset) {
145 // Prologue logic requires it's starting address in ip and the
146 // corresponding offset from the function entry.
147 prologue_offset += Instruction::kInstrSize;
148 __ addi(ip, ip, Operand(prologue_offset));
150 info->set_prologue_offset(prologue_offset);
151 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
152 info->AddNoFrameRange(0, masm_->pc_offset());
155 Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
162 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
163 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
165 __ bc_short(ge, &ok);
166 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
169 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
170 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
171 if (locals_count >= kMaxPushes) {
172 int loop_iterations = locals_count / kMaxPushes;
173 __ mov(r5, Operand(loop_iterations));
176 __ bind(&loop_header);
178 for (int i = 0; i < kMaxPushes; i++) {
181 // Continue loop if not done.
182 __ bdnz(&loop_header);
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 for (int i = 0; i < remaining; i++) {
192 bool function_in_register = true;
194 // Possibly allocate a local context.
195 if (info->scope()->num_heap_slots() > 0) {
196 // Argument to NewContext is the function, which is still in r4.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (info->scope()->is_script_scope()) {
202 __ Push(info->scope()->GetScopeInfo(info->isolate()));
203 __ CallRuntime(Runtime::kNewScriptContext, 2);
204 } else if (slots <= FastNewContextStub::kMaximumSlots) {
205 FastNewContextStub stub(isolate(), slots);
207 // Result of FastNewContextStub is always in new space.
208 need_write_barrier = false;
211 __ CallRuntime(Runtime::kNewFunctionContext, 1);
213 function_in_register = false;
214 // Context is returned in r3. It replaces the context passed to us.
215 // It's saved in the stack and kept live in cp.
217 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
218 // Copy any necessary parameters into the context.
219 int num_parameters = info->scope()->num_parameters();
220 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
221 for (int i = first_parameter; i < num_parameters; i++) {
222 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
223 if (var->IsContextSlot()) {
224 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
225 (num_parameters - 1 - i) * kPointerSize;
226 // Load parameter from stack.
227 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
228 // Store it in the context.
229 MemOperand target = ContextOperand(cp, var->index());
230 __ StoreP(r3, target, r0);
232 // Update the write barrier.
233 if (need_write_barrier) {
234 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
235 kLRHasBeenSaved, kDontSaveFPRegs);
236 } else if (FLAG_debug_code) {
238 __ JumpIfInNewSpace(cp, r3, &done);
239 __ Abort(kExpectedNewSpaceObject);
246 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function");
251 if (!function_in_register) {
252 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // The write barrier clobbers register again, keep is marked as such.
255 SetVar(this_function_var, r4, r3, r5);
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
262 // Get the frame pointer for the calling frame.
263 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
265 // Skip the arguments adaptor frame if it exists.
266 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
267 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
270 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
273 // Check the marker in the calling frame.
274 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
275 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
276 Label non_construct_frame, done;
278 __ bne(&non_construct_frame);
279 __ LoadP(r3, MemOperand(
280 r5, ConstructFrameConstants::kOriginalConstructorOffset));
283 __ bind(&non_construct_frame);
284 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
287 SetVar(new_target_var, r3, r5, r6);
290 // Possibly allocate RestParameters
292 Variable* rest_param = scope()->rest_parameter(&rest_index);
294 Comment cmnt(masm_, "[ Allocate rest parameter array");
296 int num_parameters = info->scope()->num_parameters();
297 int offset = num_parameters * kPointerSize;
299 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
300 __ LoadSmiLiteral(r5, Smi::FromInt(num_parameters));
301 __ LoadSmiLiteral(r4, Smi::FromInt(rest_index));
302 __ LoadSmiLiteral(r3, Smi::FromInt(language_mode()));
303 __ Push(r6, r5, r4, r3);
305 RestParamAccessStub stub(isolate());
308 SetVar(rest_param, r3, r4, r5);
311 Variable* arguments = scope()->arguments();
312 if (arguments != NULL) {
313 // Function uses arguments object.
314 Comment cmnt(masm_, "[ Allocate arguments object");
315 if (!function_in_register) {
316 // Load this again, if it's used by the local context below.
317 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
321 // Receiver is just before the parameters on the caller's stack.
322 int num_parameters = info->scope()->num_parameters();
323 int offset = num_parameters * kPointerSize;
324 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
325 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
328 // Arguments to ArgumentsAccessStub:
329 // function, receiver address, parameter count.
330 // The stub will rewrite receiver and parameter count if the previous
331 // stack frame was an arguments adapter frame.
332 ArgumentsAccessStub::Type type;
333 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
334 type = ArgumentsAccessStub::NEW_STRICT;
335 } else if (function()->has_duplicate_parameters()) {
336 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
338 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
340 ArgumentsAccessStub stub(isolate(), type);
343 SetVar(arguments, r3, r4, r5);
347 __ CallRuntime(Runtime::kTraceEnter, 0);
350 // Visit the declarations and body unless there is an illegal
352 if (scope()->HasIllegalRedeclaration()) {
353 Comment cmnt(masm_, "[ Declarations");
354 scope()->VisitIllegalRedeclaration(this);
357 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
359 Comment cmnt(masm_, "[ Declarations");
360 VisitDeclarations(scope()->declarations());
363 // Assert that the declarations do not use ICs. Otherwise the debugger
364 // won't be able to redirect a PC at an IC to the correct IC in newly
366 DCHECK_EQ(0, ic_total_count_);
369 Comment cmnt(masm_, "[ Stack check");
370 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
372 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
374 __ bc_short(ge, &ok);
375 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
380 Comment cmnt(masm_, "[ Body");
381 DCHECK(loop_depth() == 0);
382 VisitStatements(function()->body());
383 DCHECK(loop_depth() == 0);
387 // Always emit a 'return undefined' in case control fell off the end of
390 Comment cmnt(masm_, "[ return <undefined>;");
391 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
393 EmitReturnSequence();
395 if (HasStackOverflow()) {
396 masm_->AbortConstantPoolBuilding();
401 void FullCodeGenerator::ClearAccumulator() {
402 __ LoadSmiLiteral(r3, Smi::FromInt(0));
406 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
407 __ mov(r5, Operand(profiling_counter_));
408 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
409 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
410 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
414 void FullCodeGenerator::EmitProfilingCounterReset() {
415 int reset_value = FLAG_interrupt_budget;
416 __ mov(r5, Operand(profiling_counter_));
417 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
418 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
422 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
423 Label* back_edge_target) {
424 Comment cmnt(masm_, "[ Back edge bookkeeping");
427 DCHECK(back_edge_target->is_bound());
428 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
429 kCodeSizeMultiplier / 2;
430 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
431 EmitProfilingCounterDecrement(weight);
433 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
434 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
435 // BackEdgeTable::PatchAt manipulates this sequence.
436 __ cmpi(r6, Operand::Zero());
437 __ bc_short(ge, &ok);
438 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
440 // Record a mapping of this PC offset to the OSR id. This is used to find
441 // the AST id from the unoptimized code in order to use it as a key into
442 // the deoptimization input data found in the optimized code.
443 RecordBackEdge(stmt->OsrEntryId());
445 EmitProfilingCounterReset();
448 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
449 // Record a mapping of the OSR id to this PC. This is used if the OSR
450 // entry becomes the target of a bailout. We don't expect it to be, but
451 // we want it to work if it is.
452 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
456 void FullCodeGenerator::EmitReturnSequence() {
457 Comment cmnt(masm_, "[ Return sequence");
458 if (return_label_.is_bound()) {
459 __ b(&return_label_);
461 __ bind(&return_label_);
463 // Push the return value on the stack as the parameter.
464 // Runtime::TraceExit returns its parameter in r3
466 __ CallRuntime(Runtime::kTraceExit, 1);
468 // Pretend that the exit is a backwards jump to the entry.
470 if (info_->ShouldSelfOptimize()) {
471 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
473 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
474 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
476 EmitProfilingCounterDecrement(weight);
478 __ cmpi(r6, Operand::Zero());
481 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
483 EmitProfilingCounterReset();
486 // Make sure that the constant pool is not emitted inside of the return
489 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
490 int32_t arg_count = info_->scope()->num_parameters() + 1;
491 int32_t sp_delta = arg_count * kPointerSize;
492 SetReturnPosition(function());
493 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
495 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
501 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
502 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
503 codegen()->GetVar(result_register(), var);
504 __ push(result_register());
508 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
511 void FullCodeGenerator::AccumulatorValueContext::Plug(
512 Heap::RootListIndex index) const {
513 __ LoadRoot(result_register(), index);
517 void FullCodeGenerator::StackValueContext::Plug(
518 Heap::RootListIndex index) const {
519 __ LoadRoot(result_register(), index);
520 __ push(result_register());
524 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
525 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
527 if (index == Heap::kUndefinedValueRootIndex ||
528 index == Heap::kNullValueRootIndex ||
529 index == Heap::kFalseValueRootIndex) {
530 if (false_label_ != fall_through_) __ b(false_label_);
531 } else if (index == Heap::kTrueValueRootIndex) {
532 if (true_label_ != fall_through_) __ b(true_label_);
534 __ LoadRoot(result_register(), index);
535 codegen()->DoTest(this);
540 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
543 void FullCodeGenerator::AccumulatorValueContext::Plug(
544 Handle<Object> lit) const {
545 __ mov(result_register(), Operand(lit));
549 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
550 // Immediates cannot be pushed directly.
551 __ mov(result_register(), Operand(lit));
552 __ push(result_register());
556 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
557 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
559 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
560 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
561 if (false_label_ != fall_through_) __ b(false_label_);
562 } else if (lit->IsTrue() || lit->IsJSObject()) {
563 if (true_label_ != fall_through_) __ b(true_label_);
564 } else if (lit->IsString()) {
565 if (String::cast(*lit)->length() == 0) {
566 if (false_label_ != fall_through_) __ b(false_label_);
568 if (true_label_ != fall_through_) __ b(true_label_);
570 } else if (lit->IsSmi()) {
571 if (Smi::cast(*lit)->value() == 0) {
572 if (false_label_ != fall_through_) __ b(false_label_);
574 if (true_label_ != fall_through_) __ b(true_label_);
577 // For simplicity we always test the accumulator register.
578 __ mov(result_register(), Operand(lit));
579 codegen()->DoTest(this);
584 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
585 Register reg) const {
591 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
592 int count, Register reg) const {
595 __ Move(result_register(), reg);
599 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
600 Register reg) const {
602 if (count > 1) __ Drop(count - 1);
603 __ StoreP(reg, MemOperand(sp, 0));
607 void FullCodeGenerator::TestContext::DropAndPlug(int count,
608 Register reg) const {
610 // For simplicity we always test the accumulator register.
612 __ Move(result_register(), reg);
613 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
614 codegen()->DoTest(this);
618 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
619 Label* materialize_false) const {
620 DCHECK(materialize_true == materialize_false);
621 __ bind(materialize_true);
625 void FullCodeGenerator::AccumulatorValueContext::Plug(
626 Label* materialize_true, Label* materialize_false) const {
628 __ bind(materialize_true);
629 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
631 __ bind(materialize_false);
632 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
637 void FullCodeGenerator::StackValueContext::Plug(
638 Label* materialize_true, Label* materialize_false) const {
640 __ bind(materialize_true);
641 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
643 __ bind(materialize_false);
644 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651 Label* materialize_false) const {
652 DCHECK(materialize_true == true_label_);
653 DCHECK(materialize_false == false_label_);
657 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
658 Heap::RootListIndex value_root_index =
659 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660 __ LoadRoot(result_register(), value_root_index);
664 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
665 Heap::RootListIndex value_root_index =
666 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
667 __ LoadRoot(ip, value_root_index);
672 void FullCodeGenerator::TestContext::Plug(bool flag) const {
673 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
676 if (true_label_ != fall_through_) __ b(true_label_);
678 if (false_label_ != fall_through_) __ b(false_label_);
683 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
684 Label* if_false, Label* fall_through) {
685 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
686 CallIC(ic, condition->test_id());
687 __ cmpi(result_register(), Operand::Zero());
688 Split(ne, if_true, if_false, fall_through);
692 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
693 Label* fall_through, CRegister cr) {
694 if (if_false == fall_through) {
695 __ b(cond, if_true, cr);
696 } else if (if_true == fall_through) {
697 __ b(NegateCondition(cond), if_false, cr);
699 __ b(cond, if_true, cr);
705 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
706 DCHECK(var->IsStackAllocated());
707 // Offset is negative because higher indexes are at lower addresses.
708 int offset = -var->index() * kPointerSize;
709 // Adjust by a (parameter or local) base offset.
710 if (var->IsParameter()) {
711 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
713 offset += JavaScriptFrameConstants::kLocal0Offset;
715 return MemOperand(fp, offset);
719 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
720 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
721 if (var->IsContextSlot()) {
722 int context_chain_length = scope()->ContextChainLength(var->scope());
723 __ LoadContext(scratch, context_chain_length);
724 return ContextOperand(scratch, var->index());
726 return StackOperand(var);
731 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
732 // Use destination as scratch.
733 MemOperand location = VarOperand(var, dest);
734 __ LoadP(dest, location, r0);
738 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
740 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
741 DCHECK(!scratch0.is(src));
742 DCHECK(!scratch0.is(scratch1));
743 DCHECK(!scratch1.is(src));
744 MemOperand location = VarOperand(var, scratch0);
745 __ StoreP(src, location, r0);
747 // Emit the write barrier code if the location is in the heap.
748 if (var->IsContextSlot()) {
749 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
750 kLRHasBeenSaved, kDontSaveFPRegs);
755 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
756 bool should_normalize,
759 // Only prepare for bailouts before splits if we're in a test
760 // context. Otherwise, we let the Visit function deal with the
761 // preparation to avoid preparing with the same AST id twice.
762 if (!context()->IsTest() || !info_->IsOptimizable()) return;
765 if (should_normalize) __ b(&skip);
766 PrepareForBailout(expr, TOS_REG);
767 if (should_normalize) {
768 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
770 Split(eq, if_true, if_false, NULL);
776 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
777 // The variable in the declaration always resides in the current function
779 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
780 if (generate_debug_code_) {
781 // Check that we're not inside a with or catch context.
782 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
783 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
784 __ Check(ne, kDeclarationInWithContext);
785 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
786 __ Check(ne, kDeclarationInCatchContext);
791 void FullCodeGenerator::VisitVariableDeclaration(
792 VariableDeclaration* declaration) {
793 // If it was not possible to allocate the variable at compile time, we
794 // need to "declare" it at runtime to make sure it actually exists in the
796 VariableProxy* proxy = declaration->proxy();
797 VariableMode mode = declaration->mode();
798 Variable* variable = proxy->var();
799 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
800 switch (variable->location()) {
801 case VariableLocation::GLOBAL:
802 case VariableLocation::UNALLOCATED:
803 globals_->Add(variable->name(), zone());
804 globals_->Add(variable->binding_needs_init()
805 ? isolate()->factory()->the_hole_value()
806 : isolate()->factory()->undefined_value(),
810 case VariableLocation::PARAMETER:
811 case VariableLocation::LOCAL:
813 Comment cmnt(masm_, "[ VariableDeclaration");
814 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
815 __ StoreP(ip, StackOperand(variable));
819 case VariableLocation::CONTEXT:
821 Comment cmnt(masm_, "[ VariableDeclaration");
822 EmitDebugCheckDeclarationContext(variable);
823 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
824 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
825 // No write barrier since the_hole_value is in old space.
826 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
830 case VariableLocation::LOOKUP: {
831 Comment cmnt(masm_, "[ VariableDeclaration");
832 __ mov(r5, Operand(variable->name()));
833 // Declaration nodes are always introduced in one of four modes.
834 DCHECK(IsDeclaredVariableMode(mode));
835 // Push initial value, if any.
836 // Note: For variables we must not push an initial value (such as
837 // 'undefined') because we may have a (legal) redeclaration and we
838 // must not destroy the current value.
840 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
843 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
846 __ CallRuntime(IsImmutableVariableMode(mode)
847 ? Runtime::kDeclareReadOnlyLookupSlot
848 : Runtime::kDeclareLookupSlot,
856 void FullCodeGenerator::VisitFunctionDeclaration(
857 FunctionDeclaration* declaration) {
858 VariableProxy* proxy = declaration->proxy();
859 Variable* variable = proxy->var();
860 switch (variable->location()) {
861 case VariableLocation::GLOBAL:
862 case VariableLocation::UNALLOCATED: {
863 globals_->Add(variable->name(), zone());
864 Handle<SharedFunctionInfo> function =
865 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
866 // Check for stack-overflow exception.
867 if (function.is_null()) return SetStackOverflow();
868 globals_->Add(function, zone());
872 case VariableLocation::PARAMETER:
873 case VariableLocation::LOCAL: {
874 Comment cmnt(masm_, "[ FunctionDeclaration");
875 VisitForAccumulatorValue(declaration->fun());
876 __ StoreP(result_register(), StackOperand(variable));
880 case VariableLocation::CONTEXT: {
881 Comment cmnt(masm_, "[ FunctionDeclaration");
882 EmitDebugCheckDeclarationContext(variable);
883 VisitForAccumulatorValue(declaration->fun());
884 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
885 int offset = Context::SlotOffset(variable->index());
886 // We know that we have written a function, which is not a smi.
887 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
888 kLRHasBeenSaved, kDontSaveFPRegs,
889 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
890 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
894 case VariableLocation::LOOKUP: {
895 Comment cmnt(masm_, "[ FunctionDeclaration");
896 __ mov(r5, Operand(variable->name()));
898 // Push initial value for function declaration.
899 VisitForStackValue(declaration->fun());
900 __ CallRuntime(Runtime::kDeclareLookupSlot, 2);
907 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
908 // Call the runtime to declare the globals.
909 __ mov(r4, Operand(pairs));
910 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
912 __ CallRuntime(Runtime::kDeclareGlobals, 2);
913 // Return value is ignored.
917 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
918 // Call the runtime to declare the modules.
919 __ Push(descriptions);
920 __ CallRuntime(Runtime::kDeclareModules, 1);
921 // Return value is ignored.
925 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
926 Comment cmnt(masm_, "[ SwitchStatement");
927 Breakable nested_statement(this, stmt);
928 SetStatementPosition(stmt);
930 // Keep the switch value on the stack until a case matches.
931 VisitForStackValue(stmt->tag());
932 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
934 ZoneList<CaseClause*>* clauses = stmt->cases();
935 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
937 Label next_test; // Recycled for each test.
938 // Compile all the tests with branches to their bodies.
939 for (int i = 0; i < clauses->length(); i++) {
940 CaseClause* clause = clauses->at(i);
941 clause->body_target()->Unuse();
943 // The default is not a test, but remember it as final fall through.
944 if (clause->is_default()) {
945 default_clause = clause;
949 Comment cmnt(masm_, "[ Case comparison");
953 // Compile the label expression.
954 VisitForAccumulatorValue(clause->label());
956 // Perform the comparison as if via '==='.
957 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
958 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
959 JumpPatchSite patch_site(masm_);
960 if (inline_smi_code) {
963 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
967 __ Drop(1); // Switch value is no longer needed.
968 __ b(clause->body_target());
972 // Record position before stub call for type feedback.
973 SetExpressionPosition(clause);
974 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
975 strength(language_mode())).code();
976 CallIC(ic, clause->CompareId());
977 patch_site.EmitPatchInfo();
981 PrepareForBailout(clause, TOS_REG);
982 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
986 __ b(clause->body_target());
989 __ cmpi(r3, Operand::Zero());
991 __ Drop(1); // Switch value is no longer needed.
992 __ b(clause->body_target());
995 // Discard the test value and jump to the default if present, otherwise to
996 // the end of the statement.
998 __ Drop(1); // Switch value is no longer needed.
999 if (default_clause == NULL) {
1000 __ b(nested_statement.break_label());
1002 __ b(default_clause->body_target());
1005 // Compile all the case bodies.
1006 for (int i = 0; i < clauses->length(); i++) {
1007 Comment cmnt(masm_, "[ Case body");
1008 CaseClause* clause = clauses->at(i);
1009 __ bind(clause->body_target());
1010 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1011 VisitStatements(clause->statements());
1014 __ bind(nested_statement.break_label());
1015 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1019 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1020 Comment cmnt(masm_, "[ ForInStatement");
1021 SetStatementPosition(stmt, SKIP_BREAK);
1023 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1026 ForIn loop_statement(this, stmt);
1027 increment_loop_depth();
1029 // Get the object to enumerate over. If the object is null or undefined, skip
1030 // over the loop. See ECMA-262 version 5, section 12.6.4.
1031 SetExpressionAsStatementPosition(stmt->enumerable());
1032 VisitForAccumulatorValue(stmt->enumerable());
1033 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1036 Register null_value = r7;
1037 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1038 __ cmp(r3, null_value);
1041 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1043 // Convert the object to a JS object.
1044 Label convert, done_convert;
1045 __ JumpIfSmi(r3, &convert);
1046 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1047 __ bge(&done_convert);
1050 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1051 __ bind(&done_convert);
1052 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1055 // Check for proxies.
1057 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1058 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1059 __ ble(&call_runtime);
1061 // Check cache validity in generated code. This is a fast case for
1062 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1063 // guarantee cache validity, call the runtime system to check cache
1064 // validity or get the property names in a fixed array.
1065 __ CheckEnumCache(null_value, &call_runtime);
1067 // The enum cache is valid. Load the map of the object being
1068 // iterated over and use the cache for the iteration.
1070 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1073 // Get the set of properties to enumerate.
1074 __ bind(&call_runtime);
1075 __ push(r3); // Duplicate the enumerable object on the stack.
1076 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1077 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1079 // If we got a map from the runtime call, we can do a fast
1080 // modification check. Otherwise, we got a fixed array, and we have
1081 // to do a slow check.
1083 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1084 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1086 __ bne(&fixed_array);
1088 // We got a map in register r3. Get the enumeration cache from it.
1089 Label no_descriptors;
1090 __ bind(&use_cache);
1092 __ EnumLength(r4, r3);
1093 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1094 __ beq(&no_descriptors);
1096 __ LoadInstanceDescriptors(r3, r5);
1097 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1099 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1101 // Set up the four remaining stack slots.
1102 __ push(r3); // Map.
1103 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1104 // Push enumeration cache, enumeration cache length (as smi) and zero.
1105 __ Push(r5, r4, r3);
1108 __ bind(&no_descriptors);
1112 // We got a fixed array in register r3. Iterate through that.
1114 __ bind(&fixed_array);
1116 __ Move(r4, FeedbackVector());
1117 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1118 int vector_index = FeedbackVector()->GetIndex(slot);
1120 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1122 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1123 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1124 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1125 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1127 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1128 __ bind(&non_proxy);
1129 __ Push(r4, r3); // Smi and array
1130 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1131 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1132 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1134 // Generate code for doing the condition check.
1135 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1137 SetExpressionAsStatementPosition(stmt->each());
1139 // Load the current count to r3, load the length to r4.
1140 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1141 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1142 __ cmpl(r3, r4); // Compare to the array length.
1143 __ bge(loop_statement.break_label());
1145 // Get the current entry of the array into register r6.
1146 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1147 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1148 __ SmiToPtrArrayOffset(r6, r3);
1149 __ LoadPX(r6, MemOperand(r6, r5));
1151 // Get the expected map from the stack or a smi in the
1152 // permanent slow case into register r5.
1153 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1155 // Check if the expected map still matches that of the enumerable.
1156 // If not, we may have to filter the key.
1158 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1159 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1161 __ beq(&update_each);
1163 // For proxies, no filtering is done.
1164 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1165 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1166 __ beq(&update_each);
1168 // Convert the entry to a string or (smi) 0 if it isn't a property
1169 // any more. If the property has been removed while iterating, we
1171 __ Push(r4, r6); // Enumerable and current entry.
1172 __ CallRuntime(Runtime::kForInFilter, 2);
1173 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1175 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1177 __ beq(loop_statement.continue_label());
1179 // Update the 'each' property or variable from the possibly filtered
1180 // entry in register r6.
1181 __ bind(&update_each);
1182 __ mr(result_register(), r6);
1183 // Perform the assignment as if via '='.
1185 EffectContext context(this);
1186 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1187 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1190 // Generate code for the body of the loop.
1191 Visit(stmt->body());
1193 // Generate code for the going to the next element by incrementing
1194 // the index (smi) stored on top of the stack.
1195 __ bind(loop_statement.continue_label());
1197 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1200 EmitBackEdgeBookkeeping(stmt, &loop);
1203 // Remove the pointers stored on the stack.
1204 __ bind(loop_statement.break_label());
1207 // Exit and decrement the loop depth.
1208 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1210 decrement_loop_depth();
1214 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1216 // Use the fast case closure allocation code that allocates in new
1217 // space for nested functions that don't need literals cloning. If
1218 // we're running with the --always-opt or the --prepare-always-opt
1219 // flag, we need to use the runtime function so that the new function
1220 // we are creating here gets a chance to have its code optimized and
1221 // doesn't just get a copy of the existing unoptimized code.
1222 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1223 scope()->is_function_scope() && info->num_literals() == 0) {
1224 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1225 __ mov(r5, Operand(info));
1228 __ mov(r3, Operand(info));
1230 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1231 __ Push(cp, r3, r4);
1232 __ CallRuntime(Runtime::kNewClosure, 3);
1234 context()->Plug(r3);
1238 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1240 FeedbackVectorICSlot slot) {
1241 if (NeedsHomeObject(initializer)) {
1242 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1243 __ mov(StoreDescriptor::NameRegister(),
1244 Operand(isolate()->factory()->home_object_symbol()));
1245 __ LoadP(StoreDescriptor::ValueRegister(),
1246 MemOperand(sp, offset * kPointerSize));
1247 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1253 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1254 TypeofMode typeof_mode,
1256 Register current = cp;
1262 if (s->num_heap_slots() > 0) {
1263 if (s->calls_sloppy_eval()) {
1264 // Check that extension is NULL.
1265 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1266 __ cmpi(temp, Operand::Zero());
1269 // Load next context in chain.
1270 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1271 // Walk the rest of the chain without clobbering cp.
1274 // If no outer scope calls eval, we do not need to check more
1275 // context extensions.
1276 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1277 s = s->outer_scope();
1280 if (s->is_eval_scope()) {
1282 if (!current.is(next)) {
1283 __ Move(next, current);
1286 // Terminate at native context.
1287 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1288 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1291 // Check that extension is NULL.
1292 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1293 __ cmpi(temp, Operand::Zero());
1295 // Load next context in chain.
1296 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1301 // All extension objects were empty and it is safe to use a normal global
1303 EmitGlobalVariableLoad(proxy, typeof_mode);
1307 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1309 DCHECK(var->IsContextSlot());
1310 Register context = cp;
1314 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1315 if (s->num_heap_slots() > 0) {
1316 if (s->calls_sloppy_eval()) {
1317 // Check that extension is NULL.
1318 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1319 __ cmpi(temp, Operand::Zero());
1322 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1323 // Walk the rest of the chain without clobbering cp.
1327 // Check that last extension is NULL.
1328 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1329 __ cmpi(temp, Operand::Zero());
1332 // This function is used only for loads, not stores, so it's safe to
1333 // return an cp-based operand (the write barrier cannot be allowed to
1334 // destroy the cp register).
1335 return ContextOperand(context, var->index());
1339 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1340 TypeofMode typeof_mode,
1341 Label* slow, Label* done) {
1342 // Generate fast-case code for variables that might be shadowed by
1343 // eval-introduced variables. Eval is used a lot without
1344 // introducing variables. In those cases, we do not want to
1345 // perform a runtime call for all variables in the scope
1346 // containing the eval.
1347 Variable* var = proxy->var();
1348 if (var->mode() == DYNAMIC_GLOBAL) {
1349 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1351 } else if (var->mode() == DYNAMIC_LOCAL) {
1352 Variable* local = var->local_if_not_shadowed();
1353 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1354 if (local->mode() == LET || local->mode() == CONST ||
1355 local->mode() == CONST_LEGACY) {
1356 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1358 if (local->mode() == CONST_LEGACY) {
1359 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1360 } else { // LET || CONST
1361 __ mov(r3, Operand(var->name()));
1363 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1371 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1372 TypeofMode typeof_mode) {
1373 Variable* var = proxy->var();
1374 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1375 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1376 if (var->IsGlobalSlot()) {
1377 DCHECK(var->index() > 0);
1378 DCHECK(var->IsStaticGlobalObjectProperty());
1379 const int slot = var->index();
1380 const int depth = scope()->ContextChainLength(var->scope());
1381 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1382 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1383 LoadGlobalViaContextStub stub(isolate(), depth);
1386 __ Push(Smi::FromInt(slot));
1387 __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
1390 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1391 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1392 __ mov(LoadDescriptor::SlotRegister(),
1393 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1394 CallLoadIC(typeof_mode);
1399 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1400 TypeofMode typeof_mode) {
1401 // Record position before possible IC call.
1402 SetExpressionPosition(proxy);
1403 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1404 Variable* var = proxy->var();
1406 // Three cases: global variables, lookup variables, and all other types of
1408 switch (var->location()) {
1409 case VariableLocation::GLOBAL:
1410 case VariableLocation::UNALLOCATED: {
1411 Comment cmnt(masm_, "[ Global variable");
1412 EmitGlobalVariableLoad(proxy, typeof_mode);
1413 context()->Plug(r3);
1417 case VariableLocation::PARAMETER:
1418 case VariableLocation::LOCAL:
1419 case VariableLocation::CONTEXT: {
1420 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1421 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1422 : "[ Stack variable");
1423 if (var->binding_needs_init()) {
1424 // var->scope() may be NULL when the proxy is located in eval code and
1425 // refers to a potential outside binding. Currently those bindings are
1426 // always looked up dynamically, i.e. in that case
1427 // var->location() == LOOKUP.
1429 DCHECK(var->scope() != NULL);
1431 // Check if the binding really needs an initialization check. The check
1432 // can be skipped in the following situation: we have a LET or CONST
1433 // binding in harmony mode, both the Variable and the VariableProxy have
1434 // the same declaration scope (i.e. they are both in global code, in the
1435 // same function or in the same eval code) and the VariableProxy is in
1436 // the source physically located after the initializer of the variable.
1438 // We cannot skip any initialization checks for CONST in non-harmony
1439 // mode because const variables may be declared but never initialized:
1440 // if (false) { const x; }; var y = x;
1442 // The condition on the declaration scopes is a conservative check for
1443 // nested functions that access a binding and are called before the
1444 // binding is initialized:
1445 // function() { f(); let x = 1; function f() { x = 2; } }
1447 bool skip_init_check;
1448 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1449 skip_init_check = false;
1450 } else if (var->is_this()) {
1451 CHECK(info_->function() != nullptr &&
1452 (info_->function()->kind() & kSubclassConstructor) != 0);
1453 // TODO(dslomov): implement 'this' hole check elimination.
1454 skip_init_check = false;
1456 // Check that we always have valid source position.
1457 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1458 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1459 skip_init_check = var->mode() != CONST_LEGACY &&
1460 var->initializer_position() < proxy->position();
1463 if (!skip_init_check) {
1465 // Let and const need a read barrier.
1467 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1469 if (var->mode() == LET || var->mode() == CONST) {
1470 // Throw a reference error when using an uninitialized let/const
1471 // binding in harmony mode.
1472 __ mov(r3, Operand(var->name()));
1474 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1476 // Uninitalized const bindings outside of harmony mode are unholed.
1477 DCHECK(var->mode() == CONST_LEGACY);
1478 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1481 context()->Plug(r3);
1485 context()->Plug(var);
1489 case VariableLocation::LOOKUP: {
1490 Comment cmnt(masm_, "[ Lookup variable");
1492 // Generate code for loading from variables potentially shadowed
1493 // by eval-introduced variables.
1494 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1496 __ mov(r4, Operand(var->name()));
1497 __ Push(cp, r4); // Context and name.
1498 Runtime::FunctionId function_id =
1499 typeof_mode == NOT_INSIDE_TYPEOF
1500 ? Runtime::kLoadLookupSlot
1501 : Runtime::kLoadLookupSlotNoReferenceError;
1502 __ CallRuntime(function_id, 2);
1504 context()->Plug(r3);
1510 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1511 Comment cmnt(masm_, "[ RegExpLiteral");
1513 // Registers will be used as follows:
1514 // r8 = materialized value (RegExp literal)
1515 // r7 = JS function, literals array
1516 // r6 = literal index
1517 // r5 = RegExp pattern
1518 // r4 = RegExp flags
1519 // r3 = RegExp literal clone
1520 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1521 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1522 int literal_offset =
1523 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1524 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1525 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1527 __ bne(&materialized);
1529 // Create regexp literal using runtime function.
1530 // Result will be in r3.
1531 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1532 __ mov(r5, Operand(expr->pattern()));
1533 __ mov(r4, Operand(expr->flags()));
1534 __ Push(r7, r6, r5, r4);
1535 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1538 __ bind(&materialized);
1539 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1540 Label allocated, runtime_allocate;
1541 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1544 __ bind(&runtime_allocate);
1545 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1547 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1550 __ bind(&allocated);
1551 // After this, registers are used as follows:
1552 // r3: Newly allocated regexp.
1553 // r8: Materialized regexp.
1555 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1556 context()->Plug(r3);
1560 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1561 if (expression == NULL) {
1562 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1565 VisitForStackValue(expression);
1570 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1571 Comment cmnt(masm_, "[ ObjectLiteral");
1573 Handle<FixedArray> constant_properties = expr->constant_properties();
1574 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1575 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1576 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1577 __ mov(r4, Operand(constant_properties));
1578 int flags = expr->ComputeFlags();
1579 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1580 if (MustCreateObjectLiteralWithRuntime(expr)) {
1581 __ Push(r6, r5, r4, r3);
1582 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1584 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1587 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1589 // If result_saved is true the result is on top of the stack. If
1590 // result_saved is false the result is in r3.
1591 bool result_saved = false;
1593 AccessorTable accessor_table(zone());
1594 int property_index = 0;
1595 // store_slot_index points to the vector IC slot for the next store IC used.
1596 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1597 // and must be updated if the number of store ICs emitted here changes.
1598 int store_slot_index = 0;
1599 for (; property_index < expr->properties()->length(); property_index++) {
1600 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1601 if (property->is_computed_name()) break;
1602 if (property->IsCompileTimeValue()) continue;
1604 Literal* key = property->key()->AsLiteral();
1605 Expression* value = property->value();
1606 if (!result_saved) {
1607 __ push(r3); // Save result on stack
1608 result_saved = true;
1610 switch (property->kind()) {
1611 case ObjectLiteral::Property::CONSTANT:
1613 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1614 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1616 case ObjectLiteral::Property::COMPUTED:
1617 // It is safe to use [[Put]] here because the boilerplate already
1618 // contains computed properties with an uninitialized value.
1619 if (key->value()->IsInternalizedString()) {
1620 if (property->emit_store()) {
1621 VisitForAccumulatorValue(value);
1622 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1623 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1624 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1625 if (FLAG_vector_stores) {
1626 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1629 CallStoreIC(key->LiteralFeedbackId());
1631 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1633 if (NeedsHomeObject(value)) {
1634 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1635 __ mov(StoreDescriptor::NameRegister(),
1636 Operand(isolate()->factory()->home_object_symbol()));
1637 __ LoadP(StoreDescriptor::ValueRegister(), MemOperand(sp));
1638 if (FLAG_vector_stores) {
1639 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1644 VisitForEffect(value);
1648 // Duplicate receiver on stack.
1649 __ LoadP(r3, MemOperand(sp));
1651 VisitForStackValue(key);
1652 VisitForStackValue(value);
1653 if (property->emit_store()) {
1654 EmitSetHomeObjectIfNeeded(
1655 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1656 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1658 __ CallRuntime(Runtime::kSetProperty, 4);
1663 case ObjectLiteral::Property::PROTOTYPE:
1664 // Duplicate receiver on stack.
1665 __ LoadP(r3, MemOperand(sp));
1667 VisitForStackValue(value);
1668 DCHECK(property->emit_store());
1669 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1671 case ObjectLiteral::Property::GETTER:
1672 if (property->emit_store()) {
1673 accessor_table.lookup(key)->second->getter = value;
1676 case ObjectLiteral::Property::SETTER:
1677 if (property->emit_store()) {
1678 accessor_table.lookup(key)->second->setter = value;
1684 // Emit code to define accessors, using only a single call to the runtime for
1685 // each pair of corresponding getters and setters.
1686 for (AccessorTable::Iterator it = accessor_table.begin();
1687 it != accessor_table.end(); ++it) {
1688 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1690 VisitForStackValue(it->first);
1691 EmitAccessor(it->second->getter);
1692 EmitSetHomeObjectIfNeeded(
1693 it->second->getter, 2,
1694 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1695 EmitAccessor(it->second->setter);
1696 EmitSetHomeObjectIfNeeded(
1697 it->second->setter, 3,
1698 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1699 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1701 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1704 // Object literals have two parts. The "static" part on the left contains no
1705 // computed property names, and so we can compute its map ahead of time; see
1706 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1707 // starts with the first computed property name, and continues with all
1708 // properties to its right. All the code from above initializes the static
1709 // component of the object literal, and arranges for the map of the result to
1710 // reflect the static order in which the keys appear. For the dynamic
1711 // properties, we compile them into a series of "SetOwnProperty" runtime
1712 // calls. This will preserve insertion order.
1713 for (; property_index < expr->properties()->length(); property_index++) {
1714 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1716 Expression* value = property->value();
1717 if (!result_saved) {
1718 __ push(r3); // Save result on the stack
1719 result_saved = true;
1722 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1725 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1726 DCHECK(!property->is_computed_name());
1727 VisitForStackValue(value);
1728 DCHECK(property->emit_store());
1729 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1731 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1732 VisitForStackValue(value);
1733 EmitSetHomeObjectIfNeeded(
1734 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1736 switch (property->kind()) {
1737 case ObjectLiteral::Property::CONSTANT:
1738 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1739 case ObjectLiteral::Property::COMPUTED:
1740 if (property->emit_store()) {
1741 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1743 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1749 case ObjectLiteral::Property::PROTOTYPE:
1753 case ObjectLiteral::Property::GETTER:
1754 __ mov(r3, Operand(Smi::FromInt(NONE)));
1756 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1759 case ObjectLiteral::Property::SETTER:
1760 __ mov(r3, Operand(Smi::FromInt(NONE)));
1762 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1768 if (expr->has_function()) {
1769 DCHECK(result_saved);
1770 __ LoadP(r3, MemOperand(sp));
1772 __ CallRuntime(Runtime::kToFastProperties, 1);
1776 context()->PlugTOS();
1778 context()->Plug(r3);
1781 // Verify that compilation exactly consumed the number of store ic slots that
1782 // the ObjectLiteral node had to offer.
1783 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1787 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1788 Comment cmnt(masm_, "[ ArrayLiteral");
1790 expr->BuildConstantElements(isolate());
1791 Handle<FixedArray> constant_elements = expr->constant_elements();
1792 bool has_fast_elements =
1793 IsFastObjectElementsKind(expr->constant_elements_kind());
1794 Handle<FixedArrayBase> constant_elements_values(
1795 FixedArrayBase::cast(constant_elements->get(1)));
1797 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1798 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1799 // If the only customer of allocation sites is transitioning, then
1800 // we can turn it off if we don't have anywhere else to transition to.
1801 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1804 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1805 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1806 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1807 __ mov(r4, Operand(constant_elements));
1808 if (MustCreateArrayLiteralWithRuntime(expr)) {
1809 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1810 __ Push(r6, r5, r4, r3);
1811 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1813 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1816 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1818 bool result_saved = false; // Is the result saved to the stack?
1819 ZoneList<Expression*>* subexprs = expr->values();
1820 int length = subexprs->length();
1822 // Emit code to evaluate all the non-constant subexpressions and to store
1823 // them into the newly cloned array.
1824 int array_index = 0;
1825 for (; array_index < length; array_index++) {
1826 Expression* subexpr = subexprs->at(array_index);
1827 if (subexpr->IsSpread()) break;
1828 // If the subexpression is a literal or a simple materialized literal it
1829 // is already set in the cloned array.
1830 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1832 if (!result_saved) {
1834 __ Push(Smi::FromInt(expr->literal_index()));
1835 result_saved = true;
1837 VisitForAccumulatorValue(subexpr);
1839 if (has_fast_elements) {
1840 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1841 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1842 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1843 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1844 // Update the write barrier for the array store.
1845 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1846 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1849 __ LoadSmiLiteral(r6, Smi::FromInt(array_index));
1850 StoreArrayLiteralElementStub stub(isolate());
1854 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1857 // In case the array literal contains spread expressions it has two parts. The
1858 // first part is the "static" array which has a literal index is handled
1859 // above. The second part is the part after the first spread expression
1860 // (inclusive) and these elements gets appended to the array. Note that the
1861 // number elements an iterable produces is unknown ahead of time.
1862 if (array_index < length && result_saved) {
1863 __ Drop(1); // literal index
1865 result_saved = false;
1867 for (; array_index < length; array_index++) {
1868 Expression* subexpr = subexprs->at(array_index);
1871 if (subexpr->IsSpread()) {
1872 VisitForStackValue(subexpr->AsSpread()->expression());
1873 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1875 VisitForStackValue(subexpr);
1876 __ CallRuntime(Runtime::kAppendElement, 2);
1879 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1883 __ Drop(1); // literal index
1884 context()->PlugTOS();
1886 context()->Plug(r3);
1891 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1892 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1894 Comment cmnt(masm_, "[ Assignment");
1895 SetExpressionPosition(expr, INSERT_BREAK);
1897 Property* property = expr->target()->AsProperty();
1898 LhsKind assign_type = Property::GetAssignType(property);
1900 // Evaluate LHS expression.
1901 switch (assign_type) {
1903 // Nothing to do here.
1905 case NAMED_PROPERTY:
1906 if (expr->is_compound()) {
1907 // We need the receiver both on the stack and in the register.
1908 VisitForStackValue(property->obj());
1909 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1911 VisitForStackValue(property->obj());
1914 case NAMED_SUPER_PROPERTY:
1916 property->obj()->AsSuperPropertyReference()->this_var());
1917 VisitForAccumulatorValue(
1918 property->obj()->AsSuperPropertyReference()->home_object());
1919 __ Push(result_register());
1920 if (expr->is_compound()) {
1921 const Register scratch = r4;
1922 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1923 __ Push(scratch, result_register());
1926 case KEYED_SUPER_PROPERTY: {
1927 const Register scratch = r4;
1929 property->obj()->AsSuperPropertyReference()->this_var());
1930 VisitForAccumulatorValue(
1931 property->obj()->AsSuperPropertyReference()->home_object());
1932 __ mr(scratch, result_register());
1933 VisitForAccumulatorValue(property->key());
1934 __ Push(scratch, result_register());
1935 if (expr->is_compound()) {
1936 const Register scratch1 = r5;
1937 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1938 __ Push(scratch1, scratch, result_register());
1942 case KEYED_PROPERTY:
1943 if (expr->is_compound()) {
1944 VisitForStackValue(property->obj());
1945 VisitForStackValue(property->key());
1946 __ LoadP(LoadDescriptor::ReceiverRegister(),
1947 MemOperand(sp, 1 * kPointerSize));
1948 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1950 VisitForStackValue(property->obj());
1951 VisitForStackValue(property->key());
1956 // For compound assignments we need another deoptimization point after the
1957 // variable/property load.
1958 if (expr->is_compound()) {
1960 AccumulatorValueContext context(this);
1961 switch (assign_type) {
1963 EmitVariableLoad(expr->target()->AsVariableProxy());
1964 PrepareForBailout(expr->target(), TOS_REG);
1966 case NAMED_PROPERTY:
1967 EmitNamedPropertyLoad(property);
1968 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1970 case NAMED_SUPER_PROPERTY:
1971 EmitNamedSuperPropertyLoad(property);
1972 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1974 case KEYED_SUPER_PROPERTY:
1975 EmitKeyedSuperPropertyLoad(property);
1976 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1978 case KEYED_PROPERTY:
1979 EmitKeyedPropertyLoad(property);
1980 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1985 Token::Value op = expr->binary_op();
1986 __ push(r3); // Left operand goes on the stack.
1987 VisitForAccumulatorValue(expr->value());
1989 AccumulatorValueContext context(this);
1990 if (ShouldInlineSmiCase(op)) {
1991 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1994 EmitBinaryOp(expr->binary_operation(), op);
1997 // Deoptimization point in case the binary operation may have side effects.
1998 PrepareForBailout(expr->binary_operation(), TOS_REG);
2000 VisitForAccumulatorValue(expr->value());
2003 SetExpressionPosition(expr);
2006 switch (assign_type) {
2008 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2009 expr->op(), expr->AssignmentSlot());
2010 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2011 context()->Plug(r3);
2013 case NAMED_PROPERTY:
2014 EmitNamedPropertyAssignment(expr);
2016 case NAMED_SUPER_PROPERTY:
2017 EmitNamedSuperPropertyStore(property);
2018 context()->Plug(r3);
2020 case KEYED_SUPER_PROPERTY:
2021 EmitKeyedSuperPropertyStore(property);
2022 context()->Plug(r3);
2024 case KEYED_PROPERTY:
2025 EmitKeyedPropertyAssignment(expr);
2031 void FullCodeGenerator::VisitYield(Yield* expr) {
2032 Comment cmnt(masm_, "[ Yield");
2033 SetExpressionPosition(expr);
2035 // Evaluate yielded value first; the initial iterator definition depends on
2036 // this. It stays on the stack while we update the iterator.
2037 VisitForStackValue(expr->expression());
2039 switch (expr->yield_kind()) {
2040 case Yield::kSuspend:
2041 // Pop value from top-of-stack slot; box result into result register.
2042 EmitCreateIteratorResult(false);
2043 __ push(result_register());
2045 case Yield::kInitial: {
2046 Label suspend, continuation, post_runtime, resume;
2049 __ bind(&continuation);
2050 __ RecordGeneratorContinuation();
2054 VisitForAccumulatorValue(expr->generator_object());
2055 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2056 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2057 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2059 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2061 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2062 kLRHasBeenSaved, kDontSaveFPRegs);
2063 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2065 __ beq(&post_runtime);
2066 __ push(r3); // generator object
2067 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2068 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2069 __ bind(&post_runtime);
2070 __ pop(result_register());
2071 EmitReturnSequence();
2074 context()->Plug(result_register());
2078 case Yield::kFinal: {
2079 VisitForAccumulatorValue(expr->generator_object());
2080 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2081 __ StoreP(r4, FieldMemOperand(result_register(),
2082 JSGeneratorObject::kContinuationOffset),
2084 // Pop value from top-of-stack slot, box result into result register.
2085 EmitCreateIteratorResult(true);
2086 EmitUnwindBeforeReturn();
2087 EmitReturnSequence();
2091 case Yield::kDelegating: {
2092 VisitForStackValue(expr->generator_object());
2094 // Initial stack layout is as follows:
2095 // [sp + 1 * kPointerSize] iter
2096 // [sp + 0 * kPointerSize] g
2098 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2099 Label l_next, l_call;
2100 Register load_receiver = LoadDescriptor::ReceiverRegister();
2101 Register load_name = LoadDescriptor::NameRegister();
2103 // Initial send value is undefined.
2104 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2107 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2109 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2110 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2111 __ Push(load_name, r6, r3); // "throw", iter, except
2114 // try { received = %yield result }
2115 // Shuffle the received result above a try handler and yield it without
2118 __ pop(r3); // result
2119 int handler_index = NewHandlerTableEntry();
2120 EnterTryBlock(handler_index, &l_catch);
2121 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2122 __ push(r3); // result
2125 __ bind(&l_continuation);
2126 __ RecordGeneratorContinuation();
2129 __ bind(&l_suspend);
2130 const int generator_object_depth = kPointerSize + try_block_size;
2131 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2133 __ Push(Smi::FromInt(handler_index)); // handler-index
2134 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2135 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2136 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2138 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2140 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2141 kLRHasBeenSaved, kDontSaveFPRegs);
2142 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2143 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2144 __ pop(r3); // result
2145 EmitReturnSequence();
2146 __ bind(&l_resume); // received in r3
2147 ExitTryBlock(handler_index);
2149 // receiver = iter; f = 'next'; arg = received;
2152 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2153 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2154 __ Push(load_name, r6, r3); // "next", iter, received
2156 // result = receiver[f](arg);
2158 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2159 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2160 __ mov(LoadDescriptor::SlotRegister(),
2161 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2162 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2163 CallIC(ic, TypeFeedbackId::None());
2165 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2166 SetCallPosition(expr, 1);
2167 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2170 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2171 __ Drop(1); // The function is still on the stack; drop it.
2173 // if (!result.done) goto l_try;
2174 __ Move(load_receiver, r3);
2176 __ push(load_receiver); // save result
2177 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2178 __ mov(LoadDescriptor::SlotRegister(),
2179 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2180 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2181 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2183 __ cmpi(r3, Operand::Zero());
2187 __ pop(load_receiver); // result
2188 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2189 __ mov(LoadDescriptor::SlotRegister(),
2190 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2191 CallLoadIC(NOT_INSIDE_TYPEOF); // r3=result.value
2192 context()->DropAndPlug(2, r3); // drop iter and g
2199 void FullCodeGenerator::EmitGeneratorResume(
2200 Expression* generator, Expression* value,
2201 JSGeneratorObject::ResumeMode resume_mode) {
2202 // The value stays in r3, and is ultimately read by the resumed generator, as
2203 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2204 // is read to throw the value when the resumed generator is already closed.
2205 // r4 will hold the generator object until the activation has been resumed.
2206 VisitForStackValue(generator);
2207 VisitForAccumulatorValue(value);
2210 // Load suspended function and context.
2211 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2212 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2214 // Load receiver and store as the first argument.
2215 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2218 // Push holes for the rest of the arguments to the generator function.
2219 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2221 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2222 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2223 Label argument_loop, push_frame;
2224 #if V8_TARGET_ARCH_PPC64
2225 __ cmpi(r6, Operand::Zero());
2226 __ beq(&push_frame);
2228 __ SmiUntag(r6, SetRC);
2229 __ beq(&push_frame, cr0);
2232 __ bind(&argument_loop);
2234 __ bdnz(&argument_loop);
2236 // Enter a new JavaScript frame, and initialize its slots as they were when
2237 // the generator was suspended.
2238 Label resume_frame, done;
2239 __ bind(&push_frame);
2240 __ b(&resume_frame, SetLK);
2242 __ bind(&resume_frame);
2243 // lr = return address.
2244 // fp = caller's frame pointer.
2245 // cp = callee's context,
2246 // r7 = callee's JS function.
2247 __ PushFixedFrame(r7);
2248 // Adjust FP to point to saved FP.
2249 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2251 // Load the operand stack size.
2252 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2253 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2254 __ SmiUntag(r6, SetRC);
2256 // If we are sending a value and there is no operand stack, we can jump back
2259 if (resume_mode == JSGeneratorObject::NEXT) {
2261 __ bne(&slow_resume, cr0);
2262 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2264 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2265 if (FLAG_enable_embedded_constant_pool) {
2266 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
2268 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2271 __ LoadSmiLiteral(r5,
2272 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2273 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2276 __ bind(&slow_resume);
2279 __ beq(&call_resume, cr0);
2282 // Otherwise, we push holes for the operand stack and call the runtime to fix
2283 // up the stack and the handlers.
2286 __ bind(&operand_loop);
2288 __ bdnz(&operand_loop);
2290 __ bind(&call_resume);
2291 DCHECK(!result_register().is(r4));
2292 __ Push(r4, result_register());
2293 __ Push(Smi::FromInt(resume_mode));
2294 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2295 // Not reached: the runtime call returns elsewhere.
2296 __ stop("not-reached");
2299 context()->Plug(result_register());
2303 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2307 const int instance_size = 5 * kPointerSize;
2308 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2311 __ Allocate(instance_size, r3, r5, r6, &gc_required, TAG_OBJECT);
2314 __ bind(&gc_required);
2315 __ Push(Smi::FromInt(instance_size));
2316 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2317 __ LoadP(context_register(),
2318 MemOperand(fp, StandardFrameConstants::kContextOffset));
2320 __ bind(&allocated);
2321 __ LoadP(r4, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2322 __ LoadP(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2323 __ LoadP(r4, ContextOperand(r4, Context::ITERATOR_RESULT_MAP_INDEX));
2325 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2326 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2327 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2328 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2329 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2331 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2334 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2337 // Only the value field needs a write barrier, as the other values are in the
2339 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2340 kLRHasBeenSaved, kDontSaveFPRegs);
2344 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2345 SetExpressionPosition(prop);
2346 Literal* key = prop->key()->AsLiteral();
2347 DCHECK(!prop->IsSuperAccess());
2349 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2350 __ mov(LoadDescriptor::SlotRegister(),
2351 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2352 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2356 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2357 // Stack: receiver, home_object.
2358 SetExpressionPosition(prop);
2359 Literal* key = prop->key()->AsLiteral();
2360 DCHECK(!key->value()->IsSmi());
2361 DCHECK(prop->IsSuperAccess());
2363 __ Push(key->value());
2364 __ Push(Smi::FromInt(language_mode()));
2365 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2369 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2370 SetExpressionPosition(prop);
2371 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2372 __ mov(LoadDescriptor::SlotRegister(),
2373 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2378 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2379 // Stack: receiver, home_object, key.
2380 SetExpressionPosition(prop);
2381 __ Push(Smi::FromInt(language_mode()));
2382 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2386 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2388 Expression* left_expr,
2389 Expression* right_expr) {
2390 Label done, smi_case, stub_call;
2392 Register scratch1 = r5;
2393 Register scratch2 = r6;
2395 // Get the arguments.
2397 Register right = r3;
2400 // Perform combined smi check on both operands.
2401 __ orx(scratch1, left, right);
2402 STATIC_ASSERT(kSmiTag == 0);
2403 JumpPatchSite patch_site(masm_);
2404 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2406 __ bind(&stub_call);
2408 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2409 CallIC(code, expr->BinaryOperationFeedbackId());
2410 patch_site.EmitPatchInfo();
2414 // Smi case. This code works the same way as the smi-smi case in the type
2415 // recording binary operation stub.
2418 __ GetLeastBitsFromSmi(scratch1, right, 5);
2419 __ ShiftRightArith(right, left, scratch1);
2420 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2423 __ GetLeastBitsFromSmi(scratch2, right, 5);
2424 #if V8_TARGET_ARCH_PPC64
2425 __ ShiftLeft_(right, left, scratch2);
2427 __ SmiUntag(scratch1, left);
2428 __ ShiftLeft_(scratch1, scratch1, scratch2);
2429 // Check that the *signed* result fits in a smi
2430 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2431 __ SmiTag(right, scratch1);
2436 __ SmiUntag(scratch1, left);
2437 __ GetLeastBitsFromSmi(scratch2, right, 5);
2438 __ srw(scratch1, scratch1, scratch2);
2439 // Unsigned shift is not allowed to produce a negative number.
2440 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2441 __ SmiTag(right, scratch1);
2445 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2446 __ BranchOnOverflow(&stub_call);
2447 __ mr(right, scratch1);
2451 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2452 __ BranchOnOverflow(&stub_call);
2453 __ mr(right, scratch1);
2458 #if V8_TARGET_ARCH_PPC64
2459 // Remove tag from both operands.
2460 __ SmiUntag(ip, right);
2461 __ SmiUntag(r0, left);
2462 __ Mul(scratch1, r0, ip);
2463 // Check for overflowing the smi range - no overflow if higher 33 bits of
2464 // the result are identical.
2465 __ TestIfInt32(scratch1, r0);
2468 __ SmiUntag(ip, right);
2469 __ mullw(scratch1, left, ip);
2470 __ mulhw(scratch2, left, ip);
2471 // Check for overflowing the smi range - no overflow if higher 33 bits of
2472 // the result are identical.
2473 __ TestIfInt32(scratch2, scratch1, ip);
2476 // Go slow on zero result to handle -0.
2477 __ cmpi(scratch1, Operand::Zero());
2479 #if V8_TARGET_ARCH_PPC64
2480 __ SmiTag(right, scratch1);
2482 __ mr(right, scratch1);
2485 // We need -0 if we were multiplying a negative number with 0 to get 0.
2486 // We know one of them was zero.
2488 __ add(scratch2, right, left);
2489 __ cmpi(scratch2, Operand::Zero());
2491 __ LoadSmiLiteral(right, Smi::FromInt(0));
2495 __ orx(right, left, right);
2497 case Token::BIT_AND:
2498 __ and_(right, left, right);
2500 case Token::BIT_XOR:
2501 __ xor_(right, left, right);
2508 context()->Plug(r3);
2512 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2513 int* used_store_slots) {
2514 // Constructor is in r3.
2515 DCHECK(lit != NULL);
2518 // No access check is needed here since the constructor is created by the
2520 Register scratch = r4;
2522 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2525 for (int i = 0; i < lit->properties()->length(); i++) {
2526 ObjectLiteral::Property* property = lit->properties()->at(i);
2527 Expression* value = property->value();
2529 if (property->is_static()) {
2530 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2532 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2535 EmitPropertyKey(property, lit->GetIdForProperty(i));
2537 // The static prototype property is read only. We handle the non computed
2538 // property name case in the parser. Since this is the only case where we
2539 // need to check for an own read only property we special case this so we do
2540 // not need to do this for every property.
2541 if (property->is_static() && property->is_computed_name()) {
2542 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2546 VisitForStackValue(value);
2547 EmitSetHomeObjectIfNeeded(value, 2,
2548 lit->SlotForHomeObject(value, used_store_slots));
2550 switch (property->kind()) {
2551 case ObjectLiteral::Property::CONSTANT:
2552 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2553 case ObjectLiteral::Property::PROTOTYPE:
2555 case ObjectLiteral::Property::COMPUTED:
2556 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2559 case ObjectLiteral::Property::GETTER:
2560 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2562 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2565 case ObjectLiteral::Property::SETTER:
2566 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2568 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2576 // Set both the prototype and constructor to have fast properties, and also
2577 // freeze them in strong mode.
2578 __ CallRuntime(is_strong(language_mode())
2579 ? Runtime::kFinalizeClassDefinitionStrong
2580 : Runtime::kFinalizeClassDefinition,
2585 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2588 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2589 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2590 CallIC(code, expr->BinaryOperationFeedbackId());
2591 patch_site.EmitPatchInfo();
2592 context()->Plug(r3);
2596 void FullCodeGenerator::EmitAssignment(Expression* expr,
2597 FeedbackVectorICSlot slot) {
2598 DCHECK(expr->IsValidReferenceExpressionOrThis());
2600 Property* prop = expr->AsProperty();
2601 LhsKind assign_type = Property::GetAssignType(prop);
2603 switch (assign_type) {
2605 Variable* var = expr->AsVariableProxy()->var();
2606 EffectContext context(this);
2607 EmitVariableAssignment(var, Token::ASSIGN, slot);
2610 case NAMED_PROPERTY: {
2611 __ push(r3); // Preserve value.
2612 VisitForAccumulatorValue(prop->obj());
2613 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2614 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2615 __ mov(StoreDescriptor::NameRegister(),
2616 Operand(prop->key()->AsLiteral()->value()));
2617 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2621 case NAMED_SUPER_PROPERTY: {
2623 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2624 VisitForAccumulatorValue(
2625 prop->obj()->AsSuperPropertyReference()->home_object());
2626 // stack: value, this; r3: home_object
2627 Register scratch = r5;
2628 Register scratch2 = r6;
2629 __ mr(scratch, result_register()); // home_object
2630 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2631 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2632 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2633 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2634 // stack: this, home_object; r3: value
2635 EmitNamedSuperPropertyStore(prop);
2638 case KEYED_SUPER_PROPERTY: {
2640 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2642 prop->obj()->AsSuperPropertyReference()->home_object());
2643 VisitForAccumulatorValue(prop->key());
2644 Register scratch = r5;
2645 Register scratch2 = r6;
2646 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2647 // stack: value, this, home_object; r3: key, r6: value
2648 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2649 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2650 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2651 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2652 __ StoreP(r3, MemOperand(sp, 0));
2653 __ Move(r3, scratch2);
2654 // stack: this, home_object, key; r3: value.
2655 EmitKeyedSuperPropertyStore(prop);
2658 case KEYED_PROPERTY: {
2659 __ push(r3); // Preserve value.
2660 VisitForStackValue(prop->obj());
2661 VisitForAccumulatorValue(prop->key());
2662 __ Move(StoreDescriptor::NameRegister(), r3);
2663 __ Pop(StoreDescriptor::ValueRegister(),
2664 StoreDescriptor::ReceiverRegister());
2665 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2667 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2672 context()->Plug(r3);
2676 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2677 Variable* var, MemOperand location) {
2678 __ StoreP(result_register(), location, r0);
2679 if (var->IsContextSlot()) {
2680 // RecordWrite may destroy all its register arguments.
2681 __ mr(r6, result_register());
2682 int offset = Context::SlotOffset(var->index());
2683 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2689 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2690 FeedbackVectorICSlot slot) {
2691 if (var->IsUnallocated()) {
2692 // Global var, const, or let.
2693 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2694 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2695 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2698 } else if (var->IsGlobalSlot()) {
2699 // Global var, const, or let.
2700 DCHECK(var->index() > 0);
2701 DCHECK(var->IsStaticGlobalObjectProperty());
2702 const int slot = var->index();
2703 const int depth = scope()->ContextChainLength(var->scope());
2704 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2705 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2706 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r3));
2707 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2710 __ Push(Smi::FromInt(slot));
2712 __ CallRuntime(is_strict(language_mode())
2713 ? Runtime::kStoreGlobalViaContext_Strict
2714 : Runtime::kStoreGlobalViaContext_Sloppy,
2717 } else if (var->mode() == LET && op != Token::INIT_LET) {
2718 // Non-initializing assignment to let variable needs a write barrier.
2719 DCHECK(!var->IsLookupSlot());
2720 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2722 MemOperand location = VarOperand(var, r4);
2723 __ LoadP(r6, location);
2724 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2726 __ mov(r6, Operand(var->name()));
2728 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2729 // Perform the assignment.
2731 EmitStoreToStackLocalOrContextSlot(var, location);
2733 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2734 // Assignment to const variable needs a write barrier.
2735 DCHECK(!var->IsLookupSlot());
2736 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2738 MemOperand location = VarOperand(var, r4);
2739 __ LoadP(r6, location);
2740 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2741 __ bne(&const_error);
2742 __ mov(r6, Operand(var->name()));
2744 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2745 __ bind(&const_error);
2746 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2748 } else if (var->is_this() && op == Token::INIT_CONST) {
2749 // Initializing assignment to const {this} needs a write barrier.
2750 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2751 Label uninitialized_this;
2752 MemOperand location = VarOperand(var, r4);
2753 __ LoadP(r6, location);
2754 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2755 __ beq(&uninitialized_this);
2756 __ mov(r4, Operand(var->name()));
2758 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2759 __ bind(&uninitialized_this);
2760 EmitStoreToStackLocalOrContextSlot(var, location);
2762 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2763 if (var->IsLookupSlot()) {
2764 // Assignment to var.
2765 __ push(r3); // Value.
2766 __ mov(r4, Operand(var->name()));
2767 __ mov(r3, Operand(Smi::FromInt(language_mode())));
2768 __ Push(cp, r4, r3); // Context, name, language mode.
2769 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2771 // Assignment to var or initializing assignment to let/const in harmony
2773 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2774 MemOperand location = VarOperand(var, r4);
2775 if (generate_debug_code_ && op == Token::INIT_LET) {
2776 // Check for an uninitialized let binding.
2777 __ LoadP(r5, location);
2778 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2779 __ Check(eq, kLetBindingReInitialization);
2781 EmitStoreToStackLocalOrContextSlot(var, location);
2783 } else if (op == Token::INIT_CONST_LEGACY) {
2784 // Const initializers need a write barrier.
2785 DCHECK(var->mode() == CONST_LEGACY);
2786 DCHECK(!var->IsParameter()); // No const parameters.
2787 if (var->IsLookupSlot()) {
2789 __ mov(r3, Operand(var->name()));
2790 __ Push(cp, r3); // Context and name.
2791 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2793 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2795 MemOperand location = VarOperand(var, r4);
2796 __ LoadP(r5, location);
2797 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2799 EmitStoreToStackLocalOrContextSlot(var, location);
2804 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2805 if (is_strict(language_mode())) {
2806 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2808 // Silently ignore store in sloppy mode.
2813 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2814 // Assignment to a property, using a named store IC.
2815 Property* prop = expr->target()->AsProperty();
2816 DCHECK(prop != NULL);
2817 DCHECK(prop->key()->IsLiteral());
2819 __ mov(StoreDescriptor::NameRegister(),
2820 Operand(prop->key()->AsLiteral()->value()));
2821 __ pop(StoreDescriptor::ReceiverRegister());
2822 if (FLAG_vector_stores) {
2823 EmitLoadStoreICSlot(expr->AssignmentSlot());
2826 CallStoreIC(expr->AssignmentFeedbackId());
2829 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2830 context()->Plug(r3);
2834 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2835 // Assignment to named property of super.
2837 // stack : receiver ('this'), home_object
2838 DCHECK(prop != NULL);
2839 Literal* key = prop->key()->AsLiteral();
2840 DCHECK(key != NULL);
2842 __ Push(key->value());
2844 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2845 : Runtime::kStoreToSuper_Sloppy),
2850 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2851 // Assignment to named property of super.
2853 // stack : receiver ('this'), home_object, key
2854 DCHECK(prop != NULL);
2858 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2859 : Runtime::kStoreKeyedToSuper_Sloppy),
2864 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2865 // Assignment to a property, using a keyed store IC.
2866 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2867 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2870 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2871 if (FLAG_vector_stores) {
2872 EmitLoadStoreICSlot(expr->AssignmentSlot());
2875 CallIC(ic, expr->AssignmentFeedbackId());
2878 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2879 context()->Plug(r3);
2883 void FullCodeGenerator::VisitProperty(Property* expr) {
2884 Comment cmnt(masm_, "[ Property");
2885 SetExpressionPosition(expr);
2887 Expression* key = expr->key();
2889 if (key->IsPropertyName()) {
2890 if (!expr->IsSuperAccess()) {
2891 VisitForAccumulatorValue(expr->obj());
2892 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2893 EmitNamedPropertyLoad(expr);
2895 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2897 expr->obj()->AsSuperPropertyReference()->home_object());
2898 EmitNamedSuperPropertyLoad(expr);
2901 if (!expr->IsSuperAccess()) {
2902 VisitForStackValue(expr->obj());
2903 VisitForAccumulatorValue(expr->key());
2904 __ Move(LoadDescriptor::NameRegister(), r3);
2905 __ pop(LoadDescriptor::ReceiverRegister());
2906 EmitKeyedPropertyLoad(expr);
2908 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2910 expr->obj()->AsSuperPropertyReference()->home_object());
2911 VisitForStackValue(expr->key());
2912 EmitKeyedSuperPropertyLoad(expr);
2915 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2916 context()->Plug(r3);
2920 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2922 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2926 // Code common for calls using the IC.
2927 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2928 Expression* callee = expr->expression();
2930 CallICState::CallType call_type =
2931 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2933 // Get the target function.
2934 if (call_type == CallICState::FUNCTION) {
2936 StackValueContext context(this);
2937 EmitVariableLoad(callee->AsVariableProxy());
2938 PrepareForBailout(callee, NO_REGISTERS);
2940 // Push undefined as receiver. This is patched in the method prologue if it
2941 // is a sloppy mode method.
2942 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2945 // Load the function from the receiver.
2946 DCHECK(callee->IsProperty());
2947 DCHECK(!callee->AsProperty()->IsSuperAccess());
2948 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2949 EmitNamedPropertyLoad(callee->AsProperty());
2950 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2951 // Push the target function under the receiver.
2952 __ LoadP(r0, MemOperand(sp, 0));
2954 __ StoreP(r3, MemOperand(sp, kPointerSize));
2957 EmitCall(expr, call_type);
2961 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2962 Expression* callee = expr->expression();
2963 DCHECK(callee->IsProperty());
2964 Property* prop = callee->AsProperty();
2965 DCHECK(prop->IsSuperAccess());
2966 SetExpressionPosition(prop);
2968 Literal* key = prop->key()->AsLiteral();
2969 DCHECK(!key->value()->IsSmi());
2970 // Load the function from the receiver.
2971 const Register scratch = r4;
2972 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2973 VisitForAccumulatorValue(super_ref->home_object());
2975 VisitForAccumulatorValue(super_ref->this_var());
2976 __ Push(scratch, r3, r3, scratch);
2977 __ Push(key->value());
2978 __ Push(Smi::FromInt(language_mode()));
2982 // - this (receiver)
2983 // - this (receiver) <-- LoadFromSuper will pop here and below.
2987 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2989 // Replace home_object with target function.
2990 __ StoreP(r3, MemOperand(sp, kPointerSize));
2993 // - target function
2994 // - this (receiver)
2995 EmitCall(expr, CallICState::METHOD);
2999 // Code common for calls using the IC.
3000 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
3002 VisitForAccumulatorValue(key);
3004 Expression* callee = expr->expression();
3006 // Load the function from the receiver.
3007 DCHECK(callee->IsProperty());
3008 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3009 __ Move(LoadDescriptor::NameRegister(), r3);
3010 EmitKeyedPropertyLoad(callee->AsProperty());
3011 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3013 // Push the target function under the receiver.
3014 __ LoadP(ip, MemOperand(sp, 0));
3016 __ StoreP(r3, MemOperand(sp, kPointerSize));
3018 EmitCall(expr, CallICState::METHOD);
3022 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3023 Expression* callee = expr->expression();
3024 DCHECK(callee->IsProperty());
3025 Property* prop = callee->AsProperty();
3026 DCHECK(prop->IsSuperAccess());
3028 SetExpressionPosition(prop);
3029 // Load the function from the receiver.
3030 const Register scratch = r4;
3031 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3032 VisitForAccumulatorValue(super_ref->home_object());
3034 VisitForAccumulatorValue(super_ref->this_var());
3035 __ Push(scratch, r3, r3, scratch);
3036 VisitForStackValue(prop->key());
3037 __ Push(Smi::FromInt(language_mode()));
3041 // - this (receiver)
3042 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3046 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3048 // Replace home_object with target function.
3049 __ StoreP(r3, MemOperand(sp, kPointerSize));
3052 // - target function
3053 // - this (receiver)
3054 EmitCall(expr, CallICState::METHOD);
3058 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3059 // Load the arguments.
3060 ZoneList<Expression*>* args = expr->arguments();
3061 int arg_count = args->length();
3062 for (int i = 0; i < arg_count; i++) {
3063 VisitForStackValue(args->at(i));
3066 SetCallPosition(expr, arg_count);
3067 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3068 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
3069 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3070 // Don't assign a type feedback id to the IC, since type feedback is provided
3071 // by the vector above.
3074 RecordJSReturnSite(expr);
3075 // Restore context register.
3076 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3077 context()->DropAndPlug(1, r3);
3081 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3082 // r7: copy of the first argument or undefined if it doesn't exist.
3083 if (arg_count > 0) {
3084 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
3086 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3089 // r6: the receiver of the enclosing function.
3090 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3092 // r5: language mode.
3093 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
3095 // r4: the start position of the scope the calls resides in.
3096 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
3098 // Do the runtime call.
3099 __ Push(r7, r6, r5, r4);
3100 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3104 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3105 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3106 VariableProxy* callee = expr->expression()->AsVariableProxy();
3107 if (callee->var()->IsLookupSlot()) {
3109 SetExpressionPosition(callee);
3110 // Generate code for loading from variables potentially shadowed by
3111 // eval-introduced variables.
3112 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3115 // Call the runtime to find the function to call (returned in r3) and
3116 // the object holding it (returned in r4).
3117 DCHECK(!context_register().is(r5));
3118 __ mov(r5, Operand(callee->name()));
3119 __ Push(context_register(), r5);
3120 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3121 __ Push(r3, r4); // Function, receiver.
3122 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3124 // If fast case code has been generated, emit code to push the function
3125 // and receiver and have the slow path jump around this code.
3126 if (done.is_linked()) {
3132 // Pass undefined as the receiver, which is the WithBaseObject of a
3133 // non-object environment record. If the callee is sloppy, it will patch
3134 // it up to be the global receiver.
3135 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3140 VisitForStackValue(callee);
3141 // refEnv.WithBaseObject()
3142 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3143 __ push(r5); // Reserved receiver slot.
3148 void FullCodeGenerator::VisitCall(Call* expr) {
3150 // We want to verify that RecordJSReturnSite gets called on all paths
3151 // through this function. Avoid early returns.
3152 expr->return_is_recorded_ = false;
3155 Comment cmnt(masm_, "[ Call");
3156 Expression* callee = expr->expression();
3157 Call::CallType call_type = expr->GetCallType(isolate());
3159 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3160 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3161 // to resolve the function we need to call. Then we call the resolved
3162 // function using the given arguments.
3163 ZoneList<Expression*>* args = expr->arguments();
3164 int arg_count = args->length();
3166 PushCalleeAndWithBaseObject(expr);
3168 // Push the arguments.
3169 for (int i = 0; i < arg_count; i++) {
3170 VisitForStackValue(args->at(i));
3173 // Push a copy of the function (found below the arguments) and
3175 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3177 EmitResolvePossiblyDirectEval(arg_count);
3179 // Touch up the stack with the resolved function.
3180 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3182 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3184 // Record source position for debugger.
3185 SetCallPosition(expr, arg_count);
3186 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3187 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3189 RecordJSReturnSite(expr);
3190 // Restore context register.
3191 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3192 context()->DropAndPlug(1, r3);
3193 } else if (call_type == Call::GLOBAL_CALL) {
3194 EmitCallWithLoadIC(expr);
3196 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3197 // Call to a lookup slot (dynamically introduced variable).
3198 PushCalleeAndWithBaseObject(expr);
3200 } else if (call_type == Call::PROPERTY_CALL) {
3201 Property* property = callee->AsProperty();
3202 bool is_named_call = property->key()->IsPropertyName();
3203 if (property->IsSuperAccess()) {
3204 if (is_named_call) {
3205 EmitSuperCallWithLoadIC(expr);
3207 EmitKeyedSuperCallWithLoadIC(expr);
3210 VisitForStackValue(property->obj());
3211 if (is_named_call) {
3212 EmitCallWithLoadIC(expr);
3214 EmitKeyedCallWithLoadIC(expr, property->key());
3217 } else if (call_type == Call::SUPER_CALL) {
3218 EmitSuperConstructorCall(expr);
3220 DCHECK(call_type == Call::OTHER_CALL);
3221 // Call to an arbitrary expression not handled specially above.
3222 VisitForStackValue(callee);
3223 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3225 // Emit function call.
3230 // RecordJSReturnSite should have been called.
3231 DCHECK(expr->return_is_recorded_);
3236 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3237 Comment cmnt(masm_, "[ CallNew");
3238 // According to ECMA-262, section 11.2.2, page 44, the function
3239 // expression in new calls must be evaluated before the
3242 // Push constructor on the stack. If it's not a function it's used as
3243 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3245 DCHECK(!expr->expression()->IsSuperPropertyReference());
3246 VisitForStackValue(expr->expression());
3248 // Push the arguments ("left-to-right") on the stack.
3249 ZoneList<Expression*>* args = expr->arguments();
3250 int arg_count = args->length();
3251 for (int i = 0; i < arg_count; i++) {
3252 VisitForStackValue(args->at(i));
3255 // Call the construct call builtin that handles allocation and
3256 // constructor invocation.
3257 SetConstructCallPosition(expr);
3259 // Load function and argument count into r4 and r3.
3260 __ mov(r3, Operand(arg_count));
3261 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3263 // Record call targets in unoptimized code.
3264 if (FLAG_pretenuring_call_new) {
3265 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3266 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3267 expr->CallNewFeedbackSlot().ToInt() + 1);
3270 __ Move(r5, FeedbackVector());
3271 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3273 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3274 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3275 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3276 context()->Plug(r3);
3280 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3281 SuperCallReference* super_call_ref =
3282 expr->expression()->AsSuperCallReference();
3283 DCHECK_NOT_NULL(super_call_ref);
3285 EmitLoadSuperConstructor(super_call_ref);
3286 __ push(result_register());
3288 // Push the arguments ("left-to-right") on the stack.
3289 ZoneList<Expression*>* args = expr->arguments();
3290 int arg_count = args->length();
3291 for (int i = 0; i < arg_count; i++) {
3292 VisitForStackValue(args->at(i));
3295 // Call the construct call builtin that handles allocation and
3296 // constructor invocation.
3297 SetConstructCallPosition(expr);
3299 // Load original constructor into r7.
3300 VisitForAccumulatorValue(super_call_ref->new_target_var());
3301 __ mr(r7, result_register());
3303 // Load function and argument count into r1 and r0.
3304 __ mov(r3, Operand(arg_count));
3305 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
3307 // Record call targets in unoptimized code.
3308 if (FLAG_pretenuring_call_new) {
3310 /* TODO(dslomov): support pretenuring.
3311 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3312 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3313 expr->CallNewFeedbackSlot().ToInt() + 1);
3317 __ Move(r5, FeedbackVector());
3318 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
3320 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3321 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3323 RecordJSReturnSite(expr);
3325 context()->Plug(r3);
3329 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3330 ZoneList<Expression*>* args = expr->arguments();
3331 DCHECK(args->length() == 1);
3333 VisitForAccumulatorValue(args->at(0));
3335 Label materialize_true, materialize_false;
3336 Label* if_true = NULL;
3337 Label* if_false = NULL;
3338 Label* fall_through = NULL;
3339 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3340 &if_false, &fall_through);
3342 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3343 __ TestIfSmi(r3, r0);
3344 Split(eq, if_true, if_false, fall_through, cr0);
3346 context()->Plug(if_true, if_false);
3350 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3351 ZoneList<Expression*>* args = expr->arguments();
3352 DCHECK(args->length() == 1);
3354 VisitForAccumulatorValue(args->at(0));
3356 Label materialize_true, materialize_false;
3357 Label* if_true = NULL;
3358 Label* if_false = NULL;
3359 Label* fall_through = NULL;
3360 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3361 &if_false, &fall_through);
3363 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3364 __ TestIfPositiveSmi(r3, r0);
3365 Split(eq, if_true, if_false, fall_through, cr0);
3367 context()->Plug(if_true, if_false);
3371 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3372 ZoneList<Expression*>* args = expr->arguments();
3373 DCHECK(args->length() == 1);
3375 VisitForAccumulatorValue(args->at(0));
3377 Label materialize_true, materialize_false;
3378 Label* if_true = NULL;
3379 Label* if_false = NULL;
3380 Label* fall_through = NULL;
3381 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3382 &if_false, &fall_through);
3384 __ JumpIfSmi(r3, if_false);
3385 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3388 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3389 // Undetectable objects behave like undefined when tested with typeof.
3390 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3391 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3392 __ bne(if_false, cr0);
3393 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3394 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3396 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3397 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3398 Split(le, if_true, if_false, fall_through);
3400 context()->Plug(if_true, if_false);
3404 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3405 ZoneList<Expression*>* args = expr->arguments();
3406 DCHECK(args->length() == 1);
3408 VisitForAccumulatorValue(args->at(0));
3410 Label materialize_true, materialize_false;
3411 Label* if_true = NULL;
3412 Label* if_false = NULL;
3413 Label* fall_through = NULL;
3414 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3415 &if_false, &fall_through);
3417 __ JumpIfSmi(r3, if_false);
3418 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3419 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3420 Split(ge, if_true, if_false, fall_through);
3422 context()->Plug(if_true, if_false);
3426 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3427 ZoneList<Expression*>* args = expr->arguments();
3428 DCHECK(args->length() == 1);
3430 VisitForAccumulatorValue(args->at(0));
3432 Label materialize_true, materialize_false;
3433 Label* if_true = NULL;
3434 Label* if_false = NULL;
3435 Label* fall_through = NULL;
3436 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3437 &if_false, &fall_through);
3439 __ JumpIfSmi(r3, if_false);
3440 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3441 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3442 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3443 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3444 Split(ne, if_true, if_false, fall_through, cr0);
3446 context()->Plug(if_true, if_false);
3450 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3451 CallRuntime* expr) {
3452 ZoneList<Expression*>* args = expr->arguments();
3453 DCHECK(args->length() == 1);
3455 VisitForAccumulatorValue(args->at(0));
3457 Label materialize_true, materialize_false, skip_lookup;
3458 Label* if_true = NULL;
3459 Label* if_false = NULL;
3460 Label* fall_through = NULL;
3461 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3462 &if_false, &fall_through);
3464 __ AssertNotSmi(r3);
3466 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3467 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3468 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3469 __ bne(&skip_lookup, cr0);
3471 // Check for fast case object. Generate false result for slow case object.
3472 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3473 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3474 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3478 // Look for valueOf name in the descriptor array, and indicate false if
3479 // found. Since we omit an enumeration index check, if it is added via a
3480 // transition that shares its descriptor array, this is a false positive.
3481 Label entry, loop, done;
3483 // Skip loop if no descriptors are valid.
3484 __ NumberOfOwnDescriptors(r6, r4);
3485 __ cmpi(r6, Operand::Zero());
3488 __ LoadInstanceDescriptors(r4, r7);
3489 // r7: descriptor array.
3490 // r6: valid entries in the descriptor array.
3491 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3493 // Calculate location of the first key name.
3494 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3495 // Calculate the end of the descriptor array.
3497 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3500 // Loop through all the keys in the descriptor array. If one of these is the
3501 // string "valueOf" the result is false.
3502 // The use of ip to store the valueOf string assumes that it is not otherwise
3503 // used in the loop below.
3504 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3507 __ LoadP(r6, MemOperand(r7, 0));
3510 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3517 // Set the bit in the map to indicate that there is no local valueOf field.
3518 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3519 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3520 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3522 __ bind(&skip_lookup);
3524 // If a valueOf property is not found on the object check that its
3525 // prototype is the un-modified String prototype. If not result is false.
3526 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3527 __ JumpIfSmi(r5, if_false);
3528 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3529 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3530 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3532 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3534 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3535 Split(eq, if_true, if_false, fall_through);
3537 context()->Plug(if_true, if_false);
3541 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3542 ZoneList<Expression*>* args = expr->arguments();
3543 DCHECK(args->length() == 1);
3545 VisitForAccumulatorValue(args->at(0));
3547 Label materialize_true, materialize_false;
3548 Label* if_true = NULL;
3549 Label* if_false = NULL;
3550 Label* fall_through = NULL;
3551 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3552 &if_false, &fall_through);
3554 __ JumpIfSmi(r3, if_false);
3555 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3556 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3557 Split(eq, if_true, if_false, fall_through);
3559 context()->Plug(if_true, if_false);
3563 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3564 ZoneList<Expression*>* args = expr->arguments();
3565 DCHECK(args->length() == 1);
3567 VisitForAccumulatorValue(args->at(0));
3569 Label materialize_true, materialize_false;
3570 Label* if_true = NULL;
3571 Label* if_false = NULL;
3572 Label* fall_through = NULL;
3573 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3574 &if_false, &fall_through);
3576 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3577 #if V8_TARGET_ARCH_PPC64
3578 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3579 __ li(r5, Operand(1));
3580 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3583 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3584 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3586 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3589 __ cmpi(r4, Operand::Zero());
3593 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3594 Split(eq, if_true, if_false, fall_through);
3596 context()->Plug(if_true, if_false);
3600 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3601 ZoneList<Expression*>* args = expr->arguments();
3602 DCHECK(args->length() == 1);
3604 VisitForAccumulatorValue(args->at(0));
3606 Label materialize_true, materialize_false;
3607 Label* if_true = NULL;
3608 Label* if_false = NULL;
3609 Label* fall_through = NULL;
3610 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3611 &if_false, &fall_through);
3613 __ JumpIfSmi(r3, if_false);
3614 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3616 Split(eq, if_true, if_false, fall_through);
3618 context()->Plug(if_true, if_false);
3622 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3623 ZoneList<Expression*>* args = expr->arguments();
3624 DCHECK(args->length() == 1);
3626 VisitForAccumulatorValue(args->at(0));
3628 Label materialize_true, materialize_false;
3629 Label* if_true = NULL;
3630 Label* if_false = NULL;
3631 Label* fall_through = NULL;
3632 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3633 &if_false, &fall_through);
3635 __ JumpIfSmi(r3, if_false);
3636 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
3637 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3638 Split(eq, if_true, if_false, fall_through);
3640 context()->Plug(if_true, if_false);
3644 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3645 ZoneList<Expression*>* args = expr->arguments();
3646 DCHECK(args->length() == 1);
3648 VisitForAccumulatorValue(args->at(0));
3650 Label materialize_true, materialize_false;
3651 Label* if_true = NULL;
3652 Label* if_false = NULL;
3653 Label* fall_through = NULL;
3654 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3655 &if_false, &fall_through);
3657 __ JumpIfSmi(r3, if_false);
3658 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3659 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3660 Split(eq, if_true, if_false, fall_through);
3662 context()->Plug(if_true, if_false);
3666 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3667 ZoneList<Expression*>* args = expr->arguments();
3668 DCHECK(args->length() == 1);
3670 VisitForAccumulatorValue(args->at(0));
3672 Label materialize_true, materialize_false;
3673 Label* if_true = NULL;
3674 Label* if_false = NULL;
3675 Label* fall_through = NULL;
3676 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3677 &if_false, &fall_through);
3679 __ JumpIfSmi(r3, if_false);
3681 Register type_reg = r5;
3682 __ LoadP(map, FieldMemOperand(r3, HeapObject::kMapOffset));
3683 __ lbz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3684 __ subi(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3685 __ cmpli(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3686 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3687 Split(le, if_true, if_false, fall_through);
3689 context()->Plug(if_true, if_false);
3693 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3694 DCHECK(expr->arguments()->length() == 0);
3696 Label materialize_true, materialize_false;
3697 Label* if_true = NULL;
3698 Label* if_false = NULL;
3699 Label* fall_through = NULL;
3700 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3701 &if_false, &fall_through);
3703 // Get the frame pointer for the calling frame.
3704 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3706 // Skip the arguments adaptor frame if it exists.
3707 Label check_frame_marker;
3708 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3709 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3710 __ bne(&check_frame_marker);
3711 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3713 // Check the marker in the calling frame.
3714 __ bind(&check_frame_marker);
3715 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3716 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3717 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3718 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3719 Split(eq, if_true, if_false, fall_through);
3721 context()->Plug(if_true, if_false);
3725 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3726 ZoneList<Expression*>* args = expr->arguments();
3727 DCHECK(args->length() == 2);
3729 // Load the two objects into registers and perform the comparison.
3730 VisitForStackValue(args->at(0));
3731 VisitForAccumulatorValue(args->at(1));
3733 Label materialize_true, materialize_false;
3734 Label* if_true = NULL;
3735 Label* if_false = NULL;
3736 Label* fall_through = NULL;
3737 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3738 &if_false, &fall_through);
3742 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3743 Split(eq, if_true, if_false, fall_through);
3745 context()->Plug(if_true, if_false);
3749 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments();
3751 DCHECK(args->length() == 1);
3753 // ArgumentsAccessStub expects the key in r4 and the formal
3754 // parameter count in r3.
3755 VisitForAccumulatorValue(args->at(0));
3757 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3758 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3760 context()->Plug(r3);
3764 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3765 DCHECK(expr->arguments()->length() == 0);
3767 // Get the number of formal parameters.
3768 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3770 // Check if the calling frame is an arguments adaptor frame.
3771 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3772 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3773 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3776 // Arguments adaptor case: Read the arguments length from the
3778 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3781 context()->Plug(r3);
3785 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3786 ZoneList<Expression*>* args = expr->arguments();
3787 DCHECK(args->length() == 1);
3788 Label done, null, function, non_function_constructor;
3790 VisitForAccumulatorValue(args->at(0));
3792 // If the object is a smi, we return null.
3793 __ JumpIfSmi(r3, &null);
3795 // Check that the object is a JS object but take special care of JS
3796 // functions to make sure they have 'Function' as their class.
3797 // Assume that there are only two callable types, and one of them is at
3798 // either end of the type range for JS object types. Saves extra comparisons.
3799 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3800 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3801 // Map is now in r3.
3803 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3804 FIRST_SPEC_OBJECT_TYPE + 1);
3807 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3808 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3810 // Assume that there is no larger type.
3811 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3813 // Check if the constructor in the map is a JS function.
3814 Register instance_type = r5;
3815 __ GetMapConstructor(r3, r3, r4, instance_type);
3816 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3817 __ bne(&non_function_constructor);
3819 // r3 now contains the constructor function. Grab the
3820 // instance class name from there.
3821 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3823 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3826 // Functions have class 'Function'.
3828 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3831 // Objects with a non-function constructor have class 'Object'.
3832 __ bind(&non_function_constructor);
3833 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3836 // Non-JS objects have class null.
3838 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3843 context()->Plug(r3);
3847 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3848 ZoneList<Expression*>* args = expr->arguments();
3849 DCHECK(args->length() == 1);
3850 VisitForAccumulatorValue(args->at(0)); // Load the object.
3853 // If the object is a smi return the object.
3854 __ JumpIfSmi(r3, &done);
3855 // If the object is not a value type, return the object.
3856 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3858 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3861 context()->Plug(r3);
3865 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3866 ZoneList<Expression*>* args = expr->arguments();
3867 DCHECK_EQ(1, args->length());
3869 VisitForAccumulatorValue(args->at(0));
3871 Label materialize_true, materialize_false;
3872 Label* if_true = nullptr;
3873 Label* if_false = nullptr;
3874 Label* fall_through = nullptr;
3875 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3876 &if_false, &fall_through);
3878 __ JumpIfSmi(r3, if_false);
3879 __ CompareObjectType(r3, r4, r4, JS_DATE_TYPE);
3880 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3881 Split(eq, if_true, if_false, fall_through);
3883 context()->Plug(if_true, if_false);
3887 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3888 ZoneList<Expression*>* args = expr->arguments();
3889 DCHECK(args->length() == 2);
3890 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3891 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3893 VisitForAccumulatorValue(args->at(0)); // Load the object.
3895 Register object = r3;
3896 Register result = r3;
3897 Register scratch0 = r11;
3898 Register scratch1 = r4;
3900 if (index->value() == 0) {
3901 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3903 Label runtime, done;
3904 if (index->value() < JSDate::kFirstUncachedField) {
3905 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3906 __ mov(scratch1, Operand(stamp));
3907 __ LoadP(scratch1, MemOperand(scratch1));
3908 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3909 __ cmp(scratch1, scratch0);
3912 FieldMemOperand(object, JSDate::kValueOffset +
3913 kPointerSize * index->value()),
3918 __ PrepareCallCFunction(2, scratch1);
3919 __ LoadSmiLiteral(r4, index);
3920 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3924 context()->Plug(result);
3928 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3929 ZoneList<Expression*>* args = expr->arguments();
3930 DCHECK_EQ(3, args->length());
3932 Register string = r3;
3933 Register index = r4;
3934 Register value = r5;
3936 VisitForStackValue(args->at(0)); // index
3937 VisitForStackValue(args->at(1)); // value
3938 VisitForAccumulatorValue(args->at(2)); // string
3939 __ Pop(index, value);
3941 if (FLAG_debug_code) {
3942 __ TestIfSmi(value, r0);
3943 __ Check(eq, kNonSmiValue, cr0);
3944 __ TestIfSmi(index, r0);
3945 __ Check(eq, kNonSmiIndex, cr0);
3946 __ SmiUntag(index, index);
3947 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3948 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3949 __ SmiTag(index, index);
3953 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3954 __ SmiToByteArrayOffset(r0, index);
3955 __ stbx(value, MemOperand(ip, r0));
3956 context()->Plug(string);
3960 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3961 ZoneList<Expression*>* args = expr->arguments();
3962 DCHECK_EQ(3, args->length());
3964 Register string = r3;
3965 Register index = r4;
3966 Register value = r5;
3968 VisitForStackValue(args->at(0)); // index
3969 VisitForStackValue(args->at(1)); // value
3970 VisitForAccumulatorValue(args->at(2)); // string
3971 __ Pop(index, value);
3973 if (FLAG_debug_code) {
3974 __ TestIfSmi(value, r0);
3975 __ Check(eq, kNonSmiValue, cr0);
3976 __ TestIfSmi(index, r0);
3977 __ Check(eq, kNonSmiIndex, cr0);
3978 __ SmiUntag(index, index);
3979 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3980 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3981 __ SmiTag(index, index);
3985 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3986 __ SmiToShortArrayOffset(r0, index);
3987 __ sthx(value, MemOperand(ip, r0));
3988 context()->Plug(string);
3992 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3993 ZoneList<Expression*>* args = expr->arguments();
3994 DCHECK(args->length() == 2);
3995 VisitForStackValue(args->at(0)); // Load the object.
3996 VisitForAccumulatorValue(args->at(1)); // Load the value.
3997 __ pop(r4); // r3 = value. r4 = object.
4000 // If the object is a smi, return the value.
4001 __ JumpIfSmi(r4, &done);
4003 // If the object is not a value type, return the value.
4004 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
4008 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
4009 // Update the write barrier. Save the value as it will be
4010 // overwritten by the write barrier code and is needed afterward.
4012 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
4016 context()->Plug(r3);
4020 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4021 ZoneList<Expression*>* args = expr->arguments();
4022 DCHECK_EQ(args->length(), 1);
4023 // Load the argument into r3 and call the stub.
4024 VisitForAccumulatorValue(args->at(0));
4026 NumberToStringStub stub(isolate());
4028 context()->Plug(r3);
4032 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4033 ZoneList<Expression*>* args = expr->arguments();
4034 DCHECK(args->length() == 1);
4035 VisitForAccumulatorValue(args->at(0));
4038 StringCharFromCodeGenerator generator(r3, r4);
4039 generator.GenerateFast(masm_);
4042 NopRuntimeCallHelper call_helper;
4043 generator.GenerateSlow(masm_, call_helper);
4046 context()->Plug(r4);
4050 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4051 ZoneList<Expression*>* args = expr->arguments();
4052 DCHECK(args->length() == 2);
4053 VisitForStackValue(args->at(0));
4054 VisitForAccumulatorValue(args->at(1));
4056 Register object = r4;
4057 Register index = r3;
4058 Register result = r6;
4062 Label need_conversion;
4063 Label index_out_of_range;
4065 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
4066 &need_conversion, &index_out_of_range,
4067 STRING_INDEX_IS_NUMBER);
4068 generator.GenerateFast(masm_);
4071 __ bind(&index_out_of_range);
4072 // When the index is out of range, the spec requires us to return
4074 __ LoadRoot(result, Heap::kNanValueRootIndex);
4077 __ bind(&need_conversion);
4078 // Load the undefined value into the result register, which will
4079 // trigger conversion.
4080 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4083 NopRuntimeCallHelper call_helper;
4084 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4087 context()->Plug(result);
4091 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4092 ZoneList<Expression*>* args = expr->arguments();
4093 DCHECK(args->length() == 2);
4094 VisitForStackValue(args->at(0));
4095 VisitForAccumulatorValue(args->at(1));
4097 Register object = r4;
4098 Register index = r3;
4099 Register scratch = r6;
4100 Register result = r3;
4104 Label need_conversion;
4105 Label index_out_of_range;
4107 StringCharAtGenerator generator(object, index, scratch, result,
4108 &need_conversion, &need_conversion,
4109 &index_out_of_range, STRING_INDEX_IS_NUMBER);
4110 generator.GenerateFast(masm_);
4113 __ bind(&index_out_of_range);
4114 // When the index is out of range, the spec requires us to return
4115 // the empty string.
4116 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4119 __ bind(&need_conversion);
4120 // Move smi zero into the result register, which will trigger
4122 __ LoadSmiLiteral(result, Smi::FromInt(0));
4125 NopRuntimeCallHelper call_helper;
4126 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4129 context()->Plug(result);
4133 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4134 ZoneList<Expression*>* args = expr->arguments();
4135 DCHECK_EQ(2, args->length());
4136 VisitForStackValue(args->at(0));
4137 VisitForAccumulatorValue(args->at(1));
4140 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4142 context()->Plug(r3);
4146 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4147 ZoneList<Expression*>* args = expr->arguments();
4148 DCHECK(args->length() >= 2);
4150 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4151 for (int i = 0; i < arg_count + 1; i++) {
4152 VisitForStackValue(args->at(i));
4154 VisitForAccumulatorValue(args->last()); // Function.
4156 Label runtime, done;
4157 // Check for non-function argument (including proxy).
4158 __ JumpIfSmi(r3, &runtime);
4159 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
4162 // InvokeFunction requires the function in r4. Move it in there.
4163 __ mr(r4, result_register());
4164 ParameterCount count(arg_count);
4165 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
4166 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4171 __ CallRuntime(Runtime::kCall, args->length());
4174 context()->Plug(r3);
4178 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4179 ZoneList<Expression*>* args = expr->arguments();
4180 DCHECK(args->length() == 2);
4183 VisitForStackValue(args->at(0));
4186 VisitForStackValue(args->at(1));
4187 __ CallRuntime(Runtime::kGetPrototype, 1);
4188 __ mr(r4, result_register());
4191 // Load original constructor into r7.
4192 __ LoadP(r7, MemOperand(sp, 1 * kPointerSize));
4194 // Check if the calling frame is an arguments adaptor frame.
4195 Label adaptor_frame, args_set_up, runtime;
4196 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4197 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
4198 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
4199 __ beq(&adaptor_frame);
4201 // default constructor has no arguments, so no adaptor frame means no args.
4202 __ li(r3, Operand::Zero());
4205 // Copy arguments from adaptor frame.
4207 __ bind(&adaptor_frame);
4208 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
4211 // Get arguments pointer in r5.
4212 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
4214 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
4219 // Pre-decrement in order to skip receiver.
4220 __ LoadPU(r6, MemOperand(r5, -kPointerSize));
4225 __ bind(&args_set_up);
4226 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
4228 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4229 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4233 context()->Plug(result_register());
4237 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4238 RegExpConstructResultStub stub(isolate());
4239 ZoneList<Expression*>* args = expr->arguments();
4240 DCHECK(args->length() == 3);
4241 VisitForStackValue(args->at(0));
4242 VisitForStackValue(args->at(1));
4243 VisitForAccumulatorValue(args->at(2));
4246 context()->Plug(r3);
4250 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4251 ZoneList<Expression*>* args = expr->arguments();
4252 DCHECK_EQ(2, args->length());
4253 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4254 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4256 Handle<FixedArray> jsfunction_result_caches(
4257 isolate()->native_context()->jsfunction_result_caches());
4258 if (jsfunction_result_caches->length() <= cache_id) {
4259 __ Abort(kAttemptToUseUndefinedCache);
4260 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4261 context()->Plug(r3);
4265 VisitForAccumulatorValue(args->at(1));
4268 Register cache = r4;
4269 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4270 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4272 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4274 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
4276 Label done, not_found;
4277 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4278 // r5 now holds finger offset as a smi.
4279 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4280 // r6 now points to the start of fixed array elements.
4281 __ SmiToPtrArrayOffset(r5, r5);
4282 __ LoadPUX(r5, MemOperand(r6, r5));
4283 // r6 now points to the key of the pair.
4287 __ LoadP(r3, MemOperand(r6, kPointerSize));
4290 __ bind(¬_found);
4291 // Call runtime to perform the lookup.
4292 __ Push(cache, key);
4293 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4296 context()->Plug(r3);
4300 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4301 ZoneList<Expression*>* args = expr->arguments();
4302 VisitForAccumulatorValue(args->at(0));
4304 Label materialize_true, materialize_false;
4305 Label* if_true = NULL;
4306 Label* if_false = NULL;
4307 Label* fall_through = NULL;
4308 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4309 &if_false, &fall_through);
4311 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4312 // PPC - assume ip is free
4313 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
4314 __ and_(r0, r3, ip, SetRC);
4315 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4316 Split(eq, if_true, if_false, fall_through, cr0);
4318 context()->Plug(if_true, if_false);
4322 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4323 ZoneList<Expression*>* args = expr->arguments();
4324 DCHECK(args->length() == 1);
4325 VisitForAccumulatorValue(args->at(0));
4327 __ AssertString(r3);
4329 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4330 __ IndexFromHash(r3, r3);
4332 context()->Plug(r3);
4336 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4337 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4338 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4339 one_char_separator_loop_entry, long_separator_loop;
4340 ZoneList<Expression*>* args = expr->arguments();
4341 DCHECK(args->length() == 2);
4342 VisitForStackValue(args->at(1));
4343 VisitForAccumulatorValue(args->at(0));
4345 // All aliases of the same register have disjoint lifetimes.
4346 Register array = r3;
4347 Register elements = no_reg; // Will be r3.
4348 Register result = no_reg; // Will be r3.
4349 Register separator = r4;
4350 Register array_length = r5;
4351 Register result_pos = no_reg; // Will be r5
4352 Register string_length = r6;
4353 Register string = r7;
4354 Register element = r8;
4355 Register elements_end = r9;
4356 Register scratch1 = r10;
4357 Register scratch2 = r11;
4359 // Separator operand is on the stack.
4362 // Check that the array is a JSArray.
4363 __ JumpIfSmi(array, &bailout);
4364 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
4367 // Check that the array has fast elements.
4368 __ CheckFastElements(scratch1, scratch2, &bailout);
4370 // If the array has length zero, return the empty string.
4371 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4372 __ SmiUntag(array_length);
4373 __ cmpi(array_length, Operand::Zero());
4374 __ bne(&non_trivial_array);
4375 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
4378 __ bind(&non_trivial_array);
4380 // Get the FixedArray containing array's elements.
4382 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4383 array = no_reg; // End of array's live range.
4385 // Check that all array elements are sequential one-byte strings, and
4386 // accumulate the sum of their lengths, as a smi-encoded value.
4387 __ li(string_length, Operand::Zero());
4388 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4389 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4390 __ add(elements_end, element, elements_end);
4391 // Loop condition: while (element < elements_end).
4392 // Live values in registers:
4393 // elements: Fixed array of strings.
4394 // array_length: Length of the fixed array of strings (not smi)
4395 // separator: Separator string
4396 // string_length: Accumulated sum of string lengths (smi).
4397 // element: Current array element.
4398 // elements_end: Array end.
4399 if (generate_debug_code_) {
4400 __ cmpi(array_length, Operand::Zero());
4401 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4404 __ LoadP(string, MemOperand(element));
4405 __ addi(element, element, Operand(kPointerSize));
4406 __ JumpIfSmi(string, &bailout);
4407 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4408 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4409 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4410 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4412 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4414 __ BranchOnOverflow(&bailout);
4416 __ cmp(element, elements_end);
4419 // If array_length is 1, return elements[0], a string.
4420 __ cmpi(array_length, Operand(1));
4421 __ bne(¬_size_one_array);
4422 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
4425 __ bind(¬_size_one_array);
4427 // Live values in registers:
4428 // separator: Separator string
4429 // array_length: Length of the array.
4430 // string_length: Sum of string lengths (smi).
4431 // elements: FixedArray of strings.
4433 // Check that the separator is a flat one-byte string.
4434 __ JumpIfSmi(separator, &bailout);
4435 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4436 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4437 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4439 // Add (separator length times array_length) - separator length to the
4440 // string_length to get the length of the result string.
4442 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4443 __ sub(string_length, string_length, scratch1);
4444 #if V8_TARGET_ARCH_PPC64
4445 __ SmiUntag(scratch1, scratch1);
4446 __ Mul(scratch2, array_length, scratch1);
4447 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4449 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4450 __ bne(&bailout, cr0);
4451 __ SmiTag(scratch2, scratch2);
4453 // array_length is not smi but the other values are, so the result is a smi
4454 __ mullw(scratch2, array_length, scratch1);
4455 __ mulhw(ip, array_length, scratch1);
4456 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4458 __ cmpi(ip, Operand::Zero());
4460 __ cmpwi(scratch2, Operand::Zero());
4464 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4466 __ BranchOnOverflow(&bailout);
4467 __ SmiUntag(string_length);
4469 // Get first element in the array to free up the elements register to be used
4471 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4472 result = elements; // End of live range for elements.
4474 // Live values in registers:
4475 // element: First array element
4476 // separator: Separator string
4477 // string_length: Length of result string (not smi)
4478 // array_length: Length of the array.
4479 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4480 elements_end, &bailout);
4481 // Prepare for looping. Set up elements_end to end of the array. Set
4482 // result_pos to the position of the result where to write the first
4484 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4485 __ add(elements_end, element, elements_end);
4486 result_pos = array_length; // End of live range for array_length.
4487 array_length = no_reg;
4488 __ addi(result_pos, result,
4489 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4491 // Check the length of the separator.
4493 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4494 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4495 __ beq(&one_char_separator);
4496 __ bgt(&long_separator);
4498 // Empty separator case
4499 __ bind(&empty_separator_loop);
4500 // Live values in registers:
4501 // result_pos: the position to which we are currently copying characters.
4502 // element: Current array element.
4503 // elements_end: Array end.
4505 // Copy next array element to the result.
4506 __ LoadP(string, MemOperand(element));
4507 __ addi(element, element, Operand(kPointerSize));
4508 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4509 __ SmiUntag(string_length);
4510 __ addi(string, string,
4511 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4512 __ CopyBytes(string, result_pos, string_length, scratch1);
4513 __ cmp(element, elements_end);
4514 __ blt(&empty_separator_loop); // End while (element < elements_end).
4515 DCHECK(result.is(r3));
4518 // One-character separator case
4519 __ bind(&one_char_separator);
4520 // Replace separator with its one-byte character value.
4521 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4522 // Jump into the loop after the code that copies the separator, so the first
4523 // element is not preceded by a separator
4524 __ b(&one_char_separator_loop_entry);
4526 __ bind(&one_char_separator_loop);
4527 // Live values in registers:
4528 // result_pos: the position to which we are currently copying characters.
4529 // element: Current array element.
4530 // elements_end: Array end.
4531 // separator: Single separator one-byte char (in lower byte).
4533 // Copy the separator character to the result.
4534 __ stb(separator, MemOperand(result_pos));
4535 __ addi(result_pos, result_pos, Operand(1));
4537 // Copy next array element to the result.
4538 __ bind(&one_char_separator_loop_entry);
4539 __ LoadP(string, MemOperand(element));
4540 __ addi(element, element, Operand(kPointerSize));
4541 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4542 __ SmiUntag(string_length);
4543 __ addi(string, string,
4544 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4545 __ CopyBytes(string, result_pos, string_length, scratch1);
4546 __ cmpl(element, elements_end);
4547 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4548 DCHECK(result.is(r3));
4551 // Long separator case (separator is more than one character). Entry is at the
4552 // label long_separator below.
4553 __ bind(&long_separator_loop);
4554 // Live values in registers:
4555 // result_pos: the position to which we are currently copying characters.
4556 // element: Current array element.
4557 // elements_end: Array end.
4558 // separator: Separator string.
4560 // Copy the separator to the result.
4561 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4562 __ SmiUntag(string_length);
4563 __ addi(string, separator,
4564 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4565 __ CopyBytes(string, result_pos, string_length, scratch1);
4567 __ bind(&long_separator);
4568 __ LoadP(string, MemOperand(element));
4569 __ addi(element, element, Operand(kPointerSize));
4570 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4571 __ SmiUntag(string_length);
4572 __ addi(string, string,
4573 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4574 __ CopyBytes(string, result_pos, string_length, scratch1);
4575 __ cmpl(element, elements_end);
4576 __ blt(&long_separator_loop); // End while (element < elements_end).
4577 DCHECK(result.is(r3));
4581 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4583 context()->Plug(r3);
4587 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4588 DCHECK(expr->arguments()->length() == 0);
4589 ExternalReference debug_is_active =
4590 ExternalReference::debug_is_active_address(isolate());
4591 __ mov(ip, Operand(debug_is_active));
4592 __ lbz(r3, MemOperand(ip));
4594 context()->Plug(r3);
4598 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4599 // Push the builtins object as the receiver.
4600 Register receiver = LoadDescriptor::ReceiverRegister();
4601 __ LoadP(receiver, GlobalObjectOperand());
4602 __ LoadP(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4605 // Load the function from the receiver.
4606 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4607 __ mov(LoadDescriptor::SlotRegister(),
4608 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4609 CallLoadIC(NOT_INSIDE_TYPEOF);
4613 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4614 ZoneList<Expression*>* args = expr->arguments();
4615 int arg_count = args->length();
4617 SetCallPosition(expr, arg_count);
4618 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4619 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4624 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4625 ZoneList<Expression*>* args = expr->arguments();
4626 int arg_count = args->length();
4628 if (expr->is_jsruntime()) {
4629 Comment cmnt(masm_, "[ CallRuntime");
4630 EmitLoadJSRuntimeFunction(expr);
4632 // Push the target function under the receiver.
4633 __ LoadP(ip, MemOperand(sp, 0));
4635 __ StoreP(r3, MemOperand(sp, kPointerSize));
4637 // Push the arguments ("left-to-right").
4638 for (int i = 0; i < arg_count; i++) {
4639 VisitForStackValue(args->at(i));
4642 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4643 EmitCallJSRuntimeFunction(expr);
4645 // Restore context register.
4646 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4648 context()->DropAndPlug(1, r3);
4651 const Runtime::Function* function = expr->function();
4652 switch (function->function_id) {
4653 #define CALL_INTRINSIC_GENERATOR(Name) \
4654 case Runtime::kInline##Name: { \
4655 Comment cmnt(masm_, "[ Inline" #Name); \
4656 return Emit##Name(expr); \
4658 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4659 #undef CALL_INTRINSIC_GENERATOR
4661 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4662 // Push the arguments ("left-to-right").
4663 for (int i = 0; i < arg_count; i++) {
4664 VisitForStackValue(args->at(i));
4667 // Call the C runtime function.
4668 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4669 __ CallRuntime(expr->function(), arg_count);
4670 context()->Plug(r3);
4677 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4678 switch (expr->op()) {
4679 case Token::DELETE: {
4680 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4681 Property* property = expr->expression()->AsProperty();
4682 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4684 if (property != NULL) {
4685 VisitForStackValue(property->obj());
4686 VisitForStackValue(property->key());
4687 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
4689 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4690 context()->Plug(r3);
4691 } else if (proxy != NULL) {
4692 Variable* var = proxy->var();
4693 // Delete of an unqualified identifier is disallowed in strict mode but
4694 // "delete this" is allowed.
4695 bool is_this = var->HasThisName(isolate());
4696 DCHECK(is_sloppy(language_mode()) || is_this);
4697 if (var->IsUnallocatedOrGlobalSlot()) {
4698 __ LoadP(r5, GlobalObjectOperand());
4699 __ mov(r4, Operand(var->name()));
4700 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4701 __ Push(r5, r4, r3);
4702 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4703 context()->Plug(r3);
4704 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4705 // Result of deleting non-global, non-dynamic variables is false.
4706 // The subexpression does not have side effects.
4707 context()->Plug(is_this);
4709 // Non-global variable. Call the runtime to try to delete from the
4710 // context where the variable was introduced.
4711 DCHECK(!context_register().is(r5));
4712 __ mov(r5, Operand(var->name()));
4713 __ Push(context_register(), r5);
4714 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4715 context()->Plug(r3);
4718 // Result of deleting non-property, non-variable reference is true.
4719 // The subexpression may have side effects.
4720 VisitForEffect(expr->expression());
4721 context()->Plug(true);
4727 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4728 VisitForEffect(expr->expression());
4729 context()->Plug(Heap::kUndefinedValueRootIndex);
4734 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4735 if (context()->IsEffect()) {
4736 // Unary NOT has no side effects so it's only necessary to visit the
4737 // subexpression. Match the optimizing compiler by not branching.
4738 VisitForEffect(expr->expression());
4739 } else if (context()->IsTest()) {
4740 const TestContext* test = TestContext::cast(context());
4741 // The labels are swapped for the recursive call.
4742 VisitForControl(expr->expression(), test->false_label(),
4743 test->true_label(), test->fall_through());
4744 context()->Plug(test->true_label(), test->false_label());
4746 // We handle value contexts explicitly rather than simply visiting
4747 // for control and plugging the control flow into the context,
4748 // because we need to prepare a pair of extra administrative AST ids
4749 // for the optimizing compiler.
4750 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4751 Label materialize_true, materialize_false, done;
4752 VisitForControl(expr->expression(), &materialize_false,
4753 &materialize_true, &materialize_true);
4754 __ bind(&materialize_true);
4755 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4756 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4757 if (context()->IsStackValue()) __ push(r3);
4759 __ bind(&materialize_false);
4760 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4761 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4762 if (context()->IsStackValue()) __ push(r3);
4768 case Token::TYPEOF: {
4769 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4771 AccumulatorValueContext context(this);
4772 VisitForTypeofValue(expr->expression());
4775 TypeofStub typeof_stub(isolate());
4776 __ CallStub(&typeof_stub);
4777 context()->Plug(r3);
4787 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4788 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4790 Comment cmnt(masm_, "[ CountOperation");
4792 Property* prop = expr->expression()->AsProperty();
4793 LhsKind assign_type = Property::GetAssignType(prop);
4795 // Evaluate expression and get value.
4796 if (assign_type == VARIABLE) {
4797 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4798 AccumulatorValueContext context(this);
4799 EmitVariableLoad(expr->expression()->AsVariableProxy());
4801 // Reserve space for result of postfix operation.
4802 if (expr->is_postfix() && !context()->IsEffect()) {
4803 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4806 switch (assign_type) {
4807 case NAMED_PROPERTY: {
4808 // Put the object both on the stack and in the register.
4809 VisitForStackValue(prop->obj());
4810 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4811 EmitNamedPropertyLoad(prop);
4815 case NAMED_SUPER_PROPERTY: {
4816 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4817 VisitForAccumulatorValue(
4818 prop->obj()->AsSuperPropertyReference()->home_object());
4819 __ Push(result_register());
4820 const Register scratch = r4;
4821 __ LoadP(scratch, MemOperand(sp, kPointerSize));
4822 __ Push(scratch, result_register());
4823 EmitNamedSuperPropertyLoad(prop);
4827 case KEYED_SUPER_PROPERTY: {
4828 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4829 VisitForAccumulatorValue(
4830 prop->obj()->AsSuperPropertyReference()->home_object());
4831 const Register scratch = r4;
4832 const Register scratch1 = r5;
4833 __ mr(scratch, result_register());
4834 VisitForAccumulatorValue(prop->key());
4835 __ Push(scratch, result_register());
4836 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
4837 __ Push(scratch1, scratch, result_register());
4838 EmitKeyedSuperPropertyLoad(prop);
4842 case KEYED_PROPERTY: {
4843 VisitForStackValue(prop->obj());
4844 VisitForStackValue(prop->key());
4845 __ LoadP(LoadDescriptor::ReceiverRegister(),
4846 MemOperand(sp, 1 * kPointerSize));
4847 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4848 EmitKeyedPropertyLoad(prop);
4857 // We need a second deoptimization point after loading the value
4858 // in case evaluating the property load my have a side effect.
4859 if (assign_type == VARIABLE) {
4860 PrepareForBailout(expr->expression(), TOS_REG);
4862 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4865 // Inline smi case if we are in a loop.
4866 Label stub_call, done;
4867 JumpPatchSite patch_site(masm_);
4869 int count_value = expr->op() == Token::INC ? 1 : -1;
4870 if (ShouldInlineSmiCase(expr->op())) {
4872 patch_site.EmitJumpIfNotSmi(r3, &slow);
4874 // Save result for postfix expressions.
4875 if (expr->is_postfix()) {
4876 if (!context()->IsEffect()) {
4877 // Save the result on the stack. If we have a named or keyed property
4878 // we store the result under the receiver that is currently on top
4880 switch (assign_type) {
4884 case NAMED_PROPERTY:
4885 __ StoreP(r3, MemOperand(sp, kPointerSize));
4887 case NAMED_SUPER_PROPERTY:
4888 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4890 case KEYED_PROPERTY:
4891 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4893 case KEYED_SUPER_PROPERTY:
4894 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4900 Register scratch1 = r4;
4901 Register scratch2 = r5;
4902 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4903 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4904 __ BranchOnNoOverflow(&done);
4905 // Call stub. Undo operation first.
4906 __ sub(r3, r3, scratch1);
4910 if (!is_strong(language_mode())) {
4911 ToNumberStub convert_stub(isolate());
4912 __ CallStub(&convert_stub);
4913 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4916 // Save result for postfix expressions.
4917 if (expr->is_postfix()) {
4918 if (!context()->IsEffect()) {
4919 // Save the result on the stack. If we have a named or keyed property
4920 // we store the result under the receiver that is currently on top
4922 switch (assign_type) {
4926 case NAMED_PROPERTY:
4927 __ StoreP(r3, MemOperand(sp, kPointerSize));
4929 case NAMED_SUPER_PROPERTY:
4930 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4932 case KEYED_PROPERTY:
4933 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4935 case KEYED_SUPER_PROPERTY:
4936 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4942 __ bind(&stub_call);
4944 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4946 SetExpressionPosition(expr);
4948 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4949 strength(language_mode())).code();
4950 CallIC(code, expr->CountBinOpFeedbackId());
4951 patch_site.EmitPatchInfo();
4954 if (is_strong(language_mode())) {
4955 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4957 // Store the value returned in r3.
4958 switch (assign_type) {
4960 if (expr->is_postfix()) {
4962 EffectContext context(this);
4963 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4964 Token::ASSIGN, expr->CountSlot());
4965 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4968 // For all contexts except EffectConstant We have the result on
4969 // top of the stack.
4970 if (!context()->IsEffect()) {
4971 context()->PlugTOS();
4974 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4975 Token::ASSIGN, expr->CountSlot());
4976 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4977 context()->Plug(r3);
4980 case NAMED_PROPERTY: {
4981 __ mov(StoreDescriptor::NameRegister(),
4982 Operand(prop->key()->AsLiteral()->value()));
4983 __ pop(StoreDescriptor::ReceiverRegister());
4984 if (FLAG_vector_stores) {
4985 EmitLoadStoreICSlot(expr->CountSlot());
4988 CallStoreIC(expr->CountStoreFeedbackId());
4990 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4991 if (expr->is_postfix()) {
4992 if (!context()->IsEffect()) {
4993 context()->PlugTOS();
4996 context()->Plug(r3);
5000 case NAMED_SUPER_PROPERTY: {
5001 EmitNamedSuperPropertyStore(prop);
5002 if (expr->is_postfix()) {
5003 if (!context()->IsEffect()) {
5004 context()->PlugTOS();
5007 context()->Plug(r3);
5011 case KEYED_SUPER_PROPERTY: {
5012 EmitKeyedSuperPropertyStore(prop);
5013 if (expr->is_postfix()) {
5014 if (!context()->IsEffect()) {
5015 context()->PlugTOS();
5018 context()->Plug(r3);
5022 case KEYED_PROPERTY: {
5023 __ Pop(StoreDescriptor::ReceiverRegister(),
5024 StoreDescriptor::NameRegister());
5026 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5027 if (FLAG_vector_stores) {
5028 EmitLoadStoreICSlot(expr->CountSlot());
5031 CallIC(ic, expr->CountStoreFeedbackId());
5033 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5034 if (expr->is_postfix()) {
5035 if (!context()->IsEffect()) {
5036 context()->PlugTOS();
5039 context()->Plug(r3);
5047 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5048 Expression* sub_expr,
5049 Handle<String> check) {
5050 Label materialize_true, materialize_false;
5051 Label* if_true = NULL;
5052 Label* if_false = NULL;
5053 Label* fall_through = NULL;
5054 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5055 &if_false, &fall_through);
5058 AccumulatorValueContext context(this);
5059 VisitForTypeofValue(sub_expr);
5061 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5063 Factory* factory = isolate()->factory();
5064 if (String::Equals(check, factory->number_string())) {
5065 __ JumpIfSmi(r3, if_true);
5066 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5067 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5069 Split(eq, if_true, if_false, fall_through);
5070 } else if (String::Equals(check, factory->string_string())) {
5071 __ JumpIfSmi(r3, if_false);
5072 // Check for undetectable objects => false.
5073 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
5075 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5076 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
5077 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5078 Split(eq, if_true, if_false, fall_through, cr0);
5079 } else if (String::Equals(check, factory->symbol_string())) {
5080 __ JumpIfSmi(r3, if_false);
5081 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
5082 Split(eq, if_true, if_false, fall_through);
5083 } else if (String::Equals(check, factory->float32x4_string())) {
5084 __ JumpIfSmi(r3, if_false);
5085 __ CompareObjectType(r3, r3, r4, FLOAT32X4_TYPE);
5086 Split(eq, if_true, if_false, fall_through);
5087 } else if (String::Equals(check, factory->boolean_string())) {
5088 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
5090 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
5091 Split(eq, if_true, if_false, fall_through);
5092 } else if (String::Equals(check, factory->undefined_string())) {
5093 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
5095 __ JumpIfSmi(r3, if_false);
5096 // Check for undetectable objects => true.
5097 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5098 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5099 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5100 Split(ne, if_true, if_false, fall_through, cr0);
5102 } else if (String::Equals(check, factory->function_string())) {
5103 __ JumpIfSmi(r3, if_false);
5104 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5105 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
5107 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
5108 Split(eq, if_true, if_false, fall_through);
5109 } else if (String::Equals(check, factory->object_string())) {
5110 __ JumpIfSmi(r3, if_false);
5111 __ CompareRoot(r3, Heap::kNullValueRootIndex);
5113 // Check for JS objects => true.
5114 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5116 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5118 // Check for undetectable objects => false.
5119 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5120 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5121 Split(eq, if_true, if_false, fall_through, cr0);
5123 if (if_false != fall_through) __ b(if_false);
5125 context()->Plug(if_true, if_false);
5129 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5130 Comment cmnt(masm_, "[ CompareOperation");
5131 SetExpressionPosition(expr);
5133 // First we try a fast inlined version of the compare when one of
5134 // the operands is a literal.
5135 if (TryLiteralCompare(expr)) return;
5137 // Always perform the comparison for its control flow. Pack the result
5138 // into the expression's context after the comparison is performed.
5139 Label materialize_true, materialize_false;
5140 Label* if_true = NULL;
5141 Label* if_false = NULL;
5142 Label* fall_through = NULL;
5143 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5144 &if_false, &fall_through);
5146 Token::Value op = expr->op();
5147 VisitForStackValue(expr->left());
5150 VisitForStackValue(expr->right());
5151 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5152 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5153 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5155 Split(eq, if_true, if_false, fall_through);
5158 case Token::INSTANCEOF: {
5159 VisitForStackValue(expr->right());
5160 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5162 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5163 // The stub returns 0 for true.
5164 __ cmpi(r3, Operand::Zero());
5165 Split(eq, if_true, if_false, fall_through);
5170 VisitForAccumulatorValue(expr->right());
5171 Condition cond = CompareIC::ComputeCondition(op);
5174 bool inline_smi_code = ShouldInlineSmiCase(op);
5175 JumpPatchSite patch_site(masm_);
5176 if (inline_smi_code) {
5179 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
5181 Split(cond, if_true, if_false, NULL);
5182 __ bind(&slow_case);
5185 Handle<Code> ic = CodeFactory::CompareIC(
5186 isolate(), op, strength(language_mode())).code();
5187 CallIC(ic, expr->CompareOperationFeedbackId());
5188 patch_site.EmitPatchInfo();
5189 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5190 __ cmpi(r3, Operand::Zero());
5191 Split(cond, if_true, if_false, fall_through);
5195 // Convert the result of the comparison into one expected for this
5196 // expression's context.
5197 context()->Plug(if_true, if_false);
5201 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5202 Expression* sub_expr,
5204 Label materialize_true, materialize_false;
5205 Label* if_true = NULL;
5206 Label* if_false = NULL;
5207 Label* fall_through = NULL;
5208 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5209 &if_false, &fall_through);
5211 VisitForAccumulatorValue(sub_expr);
5212 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5213 if (expr->op() == Token::EQ_STRICT) {
5214 Heap::RootListIndex nil_value = nil == kNullValue
5215 ? Heap::kNullValueRootIndex
5216 : Heap::kUndefinedValueRootIndex;
5217 __ LoadRoot(r4, nil_value);
5219 Split(eq, if_true, if_false, fall_through);
5221 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5222 CallIC(ic, expr->CompareOperationFeedbackId());
5223 __ cmpi(r3, Operand::Zero());
5224 Split(ne, if_true, if_false, fall_through);
5226 context()->Plug(if_true, if_false);
5230 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5231 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5232 context()->Plug(r3);
5236 Register FullCodeGenerator::result_register() { return r3; }
5239 Register FullCodeGenerator::context_register() { return cp; }
5242 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5243 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
5244 __ StoreP(value, MemOperand(fp, frame_offset), r0);
5248 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5249 __ LoadP(dst, ContextOperand(cp, context_index), r0);
5253 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5254 Scope* closure_scope = scope()->ClosureScope();
5255 if (closure_scope->is_script_scope() ||
5256 closure_scope->is_module_scope()) {
5257 // Contexts nested in the native context have a canonical empty function
5258 // as their closure, not the anonymous closure containing the global
5259 // code. Pass a smi sentinel and let the runtime look up the empty
5261 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5262 } else if (closure_scope->is_eval_scope()) {
5263 // Contexts created by a call to eval have the same closure as the
5264 // context calling eval, not the anonymous closure containing the eval
5265 // code. Fetch it from the context.
5266 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5268 DCHECK(closure_scope->is_function_scope());
5269 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5275 // ----------------------------------------------------------------------------
5276 // Non-local control flow support.
5278 void FullCodeGenerator::EnterFinallyBlock() {
5279 DCHECK(!result_register().is(r4));
5280 // Store result register while executing finally block.
5281 __ push(result_register());
5282 // Cook return address in link register to stack (smi encoded Code* delta)
5284 __ mov(ip, Operand(masm_->CodeObject()));
5288 // Store result register while executing finally block.
5291 // Store pending message while executing finally block.
5292 ExternalReference pending_message_obj =
5293 ExternalReference::address_of_pending_message_obj(isolate());
5294 __ mov(ip, Operand(pending_message_obj));
5295 __ LoadP(r4, MemOperand(ip));
5298 ClearPendingMessage();
5302 void FullCodeGenerator::ExitFinallyBlock() {
5303 DCHECK(!result_register().is(r4));
5304 // Restore pending message from stack.
5306 ExternalReference pending_message_obj =
5307 ExternalReference::address_of_pending_message_obj(isolate());
5308 __ mov(ip, Operand(pending_message_obj));
5309 __ StoreP(r4, MemOperand(ip));
5311 // Restore result register from stack.
5314 // Uncook return address and return.
5315 __ pop(result_register());
5317 __ mov(ip, Operand(masm_->CodeObject()));
5324 void FullCodeGenerator::ClearPendingMessage() {
5325 DCHECK(!result_register().is(r4));
5326 ExternalReference pending_message_obj =
5327 ExternalReference::address_of_pending_message_obj(isolate());
5328 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5329 __ mov(ip, Operand(pending_message_obj));
5330 __ StoreP(r4, MemOperand(ip));
5334 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5335 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5336 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5337 Operand(SmiFromSlot(slot)));
5344 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
5345 BackEdgeState target_state,
5346 Code* replacement_code) {
5347 Address mov_address = Assembler::target_address_from_return_address(pc);
5348 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5349 CodePatcher patcher(cmp_address, 1);
5351 switch (target_state) {
5353 // <decrement profiling counter>
5355 // bge <ok> ;; not changed
5356 // mov r12, <interrupt stub address>
5359 // <reset profiling counter>
5361 patcher.masm()->cmpi(r6, Operand::Zero());
5364 case ON_STACK_REPLACEMENT:
5365 case OSR_AFTER_STACK_CHECK:
5366 // <decrement profiling counter>
5368 // bge <ok> ;; not changed
5369 // mov r12, <on-stack replacement address>
5372 // <reset profiling counter>
5373 // ok-label ----- pc_after points here
5375 // Set the LT bit such that bge is a NOP
5376 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
5380 // Replace the stack check address in the mov sequence with the
5381 // entry address of the replacement code.
5382 Assembler::set_target_address_at(mov_address, unoptimized_code,
5383 replacement_code->entry());
5385 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5386 unoptimized_code, mov_address, replacement_code);
5390 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5391 Isolate* isolate, Code* unoptimized_code, Address pc) {
5392 Address mov_address = Assembler::target_address_from_return_address(pc);
5393 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5394 Address interrupt_address =
5395 Assembler::target_address_at(mov_address, unoptimized_code);
5397 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
5398 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
5402 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5404 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5405 return ON_STACK_REPLACEMENT;
5408 DCHECK(interrupt_address ==
5409 isolate->builtins()->OsrAfterStackCheck()->entry());
5410 return OSR_AFTER_STACK_CHECK;
5412 } // namespace internal
5414 #endif // V8_TARGET_ARCH_PPC