1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/ppc/code-stubs-ppc.h"
20 #include "src/ppc/macro-assembler-ppc.h"
25 #define __ ACCESS_MASM(masm_)
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
42 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, reg, cr0);
51 __ beq(target, cr0); // Always taken before patched.
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
56 void EmitJumpIfSmi(Register reg, Label* target) {
57 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 __ bind(&patch_site_);
60 __ cmp(reg, reg, cr0);
61 __ bne(target, cr0); // Never taken before patched.
64 void EmitPatchInfo() {
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
68 // I believe this is using reg as the high bits of of the offset
69 reg.set_code(delta_to_patch_site / kOff16Mask);
70 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
75 __ nop(); // Signals no inlined code.
80 MacroAssembler* masm_;
88 // Generate code for a JS function. On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right. The actual
90 // argument count matches the formal parameter count expected by the
93 // The live registers are:
94 // o r4: the JS function object being called (i.e., ourselves)
96 // o fp: our caller's frame pointer (aka r31)
97 // o sp: stack pointer
98 // o lr: return address
99 // o ip: our own function entry (required by the prologue)
101 // The function builds a JS frame. Please see JavaScriptFrameConstants in
102 // frames-ppc.h for its layout.
103 void FullCodeGenerator::Generate() {
104 CompilationInfo* info = info_;
105 profiling_counter_ = isolate()->factory()->NewCell(
106 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
107 SetFunctionPosition(function());
108 Comment cmnt(masm_, "[ function compiled by full code generator");
110 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
113 if (strlen(FLAG_stop_at) > 0 &&
114 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
119 // Sloppy mode functions and builtins need to replace the receiver with the
120 // global proxy when called as functions (without an explicit receiver
122 if (is_sloppy(info->language_mode()) && !info->is_native() &&
123 info->MayUseThis() && info->scope()->has_this_declaration()) {
125 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
126 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
127 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
130 __ LoadP(r5, GlobalObjectOperand());
131 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
133 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
138 // Open a frame scope to indicate that there is a frame on the stack. The
139 // MANUAL indicates that the scope shouldn't actually generate code to set up
140 // the frame (that is done below).
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
142 int prologue_offset = masm_->pc_offset();
144 if (prologue_offset) {
145 // Prologue logic requires it's starting address in ip and the
146 // corresponding offset from the function entry.
147 prologue_offset += Instruction::kInstrSize;
148 __ addi(ip, ip, Operand(prologue_offset));
150 info->set_prologue_offset(prologue_offset);
151 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
152 info->AddNoFrameRange(0, masm_->pc_offset());
155 Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
162 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
163 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
165 __ bc_short(ge, &ok);
166 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
169 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
170 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
171 if (locals_count >= kMaxPushes) {
172 int loop_iterations = locals_count / kMaxPushes;
173 __ mov(r5, Operand(loop_iterations));
176 __ bind(&loop_header);
178 for (int i = 0; i < kMaxPushes; i++) {
181 // Continue loop if not done.
182 __ bdnz(&loop_header);
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 for (int i = 0; i < remaining; i++) {
192 bool function_in_register = true;
194 // Possibly allocate a local context.
195 if (info->scope()->num_heap_slots() > 0) {
196 // Argument to NewContext is the function, which is still in r4.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (info->scope()->is_script_scope()) {
202 __ Push(info->scope()->GetScopeInfo(info->isolate()));
203 __ CallRuntime(Runtime::kNewScriptContext, 2);
204 } else if (slots <= FastNewContextStub::kMaximumSlots) {
205 FastNewContextStub stub(isolate(), slots);
207 // Result of FastNewContextStub is always in new space.
208 need_write_barrier = false;
211 __ CallRuntime(Runtime::kNewFunctionContext, 1);
213 function_in_register = false;
214 // Context is returned in r3. It replaces the context passed to us.
215 // It's saved in the stack and kept live in cp.
217 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
218 // Copy any necessary parameters into the context.
219 int num_parameters = info->scope()->num_parameters();
220 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
221 for (int i = first_parameter; i < num_parameters; i++) {
222 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
223 if (var->IsContextSlot()) {
224 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
225 (num_parameters - 1 - i) * kPointerSize;
226 // Load parameter from stack.
227 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
228 // Store it in the context.
229 MemOperand target = ContextOperand(cp, var->index());
230 __ StoreP(r3, target, r0);
232 // Update the write barrier.
233 if (need_write_barrier) {
234 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
235 kLRHasBeenSaved, kDontSaveFPRegs);
236 } else if (FLAG_debug_code) {
238 __ JumpIfInNewSpace(cp, r3, &done);
239 __ Abort(kExpectedNewSpaceObject);
246 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function");
251 if (!function_in_register) {
252 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // The write barrier clobbers register again, keep is marked as such.
255 SetVar(this_function_var, r4, r3, r5);
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
262 // Get the frame pointer for the calling frame.
263 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
265 // Skip the arguments adaptor frame if it exists.
266 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
267 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
270 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
273 // Check the marker in the calling frame.
274 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
275 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
276 Label non_construct_frame, done;
278 __ bne(&non_construct_frame);
279 __ LoadP(r3, MemOperand(
280 r5, ConstructFrameConstants::kOriginalConstructorOffset));
283 __ bind(&non_construct_frame);
284 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
287 SetVar(new_target_var, r3, r5, r6);
290 // Possibly allocate RestParameters
292 Variable* rest_param = scope()->rest_parameter(&rest_index);
294 Comment cmnt(masm_, "[ Allocate rest parameter array");
296 int num_parameters = info->scope()->num_parameters();
297 int offset = num_parameters * kPointerSize;
299 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
300 __ LoadSmiLiteral(r5, Smi::FromInt(num_parameters));
301 __ LoadSmiLiteral(r4, Smi::FromInt(rest_index));
302 __ LoadSmiLiteral(r3, Smi::FromInt(language_mode()));
303 __ Push(r6, r5, r4, r3);
305 RestParamAccessStub stub(isolate());
308 SetVar(rest_param, r3, r4, r5);
311 Variable* arguments = scope()->arguments();
312 if (arguments != NULL) {
313 // Function uses arguments object.
314 Comment cmnt(masm_, "[ Allocate arguments object");
315 if (!function_in_register) {
316 // Load this again, if it's used by the local context below.
317 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
321 // Receiver is just before the parameters on the caller's stack.
322 int num_parameters = info->scope()->num_parameters();
323 int offset = num_parameters * kPointerSize;
324 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
325 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
328 // Arguments to ArgumentsAccessStub:
329 // function, receiver address, parameter count.
330 // The stub will rewrite receiever and parameter count if the previous
331 // stack frame was an arguments adapter frame.
332 ArgumentsAccessStub::Type type;
333 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
334 type = ArgumentsAccessStub::NEW_STRICT;
335 } else if (function()->has_duplicate_parameters()) {
336 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
338 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
340 ArgumentsAccessStub stub(isolate(), type);
343 SetVar(arguments, r3, r4, r5);
347 __ CallRuntime(Runtime::kTraceEnter, 0);
350 // Visit the declarations and body unless there is an illegal
352 if (scope()->HasIllegalRedeclaration()) {
353 Comment cmnt(masm_, "[ Declarations");
354 scope()->VisitIllegalRedeclaration(this);
357 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
359 Comment cmnt(masm_, "[ Declarations");
360 VisitDeclarations(scope()->declarations());
364 Comment cmnt(masm_, "[ Stack check");
365 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
367 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
369 __ bc_short(ge, &ok);
370 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
375 Comment cmnt(masm_, "[ Body");
376 DCHECK(loop_depth() == 0);
377 VisitStatements(function()->body());
378 DCHECK(loop_depth() == 0);
382 // Always emit a 'return undefined' in case control fell off the end of
385 Comment cmnt(masm_, "[ return <undefined>;");
386 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
388 EmitReturnSequence();
390 if (HasStackOverflow()) {
391 masm_->AbortConstantPoolBuilding();
396 void FullCodeGenerator::ClearAccumulator() {
397 __ LoadSmiLiteral(r3, Smi::FromInt(0));
401 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
402 __ mov(r5, Operand(profiling_counter_));
403 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
404 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
405 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
409 void FullCodeGenerator::EmitProfilingCounterReset() {
410 int reset_value = FLAG_interrupt_budget;
411 if (info_->is_debug()) {
412 // Detect debug break requests as soon as possible.
413 reset_value = FLAG_interrupt_budget >> 4;
415 __ mov(r5, Operand(profiling_counter_));
416 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
417 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
421 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
422 Label* back_edge_target) {
423 Comment cmnt(masm_, "[ Back edge bookkeeping");
426 DCHECK(back_edge_target->is_bound());
427 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
428 kCodeSizeMultiplier / 2;
429 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
430 EmitProfilingCounterDecrement(weight);
432 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
433 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
434 // BackEdgeTable::PatchAt manipulates this sequence.
435 __ cmpi(r6, Operand::Zero());
436 __ bc_short(ge, &ok);
437 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
439 // Record a mapping of this PC offset to the OSR id. This is used to find
440 // the AST id from the unoptimized code in order to use it as a key into
441 // the deoptimization input data found in the optimized code.
442 RecordBackEdge(stmt->OsrEntryId());
444 EmitProfilingCounterReset();
447 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
448 // Record a mapping of the OSR id to this PC. This is used if the OSR
449 // entry becomes the target of a bailout. We don't expect it to be, but
450 // we want it to work if it is.
451 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
455 void FullCodeGenerator::EmitReturnSequence() {
456 Comment cmnt(masm_, "[ Return sequence");
457 if (return_label_.is_bound()) {
458 __ b(&return_label_);
460 __ bind(&return_label_);
462 // Push the return value on the stack as the parameter.
463 // Runtime::TraceExit returns its parameter in r3
465 __ CallRuntime(Runtime::kTraceExit, 1);
467 // Pretend that the exit is a backwards jump to the entry.
469 if (info_->ShouldSelfOptimize()) {
470 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
472 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
473 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
475 EmitProfilingCounterDecrement(weight);
477 __ cmpi(r6, Operand::Zero());
480 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
482 EmitProfilingCounterReset();
485 // Make sure that the constant pool is not emitted inside of the return
488 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
489 int32_t arg_count = info_->scope()->num_parameters() + 1;
490 int32_t sp_delta = arg_count * kPointerSize;
491 SetReturnPosition(function());
492 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
494 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
500 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
501 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
502 codegen()->GetVar(result_register(), var);
503 __ push(result_register());
507 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
510 void FullCodeGenerator::AccumulatorValueContext::Plug(
511 Heap::RootListIndex index) const {
512 __ LoadRoot(result_register(), index);
516 void FullCodeGenerator::StackValueContext::Plug(
517 Heap::RootListIndex index) const {
518 __ LoadRoot(result_register(), index);
519 __ push(result_register());
523 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
524 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
526 if (index == Heap::kUndefinedValueRootIndex ||
527 index == Heap::kNullValueRootIndex ||
528 index == Heap::kFalseValueRootIndex) {
529 if (false_label_ != fall_through_) __ b(false_label_);
530 } else if (index == Heap::kTrueValueRootIndex) {
531 if (true_label_ != fall_through_) __ b(true_label_);
533 __ LoadRoot(result_register(), index);
534 codegen()->DoTest(this);
539 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
542 void FullCodeGenerator::AccumulatorValueContext::Plug(
543 Handle<Object> lit) const {
544 __ mov(result_register(), Operand(lit));
548 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
549 // Immediates cannot be pushed directly.
550 __ mov(result_register(), Operand(lit));
551 __ push(result_register());
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
558 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
559 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
560 if (false_label_ != fall_through_) __ b(false_label_);
561 } else if (lit->IsTrue() || lit->IsJSObject()) {
562 if (true_label_ != fall_through_) __ b(true_label_);
563 } else if (lit->IsString()) {
564 if (String::cast(*lit)->length() == 0) {
565 if (false_label_ != fall_through_) __ b(false_label_);
567 if (true_label_ != fall_through_) __ b(true_label_);
569 } else if (lit->IsSmi()) {
570 if (Smi::cast(*lit)->value() == 0) {
571 if (false_label_ != fall_through_) __ b(false_label_);
573 if (true_label_ != fall_through_) __ b(true_label_);
576 // For simplicity we always test the accumulator register.
577 __ mov(result_register(), Operand(lit));
578 codegen()->DoTest(this);
583 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
584 Register reg) const {
590 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
591 int count, Register reg) const {
594 __ Move(result_register(), reg);
598 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
599 Register reg) const {
601 if (count > 1) __ Drop(count - 1);
602 __ StoreP(reg, MemOperand(sp, 0));
606 void FullCodeGenerator::TestContext::DropAndPlug(int count,
607 Register reg) const {
609 // For simplicity we always test the accumulator register.
611 __ Move(result_register(), reg);
612 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
613 codegen()->DoTest(this);
617 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
618 Label* materialize_false) const {
619 DCHECK(materialize_true == materialize_false);
620 __ bind(materialize_true);
624 void FullCodeGenerator::AccumulatorValueContext::Plug(
625 Label* materialize_true, Label* materialize_false) const {
627 __ bind(materialize_true);
628 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
630 __ bind(materialize_false);
631 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636 void FullCodeGenerator::StackValueContext::Plug(
637 Label* materialize_true, Label* materialize_false) const {
639 __ bind(materialize_true);
640 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
642 __ bind(materialize_false);
643 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
649 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
650 Label* materialize_false) const {
651 DCHECK(materialize_true == true_label_);
652 DCHECK(materialize_false == false_label_);
656 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
657 Heap::RootListIndex value_root_index =
658 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
659 __ LoadRoot(result_register(), value_root_index);
663 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
664 Heap::RootListIndex value_root_index =
665 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
666 __ LoadRoot(ip, value_root_index);
671 void FullCodeGenerator::TestContext::Plug(bool flag) const {
672 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
675 if (true_label_ != fall_through_) __ b(true_label_);
677 if (false_label_ != fall_through_) __ b(false_label_);
682 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
683 Label* if_false, Label* fall_through) {
684 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
685 CallIC(ic, condition->test_id());
686 __ cmpi(result_register(), Operand::Zero());
687 Split(ne, if_true, if_false, fall_through);
691 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
692 Label* fall_through, CRegister cr) {
693 if (if_false == fall_through) {
694 __ b(cond, if_true, cr);
695 } else if (if_true == fall_through) {
696 __ b(NegateCondition(cond), if_false, cr);
698 __ b(cond, if_true, cr);
704 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
705 DCHECK(var->IsStackAllocated());
706 // Offset is negative because higher indexes are at lower addresses.
707 int offset = -var->index() * kPointerSize;
708 // Adjust by a (parameter or local) base offset.
709 if (var->IsParameter()) {
710 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
712 offset += JavaScriptFrameConstants::kLocal0Offset;
714 return MemOperand(fp, offset);
718 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
719 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
720 if (var->IsContextSlot()) {
721 int context_chain_length = scope()->ContextChainLength(var->scope());
722 __ LoadContext(scratch, context_chain_length);
723 return ContextOperand(scratch, var->index());
725 return StackOperand(var);
730 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
731 // Use destination as scratch.
732 MemOperand location = VarOperand(var, dest);
733 __ LoadP(dest, location, r0);
737 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
739 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
740 DCHECK(!scratch0.is(src));
741 DCHECK(!scratch0.is(scratch1));
742 DCHECK(!scratch1.is(src));
743 MemOperand location = VarOperand(var, scratch0);
744 __ StoreP(src, location, r0);
746 // Emit the write barrier code if the location is in the heap.
747 if (var->IsContextSlot()) {
748 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
749 kLRHasBeenSaved, kDontSaveFPRegs);
754 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
755 bool should_normalize,
758 // Only prepare for bailouts before splits if we're in a test
759 // context. Otherwise, we let the Visit function deal with the
760 // preparation to avoid preparing with the same AST id twice.
761 if (!context()->IsTest() || !info_->IsOptimizable()) return;
764 if (should_normalize) __ b(&skip);
765 PrepareForBailout(expr, TOS_REG);
766 if (should_normalize) {
767 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
769 Split(eq, if_true, if_false, NULL);
775 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
776 // The variable in the declaration always resides in the current function
778 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
779 if (generate_debug_code_) {
780 // Check that we're not inside a with or catch context.
781 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
782 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
783 __ Check(ne, kDeclarationInWithContext);
784 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
785 __ Check(ne, kDeclarationInCatchContext);
790 void FullCodeGenerator::VisitVariableDeclaration(
791 VariableDeclaration* declaration) {
792 // If it was not possible to allocate the variable at compile time, we
793 // need to "declare" it at runtime to make sure it actually exists in the
795 VariableProxy* proxy = declaration->proxy();
796 VariableMode mode = declaration->mode();
797 Variable* variable = proxy->var();
798 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
799 switch (variable->location()) {
800 case VariableLocation::GLOBAL:
801 case VariableLocation::UNALLOCATED:
802 globals_->Add(variable->name(), zone());
803 globals_->Add(variable->binding_needs_init()
804 ? isolate()->factory()->the_hole_value()
805 : isolate()->factory()->undefined_value(),
809 case VariableLocation::PARAMETER:
810 case VariableLocation::LOCAL:
812 Comment cmnt(masm_, "[ VariableDeclaration");
813 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
814 __ StoreP(ip, StackOperand(variable));
818 case VariableLocation::CONTEXT:
820 Comment cmnt(masm_, "[ VariableDeclaration");
821 EmitDebugCheckDeclarationContext(variable);
822 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
823 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
824 // No write barrier since the_hole_value is in old space.
825 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
829 case VariableLocation::LOOKUP: {
830 Comment cmnt(masm_, "[ VariableDeclaration");
831 __ mov(r5, Operand(variable->name()));
832 // Declaration nodes are always introduced in one of four modes.
833 DCHECK(IsDeclaredVariableMode(mode));
834 PropertyAttributes attr =
835 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
836 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
837 // Push initial value, if any.
838 // Note: For variables we must not push an initial value (such as
839 // 'undefined') because we may have a (legal) redeclaration and we
840 // must not destroy the current value.
842 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
843 __ Push(cp, r5, r4, r3);
845 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
846 __ Push(cp, r5, r4, r3);
848 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
855 void FullCodeGenerator::VisitFunctionDeclaration(
856 FunctionDeclaration* declaration) {
857 VariableProxy* proxy = declaration->proxy();
858 Variable* variable = proxy->var();
859 switch (variable->location()) {
860 case VariableLocation::GLOBAL:
861 case VariableLocation::UNALLOCATED: {
862 globals_->Add(variable->name(), zone());
863 Handle<SharedFunctionInfo> function =
864 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
865 // Check for stack-overflow exception.
866 if (function.is_null()) return SetStackOverflow();
867 globals_->Add(function, zone());
871 case VariableLocation::PARAMETER:
872 case VariableLocation::LOCAL: {
873 Comment cmnt(masm_, "[ FunctionDeclaration");
874 VisitForAccumulatorValue(declaration->fun());
875 __ StoreP(result_register(), StackOperand(variable));
879 case VariableLocation::CONTEXT: {
880 Comment cmnt(masm_, "[ FunctionDeclaration");
881 EmitDebugCheckDeclarationContext(variable);
882 VisitForAccumulatorValue(declaration->fun());
883 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
884 int offset = Context::SlotOffset(variable->index());
885 // We know that we have written a function, which is not a smi.
886 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
887 kLRHasBeenSaved, kDontSaveFPRegs,
888 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
889 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
893 case VariableLocation::LOOKUP: {
894 Comment cmnt(masm_, "[ FunctionDeclaration");
895 __ mov(r5, Operand(variable->name()));
896 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
898 // Push initial value for function declaration.
899 VisitForStackValue(declaration->fun());
900 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
907 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
908 // Call the runtime to declare the globals.
909 // The context is the first argument.
910 __ mov(r4, Operand(pairs));
911 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
913 __ CallRuntime(Runtime::kDeclareGlobals, 3);
914 // Return value is ignored.
918 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
919 // Call the runtime to declare the modules.
920 __ Push(descriptions);
921 __ CallRuntime(Runtime::kDeclareModules, 1);
922 // Return value is ignored.
926 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
927 Comment cmnt(masm_, "[ SwitchStatement");
928 Breakable nested_statement(this, stmt);
929 SetStatementPosition(stmt);
931 // Keep the switch value on the stack until a case matches.
932 VisitForStackValue(stmt->tag());
933 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
935 ZoneList<CaseClause*>* clauses = stmt->cases();
936 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
938 Label next_test; // Recycled for each test.
939 // Compile all the tests with branches to their bodies.
940 for (int i = 0; i < clauses->length(); i++) {
941 CaseClause* clause = clauses->at(i);
942 clause->body_target()->Unuse();
944 // The default is not a test, but remember it as final fall through.
945 if (clause->is_default()) {
946 default_clause = clause;
950 Comment cmnt(masm_, "[ Case comparison");
954 // Compile the label expression.
955 VisitForAccumulatorValue(clause->label());
957 // Perform the comparison as if via '==='.
958 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
959 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
960 JumpPatchSite patch_site(masm_);
961 if (inline_smi_code) {
964 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
968 __ Drop(1); // Switch value is no longer needed.
969 __ b(clause->body_target());
973 // Record position before stub call for type feedback.
974 SetExpressionPosition(clause);
975 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
976 strength(language_mode())).code();
977 CallIC(ic, clause->CompareId());
978 patch_site.EmitPatchInfo();
982 PrepareForBailout(clause, TOS_REG);
983 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
987 __ b(clause->body_target());
990 __ cmpi(r3, Operand::Zero());
992 __ Drop(1); // Switch value is no longer needed.
993 __ b(clause->body_target());
996 // Discard the test value and jump to the default if present, otherwise to
997 // the end of the statement.
999 __ Drop(1); // Switch value is no longer needed.
1000 if (default_clause == NULL) {
1001 __ b(nested_statement.break_label());
1003 __ b(default_clause->body_target());
1006 // Compile all the case bodies.
1007 for (int i = 0; i < clauses->length(); i++) {
1008 Comment cmnt(masm_, "[ Case body");
1009 CaseClause* clause = clauses->at(i);
1010 __ bind(clause->body_target());
1011 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1012 VisitStatements(clause->statements());
1015 __ bind(nested_statement.break_label());
1016 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1020 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1021 Comment cmnt(masm_, "[ ForInStatement");
1022 SetStatementPosition(stmt, SKIP_BREAK);
1024 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1027 ForIn loop_statement(this, stmt);
1028 increment_loop_depth();
1030 // Get the object to enumerate over. If the object is null or undefined, skip
1031 // over the loop. See ECMA-262 version 5, section 12.6.4.
1032 SetExpressionAsStatementPosition(stmt->enumerable());
1033 VisitForAccumulatorValue(stmt->enumerable());
1034 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1037 Register null_value = r7;
1038 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1039 __ cmp(r3, null_value);
1042 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1044 // Convert the object to a JS object.
1045 Label convert, done_convert;
1046 __ JumpIfSmi(r3, &convert);
1047 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1048 __ bge(&done_convert);
1051 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1052 __ bind(&done_convert);
1053 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1056 // Check for proxies.
1058 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1059 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1060 __ ble(&call_runtime);
1062 // Check cache validity in generated code. This is a fast case for
1063 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1064 // guarantee cache validity, call the runtime system to check cache
1065 // validity or get the property names in a fixed array.
1066 __ CheckEnumCache(null_value, &call_runtime);
1068 // The enum cache is valid. Load the map of the object being
1069 // iterated over and use the cache for the iteration.
1071 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1074 // Get the set of properties to enumerate.
1075 __ bind(&call_runtime);
1076 __ push(r3); // Duplicate the enumerable object on the stack.
1077 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1078 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1080 // If we got a map from the runtime call, we can do a fast
1081 // modification check. Otherwise, we got a fixed array, and we have
1082 // to do a slow check.
1084 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1085 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1087 __ bne(&fixed_array);
1089 // We got a map in register r3. Get the enumeration cache from it.
1090 Label no_descriptors;
1091 __ bind(&use_cache);
1093 __ EnumLength(r4, r3);
1094 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1095 __ beq(&no_descriptors);
1097 __ LoadInstanceDescriptors(r3, r5);
1098 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1100 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1102 // Set up the four remaining stack slots.
1103 __ push(r3); // Map.
1104 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1105 // Push enumeration cache, enumeration cache length (as smi) and zero.
1106 __ Push(r5, r4, r3);
1109 __ bind(&no_descriptors);
1113 // We got a fixed array in register r3. Iterate through that.
1115 __ bind(&fixed_array);
1117 __ Move(r4, FeedbackVector());
1118 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1119 int vector_index = FeedbackVector()->GetIndex(slot);
1121 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1123 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1124 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1125 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1126 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1128 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1129 __ bind(&non_proxy);
1130 __ Push(r4, r3); // Smi and array
1131 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1132 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1133 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1135 // Generate code for doing the condition check.
1136 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1138 SetExpressionAsStatementPosition(stmt->each());
1140 // Load the current count to r3, load the length to r4.
1141 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1142 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1143 __ cmpl(r3, r4); // Compare to the array length.
1144 __ bge(loop_statement.break_label());
1146 // Get the current entry of the array into register r6.
1147 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1148 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1149 __ SmiToPtrArrayOffset(r6, r3);
1150 __ LoadPX(r6, MemOperand(r6, r5));
1152 // Get the expected map from the stack or a smi in the
1153 // permanent slow case into register r5.
1154 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1156 // Check if the expected map still matches that of the enumerable.
1157 // If not, we may have to filter the key.
1159 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1160 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1162 __ beq(&update_each);
1164 // For proxies, no filtering is done.
1165 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1166 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1167 __ beq(&update_each);
1169 // Convert the entry to a string or (smi) 0 if it isn't a property
1170 // any more. If the property has been removed while iterating, we
1172 __ Push(r4, r6); // Enumerable and current entry.
1173 __ CallRuntime(Runtime::kForInFilter, 2);
1174 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1176 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1178 __ beq(loop_statement.continue_label());
1180 // Update the 'each' property or variable from the possibly filtered
1181 // entry in register r6.
1182 __ bind(&update_each);
1183 __ mr(result_register(), r6);
1184 // Perform the assignment as if via '='.
1186 EffectContext context(this);
1187 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1188 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1191 // Generate code for the body of the loop.
1192 Visit(stmt->body());
1194 // Generate code for the going to the next element by incrementing
1195 // the index (smi) stored on top of the stack.
1196 __ bind(loop_statement.continue_label());
1198 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1201 EmitBackEdgeBookkeeping(stmt, &loop);
1204 // Remove the pointers stored on the stack.
1205 __ bind(loop_statement.break_label());
1208 // Exit and decrement the loop depth.
1209 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1211 decrement_loop_depth();
1215 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1217 // Use the fast case closure allocation code that allocates in new
1218 // space for nested functions that don't need literals cloning. If
1219 // we're running with the --always-opt or the --prepare-always-opt
1220 // flag, we need to use the runtime function so that the new function
1221 // we are creating here gets a chance to have its code optimized and
1222 // doesn't just get a copy of the existing unoptimized code.
1223 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1224 scope()->is_function_scope() && info->num_literals() == 0) {
1225 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1226 __ mov(r5, Operand(info));
1229 __ mov(r3, Operand(info));
1231 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1232 __ Push(cp, r3, r4);
1233 __ CallRuntime(Runtime::kNewClosure, 3);
1235 context()->Plug(r3);
1239 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1241 FeedbackVectorICSlot slot) {
1242 if (NeedsHomeObject(initializer)) {
1243 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1244 __ mov(StoreDescriptor::NameRegister(),
1245 Operand(isolate()->factory()->home_object_symbol()));
1246 __ LoadP(StoreDescriptor::ValueRegister(),
1247 MemOperand(sp, offset * kPointerSize));
1248 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1254 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1255 TypeofMode typeof_mode,
1257 Register current = cp;
1263 if (s->num_heap_slots() > 0) {
1264 if (s->calls_sloppy_eval()) {
1265 // Check that extension is NULL.
1266 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1267 __ cmpi(temp, Operand::Zero());
1270 // Load next context in chain.
1271 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1272 // Walk the rest of the chain without clobbering cp.
1275 // If no outer scope calls eval, we do not need to check more
1276 // context extensions.
1277 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1278 s = s->outer_scope();
1281 if (s->is_eval_scope()) {
1283 if (!current.is(next)) {
1284 __ Move(next, current);
1287 // Terminate at native context.
1288 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1289 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1292 // Check that extension is NULL.
1293 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1294 __ cmpi(temp, Operand::Zero());
1296 // Load next context in chain.
1297 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1302 // All extension objects were empty and it is safe to use a normal global
1304 EmitGlobalVariableLoad(proxy, typeof_mode);
1308 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1310 DCHECK(var->IsContextSlot());
1311 Register context = cp;
1315 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1316 if (s->num_heap_slots() > 0) {
1317 if (s->calls_sloppy_eval()) {
1318 // Check that extension is NULL.
1319 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1320 __ cmpi(temp, Operand::Zero());
1323 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1324 // Walk the rest of the chain without clobbering cp.
1328 // Check that last extension is NULL.
1329 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1330 __ cmpi(temp, Operand::Zero());
1333 // This function is used only for loads, not stores, so it's safe to
1334 // return an cp-based operand (the write barrier cannot be allowed to
1335 // destroy the cp register).
1336 return ContextOperand(context, var->index());
1340 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1341 TypeofMode typeof_mode,
1342 Label* slow, Label* done) {
1343 // Generate fast-case code for variables that might be shadowed by
1344 // eval-introduced variables. Eval is used a lot without
1345 // introducing variables. In those cases, we do not want to
1346 // perform a runtime call for all variables in the scope
1347 // containing the eval.
1348 Variable* var = proxy->var();
1349 if (var->mode() == DYNAMIC_GLOBAL) {
1350 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1352 } else if (var->mode() == DYNAMIC_LOCAL) {
1353 Variable* local = var->local_if_not_shadowed();
1354 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1355 if (local->mode() == LET || local->mode() == CONST ||
1356 local->mode() == CONST_LEGACY) {
1357 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1359 if (local->mode() == CONST_LEGACY) {
1360 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1361 } else { // LET || CONST
1362 __ mov(r3, Operand(var->name()));
1364 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1372 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1373 TypeofMode typeof_mode) {
1374 Variable* var = proxy->var();
1375 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1376 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1377 if (var->IsGlobalSlot()) {
1378 DCHECK(var->index() > 0);
1379 DCHECK(var->IsStaticGlobalObjectProperty());
1380 // Each var occupies two slots in the context: for reads and writes.
1381 const int slot = var->index();
1382 const int depth = scope()->ContextChainLength(var->scope());
1383 if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
1384 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
1385 __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
1386 Operand(var->name()));
1387 LoadGlobalViaContextStub stub(isolate(), depth);
1390 __ Push(Smi::FromInt(slot));
1391 __ Push(var->name());
1392 __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
1395 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1396 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1397 __ mov(LoadDescriptor::SlotRegister(),
1398 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1399 CallLoadIC(typeof_mode);
1404 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1405 TypeofMode typeof_mode) {
1406 // Record position before possible IC call.
1407 SetExpressionPosition(proxy);
1408 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1409 Variable* var = proxy->var();
1411 // Three cases: global variables, lookup variables, and all other types of
1413 switch (var->location()) {
1414 case VariableLocation::GLOBAL:
1415 case VariableLocation::UNALLOCATED: {
1416 Comment cmnt(masm_, "[ Global variable");
1417 EmitGlobalVariableLoad(proxy, typeof_mode);
1418 context()->Plug(r3);
1422 case VariableLocation::PARAMETER:
1423 case VariableLocation::LOCAL:
1424 case VariableLocation::CONTEXT: {
1425 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1426 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1427 : "[ Stack variable");
1428 if (var->binding_needs_init()) {
1429 // var->scope() may be NULL when the proxy is located in eval code and
1430 // refers to a potential outside binding. Currently those bindings are
1431 // always looked up dynamically, i.e. in that case
1432 // var->location() == LOOKUP.
1434 DCHECK(var->scope() != NULL);
1436 // Check if the binding really needs an initialization check. The check
1437 // can be skipped in the following situation: we have a LET or CONST
1438 // binding in harmony mode, both the Variable and the VariableProxy have
1439 // the same declaration scope (i.e. they are both in global code, in the
1440 // same function or in the same eval code) and the VariableProxy is in
1441 // the source physically located after the initializer of the variable.
1443 // We cannot skip any initialization checks for CONST in non-harmony
1444 // mode because const variables may be declared but never initialized:
1445 // if (false) { const x; }; var y = x;
1447 // The condition on the declaration scopes is a conservative check for
1448 // nested functions that access a binding and are called before the
1449 // binding is initialized:
1450 // function() { f(); let x = 1; function f() { x = 2; } }
1452 bool skip_init_check;
1453 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1454 skip_init_check = false;
1455 } else if (var->is_this()) {
1456 CHECK(info_->function() != nullptr &&
1457 (info_->function()->kind() & kSubclassConstructor) != 0);
1458 // TODO(dslomov): implement 'this' hole check elimination.
1459 skip_init_check = false;
1461 // Check that we always have valid source position.
1462 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1463 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1464 skip_init_check = var->mode() != CONST_LEGACY &&
1465 var->initializer_position() < proxy->position();
1468 if (!skip_init_check) {
1470 // Let and const need a read barrier.
1472 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1474 if (var->mode() == LET || var->mode() == CONST) {
1475 // Throw a reference error when using an uninitialized let/const
1476 // binding in harmony mode.
1477 __ mov(r3, Operand(var->name()));
1479 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1481 // Uninitalized const bindings outside of harmony mode are unholed.
1482 DCHECK(var->mode() == CONST_LEGACY);
1483 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1486 context()->Plug(r3);
1490 context()->Plug(var);
1494 case VariableLocation::LOOKUP: {
1495 Comment cmnt(masm_, "[ Lookup variable");
1497 // Generate code for loading from variables potentially shadowed
1498 // by eval-introduced variables.
1499 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1501 __ mov(r4, Operand(var->name()));
1502 __ Push(cp, r4); // Context and name.
1503 Runtime::FunctionId function_id =
1504 typeof_mode == NOT_INSIDE_TYPEOF
1505 ? Runtime::kLoadLookupSlot
1506 : Runtime::kLoadLookupSlotNoReferenceError;
1507 __ CallRuntime(function_id, 2);
1509 context()->Plug(r3);
1515 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1516 Comment cmnt(masm_, "[ RegExpLiteral");
1518 // Registers will be used as follows:
1519 // r8 = materialized value (RegExp literal)
1520 // r7 = JS function, literals array
1521 // r6 = literal index
1522 // r5 = RegExp pattern
1523 // r4 = RegExp flags
1524 // r3 = RegExp literal clone
1525 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1526 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1527 int literal_offset =
1528 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1529 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1530 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1532 __ bne(&materialized);
1534 // Create regexp literal using runtime function.
1535 // Result will be in r3.
1536 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1537 __ mov(r5, Operand(expr->pattern()));
1538 __ mov(r4, Operand(expr->flags()));
1539 __ Push(r7, r6, r5, r4);
1540 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1543 __ bind(&materialized);
1544 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1545 Label allocated, runtime_allocate;
1546 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1549 __ bind(&runtime_allocate);
1550 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1552 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1555 __ bind(&allocated);
1556 // After this, registers are used as follows:
1557 // r3: Newly allocated regexp.
1558 // r8: Materialized regexp.
1560 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1561 context()->Plug(r3);
1565 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1566 if (expression == NULL) {
1567 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1570 VisitForStackValue(expression);
1575 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1576 Comment cmnt(masm_, "[ ObjectLiteral");
1578 Handle<FixedArray> constant_properties = expr->constant_properties();
1579 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1580 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1581 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1582 __ mov(r4, Operand(constant_properties));
1583 int flags = expr->ComputeFlags();
1584 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1585 if (MustCreateObjectLiteralWithRuntime(expr)) {
1586 __ Push(r6, r5, r4, r3);
1587 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1589 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1592 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1594 // If result_saved is true the result is on top of the stack. If
1595 // result_saved is false the result is in r3.
1596 bool result_saved = false;
1598 AccessorTable accessor_table(zone());
1599 int property_index = 0;
1600 // store_slot_index points to the vector IC slot for the next store IC used.
1601 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1602 // and must be updated if the number of store ICs emitted here changes.
1603 int store_slot_index = 0;
1604 for (; property_index < expr->properties()->length(); property_index++) {
1605 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1606 if (property->is_computed_name()) break;
1607 if (property->IsCompileTimeValue()) continue;
1609 Literal* key = property->key()->AsLiteral();
1610 Expression* value = property->value();
1611 if (!result_saved) {
1612 __ push(r3); // Save result on stack
1613 result_saved = true;
1615 switch (property->kind()) {
1616 case ObjectLiteral::Property::CONSTANT:
1618 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1619 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1621 case ObjectLiteral::Property::COMPUTED:
1622 // It is safe to use [[Put]] here because the boilerplate already
1623 // contains computed properties with an uninitialized value.
1624 if (key->value()->IsInternalizedString()) {
1625 if (property->emit_store()) {
1626 VisitForAccumulatorValue(value);
1627 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1628 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1629 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1630 if (FLAG_vector_stores) {
1631 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1634 CallStoreIC(key->LiteralFeedbackId());
1636 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1638 if (NeedsHomeObject(value)) {
1639 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1640 __ mov(StoreDescriptor::NameRegister(),
1641 Operand(isolate()->factory()->home_object_symbol()));
1642 __ LoadP(StoreDescriptor::ValueRegister(), MemOperand(sp));
1643 if (FLAG_vector_stores) {
1644 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1649 VisitForEffect(value);
1653 // Duplicate receiver on stack.
1654 __ LoadP(r3, MemOperand(sp));
1656 VisitForStackValue(key);
1657 VisitForStackValue(value);
1658 if (property->emit_store()) {
1659 EmitSetHomeObjectIfNeeded(
1660 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1661 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1663 __ CallRuntime(Runtime::kSetProperty, 4);
1668 case ObjectLiteral::Property::PROTOTYPE:
1669 // Duplicate receiver on stack.
1670 __ LoadP(r3, MemOperand(sp));
1672 VisitForStackValue(value);
1673 DCHECK(property->emit_store());
1674 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1676 case ObjectLiteral::Property::GETTER:
1677 if (property->emit_store()) {
1678 accessor_table.lookup(key)->second->getter = value;
1681 case ObjectLiteral::Property::SETTER:
1682 if (property->emit_store()) {
1683 accessor_table.lookup(key)->second->setter = value;
1689 // Emit code to define accessors, using only a single call to the runtime for
1690 // each pair of corresponding getters and setters.
1691 for (AccessorTable::Iterator it = accessor_table.begin();
1692 it != accessor_table.end(); ++it) {
1693 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1695 VisitForStackValue(it->first);
1696 EmitAccessor(it->second->getter);
1697 EmitSetHomeObjectIfNeeded(
1698 it->second->getter, 2,
1699 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1700 EmitAccessor(it->second->setter);
1701 EmitSetHomeObjectIfNeeded(
1702 it->second->setter, 3,
1703 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1704 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1706 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1709 // Object literals have two parts. The "static" part on the left contains no
1710 // computed property names, and so we can compute its map ahead of time; see
1711 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1712 // starts with the first computed property name, and continues with all
1713 // properties to its right. All the code from above initializes the static
1714 // component of the object literal, and arranges for the map of the result to
1715 // reflect the static order in which the keys appear. For the dynamic
1716 // properties, we compile them into a series of "SetOwnProperty" runtime
1717 // calls. This will preserve insertion order.
1718 for (; property_index < expr->properties()->length(); property_index++) {
1719 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1721 Expression* value = property->value();
1722 if (!result_saved) {
1723 __ push(r3); // Save result on the stack
1724 result_saved = true;
1727 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1730 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1731 DCHECK(!property->is_computed_name());
1732 VisitForStackValue(value);
1733 DCHECK(property->emit_store());
1734 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1736 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1737 VisitForStackValue(value);
1738 EmitSetHomeObjectIfNeeded(
1739 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1741 switch (property->kind()) {
1742 case ObjectLiteral::Property::CONSTANT:
1743 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1744 case ObjectLiteral::Property::COMPUTED:
1745 if (property->emit_store()) {
1746 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1748 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1754 case ObjectLiteral::Property::PROTOTYPE:
1758 case ObjectLiteral::Property::GETTER:
1759 __ mov(r3, Operand(Smi::FromInt(NONE)));
1761 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1764 case ObjectLiteral::Property::SETTER:
1765 __ mov(r3, Operand(Smi::FromInt(NONE)));
1767 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1773 if (expr->has_function()) {
1774 DCHECK(result_saved);
1775 __ LoadP(r3, MemOperand(sp));
1777 __ CallRuntime(Runtime::kToFastProperties, 1);
1781 context()->PlugTOS();
1783 context()->Plug(r3);
1786 // Verify that compilation exactly consumed the number of store ic slots that
1787 // the ObjectLiteral node had to offer.
1788 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1792 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1793 Comment cmnt(masm_, "[ ArrayLiteral");
1795 expr->BuildConstantElements(isolate());
1796 Handle<FixedArray> constant_elements = expr->constant_elements();
1797 bool has_fast_elements =
1798 IsFastObjectElementsKind(expr->constant_elements_kind());
1799 Handle<FixedArrayBase> constant_elements_values(
1800 FixedArrayBase::cast(constant_elements->get(1)));
1802 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1803 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1804 // If the only customer of allocation sites is transitioning, then
1805 // we can turn it off if we don't have anywhere else to transition to.
1806 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1809 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1810 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1811 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1812 __ mov(r4, Operand(constant_elements));
1813 if (MustCreateArrayLiteralWithRuntime(expr)) {
1814 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1815 __ Push(r6, r5, r4, r3);
1816 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1818 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1821 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1823 bool result_saved = false; // Is the result saved to the stack?
1824 ZoneList<Expression*>* subexprs = expr->values();
1825 int length = subexprs->length();
1827 // Emit code to evaluate all the non-constant subexpressions and to store
1828 // them into the newly cloned array.
1829 int array_index = 0;
1830 for (; array_index < length; array_index++) {
1831 Expression* subexpr = subexprs->at(array_index);
1832 if (subexpr->IsSpread()) break;
1833 // If the subexpression is a literal or a simple materialized literal it
1834 // is already set in the cloned array.
1835 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1837 if (!result_saved) {
1839 __ Push(Smi::FromInt(expr->literal_index()));
1840 result_saved = true;
1842 VisitForAccumulatorValue(subexpr);
1844 if (has_fast_elements) {
1845 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1846 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1847 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1848 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1849 // Update the write barrier for the array store.
1850 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1851 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1854 __ LoadSmiLiteral(r6, Smi::FromInt(array_index));
1855 StoreArrayLiteralElementStub stub(isolate());
1859 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1862 // In case the array literal contains spread expressions it has two parts. The
1863 // first part is the "static" array which has a literal index is handled
1864 // above. The second part is the part after the first spread expression
1865 // (inclusive) and these elements gets appended to the array. Note that the
1866 // number elements an iterable produces is unknown ahead of time.
1867 if (array_index < length && result_saved) {
1868 __ Drop(1); // literal index
1870 result_saved = false;
1872 for (; array_index < length; array_index++) {
1873 Expression* subexpr = subexprs->at(array_index);
1876 if (subexpr->IsSpread()) {
1877 VisitForStackValue(subexpr->AsSpread()->expression());
1878 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1880 VisitForStackValue(subexpr);
1881 __ CallRuntime(Runtime::kAppendElement, 2);
1884 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1888 __ Drop(1); // literal index
1889 context()->PlugTOS();
1891 context()->Plug(r3);
1896 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1897 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1899 Comment cmnt(masm_, "[ Assignment");
1900 SetExpressionPosition(expr, INSERT_BREAK);
1902 Property* property = expr->target()->AsProperty();
1903 LhsKind assign_type = Property::GetAssignType(property);
1905 // Evaluate LHS expression.
1906 switch (assign_type) {
1908 // Nothing to do here.
1910 case NAMED_PROPERTY:
1911 if (expr->is_compound()) {
1912 // We need the receiver both on the stack and in the register.
1913 VisitForStackValue(property->obj());
1914 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1916 VisitForStackValue(property->obj());
1919 case NAMED_SUPER_PROPERTY:
1921 property->obj()->AsSuperPropertyReference()->this_var());
1922 VisitForAccumulatorValue(
1923 property->obj()->AsSuperPropertyReference()->home_object());
1924 __ Push(result_register());
1925 if (expr->is_compound()) {
1926 const Register scratch = r4;
1927 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1928 __ Push(scratch, result_register());
1931 case KEYED_SUPER_PROPERTY: {
1932 const Register scratch = r4;
1934 property->obj()->AsSuperPropertyReference()->this_var());
1935 VisitForAccumulatorValue(
1936 property->obj()->AsSuperPropertyReference()->home_object());
1937 __ mr(scratch, result_register());
1938 VisitForAccumulatorValue(property->key());
1939 __ Push(scratch, result_register());
1940 if (expr->is_compound()) {
1941 const Register scratch1 = r5;
1942 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1943 __ Push(scratch1, scratch, result_register());
1947 case KEYED_PROPERTY:
1948 if (expr->is_compound()) {
1949 VisitForStackValue(property->obj());
1950 VisitForStackValue(property->key());
1951 __ LoadP(LoadDescriptor::ReceiverRegister(),
1952 MemOperand(sp, 1 * kPointerSize));
1953 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1955 VisitForStackValue(property->obj());
1956 VisitForStackValue(property->key());
1961 // For compound assignments we need another deoptimization point after the
1962 // variable/property load.
1963 if (expr->is_compound()) {
1965 AccumulatorValueContext context(this);
1966 switch (assign_type) {
1968 EmitVariableLoad(expr->target()->AsVariableProxy());
1969 PrepareForBailout(expr->target(), TOS_REG);
1971 case NAMED_PROPERTY:
1972 EmitNamedPropertyLoad(property);
1973 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1975 case NAMED_SUPER_PROPERTY:
1976 EmitNamedSuperPropertyLoad(property);
1977 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1979 case KEYED_SUPER_PROPERTY:
1980 EmitKeyedSuperPropertyLoad(property);
1981 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1983 case KEYED_PROPERTY:
1984 EmitKeyedPropertyLoad(property);
1985 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1990 Token::Value op = expr->binary_op();
1991 __ push(r3); // Left operand goes on the stack.
1992 VisitForAccumulatorValue(expr->value());
1994 AccumulatorValueContext context(this);
1995 if (ShouldInlineSmiCase(op)) {
1996 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1999 EmitBinaryOp(expr->binary_operation(), op);
2002 // Deoptimization point in case the binary operation may have side effects.
2003 PrepareForBailout(expr->binary_operation(), TOS_REG);
2005 VisitForAccumulatorValue(expr->value());
2008 SetExpressionPosition(expr);
2011 switch (assign_type) {
2013 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2014 expr->op(), expr->AssignmentSlot());
2015 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2016 context()->Plug(r3);
2018 case NAMED_PROPERTY:
2019 EmitNamedPropertyAssignment(expr);
2021 case NAMED_SUPER_PROPERTY:
2022 EmitNamedSuperPropertyStore(property);
2023 context()->Plug(r3);
2025 case KEYED_SUPER_PROPERTY:
2026 EmitKeyedSuperPropertyStore(property);
2027 context()->Plug(r3);
2029 case KEYED_PROPERTY:
2030 EmitKeyedPropertyAssignment(expr);
2036 void FullCodeGenerator::VisitYield(Yield* expr) {
2037 Comment cmnt(masm_, "[ Yield");
2038 SetExpressionPosition(expr);
2040 // Evaluate yielded value first; the initial iterator definition depends on
2041 // this. It stays on the stack while we update the iterator.
2042 VisitForStackValue(expr->expression());
2044 switch (expr->yield_kind()) {
2045 case Yield::kSuspend:
2046 // Pop value from top-of-stack slot; box result into result register.
2047 EmitCreateIteratorResult(false);
2048 __ push(result_register());
2050 case Yield::kInitial: {
2051 Label suspend, continuation, post_runtime, resume;
2054 __ bind(&continuation);
2055 __ RecordGeneratorContinuation();
2059 VisitForAccumulatorValue(expr->generator_object());
2060 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2061 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2062 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2064 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2066 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2067 kLRHasBeenSaved, kDontSaveFPRegs);
2068 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2070 __ beq(&post_runtime);
2071 __ push(r3); // generator object
2072 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2073 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2074 __ bind(&post_runtime);
2075 __ pop(result_register());
2076 EmitReturnSequence();
2079 context()->Plug(result_register());
2083 case Yield::kFinal: {
2084 VisitForAccumulatorValue(expr->generator_object());
2085 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2086 __ StoreP(r4, FieldMemOperand(result_register(),
2087 JSGeneratorObject::kContinuationOffset),
2089 // Pop value from top-of-stack slot, box result into result register.
2090 EmitCreateIteratorResult(true);
2091 EmitUnwindBeforeReturn();
2092 EmitReturnSequence();
2096 case Yield::kDelegating: {
2097 VisitForStackValue(expr->generator_object());
2099 // Initial stack layout is as follows:
2100 // [sp + 1 * kPointerSize] iter
2101 // [sp + 0 * kPointerSize] g
2103 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2104 Label l_next, l_call;
2105 Register load_receiver = LoadDescriptor::ReceiverRegister();
2106 Register load_name = LoadDescriptor::NameRegister();
2108 // Initial send value is undefined.
2109 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2112 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2114 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2115 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2116 __ Push(load_name, r6, r3); // "throw", iter, except
2119 // try { received = %yield result }
2120 // Shuffle the received result above a try handler and yield it without
2123 __ pop(r3); // result
2124 int handler_index = NewHandlerTableEntry();
2125 EnterTryBlock(handler_index, &l_catch);
2126 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2127 __ push(r3); // result
2130 __ bind(&l_continuation);
2131 __ RecordGeneratorContinuation();
2134 __ bind(&l_suspend);
2135 const int generator_object_depth = kPointerSize + try_block_size;
2136 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2138 __ Push(Smi::FromInt(handler_index)); // handler-index
2139 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2140 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2141 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2143 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2145 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2146 kLRHasBeenSaved, kDontSaveFPRegs);
2147 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2148 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2149 __ pop(r3); // result
2150 EmitReturnSequence();
2151 __ bind(&l_resume); // received in r3
2152 ExitTryBlock(handler_index);
2154 // receiver = iter; f = 'next'; arg = received;
2157 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2158 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2159 __ Push(load_name, r6, r3); // "next", iter, received
2161 // result = receiver[f](arg);
2163 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2164 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2165 __ mov(LoadDescriptor::SlotRegister(),
2166 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2167 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2168 CallIC(ic, TypeFeedbackId::None());
2170 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2171 SetCallPosition(expr, 1);
2172 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2175 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2176 __ Drop(1); // The function is still on the stack; drop it.
2178 // if (!result.done) goto l_try;
2179 __ Move(load_receiver, r3);
2181 __ push(load_receiver); // save result
2182 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2183 __ mov(LoadDescriptor::SlotRegister(),
2184 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2185 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2186 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2188 __ cmpi(r3, Operand::Zero());
2192 __ pop(load_receiver); // result
2193 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2194 __ mov(LoadDescriptor::SlotRegister(),
2195 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2196 CallLoadIC(NOT_INSIDE_TYPEOF); // r3=result.value
2197 context()->DropAndPlug(2, r3); // drop iter and g
2204 void FullCodeGenerator::EmitGeneratorResume(
2205 Expression* generator, Expression* value,
2206 JSGeneratorObject::ResumeMode resume_mode) {
2207 // The value stays in r3, and is ultimately read by the resumed generator, as
2208 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2209 // is read to throw the value when the resumed generator is already closed.
2210 // r4 will hold the generator object until the activation has been resumed.
2211 VisitForStackValue(generator);
2212 VisitForAccumulatorValue(value);
2215 // Load suspended function and context.
2216 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2217 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2219 // Load receiver and store as the first argument.
2220 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2223 // Push holes for the rest of the arguments to the generator function.
2224 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2226 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2227 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2228 Label argument_loop, push_frame;
2229 #if V8_TARGET_ARCH_PPC64
2230 __ cmpi(r6, Operand::Zero());
2231 __ beq(&push_frame);
2233 __ SmiUntag(r6, SetRC);
2234 __ beq(&push_frame, cr0);
2237 __ bind(&argument_loop);
2239 __ bdnz(&argument_loop);
2241 // Enter a new JavaScript frame, and initialize its slots as they were when
2242 // the generator was suspended.
2243 Label resume_frame, done;
2244 __ bind(&push_frame);
2245 __ b(&resume_frame, SetLK);
2247 __ bind(&resume_frame);
2248 // lr = return address.
2249 // fp = caller's frame pointer.
2250 // cp = callee's context,
2251 // r7 = callee's JS function.
2252 __ PushFixedFrame(r7);
2253 // Adjust FP to point to saved FP.
2254 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2256 // Load the operand stack size.
2257 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2258 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2259 __ SmiUntag(r6, SetRC);
2261 // If we are sending a value and there is no operand stack, we can jump back
2264 if (resume_mode == JSGeneratorObject::NEXT) {
2266 __ bne(&slow_resume, cr0);
2267 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2269 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2270 if (FLAG_enable_embedded_constant_pool) {
2271 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
2273 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2276 __ LoadSmiLiteral(r5,
2277 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2278 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2281 __ bind(&slow_resume);
2284 __ beq(&call_resume, cr0);
2287 // Otherwise, we push holes for the operand stack and call the runtime to fix
2288 // up the stack and the handlers.
2291 __ bind(&operand_loop);
2293 __ bdnz(&operand_loop);
2295 __ bind(&call_resume);
2296 DCHECK(!result_register().is(r4));
2297 __ Push(r4, result_register());
2298 __ Push(Smi::FromInt(resume_mode));
2299 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2300 // Not reached: the runtime call returns elsewhere.
2301 __ stop("not-reached");
2304 context()->Plug(result_register());
2308 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2312 const int instance_size = 5 * kPointerSize;
2313 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2316 __ Allocate(instance_size, r3, r5, r6, &gc_required, TAG_OBJECT);
2319 __ bind(&gc_required);
2320 __ Push(Smi::FromInt(instance_size));
2321 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2322 __ LoadP(context_register(),
2323 MemOperand(fp, StandardFrameConstants::kContextOffset));
2325 __ bind(&allocated);
2326 __ LoadP(r4, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2327 __ LoadP(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2328 __ LoadP(r4, ContextOperand(r4, Context::ITERATOR_RESULT_MAP_INDEX));
2330 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2331 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2332 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2333 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2334 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2336 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2339 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2342 // Only the value field needs a write barrier, as the other values are in the
2344 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2345 kLRHasBeenSaved, kDontSaveFPRegs);
2349 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2350 SetExpressionPosition(prop);
2351 Literal* key = prop->key()->AsLiteral();
2352 DCHECK(!prop->IsSuperAccess());
2354 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2355 __ mov(LoadDescriptor::SlotRegister(),
2356 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2357 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2361 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2362 // Stack: receiver, home_object.
2363 SetExpressionPosition(prop);
2364 Literal* key = prop->key()->AsLiteral();
2365 DCHECK(!key->value()->IsSmi());
2366 DCHECK(prop->IsSuperAccess());
2368 __ Push(key->value());
2369 __ Push(Smi::FromInt(language_mode()));
2370 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2374 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2375 SetExpressionPosition(prop);
2376 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2377 __ mov(LoadDescriptor::SlotRegister(),
2378 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2383 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2384 // Stack: receiver, home_object, key.
2385 SetExpressionPosition(prop);
2386 __ Push(Smi::FromInt(language_mode()));
2387 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2391 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2393 Expression* left_expr,
2394 Expression* right_expr) {
2395 Label done, smi_case, stub_call;
2397 Register scratch1 = r5;
2398 Register scratch2 = r6;
2400 // Get the arguments.
2402 Register right = r3;
2405 // Perform combined smi check on both operands.
2406 __ orx(scratch1, left, right);
2407 STATIC_ASSERT(kSmiTag == 0);
2408 JumpPatchSite patch_site(masm_);
2409 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2411 __ bind(&stub_call);
2413 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2414 CallIC(code, expr->BinaryOperationFeedbackId());
2415 patch_site.EmitPatchInfo();
2419 // Smi case. This code works the same way as the smi-smi case in the type
2420 // recording binary operation stub.
2423 __ GetLeastBitsFromSmi(scratch1, right, 5);
2424 __ ShiftRightArith(right, left, scratch1);
2425 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2428 __ GetLeastBitsFromSmi(scratch2, right, 5);
2429 #if V8_TARGET_ARCH_PPC64
2430 __ ShiftLeft_(right, left, scratch2);
2432 __ SmiUntag(scratch1, left);
2433 __ ShiftLeft_(scratch1, scratch1, scratch2);
2434 // Check that the *signed* result fits in a smi
2435 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2436 __ SmiTag(right, scratch1);
2441 __ SmiUntag(scratch1, left);
2442 __ GetLeastBitsFromSmi(scratch2, right, 5);
2443 __ srw(scratch1, scratch1, scratch2);
2444 // Unsigned shift is not allowed to produce a negative number.
2445 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2446 __ SmiTag(right, scratch1);
2450 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2451 __ BranchOnOverflow(&stub_call);
2452 __ mr(right, scratch1);
2456 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2457 __ BranchOnOverflow(&stub_call);
2458 __ mr(right, scratch1);
2463 #if V8_TARGET_ARCH_PPC64
2464 // Remove tag from both operands.
2465 __ SmiUntag(ip, right);
2466 __ SmiUntag(r0, left);
2467 __ Mul(scratch1, r0, ip);
2468 // Check for overflowing the smi range - no overflow if higher 33 bits of
2469 // the result are identical.
2470 __ TestIfInt32(scratch1, r0);
2473 __ SmiUntag(ip, right);
2474 __ mullw(scratch1, left, ip);
2475 __ mulhw(scratch2, left, ip);
2476 // Check for overflowing the smi range - no overflow if higher 33 bits of
2477 // the result are identical.
2478 __ TestIfInt32(scratch2, scratch1, ip);
2481 // Go slow on zero result to handle -0.
2482 __ cmpi(scratch1, Operand::Zero());
2484 #if V8_TARGET_ARCH_PPC64
2485 __ SmiTag(right, scratch1);
2487 __ mr(right, scratch1);
2490 // We need -0 if we were multiplying a negative number with 0 to get 0.
2491 // We know one of them was zero.
2493 __ add(scratch2, right, left);
2494 __ cmpi(scratch2, Operand::Zero());
2496 __ LoadSmiLiteral(right, Smi::FromInt(0));
2500 __ orx(right, left, right);
2502 case Token::BIT_AND:
2503 __ and_(right, left, right);
2505 case Token::BIT_XOR:
2506 __ xor_(right, left, right);
2513 context()->Plug(r3);
2517 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2518 int* used_store_slots) {
2519 // Constructor is in r3.
2520 DCHECK(lit != NULL);
2523 // No access check is needed here since the constructor is created by the
2525 Register scratch = r4;
2527 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2530 for (int i = 0; i < lit->properties()->length(); i++) {
2531 ObjectLiteral::Property* property = lit->properties()->at(i);
2532 Expression* value = property->value();
2534 if (property->is_static()) {
2535 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2537 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2540 EmitPropertyKey(property, lit->GetIdForProperty(i));
2542 // The static prototype property is read only. We handle the non computed
2543 // property name case in the parser. Since this is the only case where we
2544 // need to check for an own read only property we special case this so we do
2545 // not need to do this for every property.
2546 if (property->is_static() && property->is_computed_name()) {
2547 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2551 VisitForStackValue(value);
2552 EmitSetHomeObjectIfNeeded(value, 2,
2553 lit->SlotForHomeObject(value, used_store_slots));
2555 switch (property->kind()) {
2556 case ObjectLiteral::Property::CONSTANT:
2557 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2558 case ObjectLiteral::Property::PROTOTYPE:
2560 case ObjectLiteral::Property::COMPUTED:
2561 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2564 case ObjectLiteral::Property::GETTER:
2565 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2567 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2570 case ObjectLiteral::Property::SETTER:
2571 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2573 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2582 __ CallRuntime(Runtime::kToFastProperties, 1);
2585 __ CallRuntime(Runtime::kToFastProperties, 1);
2587 if (is_strong(language_mode())) {
2589 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2590 __ Push(r3, scratch);
2591 // TODO(conradw): It would be more efficient to define the properties with
2592 // the right attributes the first time round.
2593 // Freeze the prototype.
2594 __ CallRuntime(Runtime::kObjectFreeze, 1);
2595 // Freeze the constructor.
2596 __ CallRuntime(Runtime::kObjectFreeze, 1);
2601 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2604 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2605 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2606 CallIC(code, expr->BinaryOperationFeedbackId());
2607 patch_site.EmitPatchInfo();
2608 context()->Plug(r3);
2612 void FullCodeGenerator::EmitAssignment(Expression* expr,
2613 FeedbackVectorICSlot slot) {
2614 DCHECK(expr->IsValidReferenceExpressionOrThis());
2616 Property* prop = expr->AsProperty();
2617 LhsKind assign_type = Property::GetAssignType(prop);
2619 switch (assign_type) {
2621 Variable* var = expr->AsVariableProxy()->var();
2622 EffectContext context(this);
2623 EmitVariableAssignment(var, Token::ASSIGN, slot);
2626 case NAMED_PROPERTY: {
2627 __ push(r3); // Preserve value.
2628 VisitForAccumulatorValue(prop->obj());
2629 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2630 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2631 __ mov(StoreDescriptor::NameRegister(),
2632 Operand(prop->key()->AsLiteral()->value()));
2633 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2637 case NAMED_SUPER_PROPERTY: {
2639 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2640 VisitForAccumulatorValue(
2641 prop->obj()->AsSuperPropertyReference()->home_object());
2642 // stack: value, this; r3: home_object
2643 Register scratch = r5;
2644 Register scratch2 = r6;
2645 __ mr(scratch, result_register()); // home_object
2646 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2647 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2648 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2649 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2650 // stack: this, home_object; r3: value
2651 EmitNamedSuperPropertyStore(prop);
2654 case KEYED_SUPER_PROPERTY: {
2656 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2658 prop->obj()->AsSuperPropertyReference()->home_object());
2659 VisitForAccumulatorValue(prop->key());
2660 Register scratch = r5;
2661 Register scratch2 = r6;
2662 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2663 // stack: value, this, home_object; r3: key, r6: value
2664 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2665 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2666 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2667 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2668 __ StoreP(r3, MemOperand(sp, 0));
2669 __ Move(r3, scratch2);
2670 // stack: this, home_object, key; r3: value.
2671 EmitKeyedSuperPropertyStore(prop);
2674 case KEYED_PROPERTY: {
2675 __ push(r3); // Preserve value.
2676 VisitForStackValue(prop->obj());
2677 VisitForAccumulatorValue(prop->key());
2678 __ Move(StoreDescriptor::NameRegister(), r3);
2679 __ Pop(StoreDescriptor::ValueRegister(),
2680 StoreDescriptor::ReceiverRegister());
2681 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2683 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2688 context()->Plug(r3);
2692 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2693 Variable* var, MemOperand location) {
2694 __ StoreP(result_register(), location, r0);
2695 if (var->IsContextSlot()) {
2696 // RecordWrite may destroy all its register arguments.
2697 __ mr(r6, result_register());
2698 int offset = Context::SlotOffset(var->index());
2699 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2705 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2706 FeedbackVectorICSlot slot) {
2707 if (var->IsUnallocated()) {
2708 // Global var, const, or let.
2709 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2710 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2711 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2714 } else if (var->IsGlobalSlot()) {
2715 // Global var, const, or let.
2716 DCHECK(var->index() > 0);
2717 DCHECK(var->IsStaticGlobalObjectProperty());
2718 // Each var occupies two slots in the context: for reads and writes.
2719 const int slot = var->index() + 1;
2720 const int depth = scope()->ContextChainLength(var->scope());
2721 if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
2722 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
2723 __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
2724 Operand(var->name()));
2725 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r3));
2726 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2729 __ Push(Smi::FromInt(slot));
2730 __ Push(var->name());
2732 __ CallRuntime(is_strict(language_mode())
2733 ? Runtime::kStoreGlobalViaContext_Strict
2734 : Runtime::kStoreGlobalViaContext_Sloppy,
2737 } else if (var->mode() == LET && op != Token::INIT_LET) {
2738 // Non-initializing assignment to let variable needs a write barrier.
2739 DCHECK(!var->IsLookupSlot());
2740 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2742 MemOperand location = VarOperand(var, r4);
2743 __ LoadP(r6, location);
2744 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2746 __ mov(r6, Operand(var->name()));
2748 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2749 // Perform the assignment.
2751 EmitStoreToStackLocalOrContextSlot(var, location);
2753 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2754 // Assignment to const variable needs a write barrier.
2755 DCHECK(!var->IsLookupSlot());
2756 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2758 MemOperand location = VarOperand(var, r4);
2759 __ LoadP(r6, location);
2760 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2761 __ bne(&const_error);
2762 __ mov(r6, Operand(var->name()));
2764 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2765 __ bind(&const_error);
2766 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2768 } else if (var->is_this() && op == Token::INIT_CONST) {
2769 // Initializing assignment to const {this} needs a write barrier.
2770 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2771 Label uninitialized_this;
2772 MemOperand location = VarOperand(var, r4);
2773 __ LoadP(r6, location);
2774 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2775 __ beq(&uninitialized_this);
2776 __ mov(r4, Operand(var->name()));
2778 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2779 __ bind(&uninitialized_this);
2780 EmitStoreToStackLocalOrContextSlot(var, location);
2782 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2783 if (var->IsLookupSlot()) {
2784 // Assignment to var.
2785 __ push(r3); // Value.
2786 __ mov(r4, Operand(var->name()));
2787 __ mov(r3, Operand(Smi::FromInt(language_mode())));
2788 __ Push(cp, r4, r3); // Context, name, language mode.
2789 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2791 // Assignment to var or initializing assignment to let/const in harmony
2793 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2794 MemOperand location = VarOperand(var, r4);
2795 if (generate_debug_code_ && op == Token::INIT_LET) {
2796 // Check for an uninitialized let binding.
2797 __ LoadP(r5, location);
2798 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2799 __ Check(eq, kLetBindingReInitialization);
2801 EmitStoreToStackLocalOrContextSlot(var, location);
2803 } else if (op == Token::INIT_CONST_LEGACY) {
2804 // Const initializers need a write barrier.
2805 DCHECK(var->mode() == CONST_LEGACY);
2806 DCHECK(!var->IsParameter()); // No const parameters.
2807 if (var->IsLookupSlot()) {
2809 __ mov(r3, Operand(var->name()));
2810 __ Push(cp, r3); // Context and name.
2811 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2813 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2815 MemOperand location = VarOperand(var, r4);
2816 __ LoadP(r5, location);
2817 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2819 EmitStoreToStackLocalOrContextSlot(var, location);
2824 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2825 if (is_strict(language_mode())) {
2826 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2828 // Silently ignore store in sloppy mode.
2833 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2834 // Assignment to a property, using a named store IC.
2835 Property* prop = expr->target()->AsProperty();
2836 DCHECK(prop != NULL);
2837 DCHECK(prop->key()->IsLiteral());
2839 __ mov(StoreDescriptor::NameRegister(),
2840 Operand(prop->key()->AsLiteral()->value()));
2841 __ pop(StoreDescriptor::ReceiverRegister());
2842 if (FLAG_vector_stores) {
2843 EmitLoadStoreICSlot(expr->AssignmentSlot());
2846 CallStoreIC(expr->AssignmentFeedbackId());
2849 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2850 context()->Plug(r3);
2854 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2855 // Assignment to named property of super.
2857 // stack : receiver ('this'), home_object
2858 DCHECK(prop != NULL);
2859 Literal* key = prop->key()->AsLiteral();
2860 DCHECK(key != NULL);
2862 __ Push(key->value());
2864 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2865 : Runtime::kStoreToSuper_Sloppy),
2870 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2871 // Assignment to named property of super.
2873 // stack : receiver ('this'), home_object, key
2874 DCHECK(prop != NULL);
2878 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2879 : Runtime::kStoreKeyedToSuper_Sloppy),
2884 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2885 // Assignment to a property, using a keyed store IC.
2886 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2887 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2890 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2891 if (FLAG_vector_stores) {
2892 EmitLoadStoreICSlot(expr->AssignmentSlot());
2895 CallIC(ic, expr->AssignmentFeedbackId());
2898 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2899 context()->Plug(r3);
2903 void FullCodeGenerator::VisitProperty(Property* expr) {
2904 Comment cmnt(masm_, "[ Property");
2905 SetExpressionPosition(expr);
2907 Expression* key = expr->key();
2909 if (key->IsPropertyName()) {
2910 if (!expr->IsSuperAccess()) {
2911 VisitForAccumulatorValue(expr->obj());
2912 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2913 EmitNamedPropertyLoad(expr);
2915 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2917 expr->obj()->AsSuperPropertyReference()->home_object());
2918 EmitNamedSuperPropertyLoad(expr);
2921 if (!expr->IsSuperAccess()) {
2922 VisitForStackValue(expr->obj());
2923 VisitForAccumulatorValue(expr->key());
2924 __ Move(LoadDescriptor::NameRegister(), r3);
2925 __ pop(LoadDescriptor::ReceiverRegister());
2926 EmitKeyedPropertyLoad(expr);
2928 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2930 expr->obj()->AsSuperPropertyReference()->home_object());
2931 VisitForStackValue(expr->key());
2932 EmitKeyedSuperPropertyLoad(expr);
2935 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2936 context()->Plug(r3);
2940 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2942 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2946 // Code common for calls using the IC.
2947 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2948 Expression* callee = expr->expression();
2950 CallICState::CallType call_type =
2951 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2953 // Get the target function.
2954 if (call_type == CallICState::FUNCTION) {
2956 StackValueContext context(this);
2957 EmitVariableLoad(callee->AsVariableProxy());
2958 PrepareForBailout(callee, NO_REGISTERS);
2960 // Push undefined as receiver. This is patched in the method prologue if it
2961 // is a sloppy mode method.
2962 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2965 // Load the function from the receiver.
2966 DCHECK(callee->IsProperty());
2967 DCHECK(!callee->AsProperty()->IsSuperAccess());
2968 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2969 EmitNamedPropertyLoad(callee->AsProperty());
2970 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2971 // Push the target function under the receiver.
2972 __ LoadP(r0, MemOperand(sp, 0));
2974 __ StoreP(r3, MemOperand(sp, kPointerSize));
2977 EmitCall(expr, call_type);
2981 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2982 Expression* callee = expr->expression();
2983 DCHECK(callee->IsProperty());
2984 Property* prop = callee->AsProperty();
2985 DCHECK(prop->IsSuperAccess());
2986 SetExpressionPosition(prop);
2988 Literal* key = prop->key()->AsLiteral();
2989 DCHECK(!key->value()->IsSmi());
2990 // Load the function from the receiver.
2991 const Register scratch = r4;
2992 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2993 VisitForAccumulatorValue(super_ref->home_object());
2995 VisitForAccumulatorValue(super_ref->this_var());
2996 __ Push(scratch, r3, r3, scratch);
2997 __ Push(key->value());
2998 __ Push(Smi::FromInt(language_mode()));
3002 // - this (receiver)
3003 // - this (receiver) <-- LoadFromSuper will pop here and below.
3007 __ CallRuntime(Runtime::kLoadFromSuper, 4);
3009 // Replace home_object with target function.
3010 __ StoreP(r3, MemOperand(sp, kPointerSize));
3013 // - target function
3014 // - this (receiver)
3015 EmitCall(expr, CallICState::METHOD);
3019 // Code common for calls using the IC.
3020 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
3022 VisitForAccumulatorValue(key);
3024 Expression* callee = expr->expression();
3026 // Load the function from the receiver.
3027 DCHECK(callee->IsProperty());
3028 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3029 __ Move(LoadDescriptor::NameRegister(), r3);
3030 EmitKeyedPropertyLoad(callee->AsProperty());
3031 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3033 // Push the target function under the receiver.
3034 __ LoadP(ip, MemOperand(sp, 0));
3036 __ StoreP(r3, MemOperand(sp, kPointerSize));
3038 EmitCall(expr, CallICState::METHOD);
3042 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3043 Expression* callee = expr->expression();
3044 DCHECK(callee->IsProperty());
3045 Property* prop = callee->AsProperty();
3046 DCHECK(prop->IsSuperAccess());
3048 SetExpressionPosition(prop);
3049 // Load the function from the receiver.
3050 const Register scratch = r4;
3051 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3052 VisitForAccumulatorValue(super_ref->home_object());
3054 VisitForAccumulatorValue(super_ref->this_var());
3055 __ Push(scratch, r3, r3, scratch);
3056 VisitForStackValue(prop->key());
3057 __ Push(Smi::FromInt(language_mode()));
3061 // - this (receiver)
3062 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3066 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3068 // Replace home_object with target function.
3069 __ StoreP(r3, MemOperand(sp, kPointerSize));
3072 // - target function
3073 // - this (receiver)
3074 EmitCall(expr, CallICState::METHOD);
3078 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3079 // Load the arguments.
3080 ZoneList<Expression*>* args = expr->arguments();
3081 int arg_count = args->length();
3082 for (int i = 0; i < arg_count; i++) {
3083 VisitForStackValue(args->at(i));
3086 SetCallPosition(expr, arg_count);
3087 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3088 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
3089 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3090 // Don't assign a type feedback id to the IC, since type feedback is provided
3091 // by the vector above.
3094 RecordJSReturnSite(expr);
3095 // Restore context register.
3096 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3097 context()->DropAndPlug(1, r3);
3101 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3102 // r7: copy of the first argument or undefined if it doesn't exist.
3103 if (arg_count > 0) {
3104 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
3106 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3109 // r6: the receiver of the enclosing function.
3110 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3112 // r5: language mode.
3113 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
3115 // r4: the start position of the scope the calls resides in.
3116 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
3118 // Do the runtime call.
3119 __ Push(r7, r6, r5, r4);
3120 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3124 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3125 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3126 VariableProxy* callee = expr->expression()->AsVariableProxy();
3127 if (callee->var()->IsLookupSlot()) {
3129 SetExpressionPosition(callee);
3130 // Generate code for loading from variables potentially shadowed by
3131 // eval-introduced variables.
3132 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3135 // Call the runtime to find the function to call (returned in r3) and
3136 // the object holding it (returned in r4).
3137 DCHECK(!context_register().is(r5));
3138 __ mov(r5, Operand(callee->name()));
3139 __ Push(context_register(), r5);
3140 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3141 __ Push(r3, r4); // Function, receiver.
3142 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3144 // If fast case code has been generated, emit code to push the function
3145 // and receiver and have the slow path jump around this code.
3146 if (done.is_linked()) {
3152 // Pass undefined as the receiver, which is the WithBaseObject of a
3153 // non-object environment record. If the callee is sloppy, it will patch
3154 // it up to be the global receiver.
3155 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3160 VisitForStackValue(callee);
3161 // refEnv.WithBaseObject()
3162 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3163 __ push(r5); // Reserved receiver slot.
3168 void FullCodeGenerator::VisitCall(Call* expr) {
3170 // We want to verify that RecordJSReturnSite gets called on all paths
3171 // through this function. Avoid early returns.
3172 expr->return_is_recorded_ = false;
3175 Comment cmnt(masm_, "[ Call");
3176 Expression* callee = expr->expression();
3177 Call::CallType call_type = expr->GetCallType(isolate());
3179 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3180 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3181 // to resolve the function we need to call. Then we call the resolved
3182 // function using the given arguments.
3183 ZoneList<Expression*>* args = expr->arguments();
3184 int arg_count = args->length();
3186 PushCalleeAndWithBaseObject(expr);
3188 // Push the arguments.
3189 for (int i = 0; i < arg_count; i++) {
3190 VisitForStackValue(args->at(i));
3193 // Push a copy of the function (found below the arguments) and
3195 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3197 EmitResolvePossiblyDirectEval(arg_count);
3199 // Touch up the stack with the resolved function.
3200 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3202 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3204 // Record source position for debugger.
3205 SetCallPosition(expr, arg_count);
3206 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3207 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3209 RecordJSReturnSite(expr);
3210 // Restore context register.
3211 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3212 context()->DropAndPlug(1, r3);
3213 } else if (call_type == Call::GLOBAL_CALL) {
3214 EmitCallWithLoadIC(expr);
3216 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3217 // Call to a lookup slot (dynamically introduced variable).
3218 PushCalleeAndWithBaseObject(expr);
3220 } else if (call_type == Call::PROPERTY_CALL) {
3221 Property* property = callee->AsProperty();
3222 bool is_named_call = property->key()->IsPropertyName();
3223 if (property->IsSuperAccess()) {
3224 if (is_named_call) {
3225 EmitSuperCallWithLoadIC(expr);
3227 EmitKeyedSuperCallWithLoadIC(expr);
3230 VisitForStackValue(property->obj());
3231 if (is_named_call) {
3232 EmitCallWithLoadIC(expr);
3234 EmitKeyedCallWithLoadIC(expr, property->key());
3237 } else if (call_type == Call::SUPER_CALL) {
3238 EmitSuperConstructorCall(expr);
3240 DCHECK(call_type == Call::OTHER_CALL);
3241 // Call to an arbitrary expression not handled specially above.
3242 VisitForStackValue(callee);
3243 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3245 // Emit function call.
3250 // RecordJSReturnSite should have been called.
3251 DCHECK(expr->return_is_recorded_);
3256 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3257 Comment cmnt(masm_, "[ CallNew");
3258 // According to ECMA-262, section 11.2.2, page 44, the function
3259 // expression in new calls must be evaluated before the
3262 // Push constructor on the stack. If it's not a function it's used as
3263 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3265 DCHECK(!expr->expression()->IsSuperPropertyReference());
3266 VisitForStackValue(expr->expression());
3268 // Push the arguments ("left-to-right") on the stack.
3269 ZoneList<Expression*>* args = expr->arguments();
3270 int arg_count = args->length();
3271 for (int i = 0; i < arg_count; i++) {
3272 VisitForStackValue(args->at(i));
3275 // Call the construct call builtin that handles allocation and
3276 // constructor invocation.
3277 SetConstructCallPosition(expr);
3279 // Load function and argument count into r4 and r3.
3280 __ mov(r3, Operand(arg_count));
3281 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3283 // Record call targets in unoptimized code.
3284 if (FLAG_pretenuring_call_new) {
3285 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3286 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3287 expr->CallNewFeedbackSlot().ToInt() + 1);
3290 __ Move(r5, FeedbackVector());
3291 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3293 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3294 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3295 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3296 context()->Plug(r3);
3300 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3301 SuperCallReference* super_call_ref =
3302 expr->expression()->AsSuperCallReference();
3303 DCHECK_NOT_NULL(super_call_ref);
3305 EmitLoadSuperConstructor(super_call_ref);
3306 __ push(result_register());
3308 // Push the arguments ("left-to-right") on the stack.
3309 ZoneList<Expression*>* args = expr->arguments();
3310 int arg_count = args->length();
3311 for (int i = 0; i < arg_count; i++) {
3312 VisitForStackValue(args->at(i));
3315 // Call the construct call builtin that handles allocation and
3316 // constructor invocation.
3317 SetConstructCallPosition(expr);
3319 // Load original constructor into r7.
3320 VisitForAccumulatorValue(super_call_ref->new_target_var());
3321 __ mr(r7, result_register());
3323 // Load function and argument count into r1 and r0.
3324 __ mov(r3, Operand(arg_count));
3325 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
3327 // Record call targets in unoptimized code.
3328 if (FLAG_pretenuring_call_new) {
3330 /* TODO(dslomov): support pretenuring.
3331 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3332 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3333 expr->CallNewFeedbackSlot().ToInt() + 1);
3337 __ Move(r5, FeedbackVector());
3338 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
3340 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3341 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3343 RecordJSReturnSite(expr);
3345 context()->Plug(r3);
3349 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3350 ZoneList<Expression*>* args = expr->arguments();
3351 DCHECK(args->length() == 1);
3353 VisitForAccumulatorValue(args->at(0));
3355 Label materialize_true, materialize_false;
3356 Label* if_true = NULL;
3357 Label* if_false = NULL;
3358 Label* fall_through = NULL;
3359 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3360 &if_false, &fall_through);
3362 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3363 __ TestIfSmi(r3, r0);
3364 Split(eq, if_true, if_false, fall_through, cr0);
3366 context()->Plug(if_true, if_false);
3370 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3371 ZoneList<Expression*>* args = expr->arguments();
3372 DCHECK(args->length() == 1);
3374 VisitForAccumulatorValue(args->at(0));
3376 Label materialize_true, materialize_false;
3377 Label* if_true = NULL;
3378 Label* if_false = NULL;
3379 Label* fall_through = NULL;
3380 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3381 &if_false, &fall_through);
3383 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3384 __ TestIfPositiveSmi(r3, r0);
3385 Split(eq, if_true, if_false, fall_through, cr0);
3387 context()->Plug(if_true, if_false);
3391 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3392 ZoneList<Expression*>* args = expr->arguments();
3393 DCHECK(args->length() == 1);
3395 VisitForAccumulatorValue(args->at(0));
3397 Label materialize_true, materialize_false;
3398 Label* if_true = NULL;
3399 Label* if_false = NULL;
3400 Label* fall_through = NULL;
3401 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3402 &if_false, &fall_through);
3404 __ JumpIfSmi(r3, if_false);
3405 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3408 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3409 // Undetectable objects behave like undefined when tested with typeof.
3410 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3411 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3412 __ bne(if_false, cr0);
3413 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3414 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3416 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3417 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3418 Split(le, if_true, if_false, fall_through);
3420 context()->Plug(if_true, if_false);
3424 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3425 ZoneList<Expression*>* args = expr->arguments();
3426 DCHECK(args->length() == 1);
3428 VisitForAccumulatorValue(args->at(0));
3430 Label materialize_true, materialize_false;
3431 Label* if_true = NULL;
3432 Label* if_false = NULL;
3433 Label* fall_through = NULL;
3434 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3435 &if_false, &fall_through);
3437 __ JumpIfSmi(r3, if_false);
3438 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3439 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3440 Split(ge, if_true, if_false, fall_through);
3442 context()->Plug(if_true, if_false);
3446 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3447 ZoneList<Expression*>* args = expr->arguments();
3448 DCHECK(args->length() == 1);
3450 VisitForAccumulatorValue(args->at(0));
3452 Label materialize_true, materialize_false;
3453 Label* if_true = NULL;
3454 Label* if_false = NULL;
3455 Label* fall_through = NULL;
3456 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3457 &if_false, &fall_through);
3459 __ JumpIfSmi(r3, if_false);
3460 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3461 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3462 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3463 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3464 Split(ne, if_true, if_false, fall_through, cr0);
3466 context()->Plug(if_true, if_false);
3470 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3471 CallRuntime* expr) {
3472 ZoneList<Expression*>* args = expr->arguments();
3473 DCHECK(args->length() == 1);
3475 VisitForAccumulatorValue(args->at(0));
3477 Label materialize_true, materialize_false, skip_lookup;
3478 Label* if_true = NULL;
3479 Label* if_false = NULL;
3480 Label* fall_through = NULL;
3481 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3482 &if_false, &fall_through);
3484 __ AssertNotSmi(r3);
3486 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3487 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3488 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3489 __ bne(&skip_lookup, cr0);
3491 // Check for fast case object. Generate false result for slow case object.
3492 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3493 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3494 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3498 // Look for valueOf name in the descriptor array, and indicate false if
3499 // found. Since we omit an enumeration index check, if it is added via a
3500 // transition that shares its descriptor array, this is a false positive.
3501 Label entry, loop, done;
3503 // Skip loop if no descriptors are valid.
3504 __ NumberOfOwnDescriptors(r6, r4);
3505 __ cmpi(r6, Operand::Zero());
3508 __ LoadInstanceDescriptors(r4, r7);
3509 // r7: descriptor array.
3510 // r6: valid entries in the descriptor array.
3511 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3513 // Calculate location of the first key name.
3514 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3515 // Calculate the end of the descriptor array.
3517 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3520 // Loop through all the keys in the descriptor array. If one of these is the
3521 // string "valueOf" the result is false.
3522 // The use of ip to store the valueOf string assumes that it is not otherwise
3523 // used in the loop below.
3524 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3527 __ LoadP(r6, MemOperand(r7, 0));
3530 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3537 // Set the bit in the map to indicate that there is no local valueOf field.
3538 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3539 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3540 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3542 __ bind(&skip_lookup);
3544 // If a valueOf property is not found on the object check that its
3545 // prototype is the un-modified String prototype. If not result is false.
3546 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3547 __ JumpIfSmi(r5, if_false);
3548 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3549 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3550 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3552 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3554 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3555 Split(eq, if_true, if_false, fall_through);
3557 context()->Plug(if_true, if_false);
3561 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3562 ZoneList<Expression*>* args = expr->arguments();
3563 DCHECK(args->length() == 1);
3565 VisitForAccumulatorValue(args->at(0));
3567 Label materialize_true, materialize_false;
3568 Label* if_true = NULL;
3569 Label* if_false = NULL;
3570 Label* fall_through = NULL;
3571 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3572 &if_false, &fall_through);
3574 __ JumpIfSmi(r3, if_false);
3575 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3576 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3577 Split(eq, if_true, if_false, fall_through);
3579 context()->Plug(if_true, if_false);
3583 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3584 ZoneList<Expression*>* args = expr->arguments();
3585 DCHECK(args->length() == 1);
3587 VisitForAccumulatorValue(args->at(0));
3589 Label materialize_true, materialize_false;
3590 Label* if_true = NULL;
3591 Label* if_false = NULL;
3592 Label* fall_through = NULL;
3593 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3594 &if_false, &fall_through);
3596 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3597 #if V8_TARGET_ARCH_PPC64
3598 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3599 __ li(r5, Operand(1));
3600 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3603 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3604 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3606 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3609 __ cmpi(r4, Operand::Zero());
3613 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3614 Split(eq, if_true, if_false, fall_through);
3616 context()->Plug(if_true, if_false);
3620 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3621 ZoneList<Expression*>* args = expr->arguments();
3622 DCHECK(args->length() == 1);
3624 VisitForAccumulatorValue(args->at(0));
3626 Label materialize_true, materialize_false;
3627 Label* if_true = NULL;
3628 Label* if_false = NULL;
3629 Label* fall_through = NULL;
3630 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3631 &if_false, &fall_through);
3633 __ JumpIfSmi(r3, if_false);
3634 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3635 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3636 Split(eq, if_true, if_false, fall_through);
3638 context()->Plug(if_true, if_false);
3642 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3643 ZoneList<Expression*>* args = expr->arguments();
3644 DCHECK(args->length() == 1);
3646 VisitForAccumulatorValue(args->at(0));
3648 Label materialize_true, materialize_false;
3649 Label* if_true = NULL;
3650 Label* if_false = NULL;
3651 Label* fall_through = NULL;
3652 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3653 &if_false, &fall_through);
3655 __ JumpIfSmi(r3, if_false);
3656 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
3657 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3658 Split(eq, if_true, if_false, fall_through);
3660 context()->Plug(if_true, if_false);
3664 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3665 ZoneList<Expression*>* args = expr->arguments();
3666 DCHECK(args->length() == 1);
3668 VisitForAccumulatorValue(args->at(0));
3670 Label materialize_true, materialize_false;
3671 Label* if_true = NULL;
3672 Label* if_false = NULL;
3673 Label* fall_through = NULL;
3674 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3675 &if_false, &fall_through);
3677 __ JumpIfSmi(r3, if_false);
3678 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3679 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3680 Split(eq, if_true, if_false, fall_through);
3682 context()->Plug(if_true, if_false);
3686 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3687 ZoneList<Expression*>* args = expr->arguments();
3688 DCHECK(args->length() == 1);
3690 VisitForAccumulatorValue(args->at(0));
3692 Label materialize_true, materialize_false;
3693 Label* if_true = NULL;
3694 Label* if_false = NULL;
3695 Label* fall_through = NULL;
3696 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3697 &if_false, &fall_through);
3699 __ JumpIfSmi(r3, if_false);
3701 Register type_reg = r5;
3702 __ LoadP(map, FieldMemOperand(r3, HeapObject::kMapOffset));
3703 __ lbz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3704 __ subi(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3705 __ cmpli(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3706 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3707 Split(le, if_true, if_false, fall_through);
3709 context()->Plug(if_true, if_false);
3713 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3714 DCHECK(expr->arguments()->length() == 0);
3716 Label materialize_true, materialize_false;
3717 Label* if_true = NULL;
3718 Label* if_false = NULL;
3719 Label* fall_through = NULL;
3720 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3721 &if_false, &fall_through);
3723 // Get the frame pointer for the calling frame.
3724 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3726 // Skip the arguments adaptor frame if it exists.
3727 Label check_frame_marker;
3728 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3729 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3730 __ bne(&check_frame_marker);
3731 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3733 // Check the marker in the calling frame.
3734 __ bind(&check_frame_marker);
3735 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3736 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3737 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3738 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3739 Split(eq, if_true, if_false, fall_through);
3741 context()->Plug(if_true, if_false);
3745 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3746 ZoneList<Expression*>* args = expr->arguments();
3747 DCHECK(args->length() == 2);
3749 // Load the two objects into registers and perform the comparison.
3750 VisitForStackValue(args->at(0));
3751 VisitForAccumulatorValue(args->at(1));
3753 Label materialize_true, materialize_false;
3754 Label* if_true = NULL;
3755 Label* if_false = NULL;
3756 Label* fall_through = NULL;
3757 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3758 &if_false, &fall_through);
3762 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3763 Split(eq, if_true, if_false, fall_through);
3765 context()->Plug(if_true, if_false);
3769 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3770 ZoneList<Expression*>* args = expr->arguments();
3771 DCHECK(args->length() == 1);
3773 // ArgumentsAccessStub expects the key in r4 and the formal
3774 // parameter count in r3.
3775 VisitForAccumulatorValue(args->at(0));
3777 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3778 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3780 context()->Plug(r3);
3784 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3785 DCHECK(expr->arguments()->length() == 0);
3787 // Get the number of formal parameters.
3788 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3790 // Check if the calling frame is an arguments adaptor frame.
3791 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3792 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3793 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3796 // Arguments adaptor case: Read the arguments length from the
3798 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3801 context()->Plug(r3);
3805 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3806 ZoneList<Expression*>* args = expr->arguments();
3807 DCHECK(args->length() == 1);
3808 Label done, null, function, non_function_constructor;
3810 VisitForAccumulatorValue(args->at(0));
3812 // If the object is a smi, we return null.
3813 __ JumpIfSmi(r3, &null);
3815 // Check that the object is a JS object but take special care of JS
3816 // functions to make sure they have 'Function' as their class.
3817 // Assume that there are only two callable types, and one of them is at
3818 // either end of the type range for JS object types. Saves extra comparisons.
3819 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3820 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3821 // Map is now in r3.
3823 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3824 FIRST_SPEC_OBJECT_TYPE + 1);
3827 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3828 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3830 // Assume that there is no larger type.
3831 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3833 // Check if the constructor in the map is a JS function.
3834 Register instance_type = r5;
3835 __ GetMapConstructor(r3, r3, r4, instance_type);
3836 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3837 __ bne(&non_function_constructor);
3839 // r3 now contains the constructor function. Grab the
3840 // instance class name from there.
3841 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3843 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3846 // Functions have class 'Function'.
3848 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3851 // Objects with a non-function constructor have class 'Object'.
3852 __ bind(&non_function_constructor);
3853 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3856 // Non-JS objects have class null.
3858 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3863 context()->Plug(r3);
3867 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3868 ZoneList<Expression*>* args = expr->arguments();
3869 DCHECK(args->length() == 1);
3870 VisitForAccumulatorValue(args->at(0)); // Load the object.
3873 // If the object is a smi return the object.
3874 __ JumpIfSmi(r3, &done);
3875 // If the object is not a value type, return the object.
3876 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3878 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3881 context()->Plug(r3);
3885 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3886 ZoneList<Expression*>* args = expr->arguments();
3887 DCHECK_EQ(1, args->length());
3889 VisitForAccumulatorValue(args->at(0));
3891 Label materialize_true, materialize_false;
3892 Label* if_true = nullptr;
3893 Label* if_false = nullptr;
3894 Label* fall_through = nullptr;
3895 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3896 &if_false, &fall_through);
3898 __ JumpIfSmi(r3, if_false);
3899 __ CompareObjectType(r3, r4, r4, JS_DATE_TYPE);
3900 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3901 Split(eq, if_true, if_false, fall_through);
3903 context()->Plug(if_true, if_false);
3907 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3908 ZoneList<Expression*>* args = expr->arguments();
3909 DCHECK(args->length() == 2);
3910 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3911 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3913 VisitForAccumulatorValue(args->at(0)); // Load the object.
3915 Register object = r3;
3916 Register result = r3;
3917 Register scratch0 = r11;
3918 Register scratch1 = r4;
3920 if (index->value() == 0) {
3921 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3923 Label runtime, done;
3924 if (index->value() < JSDate::kFirstUncachedField) {
3925 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3926 __ mov(scratch1, Operand(stamp));
3927 __ LoadP(scratch1, MemOperand(scratch1));
3928 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3929 __ cmp(scratch1, scratch0);
3932 FieldMemOperand(object, JSDate::kValueOffset +
3933 kPointerSize * index->value()),
3938 __ PrepareCallCFunction(2, scratch1);
3939 __ LoadSmiLiteral(r4, index);
3940 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3944 context()->Plug(result);
3948 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3949 ZoneList<Expression*>* args = expr->arguments();
3950 DCHECK_EQ(3, args->length());
3952 Register string = r3;
3953 Register index = r4;
3954 Register value = r5;
3956 VisitForStackValue(args->at(0)); // index
3957 VisitForStackValue(args->at(1)); // value
3958 VisitForAccumulatorValue(args->at(2)); // string
3959 __ Pop(index, value);
3961 if (FLAG_debug_code) {
3962 __ TestIfSmi(value, r0);
3963 __ Check(eq, kNonSmiValue, cr0);
3964 __ TestIfSmi(index, r0);
3965 __ Check(eq, kNonSmiIndex, cr0);
3966 __ SmiUntag(index, index);
3967 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3968 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3969 __ SmiTag(index, index);
3973 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3974 __ SmiToByteArrayOffset(r0, index);
3975 __ stbx(value, MemOperand(ip, r0));
3976 context()->Plug(string);
3980 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3981 ZoneList<Expression*>* args = expr->arguments();
3982 DCHECK_EQ(3, args->length());
3984 Register string = r3;
3985 Register index = r4;
3986 Register value = r5;
3988 VisitForStackValue(args->at(0)); // index
3989 VisitForStackValue(args->at(1)); // value
3990 VisitForAccumulatorValue(args->at(2)); // string
3991 __ Pop(index, value);
3993 if (FLAG_debug_code) {
3994 __ TestIfSmi(value, r0);
3995 __ Check(eq, kNonSmiValue, cr0);
3996 __ TestIfSmi(index, r0);
3997 __ Check(eq, kNonSmiIndex, cr0);
3998 __ SmiUntag(index, index);
3999 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
4000 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
4001 __ SmiTag(index, index);
4005 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4006 __ SmiToShortArrayOffset(r0, index);
4007 __ sthx(value, MemOperand(ip, r0));
4008 context()->Plug(string);
4012 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4013 ZoneList<Expression*>* args = expr->arguments();
4014 DCHECK(args->length() == 2);
4015 VisitForStackValue(args->at(0)); // Load the object.
4016 VisitForAccumulatorValue(args->at(1)); // Load the value.
4017 __ pop(r4); // r3 = value. r4 = object.
4020 // If the object is a smi, return the value.
4021 __ JumpIfSmi(r4, &done);
4023 // If the object is not a value type, return the value.
4024 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
4028 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
4029 // Update the write barrier. Save the value as it will be
4030 // overwritten by the write barrier code and is needed afterward.
4032 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
4036 context()->Plug(r3);
4040 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4041 ZoneList<Expression*>* args = expr->arguments();
4042 DCHECK_EQ(args->length(), 1);
4043 // Load the argument into r3 and call the stub.
4044 VisitForAccumulatorValue(args->at(0));
4046 NumberToStringStub stub(isolate());
4048 context()->Plug(r3);
4052 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4053 ZoneList<Expression*>* args = expr->arguments();
4054 DCHECK(args->length() == 1);
4055 VisitForAccumulatorValue(args->at(0));
4058 StringCharFromCodeGenerator generator(r3, r4);
4059 generator.GenerateFast(masm_);
4062 NopRuntimeCallHelper call_helper;
4063 generator.GenerateSlow(masm_, call_helper);
4066 context()->Plug(r4);
4070 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4071 ZoneList<Expression*>* args = expr->arguments();
4072 DCHECK(args->length() == 2);
4073 VisitForStackValue(args->at(0));
4074 VisitForAccumulatorValue(args->at(1));
4076 Register object = r4;
4077 Register index = r3;
4078 Register result = r6;
4082 Label need_conversion;
4083 Label index_out_of_range;
4085 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
4086 &need_conversion, &index_out_of_range,
4087 STRING_INDEX_IS_NUMBER);
4088 generator.GenerateFast(masm_);
4091 __ bind(&index_out_of_range);
4092 // When the index is out of range, the spec requires us to return
4094 __ LoadRoot(result, Heap::kNanValueRootIndex);
4097 __ bind(&need_conversion);
4098 // Load the undefined value into the result register, which will
4099 // trigger conversion.
4100 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4103 NopRuntimeCallHelper call_helper;
4104 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4107 context()->Plug(result);
4111 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4112 ZoneList<Expression*>* args = expr->arguments();
4113 DCHECK(args->length() == 2);
4114 VisitForStackValue(args->at(0));
4115 VisitForAccumulatorValue(args->at(1));
4117 Register object = r4;
4118 Register index = r3;
4119 Register scratch = r6;
4120 Register result = r3;
4124 Label need_conversion;
4125 Label index_out_of_range;
4127 StringCharAtGenerator generator(object, index, scratch, result,
4128 &need_conversion, &need_conversion,
4129 &index_out_of_range, STRING_INDEX_IS_NUMBER);
4130 generator.GenerateFast(masm_);
4133 __ bind(&index_out_of_range);
4134 // When the index is out of range, the spec requires us to return
4135 // the empty string.
4136 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4139 __ bind(&need_conversion);
4140 // Move smi zero into the result register, which will trigger
4142 __ LoadSmiLiteral(result, Smi::FromInt(0));
4145 NopRuntimeCallHelper call_helper;
4146 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4149 context()->Plug(result);
4153 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4154 ZoneList<Expression*>* args = expr->arguments();
4155 DCHECK_EQ(2, args->length());
4156 VisitForStackValue(args->at(0));
4157 VisitForAccumulatorValue(args->at(1));
4160 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4162 context()->Plug(r3);
4166 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4167 ZoneList<Expression*>* args = expr->arguments();
4168 DCHECK(args->length() >= 2);
4170 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4171 for (int i = 0; i < arg_count + 1; i++) {
4172 VisitForStackValue(args->at(i));
4174 VisitForAccumulatorValue(args->last()); // Function.
4176 Label runtime, done;
4177 // Check for non-function argument (including proxy).
4178 __ JumpIfSmi(r3, &runtime);
4179 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
4182 // InvokeFunction requires the function in r4. Move it in there.
4183 __ mr(r4, result_register());
4184 ParameterCount count(arg_count);
4185 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
4186 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4191 __ CallRuntime(Runtime::kCall, args->length());
4194 context()->Plug(r3);
4198 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4199 ZoneList<Expression*>* args = expr->arguments();
4200 DCHECK(args->length() == 2);
4203 VisitForStackValue(args->at(0));
4206 VisitForStackValue(args->at(1));
4207 __ CallRuntime(Runtime::kGetPrototype, 1);
4208 __ mr(r4, result_register());
4211 // Load original constructor into r7.
4212 __ LoadP(r7, MemOperand(sp, 1 * kPointerSize));
4214 // Check if the calling frame is an arguments adaptor frame.
4215 Label adaptor_frame, args_set_up, runtime;
4216 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4217 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
4218 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
4219 __ beq(&adaptor_frame);
4221 // default constructor has no arguments, so no adaptor frame means no args.
4222 __ li(r3, Operand::Zero());
4225 // Copy arguments from adaptor frame.
4227 __ bind(&adaptor_frame);
4228 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
4231 // Get arguments pointer in r5.
4232 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
4234 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
4239 // Pre-decrement in order to skip receiver.
4240 __ LoadPU(r6, MemOperand(r5, -kPointerSize));
4245 __ bind(&args_set_up);
4246 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
4248 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4249 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4253 context()->Plug(result_register());
4257 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4258 RegExpConstructResultStub stub(isolate());
4259 ZoneList<Expression*>* args = expr->arguments();
4260 DCHECK(args->length() == 3);
4261 VisitForStackValue(args->at(0));
4262 VisitForStackValue(args->at(1));
4263 VisitForAccumulatorValue(args->at(2));
4266 context()->Plug(r3);
4270 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4271 ZoneList<Expression*>* args = expr->arguments();
4272 DCHECK_EQ(2, args->length());
4273 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4274 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4276 Handle<FixedArray> jsfunction_result_caches(
4277 isolate()->native_context()->jsfunction_result_caches());
4278 if (jsfunction_result_caches->length() <= cache_id) {
4279 __ Abort(kAttemptToUseUndefinedCache);
4280 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4281 context()->Plug(r3);
4285 VisitForAccumulatorValue(args->at(1));
4288 Register cache = r4;
4289 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4290 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4292 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4294 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
4296 Label done, not_found;
4297 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4298 // r5 now holds finger offset as a smi.
4299 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4300 // r6 now points to the start of fixed array elements.
4301 __ SmiToPtrArrayOffset(r5, r5);
4302 __ LoadPUX(r5, MemOperand(r6, r5));
4303 // r6 now points to the key of the pair.
4307 __ LoadP(r3, MemOperand(r6, kPointerSize));
4310 __ bind(¬_found);
4311 // Call runtime to perform the lookup.
4312 __ Push(cache, key);
4313 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4316 context()->Plug(r3);
4320 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4321 ZoneList<Expression*>* args = expr->arguments();
4322 VisitForAccumulatorValue(args->at(0));
4324 Label materialize_true, materialize_false;
4325 Label* if_true = NULL;
4326 Label* if_false = NULL;
4327 Label* fall_through = NULL;
4328 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4329 &if_false, &fall_through);
4331 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4332 // PPC - assume ip is free
4333 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
4334 __ and_(r0, r3, ip, SetRC);
4335 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4336 Split(eq, if_true, if_false, fall_through, cr0);
4338 context()->Plug(if_true, if_false);
4342 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4343 ZoneList<Expression*>* args = expr->arguments();
4344 DCHECK(args->length() == 1);
4345 VisitForAccumulatorValue(args->at(0));
4347 __ AssertString(r3);
4349 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4350 __ IndexFromHash(r3, r3);
4352 context()->Plug(r3);
4356 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4357 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4358 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4359 one_char_separator_loop_entry, long_separator_loop;
4360 ZoneList<Expression*>* args = expr->arguments();
4361 DCHECK(args->length() == 2);
4362 VisitForStackValue(args->at(1));
4363 VisitForAccumulatorValue(args->at(0));
4365 // All aliases of the same register have disjoint lifetimes.
4366 Register array = r3;
4367 Register elements = no_reg; // Will be r3.
4368 Register result = no_reg; // Will be r3.
4369 Register separator = r4;
4370 Register array_length = r5;
4371 Register result_pos = no_reg; // Will be r5
4372 Register string_length = r6;
4373 Register string = r7;
4374 Register element = r8;
4375 Register elements_end = r9;
4376 Register scratch1 = r10;
4377 Register scratch2 = r11;
4379 // Separator operand is on the stack.
4382 // Check that the array is a JSArray.
4383 __ JumpIfSmi(array, &bailout);
4384 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
4387 // Check that the array has fast elements.
4388 __ CheckFastElements(scratch1, scratch2, &bailout);
4390 // If the array has length zero, return the empty string.
4391 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4392 __ SmiUntag(array_length);
4393 __ cmpi(array_length, Operand::Zero());
4394 __ bne(&non_trivial_array);
4395 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
4398 __ bind(&non_trivial_array);
4400 // Get the FixedArray containing array's elements.
4402 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4403 array = no_reg; // End of array's live range.
4405 // Check that all array elements are sequential one-byte strings, and
4406 // accumulate the sum of their lengths, as a smi-encoded value.
4407 __ li(string_length, Operand::Zero());
4408 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4409 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4410 __ add(elements_end, element, elements_end);
4411 // Loop condition: while (element < elements_end).
4412 // Live values in registers:
4413 // elements: Fixed array of strings.
4414 // array_length: Length of the fixed array of strings (not smi)
4415 // separator: Separator string
4416 // string_length: Accumulated sum of string lengths (smi).
4417 // element: Current array element.
4418 // elements_end: Array end.
4419 if (generate_debug_code_) {
4420 __ cmpi(array_length, Operand::Zero());
4421 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4424 __ LoadP(string, MemOperand(element));
4425 __ addi(element, element, Operand(kPointerSize));
4426 __ JumpIfSmi(string, &bailout);
4427 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4428 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4429 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4430 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4432 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4434 __ BranchOnOverflow(&bailout);
4436 __ cmp(element, elements_end);
4439 // If array_length is 1, return elements[0], a string.
4440 __ cmpi(array_length, Operand(1));
4441 __ bne(¬_size_one_array);
4442 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
4445 __ bind(¬_size_one_array);
4447 // Live values in registers:
4448 // separator: Separator string
4449 // array_length: Length of the array.
4450 // string_length: Sum of string lengths (smi).
4451 // elements: FixedArray of strings.
4453 // Check that the separator is a flat one-byte string.
4454 __ JumpIfSmi(separator, &bailout);
4455 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4456 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4457 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4459 // Add (separator length times array_length) - separator length to the
4460 // string_length to get the length of the result string.
4462 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4463 __ sub(string_length, string_length, scratch1);
4464 #if V8_TARGET_ARCH_PPC64
4465 __ SmiUntag(scratch1, scratch1);
4466 __ Mul(scratch2, array_length, scratch1);
4467 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4469 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4470 __ bne(&bailout, cr0);
4471 __ SmiTag(scratch2, scratch2);
4473 // array_length is not smi but the other values are, so the result is a smi
4474 __ mullw(scratch2, array_length, scratch1);
4475 __ mulhw(ip, array_length, scratch1);
4476 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4478 __ cmpi(ip, Operand::Zero());
4480 __ cmpwi(scratch2, Operand::Zero());
4484 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4486 __ BranchOnOverflow(&bailout);
4487 __ SmiUntag(string_length);
4489 // Get first element in the array to free up the elements register to be used
4491 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4492 result = elements; // End of live range for elements.
4494 // Live values in registers:
4495 // element: First array element
4496 // separator: Separator string
4497 // string_length: Length of result string (not smi)
4498 // array_length: Length of the array.
4499 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4500 elements_end, &bailout);
4501 // Prepare for looping. Set up elements_end to end of the array. Set
4502 // result_pos to the position of the result where to write the first
4504 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4505 __ add(elements_end, element, elements_end);
4506 result_pos = array_length; // End of live range for array_length.
4507 array_length = no_reg;
4508 __ addi(result_pos, result,
4509 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4511 // Check the length of the separator.
4513 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4514 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4515 __ beq(&one_char_separator);
4516 __ bgt(&long_separator);
4518 // Empty separator case
4519 __ bind(&empty_separator_loop);
4520 // Live values in registers:
4521 // result_pos: the position to which we are currently copying characters.
4522 // element: Current array element.
4523 // elements_end: Array end.
4525 // Copy next array element to the result.
4526 __ LoadP(string, MemOperand(element));
4527 __ addi(element, element, Operand(kPointerSize));
4528 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4529 __ SmiUntag(string_length);
4530 __ addi(string, string,
4531 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4532 __ CopyBytes(string, result_pos, string_length, scratch1);
4533 __ cmp(element, elements_end);
4534 __ blt(&empty_separator_loop); // End while (element < elements_end).
4535 DCHECK(result.is(r3));
4538 // One-character separator case
4539 __ bind(&one_char_separator);
4540 // Replace separator with its one-byte character value.
4541 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4542 // Jump into the loop after the code that copies the separator, so the first
4543 // element is not preceded by a separator
4544 __ b(&one_char_separator_loop_entry);
4546 __ bind(&one_char_separator_loop);
4547 // Live values in registers:
4548 // result_pos: the position to which we are currently copying characters.
4549 // element: Current array element.
4550 // elements_end: Array end.
4551 // separator: Single separator one-byte char (in lower byte).
4553 // Copy the separator character to the result.
4554 __ stb(separator, MemOperand(result_pos));
4555 __ addi(result_pos, result_pos, Operand(1));
4557 // Copy next array element to the result.
4558 __ bind(&one_char_separator_loop_entry);
4559 __ LoadP(string, MemOperand(element));
4560 __ addi(element, element, Operand(kPointerSize));
4561 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4562 __ SmiUntag(string_length);
4563 __ addi(string, string,
4564 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4565 __ CopyBytes(string, result_pos, string_length, scratch1);
4566 __ cmpl(element, elements_end);
4567 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4568 DCHECK(result.is(r3));
4571 // Long separator case (separator is more than one character). Entry is at the
4572 // label long_separator below.
4573 __ bind(&long_separator_loop);
4574 // Live values in registers:
4575 // result_pos: the position to which we are currently copying characters.
4576 // element: Current array element.
4577 // elements_end: Array end.
4578 // separator: Separator string.
4580 // Copy the separator to the result.
4581 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4582 __ SmiUntag(string_length);
4583 __ addi(string, separator,
4584 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4585 __ CopyBytes(string, result_pos, string_length, scratch1);
4587 __ bind(&long_separator);
4588 __ LoadP(string, MemOperand(element));
4589 __ addi(element, element, Operand(kPointerSize));
4590 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4591 __ SmiUntag(string_length);
4592 __ addi(string, string,
4593 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4594 __ CopyBytes(string, result_pos, string_length, scratch1);
4595 __ cmpl(element, elements_end);
4596 __ blt(&long_separator_loop); // End while (element < elements_end).
4597 DCHECK(result.is(r3));
4601 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4603 context()->Plug(r3);
4607 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4608 DCHECK(expr->arguments()->length() == 0);
4609 ExternalReference debug_is_active =
4610 ExternalReference::debug_is_active_address(isolate());
4611 __ mov(ip, Operand(debug_is_active));
4612 __ lbz(r3, MemOperand(ip));
4614 context()->Plug(r3);
4618 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4619 // Push the builtins object as the receiver.
4620 Register receiver = LoadDescriptor::ReceiverRegister();
4621 __ LoadP(receiver, GlobalObjectOperand());
4622 __ LoadP(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4625 // Load the function from the receiver.
4626 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4627 __ mov(LoadDescriptor::SlotRegister(),
4628 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4629 CallLoadIC(NOT_INSIDE_TYPEOF);
4633 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4634 ZoneList<Expression*>* args = expr->arguments();
4635 int arg_count = args->length();
4637 SetCallPosition(expr, arg_count);
4638 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4639 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4644 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4645 ZoneList<Expression*>* args = expr->arguments();
4646 int arg_count = args->length();
4648 if (expr->is_jsruntime()) {
4649 Comment cmnt(masm_, "[ CallRuntime");
4650 EmitLoadJSRuntimeFunction(expr);
4652 // Push the target function under the receiver.
4653 __ LoadP(ip, MemOperand(sp, 0));
4655 __ StoreP(r3, MemOperand(sp, kPointerSize));
4657 // Push the arguments ("left-to-right").
4658 for (int i = 0; i < arg_count; i++) {
4659 VisitForStackValue(args->at(i));
4662 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4663 EmitCallJSRuntimeFunction(expr);
4665 // Restore context register.
4666 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4668 context()->DropAndPlug(1, r3);
4671 const Runtime::Function* function = expr->function();
4672 switch (function->function_id) {
4673 #define CALL_INTRINSIC_GENERATOR(Name) \
4674 case Runtime::kInline##Name: { \
4675 Comment cmnt(masm_, "[ Inline" #Name); \
4676 return Emit##Name(expr); \
4678 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4679 #undef CALL_INTRINSIC_GENERATOR
4681 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4682 // Push the arguments ("left-to-right").
4683 for (int i = 0; i < arg_count; i++) {
4684 VisitForStackValue(args->at(i));
4687 // Call the C runtime function.
4688 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4689 __ CallRuntime(expr->function(), arg_count);
4690 context()->Plug(r3);
4697 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4698 switch (expr->op()) {
4699 case Token::DELETE: {
4700 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4701 Property* property = expr->expression()->AsProperty();
4702 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4704 if (property != NULL) {
4705 VisitForStackValue(property->obj());
4706 VisitForStackValue(property->key());
4707 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
4709 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4710 context()->Plug(r3);
4711 } else if (proxy != NULL) {
4712 Variable* var = proxy->var();
4713 // Delete of an unqualified identifier is disallowed in strict mode but
4714 // "delete this" is allowed.
4715 bool is_this = var->HasThisName(isolate());
4716 DCHECK(is_sloppy(language_mode()) || is_this);
4717 if (var->IsUnallocatedOrGlobalSlot()) {
4718 __ LoadP(r5, GlobalObjectOperand());
4719 __ mov(r4, Operand(var->name()));
4720 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4721 __ Push(r5, r4, r3);
4722 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4723 context()->Plug(r3);
4724 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4725 // Result of deleting non-global, non-dynamic variables is false.
4726 // The subexpression does not have side effects.
4727 context()->Plug(is_this);
4729 // Non-global variable. Call the runtime to try to delete from the
4730 // context where the variable was introduced.
4731 DCHECK(!context_register().is(r5));
4732 __ mov(r5, Operand(var->name()));
4733 __ Push(context_register(), r5);
4734 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4735 context()->Plug(r3);
4738 // Result of deleting non-property, non-variable reference is true.
4739 // The subexpression may have side effects.
4740 VisitForEffect(expr->expression());
4741 context()->Plug(true);
4747 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4748 VisitForEffect(expr->expression());
4749 context()->Plug(Heap::kUndefinedValueRootIndex);
4754 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4755 if (context()->IsEffect()) {
4756 // Unary NOT has no side effects so it's only necessary to visit the
4757 // subexpression. Match the optimizing compiler by not branching.
4758 VisitForEffect(expr->expression());
4759 } else if (context()->IsTest()) {
4760 const TestContext* test = TestContext::cast(context());
4761 // The labels are swapped for the recursive call.
4762 VisitForControl(expr->expression(), test->false_label(),
4763 test->true_label(), test->fall_through());
4764 context()->Plug(test->true_label(), test->false_label());
4766 // We handle value contexts explicitly rather than simply visiting
4767 // for control and plugging the control flow into the context,
4768 // because we need to prepare a pair of extra administrative AST ids
4769 // for the optimizing compiler.
4770 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4771 Label materialize_true, materialize_false, done;
4772 VisitForControl(expr->expression(), &materialize_false,
4773 &materialize_true, &materialize_true);
4774 __ bind(&materialize_true);
4775 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4776 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4777 if (context()->IsStackValue()) __ push(r3);
4779 __ bind(&materialize_false);
4780 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4781 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4782 if (context()->IsStackValue()) __ push(r3);
4788 case Token::TYPEOF: {
4789 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4791 AccumulatorValueContext context(this);
4792 VisitForTypeofValue(expr->expression());
4795 TypeofStub typeof_stub(isolate());
4796 __ CallStub(&typeof_stub);
4797 context()->Plug(r3);
4807 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4808 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4810 Comment cmnt(masm_, "[ CountOperation");
4812 Property* prop = expr->expression()->AsProperty();
4813 LhsKind assign_type = Property::GetAssignType(prop);
4815 // Evaluate expression and get value.
4816 if (assign_type == VARIABLE) {
4817 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4818 AccumulatorValueContext context(this);
4819 EmitVariableLoad(expr->expression()->AsVariableProxy());
4821 // Reserve space for result of postfix operation.
4822 if (expr->is_postfix() && !context()->IsEffect()) {
4823 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4826 switch (assign_type) {
4827 case NAMED_PROPERTY: {
4828 // Put the object both on the stack and in the register.
4829 VisitForStackValue(prop->obj());
4830 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4831 EmitNamedPropertyLoad(prop);
4835 case NAMED_SUPER_PROPERTY: {
4836 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4837 VisitForAccumulatorValue(
4838 prop->obj()->AsSuperPropertyReference()->home_object());
4839 __ Push(result_register());
4840 const Register scratch = r4;
4841 __ LoadP(scratch, MemOperand(sp, kPointerSize));
4842 __ Push(scratch, result_register());
4843 EmitNamedSuperPropertyLoad(prop);
4847 case KEYED_SUPER_PROPERTY: {
4848 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4849 VisitForAccumulatorValue(
4850 prop->obj()->AsSuperPropertyReference()->home_object());
4851 const Register scratch = r4;
4852 const Register scratch1 = r5;
4853 __ mr(scratch, result_register());
4854 VisitForAccumulatorValue(prop->key());
4855 __ Push(scratch, result_register());
4856 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
4857 __ Push(scratch1, scratch, result_register());
4858 EmitKeyedSuperPropertyLoad(prop);
4862 case KEYED_PROPERTY: {
4863 VisitForStackValue(prop->obj());
4864 VisitForStackValue(prop->key());
4865 __ LoadP(LoadDescriptor::ReceiverRegister(),
4866 MemOperand(sp, 1 * kPointerSize));
4867 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4868 EmitKeyedPropertyLoad(prop);
4877 // We need a second deoptimization point after loading the value
4878 // in case evaluating the property load my have a side effect.
4879 if (assign_type == VARIABLE) {
4880 PrepareForBailout(expr->expression(), TOS_REG);
4882 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4885 // Inline smi case if we are in a loop.
4886 Label stub_call, done;
4887 JumpPatchSite patch_site(masm_);
4889 int count_value = expr->op() == Token::INC ? 1 : -1;
4890 if (ShouldInlineSmiCase(expr->op())) {
4892 patch_site.EmitJumpIfNotSmi(r3, &slow);
4894 // Save result for postfix expressions.
4895 if (expr->is_postfix()) {
4896 if (!context()->IsEffect()) {
4897 // Save the result on the stack. If we have a named or keyed property
4898 // we store the result under the receiver that is currently on top
4900 switch (assign_type) {
4904 case NAMED_PROPERTY:
4905 __ StoreP(r3, MemOperand(sp, kPointerSize));
4907 case NAMED_SUPER_PROPERTY:
4908 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4910 case KEYED_PROPERTY:
4911 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4913 case KEYED_SUPER_PROPERTY:
4914 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4920 Register scratch1 = r4;
4921 Register scratch2 = r5;
4922 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4923 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4924 __ BranchOnNoOverflow(&done);
4925 // Call stub. Undo operation first.
4926 __ sub(r3, r3, scratch1);
4930 if (!is_strong(language_mode())) {
4931 ToNumberStub convert_stub(isolate());
4932 __ CallStub(&convert_stub);
4933 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4936 // Save result for postfix expressions.
4937 if (expr->is_postfix()) {
4938 if (!context()->IsEffect()) {
4939 // Save the result on the stack. If we have a named or keyed property
4940 // we store the result under the receiver that is currently on top
4942 switch (assign_type) {
4946 case NAMED_PROPERTY:
4947 __ StoreP(r3, MemOperand(sp, kPointerSize));
4949 case NAMED_SUPER_PROPERTY:
4950 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4952 case KEYED_PROPERTY:
4953 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4955 case KEYED_SUPER_PROPERTY:
4956 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4962 __ bind(&stub_call);
4964 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4966 SetExpressionPosition(expr);
4968 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4969 strength(language_mode())).code();
4970 CallIC(code, expr->CountBinOpFeedbackId());
4971 patch_site.EmitPatchInfo();
4974 if (is_strong(language_mode())) {
4975 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4977 // Store the value returned in r3.
4978 switch (assign_type) {
4980 if (expr->is_postfix()) {
4982 EffectContext context(this);
4983 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4984 Token::ASSIGN, expr->CountSlot());
4985 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4988 // For all contexts except EffectConstant We have the result on
4989 // top of the stack.
4990 if (!context()->IsEffect()) {
4991 context()->PlugTOS();
4994 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4995 Token::ASSIGN, expr->CountSlot());
4996 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4997 context()->Plug(r3);
5000 case NAMED_PROPERTY: {
5001 __ mov(StoreDescriptor::NameRegister(),
5002 Operand(prop->key()->AsLiteral()->value()));
5003 __ pop(StoreDescriptor::ReceiverRegister());
5004 if (FLAG_vector_stores) {
5005 EmitLoadStoreICSlot(expr->CountSlot());
5008 CallStoreIC(expr->CountStoreFeedbackId());
5010 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5011 if (expr->is_postfix()) {
5012 if (!context()->IsEffect()) {
5013 context()->PlugTOS();
5016 context()->Plug(r3);
5020 case NAMED_SUPER_PROPERTY: {
5021 EmitNamedSuperPropertyStore(prop);
5022 if (expr->is_postfix()) {
5023 if (!context()->IsEffect()) {
5024 context()->PlugTOS();
5027 context()->Plug(r3);
5031 case KEYED_SUPER_PROPERTY: {
5032 EmitKeyedSuperPropertyStore(prop);
5033 if (expr->is_postfix()) {
5034 if (!context()->IsEffect()) {
5035 context()->PlugTOS();
5038 context()->Plug(r3);
5042 case KEYED_PROPERTY: {
5043 __ Pop(StoreDescriptor::ReceiverRegister(),
5044 StoreDescriptor::NameRegister());
5046 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5047 if (FLAG_vector_stores) {
5048 EmitLoadStoreICSlot(expr->CountSlot());
5051 CallIC(ic, expr->CountStoreFeedbackId());
5053 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5054 if (expr->is_postfix()) {
5055 if (!context()->IsEffect()) {
5056 context()->PlugTOS();
5059 context()->Plug(r3);
5067 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5068 Expression* sub_expr,
5069 Handle<String> check) {
5070 Label materialize_true, materialize_false;
5071 Label* if_true = NULL;
5072 Label* if_false = NULL;
5073 Label* fall_through = NULL;
5074 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5075 &if_false, &fall_through);
5078 AccumulatorValueContext context(this);
5079 VisitForTypeofValue(sub_expr);
5081 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5083 Factory* factory = isolate()->factory();
5084 if (String::Equals(check, factory->number_string())) {
5085 __ JumpIfSmi(r3, if_true);
5086 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5087 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5089 Split(eq, if_true, if_false, fall_through);
5090 } else if (String::Equals(check, factory->string_string())) {
5091 __ JumpIfSmi(r3, if_false);
5092 // Check for undetectable objects => false.
5093 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
5095 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5096 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
5097 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5098 Split(eq, if_true, if_false, fall_through, cr0);
5099 } else if (String::Equals(check, factory->symbol_string())) {
5100 __ JumpIfSmi(r3, if_false);
5101 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
5102 Split(eq, if_true, if_false, fall_through);
5103 } else if (String::Equals(check, factory->float32x4_string())) {
5104 __ JumpIfSmi(r3, if_false);
5105 __ CompareObjectType(r3, r3, r4, FLOAT32X4_TYPE);
5106 Split(eq, if_true, if_false, fall_through);
5107 } else if (String::Equals(check, factory->boolean_string())) {
5108 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
5110 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
5111 Split(eq, if_true, if_false, fall_through);
5112 } else if (String::Equals(check, factory->undefined_string())) {
5113 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
5115 __ JumpIfSmi(r3, if_false);
5116 // Check for undetectable objects => true.
5117 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5118 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5119 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5120 Split(ne, if_true, if_false, fall_through, cr0);
5122 } else if (String::Equals(check, factory->function_string())) {
5123 __ JumpIfSmi(r3, if_false);
5124 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5125 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
5127 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
5128 Split(eq, if_true, if_false, fall_through);
5129 } else if (String::Equals(check, factory->object_string())) {
5130 __ JumpIfSmi(r3, if_false);
5131 __ CompareRoot(r3, Heap::kNullValueRootIndex);
5133 // Check for JS objects => true.
5134 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5136 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5138 // Check for undetectable objects => false.
5139 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5140 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5141 Split(eq, if_true, if_false, fall_through, cr0);
5143 if (if_false != fall_through) __ b(if_false);
5145 context()->Plug(if_true, if_false);
5149 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5150 Comment cmnt(masm_, "[ CompareOperation");
5151 SetExpressionPosition(expr);
5153 // First we try a fast inlined version of the compare when one of
5154 // the operands is a literal.
5155 if (TryLiteralCompare(expr)) return;
5157 // Always perform the comparison for its control flow. Pack the result
5158 // into the expression's context after the comparison is performed.
5159 Label materialize_true, materialize_false;
5160 Label* if_true = NULL;
5161 Label* if_false = NULL;
5162 Label* fall_through = NULL;
5163 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5164 &if_false, &fall_through);
5166 Token::Value op = expr->op();
5167 VisitForStackValue(expr->left());
5170 VisitForStackValue(expr->right());
5171 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5172 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5173 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5175 Split(eq, if_true, if_false, fall_through);
5178 case Token::INSTANCEOF: {
5179 VisitForStackValue(expr->right());
5180 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5182 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5183 // The stub returns 0 for true.
5184 __ cmpi(r3, Operand::Zero());
5185 Split(eq, if_true, if_false, fall_through);
5190 VisitForAccumulatorValue(expr->right());
5191 Condition cond = CompareIC::ComputeCondition(op);
5194 bool inline_smi_code = ShouldInlineSmiCase(op);
5195 JumpPatchSite patch_site(masm_);
5196 if (inline_smi_code) {
5199 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
5201 Split(cond, if_true, if_false, NULL);
5202 __ bind(&slow_case);
5205 Handle<Code> ic = CodeFactory::CompareIC(
5206 isolate(), op, strength(language_mode())).code();
5207 CallIC(ic, expr->CompareOperationFeedbackId());
5208 patch_site.EmitPatchInfo();
5209 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5210 __ cmpi(r3, Operand::Zero());
5211 Split(cond, if_true, if_false, fall_through);
5215 // Convert the result of the comparison into one expected for this
5216 // expression's context.
5217 context()->Plug(if_true, if_false);
5221 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5222 Expression* sub_expr,
5224 Label materialize_true, materialize_false;
5225 Label* if_true = NULL;
5226 Label* if_false = NULL;
5227 Label* fall_through = NULL;
5228 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5229 &if_false, &fall_through);
5231 VisitForAccumulatorValue(sub_expr);
5232 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5233 if (expr->op() == Token::EQ_STRICT) {
5234 Heap::RootListIndex nil_value = nil == kNullValue
5235 ? Heap::kNullValueRootIndex
5236 : Heap::kUndefinedValueRootIndex;
5237 __ LoadRoot(r4, nil_value);
5239 Split(eq, if_true, if_false, fall_through);
5241 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5242 CallIC(ic, expr->CompareOperationFeedbackId());
5243 __ cmpi(r3, Operand::Zero());
5244 Split(ne, if_true, if_false, fall_through);
5246 context()->Plug(if_true, if_false);
5250 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5251 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5252 context()->Plug(r3);
5256 Register FullCodeGenerator::result_register() { return r3; }
5259 Register FullCodeGenerator::context_register() { return cp; }
5262 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5263 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
5264 __ StoreP(value, MemOperand(fp, frame_offset), r0);
5268 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5269 __ LoadP(dst, ContextOperand(cp, context_index), r0);
5273 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5274 Scope* closure_scope = scope()->ClosureScope();
5275 if (closure_scope->is_script_scope() ||
5276 closure_scope->is_module_scope()) {
5277 // Contexts nested in the native context have a canonical empty function
5278 // as their closure, not the anonymous closure containing the global
5279 // code. Pass a smi sentinel and let the runtime look up the empty
5281 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5282 } else if (closure_scope->is_eval_scope()) {
5283 // Contexts created by a call to eval have the same closure as the
5284 // context calling eval, not the anonymous closure containing the eval
5285 // code. Fetch it from the context.
5286 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5288 DCHECK(closure_scope->is_function_scope());
5289 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5295 // ----------------------------------------------------------------------------
5296 // Non-local control flow support.
5298 void FullCodeGenerator::EnterFinallyBlock() {
5299 DCHECK(!result_register().is(r4));
5300 // Store result register while executing finally block.
5301 __ push(result_register());
5302 // Cook return address in link register to stack (smi encoded Code* delta)
5304 __ mov(ip, Operand(masm_->CodeObject()));
5308 // Store result register while executing finally block.
5311 // Store pending message while executing finally block.
5312 ExternalReference pending_message_obj =
5313 ExternalReference::address_of_pending_message_obj(isolate());
5314 __ mov(ip, Operand(pending_message_obj));
5315 __ LoadP(r4, MemOperand(ip));
5318 ClearPendingMessage();
5322 void FullCodeGenerator::ExitFinallyBlock() {
5323 DCHECK(!result_register().is(r4));
5324 // Restore pending message from stack.
5326 ExternalReference pending_message_obj =
5327 ExternalReference::address_of_pending_message_obj(isolate());
5328 __ mov(ip, Operand(pending_message_obj));
5329 __ StoreP(r4, MemOperand(ip));
5331 // Restore result register from stack.
5334 // Uncook return address and return.
5335 __ pop(result_register());
5337 __ mov(ip, Operand(masm_->CodeObject()));
5344 void FullCodeGenerator::ClearPendingMessage() {
5345 DCHECK(!result_register().is(r4));
5346 ExternalReference pending_message_obj =
5347 ExternalReference::address_of_pending_message_obj(isolate());
5348 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5349 __ mov(ip, Operand(pending_message_obj));
5350 __ StoreP(r4, MemOperand(ip));
5354 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5355 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5356 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5357 Operand(SmiFromSlot(slot)));
5364 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
5365 BackEdgeState target_state,
5366 Code* replacement_code) {
5367 Address mov_address = Assembler::target_address_from_return_address(pc);
5368 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5369 CodePatcher patcher(cmp_address, 1);
5371 switch (target_state) {
5373 // <decrement profiling counter>
5375 // bge <ok> ;; not changed
5376 // mov r12, <interrupt stub address>
5379 // <reset profiling counter>
5381 patcher.masm()->cmpi(r6, Operand::Zero());
5384 case ON_STACK_REPLACEMENT:
5385 case OSR_AFTER_STACK_CHECK:
5386 // <decrement profiling counter>
5388 // bge <ok> ;; not changed
5389 // mov r12, <on-stack replacement address>
5392 // <reset profiling counter>
5393 // ok-label ----- pc_after points here
5395 // Set the LT bit such that bge is a NOP
5396 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
5400 // Replace the stack check address in the mov sequence with the
5401 // entry address of the replacement code.
5402 Assembler::set_target_address_at(mov_address, unoptimized_code,
5403 replacement_code->entry());
5405 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5406 unoptimized_code, mov_address, replacement_code);
5410 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5411 Isolate* isolate, Code* unoptimized_code, Address pc) {
5412 Address mov_address = Assembler::target_address_from_return_address(pc);
5413 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5414 Address interrupt_address =
5415 Assembler::target_address_at(mov_address, unoptimized_code);
5417 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
5418 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
5422 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5424 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5425 return ON_STACK_REPLACEMENT;
5428 DCHECK(interrupt_address ==
5429 isolate->builtins()->OsrAfterStackCheck()->entry());
5430 return OSR_AFTER_STACK_CHECK;
5432 } // namespace internal
5434 #endif // V8_TARGET_ARCH_PPC