1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
19 #include "src/ppc/code-stubs-ppc.h"
20 #include "src/ppc/macro-assembler-ppc.h"
25 #define __ ACCESS_MASM(masm_)
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
34 class JumpPatchSite BASE_EMBEDDED {
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 info_emitted_ = false;
42 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, reg, cr0);
51 __ beq(target, cr0); // Always taken before patched.
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
56 void EmitJumpIfSmi(Register reg, Label* target) {
57 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 __ bind(&patch_site_);
60 __ cmp(reg, reg, cr0);
61 __ bne(target, cr0); // Never taken before patched.
64 void EmitPatchInfo() {
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
68 // I believe this is using reg as the high bits of of the offset
69 reg.set_code(delta_to_patch_site / kOff16Mask);
70 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
75 __ nop(); // Signals no inlined code.
80 MacroAssembler* masm_;
88 // Generate code for a JS function. On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right. The actual
90 // argument count matches the formal parameter count expected by the
93 // The live registers are:
94 // o r4: the JS function object being called (i.e., ourselves)
96 // o fp: our caller's frame pointer (aka r31)
97 // o sp: stack pointer
98 // o lr: return address
99 // o ip: our own function entry (required by the prologue)
101 // The function builds a JS frame. Please see JavaScriptFrameConstants in
102 // frames-ppc.h for its layout.
103 void FullCodeGenerator::Generate() {
104 CompilationInfo* info = info_;
105 profiling_counter_ = isolate()->factory()->NewCell(
106 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
107 SetFunctionPosition(function());
108 Comment cmnt(masm_, "[ function compiled by full code generator");
110 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
113 if (strlen(FLAG_stop_at) > 0 &&
114 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
119 // Sloppy mode functions and builtins need to replace the receiver with the
120 // global proxy when called as functions (without an explicit receiver
122 if (is_sloppy(info->language_mode()) && !info->is_native() &&
123 info->MayUseThis() && info->scope()->has_this_declaration()) {
125 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
126 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
127 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
130 __ LoadP(r5, GlobalObjectOperand());
131 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
133 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
138 // Open a frame scope to indicate that there is a frame on the stack. The
139 // MANUAL indicates that the scope shouldn't actually generate code to set up
140 // the frame (that is done below).
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
142 int prologue_offset = masm_->pc_offset();
144 if (prologue_offset) {
145 // Prologue logic requires it's starting address in ip and the
146 // corresponding offset from the function entry.
147 prologue_offset += Instruction::kInstrSize;
148 __ addi(ip, ip, Operand(prologue_offset));
150 info->set_prologue_offset(prologue_offset);
151 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
152 info->AddNoFrameRange(0, masm_->pc_offset());
155 Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
162 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
163 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
165 __ bc_short(ge, &ok);
166 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
169 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
170 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
171 if (locals_count >= kMaxPushes) {
172 int loop_iterations = locals_count / kMaxPushes;
173 __ mov(r5, Operand(loop_iterations));
176 __ bind(&loop_header);
178 for (int i = 0; i < kMaxPushes; i++) {
181 // Continue loop if not done.
182 __ bdnz(&loop_header);
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 for (int i = 0; i < remaining; i++) {
192 bool function_in_register = true;
194 // Possibly allocate a local context.
195 if (info->scope()->num_heap_slots() > 0) {
196 // Argument to NewContext is the function, which is still in r4.
197 Comment cmnt(masm_, "[ Allocate context");
198 bool need_write_barrier = true;
199 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (info->scope()->is_script_scope()) {
202 __ Push(info->scope()->GetScopeInfo(info->isolate()));
203 __ CallRuntime(Runtime::kNewScriptContext, 2);
204 } else if (slots <= FastNewContextStub::kMaximumSlots) {
205 FastNewContextStub stub(isolate(), slots);
207 // Result of FastNewContextStub is always in new space.
208 need_write_barrier = false;
211 __ CallRuntime(Runtime::kNewFunctionContext, 1);
213 function_in_register = false;
214 // Context is returned in r3. It replaces the context passed to us.
215 // It's saved in the stack and kept live in cp.
217 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
218 // Copy any necessary parameters into the context.
219 int num_parameters = info->scope()->num_parameters();
220 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
221 for (int i = first_parameter; i < num_parameters; i++) {
222 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
223 if (var->IsContextSlot()) {
224 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
225 (num_parameters - 1 - i) * kPointerSize;
226 // Load parameter from stack.
227 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
228 // Store it in the context.
229 MemOperand target = ContextOperand(cp, var->index());
230 __ StoreP(r3, target, r0);
232 // Update the write barrier.
233 if (need_write_barrier) {
234 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
235 kLRHasBeenSaved, kDontSaveFPRegs);
236 } else if (FLAG_debug_code) {
238 __ JumpIfInNewSpace(cp, r3, &done);
239 __ Abort(kExpectedNewSpaceObject);
246 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function");
251 if (!function_in_register) {
252 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // The write barrier clobbers register again, keep is marked as such.
255 SetVar(this_function_var, r4, r3, r5);
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
262 // Get the frame pointer for the calling frame.
263 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
265 // Skip the arguments adaptor frame if it exists.
266 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
267 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
270 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
273 // Check the marker in the calling frame.
274 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
275 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
276 Label non_construct_frame, done;
278 __ bne(&non_construct_frame);
279 __ LoadP(r3, MemOperand(
280 r5, ConstructFrameConstants::kOriginalConstructorOffset));
283 __ bind(&non_construct_frame);
284 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
287 SetVar(new_target_var, r3, r5, r6);
290 // Possibly allocate RestParameters
292 Variable* rest_param = scope()->rest_parameter(&rest_index);
294 Comment cmnt(masm_, "[ Allocate rest parameter array");
296 int num_parameters = info->scope()->num_parameters();
297 int offset = num_parameters * kPointerSize;
299 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
300 __ LoadSmiLiteral(r5, Smi::FromInt(num_parameters));
301 __ LoadSmiLiteral(r4, Smi::FromInt(rest_index));
302 __ LoadSmiLiteral(r3, Smi::FromInt(language_mode()));
303 __ Push(r6, r5, r4, r3);
305 RestParamAccessStub stub(isolate());
308 SetVar(rest_param, r3, r4, r5);
311 Variable* arguments = scope()->arguments();
312 if (arguments != NULL) {
313 // Function uses arguments object.
314 Comment cmnt(masm_, "[ Allocate arguments object");
315 if (!function_in_register) {
316 // Load this again, if it's used by the local context below.
317 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
321 // Receiver is just before the parameters on the caller's stack.
322 int num_parameters = info->scope()->num_parameters();
323 int offset = num_parameters * kPointerSize;
324 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
325 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
328 // Arguments to ArgumentsAccessStub:
329 // function, receiver address, parameter count.
330 // The stub will rewrite receiever and parameter count if the previous
331 // stack frame was an arguments adapter frame.
332 ArgumentsAccessStub::Type type;
333 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
334 type = ArgumentsAccessStub::NEW_STRICT;
335 } else if (function()->has_duplicate_parameters()) {
336 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
338 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
340 ArgumentsAccessStub stub(isolate(), type);
343 SetVar(arguments, r3, r4, r5);
347 __ CallRuntime(Runtime::kTraceEnter, 0);
350 // Visit the declarations and body unless there is an illegal
352 if (scope()->HasIllegalRedeclaration()) {
353 Comment cmnt(masm_, "[ Declarations");
354 scope()->VisitIllegalRedeclaration(this);
357 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
359 Comment cmnt(masm_, "[ Declarations");
360 // For named function expressions, declare the function name as a
362 if (scope()->is_function_scope() && scope()->function() != NULL) {
363 VariableDeclaration* function = scope()->function();
364 DCHECK(function->proxy()->var()->mode() == CONST ||
365 function->proxy()->var()->mode() == CONST_LEGACY);
366 DCHECK(!function->proxy()->var()->IsUnallocatedOrGlobalSlot());
367 VisitVariableDeclaration(function);
369 VisitDeclarations(scope()->declarations());
373 Comment cmnt(masm_, "[ Stack check");
374 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
376 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
378 __ bc_short(ge, &ok);
379 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
384 Comment cmnt(masm_, "[ Body");
385 DCHECK(loop_depth() == 0);
386 VisitStatements(function()->body());
387 DCHECK(loop_depth() == 0);
391 // Always emit a 'return undefined' in case control fell off the end of
394 Comment cmnt(masm_, "[ return <undefined>;");
395 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
397 EmitReturnSequence();
399 if (HasStackOverflow()) {
400 masm_->AbortConstantPoolBuilding();
405 void FullCodeGenerator::ClearAccumulator() {
406 __ LoadSmiLiteral(r3, Smi::FromInt(0));
410 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
411 __ mov(r5, Operand(profiling_counter_));
412 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
413 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
414 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
418 void FullCodeGenerator::EmitProfilingCounterReset() {
419 int reset_value = FLAG_interrupt_budget;
420 if (info_->is_debug()) {
421 // Detect debug break requests as soon as possible.
422 reset_value = FLAG_interrupt_budget >> 4;
424 __ mov(r5, Operand(profiling_counter_));
425 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
426 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
430 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
431 Label* back_edge_target) {
432 Comment cmnt(masm_, "[ Back edge bookkeeping");
435 DCHECK(back_edge_target->is_bound());
436 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
437 kCodeSizeMultiplier / 2;
438 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
439 EmitProfilingCounterDecrement(weight);
441 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
442 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
443 // BackEdgeTable::PatchAt manipulates this sequence.
444 __ cmpi(r6, Operand::Zero());
445 __ bc_short(ge, &ok);
446 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
448 // Record a mapping of this PC offset to the OSR id. This is used to find
449 // the AST id from the unoptimized code in order to use it as a key into
450 // the deoptimization input data found in the optimized code.
451 RecordBackEdge(stmt->OsrEntryId());
453 EmitProfilingCounterReset();
456 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
457 // Record a mapping of the OSR id to this PC. This is used if the OSR
458 // entry becomes the target of a bailout. We don't expect it to be, but
459 // we want it to work if it is.
460 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
464 void FullCodeGenerator::EmitReturnSequence() {
465 Comment cmnt(masm_, "[ Return sequence");
466 if (return_label_.is_bound()) {
467 __ b(&return_label_);
469 __ bind(&return_label_);
471 // Push the return value on the stack as the parameter.
472 // Runtime::TraceExit returns its parameter in r3
474 __ CallRuntime(Runtime::kTraceExit, 1);
476 // Pretend that the exit is a backwards jump to the entry.
478 if (info_->ShouldSelfOptimize()) {
479 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
481 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
482 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
484 EmitProfilingCounterDecrement(weight);
486 __ cmpi(r6, Operand::Zero());
489 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
491 EmitProfilingCounterReset();
495 // Add a label for checking the size of the code used for returning.
496 Label check_exit_codesize;
497 __ bind(&check_exit_codesize);
499 // Make sure that the constant pool is not emitted inside of the return
502 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
503 int32_t arg_count = info_->scope()->num_parameters() + 1;
504 int32_t sp_delta = arg_count * kPointerSize;
505 SetReturnPosition(function());
507 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
508 #if V8_TARGET_ARCH_PPC64
509 // With 64bit we may need nop() instructions to ensure we have
510 // enough space to SetDebugBreakAtReturn()
511 if (is_int16(sp_delta)) {
512 if (!FLAG_enable_embedded_constant_pool) masm_->nop();
517 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
521 // Check that the size of the code used for returning is large enough
522 // for the debugger's requirements.
523 DCHECK(Assembler::kJSReturnSequenceInstructions <=
524 masm_->InstructionsGeneratedSince(&check_exit_codesize));
530 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
531 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
535 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
536 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
537 codegen()->GetVar(result_register(), var);
541 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
542 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
543 codegen()->GetVar(result_register(), var);
544 __ push(result_register());
548 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
549 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
550 // For simplicity we always test the accumulator register.
551 codegen()->GetVar(result_register(), var);
552 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
553 codegen()->DoTest(this);
557 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
560 void FullCodeGenerator::AccumulatorValueContext::Plug(
561 Heap::RootListIndex index) const {
562 __ LoadRoot(result_register(), index);
566 void FullCodeGenerator::StackValueContext::Plug(
567 Heap::RootListIndex index) const {
568 __ LoadRoot(result_register(), index);
569 __ push(result_register());
573 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
574 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
576 if (index == Heap::kUndefinedValueRootIndex ||
577 index == Heap::kNullValueRootIndex ||
578 index == Heap::kFalseValueRootIndex) {
579 if (false_label_ != fall_through_) __ b(false_label_);
580 } else if (index == Heap::kTrueValueRootIndex) {
581 if (true_label_ != fall_through_) __ b(true_label_);
583 __ LoadRoot(result_register(), index);
584 codegen()->DoTest(this);
589 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Handle<Object> lit) const {
594 __ mov(result_register(), Operand(lit));
598 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
599 // Immediates cannot be pushed directly.
600 __ mov(result_register(), Operand(lit));
601 __ push(result_register());
605 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
606 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
608 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
609 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
610 if (false_label_ != fall_through_) __ b(false_label_);
611 } else if (lit->IsTrue() || lit->IsJSObject()) {
612 if (true_label_ != fall_through_) __ b(true_label_);
613 } else if (lit->IsString()) {
614 if (String::cast(*lit)->length() == 0) {
615 if (false_label_ != fall_through_) __ b(false_label_);
617 if (true_label_ != fall_through_) __ b(true_label_);
619 } else if (lit->IsSmi()) {
620 if (Smi::cast(*lit)->value() == 0) {
621 if (false_label_ != fall_through_) __ b(false_label_);
623 if (true_label_ != fall_through_) __ b(true_label_);
626 // For simplicity we always test the accumulator register.
627 __ mov(result_register(), Operand(lit));
628 codegen()->DoTest(this);
633 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
634 Register reg) const {
640 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
641 int count, Register reg) const {
644 __ Move(result_register(), reg);
648 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
649 Register reg) const {
651 if (count > 1) __ Drop(count - 1);
652 __ StoreP(reg, MemOperand(sp, 0));
656 void FullCodeGenerator::TestContext::DropAndPlug(int count,
657 Register reg) const {
659 // For simplicity we always test the accumulator register.
661 __ Move(result_register(), reg);
662 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
663 codegen()->DoTest(this);
667 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
668 Label* materialize_false) const {
669 DCHECK(materialize_true == materialize_false);
670 __ bind(materialize_true);
674 void FullCodeGenerator::AccumulatorValueContext::Plug(
675 Label* materialize_true, Label* materialize_false) const {
677 __ bind(materialize_true);
678 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
680 __ bind(materialize_false);
681 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
686 void FullCodeGenerator::StackValueContext::Plug(
687 Label* materialize_true, Label* materialize_false) const {
689 __ bind(materialize_true);
690 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
692 __ bind(materialize_false);
693 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
699 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
700 Label* materialize_false) const {
701 DCHECK(materialize_true == true_label_);
702 DCHECK(materialize_false == false_label_);
706 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
709 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
710 Heap::RootListIndex value_root_index =
711 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
712 __ LoadRoot(result_register(), value_root_index);
716 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
717 Heap::RootListIndex value_root_index =
718 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
719 __ LoadRoot(ip, value_root_index);
724 void FullCodeGenerator::TestContext::Plug(bool flag) const {
725 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
728 if (true_label_ != fall_through_) __ b(true_label_);
730 if (false_label_ != fall_through_) __ b(false_label_);
735 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
736 Label* if_false, Label* fall_through) {
737 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
738 CallIC(ic, condition->test_id());
739 __ cmpi(result_register(), Operand::Zero());
740 Split(ne, if_true, if_false, fall_through);
744 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
745 Label* fall_through, CRegister cr) {
746 if (if_false == fall_through) {
747 __ b(cond, if_true, cr);
748 } else if (if_true == fall_through) {
749 __ b(NegateCondition(cond), if_false, cr);
751 __ b(cond, if_true, cr);
757 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
758 DCHECK(var->IsStackAllocated());
759 // Offset is negative because higher indexes are at lower addresses.
760 int offset = -var->index() * kPointerSize;
761 // Adjust by a (parameter or local) base offset.
762 if (var->IsParameter()) {
763 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
765 offset += JavaScriptFrameConstants::kLocal0Offset;
767 return MemOperand(fp, offset);
771 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
772 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
773 if (var->IsContextSlot()) {
774 int context_chain_length = scope()->ContextChainLength(var->scope());
775 __ LoadContext(scratch, context_chain_length);
776 return ContextOperand(scratch, var->index());
778 return StackOperand(var);
783 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
784 // Use destination as scratch.
785 MemOperand location = VarOperand(var, dest);
786 __ LoadP(dest, location, r0);
790 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
792 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
793 DCHECK(!scratch0.is(src));
794 DCHECK(!scratch0.is(scratch1));
795 DCHECK(!scratch1.is(src));
796 MemOperand location = VarOperand(var, scratch0);
797 __ StoreP(src, location, r0);
799 // Emit the write barrier code if the location is in the heap.
800 if (var->IsContextSlot()) {
801 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
802 kLRHasBeenSaved, kDontSaveFPRegs);
807 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
808 bool should_normalize,
811 // Only prepare for bailouts before splits if we're in a test
812 // context. Otherwise, we let the Visit function deal with the
813 // preparation to avoid preparing with the same AST id twice.
814 if (!context()->IsTest() || !info_->IsOptimizable()) return;
817 if (should_normalize) __ b(&skip);
818 PrepareForBailout(expr, TOS_REG);
819 if (should_normalize) {
820 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
822 Split(eq, if_true, if_false, NULL);
828 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
829 // The variable in the declaration always resides in the current function
831 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
832 if (generate_debug_code_) {
833 // Check that we're not inside a with or catch context.
834 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
835 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
836 __ Check(ne, kDeclarationInWithContext);
837 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
838 __ Check(ne, kDeclarationInCatchContext);
843 void FullCodeGenerator::VisitVariableDeclaration(
844 VariableDeclaration* declaration) {
845 // If it was not possible to allocate the variable at compile time, we
846 // need to "declare" it at runtime to make sure it actually exists in the
848 VariableProxy* proxy = declaration->proxy();
849 VariableMode mode = declaration->mode();
850 Variable* variable = proxy->var();
851 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
852 switch (variable->location()) {
853 case VariableLocation::GLOBAL:
854 case VariableLocation::UNALLOCATED:
855 globals_->Add(variable->name(), zone());
856 globals_->Add(variable->binding_needs_init()
857 ? isolate()->factory()->the_hole_value()
858 : isolate()->factory()->undefined_value(),
862 case VariableLocation::PARAMETER:
863 case VariableLocation::LOCAL:
865 Comment cmnt(masm_, "[ VariableDeclaration");
866 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
867 __ StoreP(ip, StackOperand(variable));
871 case VariableLocation::CONTEXT:
873 Comment cmnt(masm_, "[ VariableDeclaration");
874 EmitDebugCheckDeclarationContext(variable);
875 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
876 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
877 // No write barrier since the_hole_value is in old space.
878 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
882 case VariableLocation::LOOKUP: {
883 Comment cmnt(masm_, "[ VariableDeclaration");
884 __ mov(r5, Operand(variable->name()));
885 // Declaration nodes are always introduced in one of four modes.
886 DCHECK(IsDeclaredVariableMode(mode));
887 PropertyAttributes attr =
888 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
889 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
890 // Push initial value, if any.
891 // Note: For variables we must not push an initial value (such as
892 // 'undefined') because we may have a (legal) redeclaration and we
893 // must not destroy the current value.
895 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
896 __ Push(cp, r5, r4, r3);
898 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
899 __ Push(cp, r5, r4, r3);
901 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
908 void FullCodeGenerator::VisitFunctionDeclaration(
909 FunctionDeclaration* declaration) {
910 VariableProxy* proxy = declaration->proxy();
911 Variable* variable = proxy->var();
912 switch (variable->location()) {
913 case VariableLocation::GLOBAL:
914 case VariableLocation::UNALLOCATED: {
915 globals_->Add(variable->name(), zone());
916 Handle<SharedFunctionInfo> function =
917 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
918 // Check for stack-overflow exception.
919 if (function.is_null()) return SetStackOverflow();
920 globals_->Add(function, zone());
924 case VariableLocation::PARAMETER:
925 case VariableLocation::LOCAL: {
926 Comment cmnt(masm_, "[ FunctionDeclaration");
927 VisitForAccumulatorValue(declaration->fun());
928 __ StoreP(result_register(), StackOperand(variable));
932 case VariableLocation::CONTEXT: {
933 Comment cmnt(masm_, "[ FunctionDeclaration");
934 EmitDebugCheckDeclarationContext(variable);
935 VisitForAccumulatorValue(declaration->fun());
936 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
937 int offset = Context::SlotOffset(variable->index());
938 // We know that we have written a function, which is not a smi.
939 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
940 kLRHasBeenSaved, kDontSaveFPRegs,
941 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
942 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
946 case VariableLocation::LOOKUP: {
947 Comment cmnt(masm_, "[ FunctionDeclaration");
948 __ mov(r5, Operand(variable->name()));
949 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
951 // Push initial value for function declaration.
952 VisitForStackValue(declaration->fun());
953 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
960 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
961 VariableProxy* proxy = declaration->proxy();
962 Variable* variable = proxy->var();
963 switch (variable->location()) {
964 case VariableLocation::GLOBAL:
965 case VariableLocation::UNALLOCATED:
969 case VariableLocation::CONTEXT: {
970 Comment cmnt(masm_, "[ ImportDeclaration");
971 EmitDebugCheckDeclarationContext(variable);
976 case VariableLocation::PARAMETER:
977 case VariableLocation::LOCAL:
978 case VariableLocation::LOOKUP:
984 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
989 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
990 // Call the runtime to declare the globals.
991 // The context is the first argument.
992 __ mov(r4, Operand(pairs));
993 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
995 __ CallRuntime(Runtime::kDeclareGlobals, 3);
996 // Return value is ignored.
1000 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1001 // Call the runtime to declare the modules.
1002 __ Push(descriptions);
1003 __ CallRuntime(Runtime::kDeclareModules, 1);
1004 // Return value is ignored.
1008 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1009 Comment cmnt(masm_, "[ SwitchStatement");
1010 Breakable nested_statement(this, stmt);
1011 SetStatementPosition(stmt);
1013 // Keep the switch value on the stack until a case matches.
1014 VisitForStackValue(stmt->tag());
1015 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1017 ZoneList<CaseClause*>* clauses = stmt->cases();
1018 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1020 Label next_test; // Recycled for each test.
1021 // Compile all the tests with branches to their bodies.
1022 for (int i = 0; i < clauses->length(); i++) {
1023 CaseClause* clause = clauses->at(i);
1024 clause->body_target()->Unuse();
1026 // The default is not a test, but remember it as final fall through.
1027 if (clause->is_default()) {
1028 default_clause = clause;
1032 Comment cmnt(masm_, "[ Case comparison");
1033 __ bind(&next_test);
1036 // Compile the label expression.
1037 VisitForAccumulatorValue(clause->label());
1039 // Perform the comparison as if via '==='.
1040 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
1041 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1042 JumpPatchSite patch_site(masm_);
1043 if (inline_smi_code) {
1046 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
1050 __ Drop(1); // Switch value is no longer needed.
1051 __ b(clause->body_target());
1052 __ bind(&slow_case);
1055 // Record position before stub call for type feedback.
1056 SetExpressionPosition(clause);
1057 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1058 strength(language_mode())).code();
1059 CallIC(ic, clause->CompareId());
1060 patch_site.EmitPatchInfo();
1064 PrepareForBailout(clause, TOS_REG);
1065 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1069 __ b(clause->body_target());
1072 __ cmpi(r3, Operand::Zero());
1074 __ Drop(1); // Switch value is no longer needed.
1075 __ b(clause->body_target());
1078 // Discard the test value and jump to the default if present, otherwise to
1079 // the end of the statement.
1080 __ bind(&next_test);
1081 __ Drop(1); // Switch value is no longer needed.
1082 if (default_clause == NULL) {
1083 __ b(nested_statement.break_label());
1085 __ b(default_clause->body_target());
1088 // Compile all the case bodies.
1089 for (int i = 0; i < clauses->length(); i++) {
1090 Comment cmnt(masm_, "[ Case body");
1091 CaseClause* clause = clauses->at(i);
1092 __ bind(clause->body_target());
1093 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1094 VisitStatements(clause->statements());
1097 __ bind(nested_statement.break_label());
1098 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1102 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1103 Comment cmnt(masm_, "[ ForInStatement");
1104 SetStatementPosition(stmt, SKIP_BREAK);
1106 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1109 ForIn loop_statement(this, stmt);
1110 increment_loop_depth();
1112 // Get the object to enumerate over. If the object is null or undefined, skip
1113 // over the loop. See ECMA-262 version 5, section 12.6.4.
1114 SetExpressionAsStatementPosition(stmt->enumerable());
1115 VisitForAccumulatorValue(stmt->enumerable());
1116 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1119 Register null_value = r7;
1120 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1121 __ cmp(r3, null_value);
1124 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1126 // Convert the object to a JS object.
1127 Label convert, done_convert;
1128 __ JumpIfSmi(r3, &convert);
1129 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1130 __ bge(&done_convert);
1133 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1134 __ bind(&done_convert);
1135 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1138 // Check for proxies.
1140 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1141 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1142 __ ble(&call_runtime);
1144 // Check cache validity in generated code. This is a fast case for
1145 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1146 // guarantee cache validity, call the runtime system to check cache
1147 // validity or get the property names in a fixed array.
1148 __ CheckEnumCache(null_value, &call_runtime);
1150 // The enum cache is valid. Load the map of the object being
1151 // iterated over and use the cache for the iteration.
1153 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1156 // Get the set of properties to enumerate.
1157 __ bind(&call_runtime);
1158 __ push(r3); // Duplicate the enumerable object on the stack.
1159 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1160 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1162 // If we got a map from the runtime call, we can do a fast
1163 // modification check. Otherwise, we got a fixed array, and we have
1164 // to do a slow check.
1166 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1167 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1169 __ bne(&fixed_array);
1171 // We got a map in register r3. Get the enumeration cache from it.
1172 Label no_descriptors;
1173 __ bind(&use_cache);
1175 __ EnumLength(r4, r3);
1176 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1177 __ beq(&no_descriptors);
1179 __ LoadInstanceDescriptors(r3, r5);
1180 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1182 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1184 // Set up the four remaining stack slots.
1185 __ push(r3); // Map.
1186 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1187 // Push enumeration cache, enumeration cache length (as smi) and zero.
1188 __ Push(r5, r4, r3);
1191 __ bind(&no_descriptors);
1195 // We got a fixed array in register r3. Iterate through that.
1197 __ bind(&fixed_array);
1199 __ Move(r4, FeedbackVector());
1200 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1201 int vector_index = FeedbackVector()->GetIndex(slot);
1203 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1205 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1206 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1207 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1208 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1210 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1211 __ bind(&non_proxy);
1212 __ Push(r4, r3); // Smi and array
1213 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1214 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1215 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1217 // Generate code for doing the condition check.
1218 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1220 SetExpressionAsStatementPosition(stmt->each());
1222 // Load the current count to r3, load the length to r4.
1223 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1224 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1225 __ cmpl(r3, r4); // Compare to the array length.
1226 __ bge(loop_statement.break_label());
1228 // Get the current entry of the array into register r6.
1229 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1230 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1231 __ SmiToPtrArrayOffset(r6, r3);
1232 __ LoadPX(r6, MemOperand(r6, r5));
1234 // Get the expected map from the stack or a smi in the
1235 // permanent slow case into register r5.
1236 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1238 // Check if the expected map still matches that of the enumerable.
1239 // If not, we may have to filter the key.
1241 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1242 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1244 __ beq(&update_each);
1246 // For proxies, no filtering is done.
1247 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1248 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1249 __ beq(&update_each);
1251 // Convert the entry to a string or (smi) 0 if it isn't a property
1252 // any more. If the property has been removed while iterating, we
1254 __ Push(r4, r6); // Enumerable and current entry.
1255 __ CallRuntime(Runtime::kForInFilter, 2);
1256 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1258 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1260 __ beq(loop_statement.continue_label());
1262 // Update the 'each' property or variable from the possibly filtered
1263 // entry in register r6.
1264 __ bind(&update_each);
1265 __ mr(result_register(), r6);
1266 // Perform the assignment as if via '='.
1268 EffectContext context(this);
1269 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1270 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1273 // Generate code for the body of the loop.
1274 Visit(stmt->body());
1276 // Generate code for the going to the next element by incrementing
1277 // the index (smi) stored on top of the stack.
1278 __ bind(loop_statement.continue_label());
1280 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1283 EmitBackEdgeBookkeeping(stmt, &loop);
1286 // Remove the pointers stored on the stack.
1287 __ bind(loop_statement.break_label());
1290 // Exit and decrement the loop depth.
1291 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1293 decrement_loop_depth();
1297 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1299 // Use the fast case closure allocation code that allocates in new
1300 // space for nested functions that don't need literals cloning. If
1301 // we're running with the --always-opt or the --prepare-always-opt
1302 // flag, we need to use the runtime function so that the new function
1303 // we are creating here gets a chance to have its code optimized and
1304 // doesn't just get a copy of the existing unoptimized code.
1305 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1306 scope()->is_function_scope() && info->num_literals() == 0) {
1307 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1308 __ mov(r5, Operand(info));
1311 __ mov(r3, Operand(info));
1313 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1314 __ Push(cp, r3, r4);
1315 __ CallRuntime(Runtime::kNewClosure, 3);
1317 context()->Plug(r3);
1321 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1322 Comment cmnt(masm_, "[ VariableProxy");
1323 EmitVariableLoad(expr);
1327 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1329 FeedbackVectorICSlot slot) {
1330 if (NeedsHomeObject(initializer)) {
1331 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1332 __ mov(StoreDescriptor::NameRegister(),
1333 Operand(isolate()->factory()->home_object_symbol()));
1334 __ LoadP(StoreDescriptor::ValueRegister(),
1335 MemOperand(sp, offset * kPointerSize));
1336 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1342 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1343 TypeofMode typeof_mode,
1345 Register current = cp;
1351 if (s->num_heap_slots() > 0) {
1352 if (s->calls_sloppy_eval()) {
1353 // Check that extension is NULL.
1354 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1355 __ cmpi(temp, Operand::Zero());
1358 // Load next context in chain.
1359 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1360 // Walk the rest of the chain without clobbering cp.
1363 // If no outer scope calls eval, we do not need to check more
1364 // context extensions.
1365 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1366 s = s->outer_scope();
1369 if (s->is_eval_scope()) {
1371 if (!current.is(next)) {
1372 __ Move(next, current);
1375 // Terminate at native context.
1376 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1377 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1380 // Check that extension is NULL.
1381 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1382 __ cmpi(temp, Operand::Zero());
1384 // Load next context in chain.
1385 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1390 // All extension objects were empty and it is safe to use a normal global
1392 EmitGlobalVariableLoad(proxy, typeof_mode);
1396 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1398 DCHECK(var->IsContextSlot());
1399 Register context = cp;
1403 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1404 if (s->num_heap_slots() > 0) {
1405 if (s->calls_sloppy_eval()) {
1406 // Check that extension is NULL.
1407 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1408 __ cmpi(temp, Operand::Zero());
1411 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1412 // Walk the rest of the chain without clobbering cp.
1416 // Check that last extension is NULL.
1417 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1418 __ cmpi(temp, Operand::Zero());
1421 // This function is used only for loads, not stores, so it's safe to
1422 // return an cp-based operand (the write barrier cannot be allowed to
1423 // destroy the cp register).
1424 return ContextOperand(context, var->index());
1428 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1429 TypeofMode typeof_mode,
1430 Label* slow, Label* done) {
1431 // Generate fast-case code for variables that might be shadowed by
1432 // eval-introduced variables. Eval is used a lot without
1433 // introducing variables. In those cases, we do not want to
1434 // perform a runtime call for all variables in the scope
1435 // containing the eval.
1436 Variable* var = proxy->var();
1437 if (var->mode() == DYNAMIC_GLOBAL) {
1438 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1440 } else if (var->mode() == DYNAMIC_LOCAL) {
1441 Variable* local = var->local_if_not_shadowed();
1442 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1443 if (local->mode() == LET || local->mode() == CONST ||
1444 local->mode() == CONST_LEGACY) {
1445 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1447 if (local->mode() == CONST_LEGACY) {
1448 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1449 } else { // LET || CONST
1450 __ mov(r3, Operand(var->name()));
1452 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1460 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1461 TypeofMode typeof_mode) {
1462 Variable* var = proxy->var();
1463 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1464 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1465 if (var->IsGlobalSlot()) {
1466 DCHECK(var->index() > 0);
1467 DCHECK(var->IsStaticGlobalObjectProperty());
1468 // Each var occupies two slots in the context: for reads and writes.
1469 int slot_index = var->index();
1470 int depth = scope()->ContextChainLength(var->scope());
1471 __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
1472 Operand(Smi::FromInt(depth)));
1473 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
1474 Operand(Smi::FromInt(slot_index)));
1475 __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
1476 Operand(var->name()));
1477 LoadGlobalViaContextStub stub(isolate(), depth);
1481 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1482 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1483 __ mov(LoadDescriptor::SlotRegister(),
1484 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1485 CallLoadIC(typeof_mode);
1490 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1491 TypeofMode typeof_mode) {
1492 // Record position before possible IC call.
1493 SetExpressionPosition(proxy);
1494 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1495 Variable* var = proxy->var();
1497 // Three cases: global variables, lookup variables, and all other types of
1499 switch (var->location()) {
1500 case VariableLocation::GLOBAL:
1501 case VariableLocation::UNALLOCATED: {
1502 Comment cmnt(masm_, "[ Global variable");
1503 EmitGlobalVariableLoad(proxy, typeof_mode);
1504 context()->Plug(r3);
1508 case VariableLocation::PARAMETER:
1509 case VariableLocation::LOCAL:
1510 case VariableLocation::CONTEXT: {
1511 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1512 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1513 : "[ Stack variable");
1514 if (var->binding_needs_init()) {
1515 // var->scope() may be NULL when the proxy is located in eval code and
1516 // refers to a potential outside binding. Currently those bindings are
1517 // always looked up dynamically, i.e. in that case
1518 // var->location() == LOOKUP.
1520 DCHECK(var->scope() != NULL);
1522 // Check if the binding really needs an initialization check. The check
1523 // can be skipped in the following situation: we have a LET or CONST
1524 // binding in harmony mode, both the Variable and the VariableProxy have
1525 // the same declaration scope (i.e. they are both in global code, in the
1526 // same function or in the same eval code) and the VariableProxy is in
1527 // the source physically located after the initializer of the variable.
1529 // We cannot skip any initialization checks for CONST in non-harmony
1530 // mode because const variables may be declared but never initialized:
1531 // if (false) { const x; }; var y = x;
1533 // The condition on the declaration scopes is a conservative check for
1534 // nested functions that access a binding and are called before the
1535 // binding is initialized:
1536 // function() { f(); let x = 1; function f() { x = 2; } }
1538 bool skip_init_check;
1539 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1540 skip_init_check = false;
1541 } else if (var->is_this()) {
1542 CHECK(info_->function() != nullptr &&
1543 (info_->function()->kind() & kSubclassConstructor) != 0);
1544 // TODO(dslomov): implement 'this' hole check elimination.
1545 skip_init_check = false;
1547 // Check that we always have valid source position.
1548 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1549 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1550 skip_init_check = var->mode() != CONST_LEGACY &&
1551 var->initializer_position() < proxy->position();
1554 if (!skip_init_check) {
1556 // Let and const need a read barrier.
1558 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1560 if (var->mode() == LET || var->mode() == CONST) {
1561 // Throw a reference error when using an uninitialized let/const
1562 // binding in harmony mode.
1563 __ mov(r3, Operand(var->name()));
1565 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1567 // Uninitalized const bindings outside of harmony mode are unholed.
1568 DCHECK(var->mode() == CONST_LEGACY);
1569 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1572 context()->Plug(r3);
1576 context()->Plug(var);
1580 case VariableLocation::LOOKUP: {
1581 Comment cmnt(masm_, "[ Lookup variable");
1583 // Generate code for loading from variables potentially shadowed
1584 // by eval-introduced variables.
1585 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1587 __ mov(r4, Operand(var->name()));
1588 __ Push(cp, r4); // Context and name.
1589 Runtime::FunctionId function_id =
1590 typeof_mode == NOT_INSIDE_TYPEOF
1591 ? Runtime::kLoadLookupSlot
1592 : Runtime::kLoadLookupSlotNoReferenceError;
1593 __ CallRuntime(function_id, 2);
1595 context()->Plug(r3);
1601 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1602 Comment cmnt(masm_, "[ RegExpLiteral");
1604 // Registers will be used as follows:
1605 // r8 = materialized value (RegExp literal)
1606 // r7 = JS function, literals array
1607 // r6 = literal index
1608 // r5 = RegExp pattern
1609 // r4 = RegExp flags
1610 // r3 = RegExp literal clone
1611 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1612 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1613 int literal_offset =
1614 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1615 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1616 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1618 __ bne(&materialized);
1620 // Create regexp literal using runtime function.
1621 // Result will be in r3.
1622 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1623 __ mov(r5, Operand(expr->pattern()));
1624 __ mov(r4, Operand(expr->flags()));
1625 __ Push(r7, r6, r5, r4);
1626 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1629 __ bind(&materialized);
1630 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1631 Label allocated, runtime_allocate;
1632 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1635 __ bind(&runtime_allocate);
1636 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1638 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1641 __ bind(&allocated);
1642 // After this, registers are used as follows:
1643 // r3: Newly allocated regexp.
1644 // r8: Materialized regexp.
1646 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1647 context()->Plug(r3);
1651 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1652 if (expression == NULL) {
1653 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1656 VisitForStackValue(expression);
1661 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1662 Comment cmnt(masm_, "[ ObjectLiteral");
1664 Handle<FixedArray> constant_properties = expr->constant_properties();
1665 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1666 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1667 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1668 __ mov(r4, Operand(constant_properties));
1669 int flags = expr->ComputeFlags();
1670 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1671 if (MustCreateObjectLiteralWithRuntime(expr)) {
1672 __ Push(r6, r5, r4, r3);
1673 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1675 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1678 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1680 // If result_saved is true the result is on top of the stack. If
1681 // result_saved is false the result is in r3.
1682 bool result_saved = false;
1684 AccessorTable accessor_table(zone());
1685 int property_index = 0;
1686 // store_slot_index points to the vector IC slot for the next store IC used.
1687 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1688 // and must be updated if the number of store ICs emitted here changes.
1689 int store_slot_index = 0;
1690 for (; property_index < expr->properties()->length(); property_index++) {
1691 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1692 if (property->is_computed_name()) break;
1693 if (property->IsCompileTimeValue()) continue;
1695 Literal* key = property->key()->AsLiteral();
1696 Expression* value = property->value();
1697 if (!result_saved) {
1698 __ push(r3); // Save result on stack
1699 result_saved = true;
1701 switch (property->kind()) {
1702 case ObjectLiteral::Property::CONSTANT:
1704 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1705 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1707 case ObjectLiteral::Property::COMPUTED:
1708 // It is safe to use [[Put]] here because the boilerplate already
1709 // contains computed properties with an uninitialized value.
1710 if (key->value()->IsInternalizedString()) {
1711 if (property->emit_store()) {
1712 VisitForAccumulatorValue(value);
1713 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1714 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1715 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1716 if (FLAG_vector_stores) {
1717 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1720 CallStoreIC(key->LiteralFeedbackId());
1722 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1724 if (NeedsHomeObject(value)) {
1725 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1726 __ mov(StoreDescriptor::NameRegister(),
1727 Operand(isolate()->factory()->home_object_symbol()));
1728 __ LoadP(StoreDescriptor::ValueRegister(), MemOperand(sp));
1729 if (FLAG_vector_stores) {
1730 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1735 VisitForEffect(value);
1739 // Duplicate receiver on stack.
1740 __ LoadP(r3, MemOperand(sp));
1742 VisitForStackValue(key);
1743 VisitForStackValue(value);
1744 if (property->emit_store()) {
1745 EmitSetHomeObjectIfNeeded(
1746 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1747 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1749 __ CallRuntime(Runtime::kSetProperty, 4);
1754 case ObjectLiteral::Property::PROTOTYPE:
1755 // Duplicate receiver on stack.
1756 __ LoadP(r3, MemOperand(sp));
1758 VisitForStackValue(value);
1759 DCHECK(property->emit_store());
1760 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1762 case ObjectLiteral::Property::GETTER:
1763 if (property->emit_store()) {
1764 accessor_table.lookup(key)->second->getter = value;
1767 case ObjectLiteral::Property::SETTER:
1768 if (property->emit_store()) {
1769 accessor_table.lookup(key)->second->setter = value;
1775 // Emit code to define accessors, using only a single call to the runtime for
1776 // each pair of corresponding getters and setters.
1777 for (AccessorTable::Iterator it = accessor_table.begin();
1778 it != accessor_table.end(); ++it) {
1779 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1781 VisitForStackValue(it->first);
1782 EmitAccessor(it->second->getter);
1783 EmitSetHomeObjectIfNeeded(
1784 it->second->getter, 2,
1785 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1786 EmitAccessor(it->second->setter);
1787 EmitSetHomeObjectIfNeeded(
1788 it->second->setter, 3,
1789 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1790 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1792 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1795 // Object literals have two parts. The "static" part on the left contains no
1796 // computed property names, and so we can compute its map ahead of time; see
1797 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1798 // starts with the first computed property name, and continues with all
1799 // properties to its right. All the code from above initializes the static
1800 // component of the object literal, and arranges for the map of the result to
1801 // reflect the static order in which the keys appear. For the dynamic
1802 // properties, we compile them into a series of "SetOwnProperty" runtime
1803 // calls. This will preserve insertion order.
1804 for (; property_index < expr->properties()->length(); property_index++) {
1805 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1807 Expression* value = property->value();
1808 if (!result_saved) {
1809 __ push(r3); // Save result on the stack
1810 result_saved = true;
1813 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1816 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1817 DCHECK(!property->is_computed_name());
1818 VisitForStackValue(value);
1819 DCHECK(property->emit_store());
1820 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1822 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1823 VisitForStackValue(value);
1824 EmitSetHomeObjectIfNeeded(
1825 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1827 switch (property->kind()) {
1828 case ObjectLiteral::Property::CONSTANT:
1829 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1830 case ObjectLiteral::Property::COMPUTED:
1831 if (property->emit_store()) {
1832 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1834 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1840 case ObjectLiteral::Property::PROTOTYPE:
1844 case ObjectLiteral::Property::GETTER:
1845 __ mov(r3, Operand(Smi::FromInt(NONE)));
1847 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1850 case ObjectLiteral::Property::SETTER:
1851 __ mov(r3, Operand(Smi::FromInt(NONE)));
1853 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1859 if (expr->has_function()) {
1860 DCHECK(result_saved);
1861 __ LoadP(r3, MemOperand(sp));
1863 __ CallRuntime(Runtime::kToFastProperties, 1);
1867 context()->PlugTOS();
1869 context()->Plug(r3);
1872 // Verify that compilation exactly consumed the number of store ic slots that
1873 // the ObjectLiteral node had to offer.
1874 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1878 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1879 Comment cmnt(masm_, "[ ArrayLiteral");
1881 expr->BuildConstantElements(isolate());
1882 Handle<FixedArray> constant_elements = expr->constant_elements();
1883 bool has_fast_elements =
1884 IsFastObjectElementsKind(expr->constant_elements_kind());
1885 Handle<FixedArrayBase> constant_elements_values(
1886 FixedArrayBase::cast(constant_elements->get(1)));
1888 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1889 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1890 // If the only customer of allocation sites is transitioning, then
1891 // we can turn it off if we don't have anywhere else to transition to.
1892 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1895 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1896 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1897 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1898 __ mov(r4, Operand(constant_elements));
1899 if (MustCreateArrayLiteralWithRuntime(expr)) {
1900 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1901 __ Push(r6, r5, r4, r3);
1902 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1904 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1907 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1909 bool result_saved = false; // Is the result saved to the stack?
1910 ZoneList<Expression*>* subexprs = expr->values();
1911 int length = subexprs->length();
1913 // Emit code to evaluate all the non-constant subexpressions and to store
1914 // them into the newly cloned array.
1915 int array_index = 0;
1916 for (; array_index < length; array_index++) {
1917 Expression* subexpr = subexprs->at(array_index);
1918 if (subexpr->IsSpread()) break;
1919 // If the subexpression is a literal or a simple materialized literal it
1920 // is already set in the cloned array.
1921 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1923 if (!result_saved) {
1925 __ Push(Smi::FromInt(expr->literal_index()));
1926 result_saved = true;
1928 VisitForAccumulatorValue(subexpr);
1930 if (has_fast_elements) {
1931 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1932 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1933 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1934 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1935 // Update the write barrier for the array store.
1936 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1937 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1940 __ LoadSmiLiteral(r6, Smi::FromInt(array_index));
1941 StoreArrayLiteralElementStub stub(isolate());
1945 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1948 // In case the array literal contains spread expressions it has two parts. The
1949 // first part is the "static" array which has a literal index is handled
1950 // above. The second part is the part after the first spread expression
1951 // (inclusive) and these elements gets appended to the array. Note that the
1952 // number elements an iterable produces is unknown ahead of time.
1953 if (array_index < length && result_saved) {
1954 __ Drop(1); // literal index
1956 result_saved = false;
1958 for (; array_index < length; array_index++) {
1959 Expression* subexpr = subexprs->at(array_index);
1962 if (subexpr->IsSpread()) {
1963 VisitForStackValue(subexpr->AsSpread()->expression());
1964 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1966 VisitForStackValue(subexpr);
1967 __ CallRuntime(Runtime::kAppendElement, 2);
1970 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1974 __ Drop(1); // literal index
1975 context()->PlugTOS();
1977 context()->Plug(r3);
1982 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1983 DCHECK(expr->target()->IsValidReferenceExpression());
1985 Comment cmnt(masm_, "[ Assignment");
1986 SetExpressionPosition(expr, INSERT_BREAK);
1988 Property* property = expr->target()->AsProperty();
1989 LhsKind assign_type = Property::GetAssignType(property);
1991 // Evaluate LHS expression.
1992 switch (assign_type) {
1994 // Nothing to do here.
1996 case NAMED_PROPERTY:
1997 if (expr->is_compound()) {
1998 // We need the receiver both on the stack and in the register.
1999 VisitForStackValue(property->obj());
2000 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2002 VisitForStackValue(property->obj());
2005 case NAMED_SUPER_PROPERTY:
2007 property->obj()->AsSuperPropertyReference()->this_var());
2008 VisitForAccumulatorValue(
2009 property->obj()->AsSuperPropertyReference()->home_object());
2010 __ Push(result_register());
2011 if (expr->is_compound()) {
2012 const Register scratch = r4;
2013 __ LoadP(scratch, MemOperand(sp, kPointerSize));
2014 __ Push(scratch, result_register());
2017 case KEYED_SUPER_PROPERTY: {
2018 const Register scratch = r4;
2020 property->obj()->AsSuperPropertyReference()->this_var());
2021 VisitForAccumulatorValue(
2022 property->obj()->AsSuperPropertyReference()->home_object());
2023 __ mr(scratch, result_register());
2024 VisitForAccumulatorValue(property->key());
2025 __ Push(scratch, result_register());
2026 if (expr->is_compound()) {
2027 const Register scratch1 = r5;
2028 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
2029 __ Push(scratch1, scratch, result_register());
2033 case KEYED_PROPERTY:
2034 if (expr->is_compound()) {
2035 VisitForStackValue(property->obj());
2036 VisitForStackValue(property->key());
2037 __ LoadP(LoadDescriptor::ReceiverRegister(),
2038 MemOperand(sp, 1 * kPointerSize));
2039 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2041 VisitForStackValue(property->obj());
2042 VisitForStackValue(property->key());
2047 // For compound assignments we need another deoptimization point after the
2048 // variable/property load.
2049 if (expr->is_compound()) {
2051 AccumulatorValueContext context(this);
2052 switch (assign_type) {
2054 EmitVariableLoad(expr->target()->AsVariableProxy());
2055 PrepareForBailout(expr->target(), TOS_REG);
2057 case NAMED_PROPERTY:
2058 EmitNamedPropertyLoad(property);
2059 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2061 case NAMED_SUPER_PROPERTY:
2062 EmitNamedSuperPropertyLoad(property);
2063 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2065 case KEYED_SUPER_PROPERTY:
2066 EmitKeyedSuperPropertyLoad(property);
2067 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2069 case KEYED_PROPERTY:
2070 EmitKeyedPropertyLoad(property);
2071 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2076 Token::Value op = expr->binary_op();
2077 __ push(r3); // Left operand goes on the stack.
2078 VisitForAccumulatorValue(expr->value());
2080 AccumulatorValueContext context(this);
2081 if (ShouldInlineSmiCase(op)) {
2082 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
2085 EmitBinaryOp(expr->binary_operation(), op);
2088 // Deoptimization point in case the binary operation may have side effects.
2089 PrepareForBailout(expr->binary_operation(), TOS_REG);
2091 VisitForAccumulatorValue(expr->value());
2094 SetExpressionPosition(expr);
2097 switch (assign_type) {
2099 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2100 expr->op(), expr->AssignmentSlot());
2101 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2102 context()->Plug(r3);
2104 case NAMED_PROPERTY:
2105 EmitNamedPropertyAssignment(expr);
2107 case NAMED_SUPER_PROPERTY:
2108 EmitNamedSuperPropertyStore(property);
2109 context()->Plug(r3);
2111 case KEYED_SUPER_PROPERTY:
2112 EmitKeyedSuperPropertyStore(property);
2113 context()->Plug(r3);
2115 case KEYED_PROPERTY:
2116 EmitKeyedPropertyAssignment(expr);
2122 void FullCodeGenerator::VisitYield(Yield* expr) {
2123 Comment cmnt(masm_, "[ Yield");
2124 SetExpressionPosition(expr);
2126 // Evaluate yielded value first; the initial iterator definition depends on
2127 // this. It stays on the stack while we update the iterator.
2128 VisitForStackValue(expr->expression());
2130 switch (expr->yield_kind()) {
2131 case Yield::kSuspend:
2132 // Pop value from top-of-stack slot; box result into result register.
2133 EmitCreateIteratorResult(false);
2134 __ push(result_register());
2136 case Yield::kInitial: {
2137 Label suspend, continuation, post_runtime, resume;
2140 __ bind(&continuation);
2141 __ RecordGeneratorContinuation();
2145 VisitForAccumulatorValue(expr->generator_object());
2146 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2147 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2148 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2150 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2152 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2153 kLRHasBeenSaved, kDontSaveFPRegs);
2154 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2156 __ beq(&post_runtime);
2157 __ push(r3); // generator object
2158 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2159 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2160 __ bind(&post_runtime);
2161 __ pop(result_register());
2162 EmitReturnSequence();
2165 context()->Plug(result_register());
2169 case Yield::kFinal: {
2170 VisitForAccumulatorValue(expr->generator_object());
2171 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2172 __ StoreP(r4, FieldMemOperand(result_register(),
2173 JSGeneratorObject::kContinuationOffset),
2175 // Pop value from top-of-stack slot, box result into result register.
2176 EmitCreateIteratorResult(true);
2177 EmitUnwindBeforeReturn();
2178 EmitReturnSequence();
2182 case Yield::kDelegating: {
2183 VisitForStackValue(expr->generator_object());
2185 // Initial stack layout is as follows:
2186 // [sp + 1 * kPointerSize] iter
2187 // [sp + 0 * kPointerSize] g
2189 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2190 Label l_next, l_call;
2191 Register load_receiver = LoadDescriptor::ReceiverRegister();
2192 Register load_name = LoadDescriptor::NameRegister();
2194 // Initial send value is undefined.
2195 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2198 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2200 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2201 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2202 __ Push(load_name, r6, r3); // "throw", iter, except
2205 // try { received = %yield result }
2206 // Shuffle the received result above a try handler and yield it without
2209 __ pop(r3); // result
2210 int handler_index = NewHandlerTableEntry();
2211 EnterTryBlock(handler_index, &l_catch);
2212 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2213 __ push(r3); // result
2216 __ bind(&l_continuation);
2217 __ RecordGeneratorContinuation();
2220 __ bind(&l_suspend);
2221 const int generator_object_depth = kPointerSize + try_block_size;
2222 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2224 __ Push(Smi::FromInt(handler_index)); // handler-index
2225 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2226 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2227 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2229 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2231 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2232 kLRHasBeenSaved, kDontSaveFPRegs);
2233 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2234 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2235 __ pop(r3); // result
2236 EmitReturnSequence();
2237 __ bind(&l_resume); // received in r3
2238 ExitTryBlock(handler_index);
2240 // receiver = iter; f = 'next'; arg = received;
2243 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2244 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2245 __ Push(load_name, r6, r3); // "next", iter, received
2247 // result = receiver[f](arg);
2249 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2250 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2251 __ mov(LoadDescriptor::SlotRegister(),
2252 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2253 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2254 CallIC(ic, TypeFeedbackId::None());
2256 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2257 SetCallPosition(expr, 1);
2258 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2261 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2262 __ Drop(1); // The function is still on the stack; drop it.
2264 // if (!result.done) goto l_try;
2265 __ Move(load_receiver, r3);
2267 __ push(load_receiver); // save result
2268 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2269 __ mov(LoadDescriptor::SlotRegister(),
2270 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2271 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done
2272 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2274 __ cmpi(r3, Operand::Zero());
2278 __ pop(load_receiver); // result
2279 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2280 __ mov(LoadDescriptor::SlotRegister(),
2281 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2282 CallLoadIC(NOT_INSIDE_TYPEOF); // r3=result.value
2283 context()->DropAndPlug(2, r3); // drop iter and g
2290 void FullCodeGenerator::EmitGeneratorResume(
2291 Expression* generator, Expression* value,
2292 JSGeneratorObject::ResumeMode resume_mode) {
2293 // The value stays in r3, and is ultimately read by the resumed generator, as
2294 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2295 // is read to throw the value when the resumed generator is already closed.
2296 // r4 will hold the generator object until the activation has been resumed.
2297 VisitForStackValue(generator);
2298 VisitForAccumulatorValue(value);
2301 // Load suspended function and context.
2302 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2303 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2305 // Load receiver and store as the first argument.
2306 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2309 // Push holes for the rest of the arguments to the generator function.
2310 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2312 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2313 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2314 Label argument_loop, push_frame;
2315 #if V8_TARGET_ARCH_PPC64
2316 __ cmpi(r6, Operand::Zero());
2317 __ beq(&push_frame);
2319 __ SmiUntag(r6, SetRC);
2320 __ beq(&push_frame, cr0);
2323 __ bind(&argument_loop);
2325 __ bdnz(&argument_loop);
2327 // Enter a new JavaScript frame, and initialize its slots as they were when
2328 // the generator was suspended.
2329 Label resume_frame, done;
2330 __ bind(&push_frame);
2331 __ b(&resume_frame, SetLK);
2333 __ bind(&resume_frame);
2334 // lr = return address.
2335 // fp = caller's frame pointer.
2336 // cp = callee's context,
2337 // r7 = callee's JS function.
2338 __ PushFixedFrame(r7);
2339 // Adjust FP to point to saved FP.
2340 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2342 // Load the operand stack size.
2343 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2344 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2345 __ SmiUntag(r6, SetRC);
2347 // If we are sending a value and there is no operand stack, we can jump back
2350 if (resume_mode == JSGeneratorObject::NEXT) {
2352 __ bne(&slow_resume, cr0);
2353 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2355 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2356 if (FLAG_enable_embedded_constant_pool) {
2357 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
2359 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2362 __ LoadSmiLiteral(r5,
2363 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2364 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2367 __ bind(&slow_resume);
2370 __ beq(&call_resume, cr0);
2373 // Otherwise, we push holes for the operand stack and call the runtime to fix
2374 // up the stack and the handlers.
2377 __ bind(&operand_loop);
2379 __ bdnz(&operand_loop);
2381 __ bind(&call_resume);
2382 DCHECK(!result_register().is(r4));
2383 __ Push(r4, result_register());
2384 __ Push(Smi::FromInt(resume_mode));
2385 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2386 // Not reached: the runtime call returns elsewhere.
2387 __ stop("not-reached");
2390 context()->Plug(result_register());
2394 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2398 const int instance_size = 5 * kPointerSize;
2399 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2402 __ Allocate(instance_size, r3, r5, r6, &gc_required, TAG_OBJECT);
2405 __ bind(&gc_required);
2406 __ Push(Smi::FromInt(instance_size));
2407 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2408 __ LoadP(context_register(),
2409 MemOperand(fp, StandardFrameConstants::kContextOffset));
2411 __ bind(&allocated);
2412 __ LoadP(r4, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2413 __ LoadP(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2414 __ LoadP(r4, ContextOperand(r4, Context::ITERATOR_RESULT_MAP_INDEX));
2416 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2417 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2418 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2419 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2420 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2422 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2425 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2428 // Only the value field needs a write barrier, as the other values are in the
2430 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2431 kLRHasBeenSaved, kDontSaveFPRegs);
2435 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2436 SetExpressionPosition(prop);
2437 Literal* key = prop->key()->AsLiteral();
2438 DCHECK(!prop->IsSuperAccess());
2440 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2441 __ mov(LoadDescriptor::SlotRegister(),
2442 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2443 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2447 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2448 // Stack: receiver, home_object.
2449 SetExpressionPosition(prop);
2450 Literal* key = prop->key()->AsLiteral();
2451 DCHECK(!key->value()->IsSmi());
2452 DCHECK(prop->IsSuperAccess());
2454 __ Push(key->value());
2455 __ Push(Smi::FromInt(language_mode()));
2456 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2460 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2461 SetExpressionPosition(prop);
2462 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2463 __ mov(LoadDescriptor::SlotRegister(),
2464 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2469 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2470 // Stack: receiver, home_object, key.
2471 SetExpressionPosition(prop);
2472 __ Push(Smi::FromInt(language_mode()));
2473 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2477 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2479 Expression* left_expr,
2480 Expression* right_expr) {
2481 Label done, smi_case, stub_call;
2483 Register scratch1 = r5;
2484 Register scratch2 = r6;
2486 // Get the arguments.
2488 Register right = r3;
2491 // Perform combined smi check on both operands.
2492 __ orx(scratch1, left, right);
2493 STATIC_ASSERT(kSmiTag == 0);
2494 JumpPatchSite patch_site(masm_);
2495 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2497 __ bind(&stub_call);
2499 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2500 CallIC(code, expr->BinaryOperationFeedbackId());
2501 patch_site.EmitPatchInfo();
2505 // Smi case. This code works the same way as the smi-smi case in the type
2506 // recording binary operation stub.
2509 __ GetLeastBitsFromSmi(scratch1, right, 5);
2510 __ ShiftRightArith(right, left, scratch1);
2511 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2514 __ GetLeastBitsFromSmi(scratch2, right, 5);
2515 #if V8_TARGET_ARCH_PPC64
2516 __ ShiftLeft_(right, left, scratch2);
2518 __ SmiUntag(scratch1, left);
2519 __ ShiftLeft_(scratch1, scratch1, scratch2);
2520 // Check that the *signed* result fits in a smi
2521 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2522 __ SmiTag(right, scratch1);
2527 __ SmiUntag(scratch1, left);
2528 __ GetLeastBitsFromSmi(scratch2, right, 5);
2529 __ srw(scratch1, scratch1, scratch2);
2530 // Unsigned shift is not allowed to produce a negative number.
2531 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2532 __ SmiTag(right, scratch1);
2536 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2537 __ BranchOnOverflow(&stub_call);
2538 __ mr(right, scratch1);
2542 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2543 __ BranchOnOverflow(&stub_call);
2544 __ mr(right, scratch1);
2549 #if V8_TARGET_ARCH_PPC64
2550 // Remove tag from both operands.
2551 __ SmiUntag(ip, right);
2552 __ SmiUntag(r0, left);
2553 __ Mul(scratch1, r0, ip);
2554 // Check for overflowing the smi range - no overflow if higher 33 bits of
2555 // the result are identical.
2556 __ TestIfInt32(scratch1, r0);
2559 __ SmiUntag(ip, right);
2560 __ mullw(scratch1, left, ip);
2561 __ mulhw(scratch2, left, ip);
2562 // Check for overflowing the smi range - no overflow if higher 33 bits of
2563 // the result are identical.
2564 __ TestIfInt32(scratch2, scratch1, ip);
2567 // Go slow on zero result to handle -0.
2568 __ cmpi(scratch1, Operand::Zero());
2570 #if V8_TARGET_ARCH_PPC64
2571 __ SmiTag(right, scratch1);
2573 __ mr(right, scratch1);
2576 // We need -0 if we were multiplying a negative number with 0 to get 0.
2577 // We know one of them was zero.
2579 __ add(scratch2, right, left);
2580 __ cmpi(scratch2, Operand::Zero());
2582 __ LoadSmiLiteral(right, Smi::FromInt(0));
2586 __ orx(right, left, right);
2588 case Token::BIT_AND:
2589 __ and_(right, left, right);
2591 case Token::BIT_XOR:
2592 __ xor_(right, left, right);
2599 context()->Plug(r3);
2603 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2604 int* used_store_slots) {
2605 // Constructor is in r3.
2606 DCHECK(lit != NULL);
2609 // No access check is needed here since the constructor is created by the
2611 Register scratch = r4;
2613 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2616 for (int i = 0; i < lit->properties()->length(); i++) {
2617 ObjectLiteral::Property* property = lit->properties()->at(i);
2618 Expression* value = property->value();
2620 if (property->is_static()) {
2621 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2623 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2626 EmitPropertyKey(property, lit->GetIdForProperty(i));
2628 // The static prototype property is read only. We handle the non computed
2629 // property name case in the parser. Since this is the only case where we
2630 // need to check for an own read only property we special case this so we do
2631 // not need to do this for every property.
2632 if (property->is_static() && property->is_computed_name()) {
2633 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2637 VisitForStackValue(value);
2638 EmitSetHomeObjectIfNeeded(value, 2,
2639 lit->SlotForHomeObject(value, used_store_slots));
2641 switch (property->kind()) {
2642 case ObjectLiteral::Property::CONSTANT:
2643 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2644 case ObjectLiteral::Property::PROTOTYPE:
2646 case ObjectLiteral::Property::COMPUTED:
2647 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2650 case ObjectLiteral::Property::GETTER:
2651 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2653 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2656 case ObjectLiteral::Property::SETTER:
2657 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2659 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2668 __ CallRuntime(Runtime::kToFastProperties, 1);
2671 __ CallRuntime(Runtime::kToFastProperties, 1);
2673 if (is_strong(language_mode())) {
2675 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2676 __ Push(r3, scratch);
2677 // TODO(conradw): It would be more efficient to define the properties with
2678 // the right attributes the first time round.
2679 // Freeze the prototype.
2680 __ CallRuntime(Runtime::kObjectFreeze, 1);
2681 // Freeze the constructor.
2682 __ CallRuntime(Runtime::kObjectFreeze, 1);
2687 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2690 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2691 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2692 CallIC(code, expr->BinaryOperationFeedbackId());
2693 patch_site.EmitPatchInfo();
2694 context()->Plug(r3);
2698 void FullCodeGenerator::EmitAssignment(Expression* expr,
2699 FeedbackVectorICSlot slot) {
2700 DCHECK(expr->IsValidReferenceExpression());
2702 Property* prop = expr->AsProperty();
2703 LhsKind assign_type = Property::GetAssignType(prop);
2705 switch (assign_type) {
2707 Variable* var = expr->AsVariableProxy()->var();
2708 EffectContext context(this);
2709 EmitVariableAssignment(var, Token::ASSIGN, slot);
2712 case NAMED_PROPERTY: {
2713 __ push(r3); // Preserve value.
2714 VisitForAccumulatorValue(prop->obj());
2715 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2716 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2717 __ mov(StoreDescriptor::NameRegister(),
2718 Operand(prop->key()->AsLiteral()->value()));
2719 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2723 case NAMED_SUPER_PROPERTY: {
2725 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2726 VisitForAccumulatorValue(
2727 prop->obj()->AsSuperPropertyReference()->home_object());
2728 // stack: value, this; r3: home_object
2729 Register scratch = r5;
2730 Register scratch2 = r6;
2731 __ mr(scratch, result_register()); // home_object
2732 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2733 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2734 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2735 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2736 // stack: this, home_object; r3: value
2737 EmitNamedSuperPropertyStore(prop);
2740 case KEYED_SUPER_PROPERTY: {
2742 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2744 prop->obj()->AsSuperPropertyReference()->home_object());
2745 VisitForAccumulatorValue(prop->key());
2746 Register scratch = r5;
2747 Register scratch2 = r6;
2748 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2749 // stack: value, this, home_object; r3: key, r6: value
2750 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2751 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2752 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2753 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2754 __ StoreP(r3, MemOperand(sp, 0));
2755 __ Move(r3, scratch2);
2756 // stack: this, home_object, key; r3: value.
2757 EmitKeyedSuperPropertyStore(prop);
2760 case KEYED_PROPERTY: {
2761 __ push(r3); // Preserve value.
2762 VisitForStackValue(prop->obj());
2763 VisitForAccumulatorValue(prop->key());
2764 __ Move(StoreDescriptor::NameRegister(), r3);
2765 __ Pop(StoreDescriptor::ValueRegister(),
2766 StoreDescriptor::ReceiverRegister());
2767 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2769 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2774 context()->Plug(r3);
2778 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2779 Variable* var, MemOperand location) {
2780 __ StoreP(result_register(), location, r0);
2781 if (var->IsContextSlot()) {
2782 // RecordWrite may destroy all its register arguments.
2783 __ mr(r6, result_register());
2784 int offset = Context::SlotOffset(var->index());
2785 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2791 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2792 FeedbackVectorICSlot slot) {
2793 if (var->IsUnallocated()) {
2794 // Global var, const, or let.
2795 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2796 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2797 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2800 } else if (var->IsGlobalSlot()) {
2801 // Global var, const, or let.
2802 DCHECK(var->index() > 0);
2803 DCHECK(var->IsStaticGlobalObjectProperty());
2804 // Each var occupies two slots in the context: for reads and writes.
2805 int slot_index = var->index() + 1;
2806 int depth = scope()->ContextChainLength(var->scope());
2807 __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
2808 Operand(Smi::FromInt(depth)));
2809 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
2810 Operand(Smi::FromInt(slot_index)));
2811 __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
2812 Operand(var->name()));
2813 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r3));
2814 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2817 } else if (var->mode() == LET && op != Token::INIT_LET) {
2818 // Non-initializing assignment to let variable needs a write barrier.
2819 DCHECK(!var->IsLookupSlot());
2820 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2822 MemOperand location = VarOperand(var, r4);
2823 __ LoadP(r6, location);
2824 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2826 __ mov(r6, Operand(var->name()));
2828 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2829 // Perform the assignment.
2831 EmitStoreToStackLocalOrContextSlot(var, location);
2833 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2834 // Assignment to const variable needs a write barrier.
2835 DCHECK(!var->IsLookupSlot());
2836 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2838 MemOperand location = VarOperand(var, r4);
2839 __ LoadP(r6, location);
2840 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2841 __ bne(&const_error);
2842 __ mov(r6, Operand(var->name()));
2844 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2845 __ bind(&const_error);
2846 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2848 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2849 if (var->IsLookupSlot()) {
2850 // Assignment to var.
2851 __ push(r3); // Value.
2852 __ mov(r4, Operand(var->name()));
2853 __ mov(r3, Operand(Smi::FromInt(language_mode())));
2854 __ Push(cp, r4, r3); // Context, name, language mode.
2855 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2857 // Assignment to var or initializing assignment to let/const in harmony
2859 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2860 MemOperand location = VarOperand(var, r4);
2861 if (generate_debug_code_ && op == Token::INIT_LET) {
2862 // Check for an uninitialized let binding.
2863 __ LoadP(r5, location);
2864 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2865 __ Check(eq, kLetBindingReInitialization);
2867 EmitStoreToStackLocalOrContextSlot(var, location);
2869 } else if (op == Token::INIT_CONST_LEGACY) {
2870 // Const initializers need a write barrier.
2871 DCHECK(var->mode() == CONST_LEGACY);
2872 DCHECK(!var->IsParameter()); // No const parameters.
2873 if (var->IsLookupSlot()) {
2875 __ mov(r3, Operand(var->name()));
2876 __ Push(cp, r3); // Context and name.
2877 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2879 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2881 MemOperand location = VarOperand(var, r4);
2882 __ LoadP(r5, location);
2883 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2885 EmitStoreToStackLocalOrContextSlot(var, location);
2890 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2891 if (is_strict(language_mode())) {
2892 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2894 // Silently ignore store in sloppy mode.
2899 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2900 // Assignment to a property, using a named store IC.
2901 Property* prop = expr->target()->AsProperty();
2902 DCHECK(prop != NULL);
2903 DCHECK(prop->key()->IsLiteral());
2905 __ mov(StoreDescriptor::NameRegister(),
2906 Operand(prop->key()->AsLiteral()->value()));
2907 __ pop(StoreDescriptor::ReceiverRegister());
2908 if (FLAG_vector_stores) {
2909 EmitLoadStoreICSlot(expr->AssignmentSlot());
2912 CallStoreIC(expr->AssignmentFeedbackId());
2915 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2916 context()->Plug(r3);
2920 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2921 // Assignment to named property of super.
2923 // stack : receiver ('this'), home_object
2924 DCHECK(prop != NULL);
2925 Literal* key = prop->key()->AsLiteral();
2926 DCHECK(key != NULL);
2928 __ Push(key->value());
2930 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2931 : Runtime::kStoreToSuper_Sloppy),
2936 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2937 // Assignment to named property of super.
2939 // stack : receiver ('this'), home_object, key
2940 DCHECK(prop != NULL);
2944 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2945 : Runtime::kStoreKeyedToSuper_Sloppy),
2950 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2951 // Assignment to a property, using a keyed store IC.
2952 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2953 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2956 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2957 if (FLAG_vector_stores) {
2958 EmitLoadStoreICSlot(expr->AssignmentSlot());
2961 CallIC(ic, expr->AssignmentFeedbackId());
2964 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2965 context()->Plug(r3);
2969 void FullCodeGenerator::VisitProperty(Property* expr) {
2970 Comment cmnt(masm_, "[ Property");
2971 SetExpressionPosition(expr);
2973 Expression* key = expr->key();
2975 if (key->IsPropertyName()) {
2976 if (!expr->IsSuperAccess()) {
2977 VisitForAccumulatorValue(expr->obj());
2978 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2979 EmitNamedPropertyLoad(expr);
2981 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2983 expr->obj()->AsSuperPropertyReference()->home_object());
2984 EmitNamedSuperPropertyLoad(expr);
2987 if (!expr->IsSuperAccess()) {
2988 VisitForStackValue(expr->obj());
2989 VisitForAccumulatorValue(expr->key());
2990 __ Move(LoadDescriptor::NameRegister(), r3);
2991 __ pop(LoadDescriptor::ReceiverRegister());
2992 EmitKeyedPropertyLoad(expr);
2994 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2996 expr->obj()->AsSuperPropertyReference()->home_object());
2997 VisitForStackValue(expr->key());
2998 EmitKeyedSuperPropertyLoad(expr);
3001 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
3002 context()->Plug(r3);
3006 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
3008 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
3012 // Code common for calls using the IC.
3013 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
3014 Expression* callee = expr->expression();
3016 CallICState::CallType call_type =
3017 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
3019 // Get the target function.
3020 if (call_type == CallICState::FUNCTION) {
3022 StackValueContext context(this);
3023 EmitVariableLoad(callee->AsVariableProxy());
3024 PrepareForBailout(callee, NO_REGISTERS);
3026 // Push undefined as receiver. This is patched in the method prologue if it
3027 // is a sloppy mode method.
3028 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3031 // Load the function from the receiver.
3032 DCHECK(callee->IsProperty());
3033 DCHECK(!callee->AsProperty()->IsSuperAccess());
3034 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3035 EmitNamedPropertyLoad(callee->AsProperty());
3036 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3037 // Push the target function under the receiver.
3038 __ LoadP(r0, MemOperand(sp, 0));
3040 __ StoreP(r3, MemOperand(sp, kPointerSize));
3043 EmitCall(expr, call_type);
3047 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
3048 Expression* callee = expr->expression();
3049 DCHECK(callee->IsProperty());
3050 Property* prop = callee->AsProperty();
3051 DCHECK(prop->IsSuperAccess());
3052 SetExpressionPosition(prop);
3054 Literal* key = prop->key()->AsLiteral();
3055 DCHECK(!key->value()->IsSmi());
3056 // Load the function from the receiver.
3057 const Register scratch = r4;
3058 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3059 VisitForAccumulatorValue(super_ref->home_object());
3061 VisitForAccumulatorValue(super_ref->this_var());
3062 __ Push(scratch, r3, r3, scratch);
3063 __ Push(key->value());
3064 __ Push(Smi::FromInt(language_mode()));
3068 // - this (receiver)
3069 // - this (receiver) <-- LoadFromSuper will pop here and below.
3073 __ CallRuntime(Runtime::kLoadFromSuper, 4);
3075 // Replace home_object with target function.
3076 __ StoreP(r3, MemOperand(sp, kPointerSize));
3079 // - target function
3080 // - this (receiver)
3081 EmitCall(expr, CallICState::METHOD);
3085 // Code common for calls using the IC.
3086 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
3088 VisitForAccumulatorValue(key);
3090 Expression* callee = expr->expression();
3092 // Load the function from the receiver.
3093 DCHECK(callee->IsProperty());
3094 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3095 __ Move(LoadDescriptor::NameRegister(), r3);
3096 EmitKeyedPropertyLoad(callee->AsProperty());
3097 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3099 // Push the target function under the receiver.
3100 __ LoadP(ip, MemOperand(sp, 0));
3102 __ StoreP(r3, MemOperand(sp, kPointerSize));
3104 EmitCall(expr, CallICState::METHOD);
3108 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3109 Expression* callee = expr->expression();
3110 DCHECK(callee->IsProperty());
3111 Property* prop = callee->AsProperty();
3112 DCHECK(prop->IsSuperAccess());
3114 SetExpressionPosition(prop);
3115 // Load the function from the receiver.
3116 const Register scratch = r4;
3117 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3118 VisitForAccumulatorValue(super_ref->home_object());
3120 VisitForAccumulatorValue(super_ref->this_var());
3121 __ Push(scratch, r3, r3, scratch);
3122 VisitForStackValue(prop->key());
3123 __ Push(Smi::FromInt(language_mode()));
3127 // - this (receiver)
3128 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3132 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3134 // Replace home_object with target function.
3135 __ StoreP(r3, MemOperand(sp, kPointerSize));
3138 // - target function
3139 // - this (receiver)
3140 EmitCall(expr, CallICState::METHOD);
3144 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3145 // Load the arguments.
3146 ZoneList<Expression*>* args = expr->arguments();
3147 int arg_count = args->length();
3148 for (int i = 0; i < arg_count; i++) {
3149 VisitForStackValue(args->at(i));
3152 SetCallPosition(expr, arg_count);
3153 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3154 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
3155 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3156 // Don't assign a type feedback id to the IC, since type feedback is provided
3157 // by the vector above.
3160 RecordJSReturnSite(expr);
3161 // Restore context register.
3162 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3163 context()->DropAndPlug(1, r3);
3167 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3168 // r7: copy of the first argument or undefined if it doesn't exist.
3169 if (arg_count > 0) {
3170 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
3172 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3175 // r6: the receiver of the enclosing function.
3176 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3178 // r5: language mode.
3179 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
3181 // r4: the start position of the scope the calls resides in.
3182 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
3184 // Do the runtime call.
3185 __ Push(r7, r6, r5, r4);
3186 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3190 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3191 SuperCallReference* super_ref, FeedbackVectorICSlot slot) {
3192 Variable* this_var = super_ref->this_var()->var();
3193 GetVar(r4, this_var);
3194 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
3195 Label uninitialized_this;
3196 __ beq(&uninitialized_this);
3197 __ mov(r4, Operand(this_var->name()));
3199 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3200 __ bind(&uninitialized_this);
3202 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3206 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3207 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3208 VariableProxy* callee = expr->expression()->AsVariableProxy();
3209 if (callee->var()->IsLookupSlot()) {
3211 SetExpressionPosition(callee);
3212 // Generate code for loading from variables potentially shadowed by
3213 // eval-introduced variables.
3214 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3217 // Call the runtime to find the function to call (returned in r3) and
3218 // the object holding it (returned in r4).
3219 DCHECK(!context_register().is(r5));
3220 __ mov(r5, Operand(callee->name()));
3221 __ Push(context_register(), r5);
3222 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3223 __ Push(r3, r4); // Function, receiver.
3224 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3226 // If fast case code has been generated, emit code to push the function
3227 // and receiver and have the slow path jump around this code.
3228 if (done.is_linked()) {
3234 // Pass undefined as the receiver, which is the WithBaseObject of a
3235 // non-object environment record. If the callee is sloppy, it will patch
3236 // it up to be the global receiver.
3237 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3242 VisitForStackValue(callee);
3243 // refEnv.WithBaseObject()
3244 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3245 __ push(r5); // Reserved receiver slot.
3250 void FullCodeGenerator::VisitCall(Call* expr) {
3252 // We want to verify that RecordJSReturnSite gets called on all paths
3253 // through this function. Avoid early returns.
3254 expr->return_is_recorded_ = false;
3257 Comment cmnt(masm_, "[ Call");
3258 Expression* callee = expr->expression();
3259 Call::CallType call_type = expr->GetCallType(isolate());
3261 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3262 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3263 // to resolve the function we need to call. Then we call the resolved
3264 // function using the given arguments.
3265 ZoneList<Expression*>* args = expr->arguments();
3266 int arg_count = args->length();
3268 PushCalleeAndWithBaseObject(expr);
3270 // Push the arguments.
3271 for (int i = 0; i < arg_count; i++) {
3272 VisitForStackValue(args->at(i));
3275 // Push a copy of the function (found below the arguments) and
3277 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3279 EmitResolvePossiblyDirectEval(arg_count);
3281 // Touch up the stack with the resolved function.
3282 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3284 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3286 // Record source position for debugger.
3287 SetCallPosition(expr, arg_count);
3288 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3289 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3291 RecordJSReturnSite(expr);
3292 // Restore context register.
3293 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3294 context()->DropAndPlug(1, r3);
3295 } else if (call_type == Call::GLOBAL_CALL) {
3296 EmitCallWithLoadIC(expr);
3298 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3299 // Call to a lookup slot (dynamically introduced variable).
3300 PushCalleeAndWithBaseObject(expr);
3302 } else if (call_type == Call::PROPERTY_CALL) {
3303 Property* property = callee->AsProperty();
3304 bool is_named_call = property->key()->IsPropertyName();
3305 if (property->IsSuperAccess()) {
3306 if (is_named_call) {
3307 EmitSuperCallWithLoadIC(expr);
3309 EmitKeyedSuperCallWithLoadIC(expr);
3312 VisitForStackValue(property->obj());
3313 if (is_named_call) {
3314 EmitCallWithLoadIC(expr);
3316 EmitKeyedCallWithLoadIC(expr, property->key());
3319 } else if (call_type == Call::SUPER_CALL) {
3320 EmitSuperConstructorCall(expr);
3322 DCHECK(call_type == Call::OTHER_CALL);
3323 // Call to an arbitrary expression not handled specially above.
3324 VisitForStackValue(callee);
3325 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3327 // Emit function call.
3332 // RecordJSReturnSite should have been called.
3333 DCHECK(expr->return_is_recorded_);
3338 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3339 Comment cmnt(masm_, "[ CallNew");
3340 // According to ECMA-262, section 11.2.2, page 44, the function
3341 // expression in new calls must be evaluated before the
3344 // Push constructor on the stack. If it's not a function it's used as
3345 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3347 DCHECK(!expr->expression()->IsSuperPropertyReference());
3348 VisitForStackValue(expr->expression());
3350 // Push the arguments ("left-to-right") on the stack.
3351 ZoneList<Expression*>* args = expr->arguments();
3352 int arg_count = args->length();
3353 for (int i = 0; i < arg_count; i++) {
3354 VisitForStackValue(args->at(i));
3357 // Call the construct call builtin that handles allocation and
3358 // constructor invocation.
3359 SetConstructCallPosition(expr);
3361 // Load function and argument count into r4 and r3.
3362 __ mov(r3, Operand(arg_count));
3363 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3365 // Record call targets in unoptimized code.
3366 if (FLAG_pretenuring_call_new) {
3367 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3368 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3369 expr->CallNewFeedbackSlot().ToInt() + 1);
3372 __ Move(r5, FeedbackVector());
3373 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3375 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3376 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3377 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3378 context()->Plug(r3);
3382 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3383 SuperCallReference* super_call_ref =
3384 expr->expression()->AsSuperCallReference();
3385 DCHECK_NOT_NULL(super_call_ref);
3387 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3388 VisitForStackValue(new_target_proxy);
3390 EmitLoadSuperConstructor(super_call_ref);
3391 __ push(result_register());
3393 // Push the arguments ("left-to-right") on the stack.
3394 ZoneList<Expression*>* args = expr->arguments();
3395 int arg_count = args->length();
3396 for (int i = 0; i < arg_count; i++) {
3397 VisitForStackValue(args->at(i));
3400 // Call the construct call builtin that handles allocation and
3401 // constructor invocation.
3402 SetConstructCallPosition(expr);
3404 // Load function and argument count into r1 and r0.
3405 __ mov(r3, Operand(arg_count));
3406 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
3408 // Record call targets in unoptimized code.
3409 if (FLAG_pretenuring_call_new) {
3411 /* TODO(dslomov): support pretenuring.
3412 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3413 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3414 expr->CallNewFeedbackSlot().ToInt() + 1);
3418 __ Move(r5, FeedbackVector());
3419 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
3421 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3422 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3426 RecordJSReturnSite(expr);
3428 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3429 context()->Plug(r3);
3433 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3434 ZoneList<Expression*>* args = expr->arguments();
3435 DCHECK(args->length() == 1);
3437 VisitForAccumulatorValue(args->at(0));
3439 Label materialize_true, materialize_false;
3440 Label* if_true = NULL;
3441 Label* if_false = NULL;
3442 Label* fall_through = NULL;
3443 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3444 &if_false, &fall_through);
3446 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3447 __ TestIfSmi(r3, r0);
3448 Split(eq, if_true, if_false, fall_through, cr0);
3450 context()->Plug(if_true, if_false);
3454 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3455 ZoneList<Expression*>* args = expr->arguments();
3456 DCHECK(args->length() == 1);
3458 VisitForAccumulatorValue(args->at(0));
3460 Label materialize_true, materialize_false;
3461 Label* if_true = NULL;
3462 Label* if_false = NULL;
3463 Label* fall_through = NULL;
3464 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3465 &if_false, &fall_through);
3467 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3468 __ TestIfPositiveSmi(r3, r0);
3469 Split(eq, if_true, if_false, fall_through, cr0);
3471 context()->Plug(if_true, if_false);
3475 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3476 ZoneList<Expression*>* args = expr->arguments();
3477 DCHECK(args->length() == 1);
3479 VisitForAccumulatorValue(args->at(0));
3481 Label materialize_true, materialize_false;
3482 Label* if_true = NULL;
3483 Label* if_false = NULL;
3484 Label* fall_through = NULL;
3485 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3486 &if_false, &fall_through);
3488 __ JumpIfSmi(r3, if_false);
3489 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3492 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3493 // Undetectable objects behave like undefined when tested with typeof.
3494 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3495 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3496 __ bne(if_false, cr0);
3497 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3498 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3500 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3501 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3502 Split(le, if_true, if_false, fall_through);
3504 context()->Plug(if_true, if_false);
3508 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3509 ZoneList<Expression*>* args = expr->arguments();
3510 DCHECK(args->length() == 1);
3512 VisitForAccumulatorValue(args->at(0));
3514 Label materialize_true, materialize_false;
3515 Label* if_true = NULL;
3516 Label* if_false = NULL;
3517 Label* fall_through = NULL;
3518 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3519 &if_false, &fall_through);
3521 __ JumpIfSmi(r3, if_false);
3522 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3523 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3524 Split(ge, if_true, if_false, fall_through);
3526 context()->Plug(if_true, if_false);
3530 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3531 ZoneList<Expression*>* args = expr->arguments();
3532 DCHECK(args->length() == 1);
3534 VisitForAccumulatorValue(args->at(0));
3536 Label materialize_true, materialize_false;
3537 Label* if_true = NULL;
3538 Label* if_false = NULL;
3539 Label* fall_through = NULL;
3540 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3541 &if_false, &fall_through);
3543 __ JumpIfSmi(r3, if_false);
3544 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3545 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3546 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3547 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3548 Split(ne, if_true, if_false, fall_through, cr0);
3550 context()->Plug(if_true, if_false);
3554 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3555 CallRuntime* expr) {
3556 ZoneList<Expression*>* args = expr->arguments();
3557 DCHECK(args->length() == 1);
3559 VisitForAccumulatorValue(args->at(0));
3561 Label materialize_true, materialize_false, skip_lookup;
3562 Label* if_true = NULL;
3563 Label* if_false = NULL;
3564 Label* fall_through = NULL;
3565 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3566 &if_false, &fall_through);
3568 __ AssertNotSmi(r3);
3570 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3571 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3572 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3573 __ bne(&skip_lookup, cr0);
3575 // Check for fast case object. Generate false result for slow case object.
3576 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3577 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3578 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3582 // Look for valueOf name in the descriptor array, and indicate false if
3583 // found. Since we omit an enumeration index check, if it is added via a
3584 // transition that shares its descriptor array, this is a false positive.
3585 Label entry, loop, done;
3587 // Skip loop if no descriptors are valid.
3588 __ NumberOfOwnDescriptors(r6, r4);
3589 __ cmpi(r6, Operand::Zero());
3592 __ LoadInstanceDescriptors(r4, r7);
3593 // r7: descriptor array.
3594 // r6: valid entries in the descriptor array.
3595 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3597 // Calculate location of the first key name.
3598 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3599 // Calculate the end of the descriptor array.
3601 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3604 // Loop through all the keys in the descriptor array. If one of these is the
3605 // string "valueOf" the result is false.
3606 // The use of ip to store the valueOf string assumes that it is not otherwise
3607 // used in the loop below.
3608 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3611 __ LoadP(r6, MemOperand(r7, 0));
3614 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3621 // Set the bit in the map to indicate that there is no local valueOf field.
3622 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3623 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3624 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3626 __ bind(&skip_lookup);
3628 // If a valueOf property is not found on the object check that its
3629 // prototype is the un-modified String prototype. If not result is false.
3630 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3631 __ JumpIfSmi(r5, if_false);
3632 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3633 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3634 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3636 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3638 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3639 Split(eq, if_true, if_false, fall_through);
3641 context()->Plug(if_true, if_false);
3645 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3647 DCHECK(args->length() == 1);
3649 VisitForAccumulatorValue(args->at(0));
3651 Label materialize_true, materialize_false;
3652 Label* if_true = NULL;
3653 Label* if_false = NULL;
3654 Label* fall_through = NULL;
3655 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3656 &if_false, &fall_through);
3658 __ JumpIfSmi(r3, if_false);
3659 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3660 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3661 Split(eq, if_true, if_false, fall_through);
3663 context()->Plug(if_true, if_false);
3667 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3668 ZoneList<Expression*>* args = expr->arguments();
3669 DCHECK(args->length() == 1);
3671 VisitForAccumulatorValue(args->at(0));
3673 Label materialize_true, materialize_false;
3674 Label* if_true = NULL;
3675 Label* if_false = NULL;
3676 Label* fall_through = NULL;
3677 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3678 &if_false, &fall_through);
3680 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3681 #if V8_TARGET_ARCH_PPC64
3682 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3683 __ li(r5, Operand(1));
3684 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3687 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3688 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3690 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3693 __ cmpi(r4, Operand::Zero());
3697 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3698 Split(eq, if_true, if_false, fall_through);
3700 context()->Plug(if_true, if_false);
3704 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3705 ZoneList<Expression*>* args = expr->arguments();
3706 DCHECK(args->length() == 1);
3708 VisitForAccumulatorValue(args->at(0));
3710 Label materialize_true, materialize_false;
3711 Label* if_true = NULL;
3712 Label* if_false = NULL;
3713 Label* fall_through = NULL;
3714 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3715 &if_false, &fall_through);
3717 __ JumpIfSmi(r3, if_false);
3718 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3719 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3720 Split(eq, if_true, if_false, fall_through);
3722 context()->Plug(if_true, if_false);
3726 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3727 ZoneList<Expression*>* args = expr->arguments();
3728 DCHECK(args->length() == 1);
3730 VisitForAccumulatorValue(args->at(0));
3732 Label materialize_true, materialize_false;
3733 Label* if_true = NULL;
3734 Label* if_false = NULL;
3735 Label* fall_through = NULL;
3736 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3737 &if_false, &fall_through);
3739 __ JumpIfSmi(r3, if_false);
3740 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
3741 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3742 Split(eq, if_true, if_false, fall_through);
3744 context()->Plug(if_true, if_false);
3748 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3749 ZoneList<Expression*>* args = expr->arguments();
3750 DCHECK(args->length() == 1);
3752 VisitForAccumulatorValue(args->at(0));
3754 Label materialize_true, materialize_false;
3755 Label* if_true = NULL;
3756 Label* if_false = NULL;
3757 Label* fall_through = NULL;
3758 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3759 &if_false, &fall_through);
3761 __ JumpIfSmi(r3, if_false);
3762 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3763 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3764 Split(eq, if_true, if_false, fall_through);
3766 context()->Plug(if_true, if_false);
3770 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3771 ZoneList<Expression*>* args = expr->arguments();
3772 DCHECK(args->length() == 1);
3774 VisitForAccumulatorValue(args->at(0));
3776 Label materialize_true, materialize_false;
3777 Label* if_true = NULL;
3778 Label* if_false = NULL;
3779 Label* fall_through = NULL;
3780 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3781 &if_false, &fall_through);
3783 __ JumpIfSmi(r3, if_false);
3785 Register type_reg = r5;
3786 __ LoadP(map, FieldMemOperand(r3, HeapObject::kMapOffset));
3787 __ lbz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3788 __ subi(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3789 __ cmpli(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3790 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3791 Split(le, if_true, if_false, fall_through);
3793 context()->Plug(if_true, if_false);
3797 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3798 DCHECK(expr->arguments()->length() == 0);
3800 Label materialize_true, materialize_false;
3801 Label* if_true = NULL;
3802 Label* if_false = NULL;
3803 Label* fall_through = NULL;
3804 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3805 &if_false, &fall_through);
3807 // Get the frame pointer for the calling frame.
3808 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3810 // Skip the arguments adaptor frame if it exists.
3811 Label check_frame_marker;
3812 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3813 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3814 __ bne(&check_frame_marker);
3815 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3817 // Check the marker in the calling frame.
3818 __ bind(&check_frame_marker);
3819 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3820 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3821 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3822 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3823 Split(eq, if_true, if_false, fall_through);
3825 context()->Plug(if_true, if_false);
3829 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3830 ZoneList<Expression*>* args = expr->arguments();
3831 DCHECK(args->length() == 2);
3833 // Load the two objects into registers and perform the comparison.
3834 VisitForStackValue(args->at(0));
3835 VisitForAccumulatorValue(args->at(1));
3837 Label materialize_true, materialize_false;
3838 Label* if_true = NULL;
3839 Label* if_false = NULL;
3840 Label* fall_through = NULL;
3841 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3842 &if_false, &fall_through);
3846 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3847 Split(eq, if_true, if_false, fall_through);
3849 context()->Plug(if_true, if_false);
3853 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3854 ZoneList<Expression*>* args = expr->arguments();
3855 DCHECK(args->length() == 1);
3857 // ArgumentsAccessStub expects the key in r4 and the formal
3858 // parameter count in r3.
3859 VisitForAccumulatorValue(args->at(0));
3861 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3862 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3864 context()->Plug(r3);
3868 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3869 DCHECK(expr->arguments()->length() == 0);
3871 // Get the number of formal parameters.
3872 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3874 // Check if the calling frame is an arguments adaptor frame.
3875 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3876 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3877 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3880 // Arguments adaptor case: Read the arguments length from the
3882 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3885 context()->Plug(r3);
3889 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3890 ZoneList<Expression*>* args = expr->arguments();
3891 DCHECK(args->length() == 1);
3892 Label done, null, function, non_function_constructor;
3894 VisitForAccumulatorValue(args->at(0));
3896 // If the object is a smi, we return null.
3897 __ JumpIfSmi(r3, &null);
3899 // Check that the object is a JS object but take special care of JS
3900 // functions to make sure they have 'Function' as their class.
3901 // Assume that there are only two callable types, and one of them is at
3902 // either end of the type range for JS object types. Saves extra comparisons.
3903 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3904 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3905 // Map is now in r3.
3907 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3908 FIRST_SPEC_OBJECT_TYPE + 1);
3911 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3912 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3914 // Assume that there is no larger type.
3915 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3917 // Check if the constructor in the map is a JS function.
3918 Register instance_type = r5;
3919 __ GetMapConstructor(r3, r3, r4, instance_type);
3920 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3921 __ bne(&non_function_constructor);
3923 // r3 now contains the constructor function. Grab the
3924 // instance class name from there.
3925 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3927 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3930 // Functions have class 'Function'.
3932 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3935 // Objects with a non-function constructor have class 'Object'.
3936 __ bind(&non_function_constructor);
3937 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3940 // Non-JS objects have class null.
3942 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3947 context()->Plug(r3);
3951 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3952 // Load the arguments on the stack and call the stub.
3953 SubStringStub stub(isolate());
3954 ZoneList<Expression*>* args = expr->arguments();
3955 DCHECK(args->length() == 3);
3956 VisitForStackValue(args->at(0));
3957 VisitForStackValue(args->at(1));
3958 VisitForStackValue(args->at(2));
3960 context()->Plug(r3);
3964 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3965 // Load the arguments on the stack and call the stub.
3966 RegExpExecStub stub(isolate());
3967 ZoneList<Expression*>* args = expr->arguments();
3968 DCHECK(args->length() == 4);
3969 VisitForStackValue(args->at(0));
3970 VisitForStackValue(args->at(1));
3971 VisitForStackValue(args->at(2));
3972 VisitForStackValue(args->at(3));
3974 context()->Plug(r3);
3978 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3979 ZoneList<Expression*>* args = expr->arguments();
3980 DCHECK(args->length() == 1);
3981 VisitForAccumulatorValue(args->at(0)); // Load the object.
3984 // If the object is a smi return the object.
3985 __ JumpIfSmi(r3, &done);
3986 // If the object is not a value type, return the object.
3987 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3989 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3992 context()->Plug(r3);
3996 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3997 ZoneList<Expression*>* args = expr->arguments();
3998 DCHECK_EQ(1, args->length());
4000 VisitForAccumulatorValue(args->at(0));
4002 Label materialize_true, materialize_false;
4003 Label* if_true = nullptr;
4004 Label* if_false = nullptr;
4005 Label* fall_through = nullptr;
4006 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4007 &if_false, &fall_through);
4009 __ JumpIfSmi(r3, if_false);
4010 __ CompareObjectType(r3, r4, r4, JS_DATE_TYPE);
4011 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4012 Split(eq, if_true, if_false, fall_through);
4014 context()->Plug(if_true, if_false);
4018 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
4019 ZoneList<Expression*>* args = expr->arguments();
4020 DCHECK(args->length() == 2);
4021 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
4022 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
4024 VisitForAccumulatorValue(args->at(0)); // Load the object.
4026 Register object = r3;
4027 Register result = r3;
4028 Register scratch0 = r11;
4029 Register scratch1 = r4;
4031 if (index->value() == 0) {
4032 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
4034 Label runtime, done;
4035 if (index->value() < JSDate::kFirstUncachedField) {
4036 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
4037 __ mov(scratch1, Operand(stamp));
4038 __ LoadP(scratch1, MemOperand(scratch1));
4039 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
4040 __ cmp(scratch1, scratch0);
4043 FieldMemOperand(object, JSDate::kValueOffset +
4044 kPointerSize * index->value()),
4049 __ PrepareCallCFunction(2, scratch1);
4050 __ LoadSmiLiteral(r4, index);
4051 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
4055 context()->Plug(result);
4059 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
4060 ZoneList<Expression*>* args = expr->arguments();
4061 DCHECK_EQ(3, args->length());
4063 Register string = r3;
4064 Register index = r4;
4065 Register value = r5;
4067 VisitForStackValue(args->at(0)); // index
4068 VisitForStackValue(args->at(1)); // value
4069 VisitForAccumulatorValue(args->at(2)); // string
4070 __ Pop(index, value);
4072 if (FLAG_debug_code) {
4073 __ TestIfSmi(value, r0);
4074 __ Check(eq, kNonSmiValue, cr0);
4075 __ TestIfSmi(index, r0);
4076 __ Check(eq, kNonSmiIndex, cr0);
4077 __ SmiUntag(index, index);
4078 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
4079 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
4080 __ SmiTag(index, index);
4084 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4085 __ SmiToByteArrayOffset(r0, index);
4086 __ stbx(value, MemOperand(ip, r0));
4087 context()->Plug(string);
4091 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
4092 ZoneList<Expression*>* args = expr->arguments();
4093 DCHECK_EQ(3, args->length());
4095 Register string = r3;
4096 Register index = r4;
4097 Register value = r5;
4099 VisitForStackValue(args->at(0)); // index
4100 VisitForStackValue(args->at(1)); // value
4101 VisitForAccumulatorValue(args->at(2)); // string
4102 __ Pop(index, value);
4104 if (FLAG_debug_code) {
4105 __ TestIfSmi(value, r0);
4106 __ Check(eq, kNonSmiValue, cr0);
4107 __ TestIfSmi(index, r0);
4108 __ Check(eq, kNonSmiIndex, cr0);
4109 __ SmiUntag(index, index);
4110 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
4111 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
4112 __ SmiTag(index, index);
4116 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4117 __ SmiToShortArrayOffset(r0, index);
4118 __ sthx(value, MemOperand(ip, r0));
4119 context()->Plug(string);
4123 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
4124 // Load the arguments on the stack and call the runtime function.
4125 ZoneList<Expression*>* args = expr->arguments();
4126 DCHECK(args->length() == 2);
4127 VisitForStackValue(args->at(0));
4128 VisitForStackValue(args->at(1));
4129 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4131 context()->Plug(r3);
4135 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4136 ZoneList<Expression*>* args = expr->arguments();
4137 DCHECK(args->length() == 2);
4138 VisitForStackValue(args->at(0)); // Load the object.
4139 VisitForAccumulatorValue(args->at(1)); // Load the value.
4140 __ pop(r4); // r3 = value. r4 = object.
4143 // If the object is a smi, return the value.
4144 __ JumpIfSmi(r4, &done);
4146 // If the object is not a value type, return the value.
4147 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
4151 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
4152 // Update the write barrier. Save the value as it will be
4153 // overwritten by the write barrier code and is needed afterward.
4155 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
4159 context()->Plug(r3);
4163 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4164 ZoneList<Expression*>* args = expr->arguments();
4165 DCHECK_EQ(args->length(), 1);
4166 // Load the argument into r3 and call the stub.
4167 VisitForAccumulatorValue(args->at(0));
4169 NumberToStringStub stub(isolate());
4171 context()->Plug(r3);
4175 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4176 ZoneList<Expression*>* args = expr->arguments();
4177 DCHECK(args->length() == 1);
4178 VisitForAccumulatorValue(args->at(0));
4181 StringCharFromCodeGenerator generator(r3, r4);
4182 generator.GenerateFast(masm_);
4185 NopRuntimeCallHelper call_helper;
4186 generator.GenerateSlow(masm_, call_helper);
4189 context()->Plug(r4);
4193 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4194 ZoneList<Expression*>* args = expr->arguments();
4195 DCHECK(args->length() == 2);
4196 VisitForStackValue(args->at(0));
4197 VisitForAccumulatorValue(args->at(1));
4199 Register object = r4;
4200 Register index = r3;
4201 Register result = r6;
4205 Label need_conversion;
4206 Label index_out_of_range;
4208 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
4209 &need_conversion, &index_out_of_range,
4210 STRING_INDEX_IS_NUMBER);
4211 generator.GenerateFast(masm_);
4214 __ bind(&index_out_of_range);
4215 // When the index is out of range, the spec requires us to return
4217 __ LoadRoot(result, Heap::kNanValueRootIndex);
4220 __ bind(&need_conversion);
4221 // Load the undefined value into the result register, which will
4222 // trigger conversion.
4223 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4226 NopRuntimeCallHelper call_helper;
4227 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4230 context()->Plug(result);
4234 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4235 ZoneList<Expression*>* args = expr->arguments();
4236 DCHECK(args->length() == 2);
4237 VisitForStackValue(args->at(0));
4238 VisitForAccumulatorValue(args->at(1));
4240 Register object = r4;
4241 Register index = r3;
4242 Register scratch = r6;
4243 Register result = r3;
4247 Label need_conversion;
4248 Label index_out_of_range;
4250 StringCharAtGenerator generator(object, index, scratch, result,
4251 &need_conversion, &need_conversion,
4252 &index_out_of_range, STRING_INDEX_IS_NUMBER);
4253 generator.GenerateFast(masm_);
4256 __ bind(&index_out_of_range);
4257 // When the index is out of range, the spec requires us to return
4258 // the empty string.
4259 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4262 __ bind(&need_conversion);
4263 // Move smi zero into the result register, which will trigger
4265 __ LoadSmiLiteral(result, Smi::FromInt(0));
4268 NopRuntimeCallHelper call_helper;
4269 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4272 context()->Plug(result);
4276 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4277 ZoneList<Expression*>* args = expr->arguments();
4278 DCHECK_EQ(2, args->length());
4279 VisitForStackValue(args->at(0));
4280 VisitForAccumulatorValue(args->at(1));
4283 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4285 context()->Plug(r3);
4289 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4290 ZoneList<Expression*>* args = expr->arguments();
4291 DCHECK_EQ(2, args->length());
4292 VisitForStackValue(args->at(0));
4293 VisitForStackValue(args->at(1));
4295 StringCompareStub stub(isolate());
4297 context()->Plug(r3);
4301 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4302 ZoneList<Expression*>* args = expr->arguments();
4303 DCHECK(args->length() >= 2);
4305 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4306 for (int i = 0; i < arg_count + 1; i++) {
4307 VisitForStackValue(args->at(i));
4309 VisitForAccumulatorValue(args->last()); // Function.
4311 Label runtime, done;
4312 // Check for non-function argument (including proxy).
4313 __ JumpIfSmi(r3, &runtime);
4314 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
4317 // InvokeFunction requires the function in r4. Move it in there.
4318 __ mr(r4, result_register());
4319 ParameterCount count(arg_count);
4320 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
4321 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4326 __ CallRuntime(Runtime::kCall, args->length());
4329 context()->Plug(r3);
4333 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4334 ZoneList<Expression*>* args = expr->arguments();
4335 DCHECK(args->length() == 2);
4338 VisitForStackValue(args->at(0));
4341 VisitForStackValue(args->at(1));
4342 __ CallRuntime(Runtime::kGetPrototype, 1);
4343 __ mr(r4, result_register());
4346 // Check if the calling frame is an arguments adaptor frame.
4347 Label adaptor_frame, args_set_up, runtime;
4348 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4349 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
4350 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
4351 __ beq(&adaptor_frame);
4353 // default constructor has no arguments, so no adaptor frame means no args.
4354 __ li(r3, Operand::Zero());
4357 // Copy arguments from adaptor frame.
4359 __ bind(&adaptor_frame);
4360 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
4363 // Get arguments pointer in r5.
4364 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
4366 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
4371 // Pre-decrement in order to skip receiver.
4372 __ LoadPU(r6, MemOperand(r5, -kPointerSize));
4377 __ bind(&args_set_up);
4378 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
4380 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4381 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4385 context()->Plug(result_register());
4389 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4390 RegExpConstructResultStub stub(isolate());
4391 ZoneList<Expression*>* args = expr->arguments();
4392 DCHECK(args->length() == 3);
4393 VisitForStackValue(args->at(0));
4394 VisitForStackValue(args->at(1));
4395 VisitForAccumulatorValue(args->at(2));
4398 context()->Plug(r3);
4402 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4403 ZoneList<Expression*>* args = expr->arguments();
4404 DCHECK_EQ(2, args->length());
4405 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4406 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4408 Handle<FixedArray> jsfunction_result_caches(
4409 isolate()->native_context()->jsfunction_result_caches());
4410 if (jsfunction_result_caches->length() <= cache_id) {
4411 __ Abort(kAttemptToUseUndefinedCache);
4412 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4413 context()->Plug(r3);
4417 VisitForAccumulatorValue(args->at(1));
4420 Register cache = r4;
4421 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4422 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4424 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4426 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
4428 Label done, not_found;
4429 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4430 // r5 now holds finger offset as a smi.
4431 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4432 // r6 now points to the start of fixed array elements.
4433 __ SmiToPtrArrayOffset(r5, r5);
4434 __ LoadPUX(r5, MemOperand(r6, r5));
4435 // r6 now points to the key of the pair.
4439 __ LoadP(r3, MemOperand(r6, kPointerSize));
4442 __ bind(¬_found);
4443 // Call runtime to perform the lookup.
4444 __ Push(cache, key);
4445 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4448 context()->Plug(r3);
4452 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4453 ZoneList<Expression*>* args = expr->arguments();
4454 VisitForAccumulatorValue(args->at(0));
4456 Label materialize_true, materialize_false;
4457 Label* if_true = NULL;
4458 Label* if_false = NULL;
4459 Label* fall_through = NULL;
4460 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4461 &if_false, &fall_through);
4463 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4464 // PPC - assume ip is free
4465 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
4466 __ and_(r0, r3, ip);
4467 __ cmpi(r0, Operand::Zero());
4468 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4469 Split(eq, if_true, if_false, fall_through);
4471 context()->Plug(if_true, if_false);
4475 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4476 ZoneList<Expression*>* args = expr->arguments();
4477 DCHECK(args->length() == 1);
4478 VisitForAccumulatorValue(args->at(0));
4480 __ AssertString(r3);
4482 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4483 __ IndexFromHash(r3, r3);
4485 context()->Plug(r3);
4489 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4490 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4491 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4492 one_char_separator_loop_entry, long_separator_loop;
4493 ZoneList<Expression*>* args = expr->arguments();
4494 DCHECK(args->length() == 2);
4495 VisitForStackValue(args->at(1));
4496 VisitForAccumulatorValue(args->at(0));
4498 // All aliases of the same register have disjoint lifetimes.
4499 Register array = r3;
4500 Register elements = no_reg; // Will be r3.
4501 Register result = no_reg; // Will be r3.
4502 Register separator = r4;
4503 Register array_length = r5;
4504 Register result_pos = no_reg; // Will be r5
4505 Register string_length = r6;
4506 Register string = r7;
4507 Register element = r8;
4508 Register elements_end = r9;
4509 Register scratch1 = r10;
4510 Register scratch2 = r11;
4512 // Separator operand is on the stack.
4515 // Check that the array is a JSArray.
4516 __ JumpIfSmi(array, &bailout);
4517 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
4520 // Check that the array has fast elements.
4521 __ CheckFastElements(scratch1, scratch2, &bailout);
4523 // If the array has length zero, return the empty string.
4524 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4525 __ SmiUntag(array_length);
4526 __ cmpi(array_length, Operand::Zero());
4527 __ bne(&non_trivial_array);
4528 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
4531 __ bind(&non_trivial_array);
4533 // Get the FixedArray containing array's elements.
4535 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4536 array = no_reg; // End of array's live range.
4538 // Check that all array elements are sequential one-byte strings, and
4539 // accumulate the sum of their lengths, as a smi-encoded value.
4540 __ li(string_length, Operand::Zero());
4541 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4542 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4543 __ add(elements_end, element, elements_end);
4544 // Loop condition: while (element < elements_end).
4545 // Live values in registers:
4546 // elements: Fixed array of strings.
4547 // array_length: Length of the fixed array of strings (not smi)
4548 // separator: Separator string
4549 // string_length: Accumulated sum of string lengths (smi).
4550 // element: Current array element.
4551 // elements_end: Array end.
4552 if (generate_debug_code_) {
4553 __ cmpi(array_length, Operand::Zero());
4554 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4557 __ LoadP(string, MemOperand(element));
4558 __ addi(element, element, Operand(kPointerSize));
4559 __ JumpIfSmi(string, &bailout);
4560 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4561 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4562 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4563 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4565 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4567 __ BranchOnOverflow(&bailout);
4569 __ cmp(element, elements_end);
4572 // If array_length is 1, return elements[0], a string.
4573 __ cmpi(array_length, Operand(1));
4574 __ bne(¬_size_one_array);
4575 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
4578 __ bind(¬_size_one_array);
4580 // Live values in registers:
4581 // separator: Separator string
4582 // array_length: Length of the array.
4583 // string_length: Sum of string lengths (smi).
4584 // elements: FixedArray of strings.
4586 // Check that the separator is a flat one-byte string.
4587 __ JumpIfSmi(separator, &bailout);
4588 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4589 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4590 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4592 // Add (separator length times array_length) - separator length to the
4593 // string_length to get the length of the result string.
4595 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4596 __ sub(string_length, string_length, scratch1);
4597 #if V8_TARGET_ARCH_PPC64
4598 __ SmiUntag(scratch1, scratch1);
4599 __ Mul(scratch2, array_length, scratch1);
4600 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4602 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4603 __ bne(&bailout, cr0);
4604 __ SmiTag(scratch2, scratch2);
4606 // array_length is not smi but the other values are, so the result is a smi
4607 __ mullw(scratch2, array_length, scratch1);
4608 __ mulhw(ip, array_length, scratch1);
4609 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4611 __ cmpi(ip, Operand::Zero());
4613 __ cmpwi(scratch2, Operand::Zero());
4617 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4619 __ BranchOnOverflow(&bailout);
4620 __ SmiUntag(string_length);
4622 // Get first element in the array to free up the elements register to be used
4624 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4625 result = elements; // End of live range for elements.
4627 // Live values in registers:
4628 // element: First array element
4629 // separator: Separator string
4630 // string_length: Length of result string (not smi)
4631 // array_length: Length of the array.
4632 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4633 elements_end, &bailout);
4634 // Prepare for looping. Set up elements_end to end of the array. Set
4635 // result_pos to the position of the result where to write the first
4637 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4638 __ add(elements_end, element, elements_end);
4639 result_pos = array_length; // End of live range for array_length.
4640 array_length = no_reg;
4641 __ addi(result_pos, result,
4642 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4644 // Check the length of the separator.
4646 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4647 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4648 __ beq(&one_char_separator);
4649 __ bgt(&long_separator);
4651 // Empty separator case
4652 __ bind(&empty_separator_loop);
4653 // Live values in registers:
4654 // result_pos: the position to which we are currently copying characters.
4655 // element: Current array element.
4656 // elements_end: Array end.
4658 // Copy next array element to the result.
4659 __ LoadP(string, MemOperand(element));
4660 __ addi(element, element, Operand(kPointerSize));
4661 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4662 __ SmiUntag(string_length);
4663 __ addi(string, string,
4664 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4665 __ CopyBytes(string, result_pos, string_length, scratch1);
4666 __ cmp(element, elements_end);
4667 __ blt(&empty_separator_loop); // End while (element < elements_end).
4668 DCHECK(result.is(r3));
4671 // One-character separator case
4672 __ bind(&one_char_separator);
4673 // Replace separator with its one-byte character value.
4674 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4675 // Jump into the loop after the code that copies the separator, so the first
4676 // element is not preceded by a separator
4677 __ b(&one_char_separator_loop_entry);
4679 __ bind(&one_char_separator_loop);
4680 // Live values in registers:
4681 // result_pos: the position to which we are currently copying characters.
4682 // element: Current array element.
4683 // elements_end: Array end.
4684 // separator: Single separator one-byte char (in lower byte).
4686 // Copy the separator character to the result.
4687 __ stb(separator, MemOperand(result_pos));
4688 __ addi(result_pos, result_pos, Operand(1));
4690 // Copy next array element to the result.
4691 __ bind(&one_char_separator_loop_entry);
4692 __ LoadP(string, MemOperand(element));
4693 __ addi(element, element, Operand(kPointerSize));
4694 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4695 __ SmiUntag(string_length);
4696 __ addi(string, string,
4697 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4698 __ CopyBytes(string, result_pos, string_length, scratch1);
4699 __ cmpl(element, elements_end);
4700 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4701 DCHECK(result.is(r3));
4704 // Long separator case (separator is more than one character). Entry is at the
4705 // label long_separator below.
4706 __ bind(&long_separator_loop);
4707 // Live values in registers:
4708 // result_pos: the position to which we are currently copying characters.
4709 // element: Current array element.
4710 // elements_end: Array end.
4711 // separator: Separator string.
4713 // Copy the separator to the result.
4714 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4715 __ SmiUntag(string_length);
4716 __ addi(string, separator,
4717 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4718 __ CopyBytes(string, result_pos, string_length, scratch1);
4720 __ bind(&long_separator);
4721 __ LoadP(string, MemOperand(element));
4722 __ addi(element, element, Operand(kPointerSize));
4723 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4724 __ SmiUntag(string_length);
4725 __ addi(string, string,
4726 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4727 __ CopyBytes(string, result_pos, string_length, scratch1);
4728 __ cmpl(element, elements_end);
4729 __ blt(&long_separator_loop); // End while (element < elements_end).
4730 DCHECK(result.is(r3));
4734 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4736 context()->Plug(r3);
4740 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4741 DCHECK(expr->arguments()->length() == 0);
4742 ExternalReference debug_is_active =
4743 ExternalReference::debug_is_active_address(isolate());
4744 __ mov(ip, Operand(debug_is_active));
4745 __ lbz(r3, MemOperand(ip));
4747 context()->Plug(r3);
4751 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4752 // Assert: expr === CallRuntime("ReflectConstruct")
4753 DCHECK_EQ(1, expr->arguments()->length());
4754 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4756 ZoneList<Expression*>* args = call->arguments();
4757 DCHECK_EQ(3, args->length());
4759 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4760 DCHECK_NOT_NULL(super_call_ref);
4762 // Load ReflectConstruct function
4763 EmitLoadJSRuntimeFunction(call);
4765 // Push the target function under the receiver.
4766 __ LoadP(r0, MemOperand(sp, 0));
4768 __ StoreP(r3, MemOperand(sp, kPointerSize));
4770 // Push super constructor
4771 EmitLoadSuperConstructor(super_call_ref);
4772 __ Push(result_register());
4774 // Push arguments array
4775 VisitForStackValue(args->at(1));
4778 DCHECK(args->at(2)->IsVariableProxy());
4779 VisitForStackValue(args->at(2));
4781 EmitCallJSRuntimeFunction(call);
4783 // Restore context register.
4784 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4785 context()->DropAndPlug(1, r3);
4787 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4788 EmitInitializeThisAfterSuper(super_call_ref);
4792 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4793 // Push the builtins object as the receiver.
4794 Register receiver = LoadDescriptor::ReceiverRegister();
4795 __ LoadP(receiver, GlobalObjectOperand());
4796 __ LoadP(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4799 // Load the function from the receiver.
4800 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4801 __ mov(LoadDescriptor::SlotRegister(),
4802 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4803 CallLoadIC(NOT_INSIDE_TYPEOF);
4807 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4808 ZoneList<Expression*>* args = expr->arguments();
4809 int arg_count = args->length();
4811 SetCallPosition(expr, arg_count);
4812 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4813 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4818 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4819 ZoneList<Expression*>* args = expr->arguments();
4820 int arg_count = args->length();
4822 if (expr->is_jsruntime()) {
4823 Comment cmnt(masm_, "[ CallRuntime");
4824 EmitLoadJSRuntimeFunction(expr);
4826 // Push the target function under the receiver.
4827 __ LoadP(ip, MemOperand(sp, 0));
4829 __ StoreP(r3, MemOperand(sp, kPointerSize));
4831 // Push the arguments ("left-to-right").
4832 for (int i = 0; i < arg_count; i++) {
4833 VisitForStackValue(args->at(i));
4836 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4837 EmitCallJSRuntimeFunction(expr);
4839 // Restore context register.
4840 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4842 context()->DropAndPlug(1, r3);
4845 const Runtime::Function* function = expr->function();
4846 switch (function->function_id) {
4847 #define CALL_INTRINSIC_GENERATOR(Name) \
4848 case Runtime::kInline##Name: { \
4849 Comment cmnt(masm_, "[ Inline" #Name); \
4850 return Emit##Name(expr); \
4852 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4853 #undef CALL_INTRINSIC_GENERATOR
4855 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4856 // Push the arguments ("left-to-right").
4857 for (int i = 0; i < arg_count; i++) {
4858 VisitForStackValue(args->at(i));
4861 // Call the C runtime function.
4862 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4863 __ CallRuntime(expr->function(), arg_count);
4864 context()->Plug(r3);
4871 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4872 switch (expr->op()) {
4873 case Token::DELETE: {
4874 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4875 Property* property = expr->expression()->AsProperty();
4876 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4878 if (property != NULL) {
4879 VisitForStackValue(property->obj());
4880 VisitForStackValue(property->key());
4881 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
4883 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4884 context()->Plug(r3);
4885 } else if (proxy != NULL) {
4886 Variable* var = proxy->var();
4887 // Delete of an unqualified identifier is disallowed in strict mode but
4888 // "delete this" is allowed.
4889 bool is_this = var->HasThisName(isolate());
4890 DCHECK(is_sloppy(language_mode()) || is_this);
4891 if (var->IsUnallocatedOrGlobalSlot()) {
4892 __ LoadP(r5, GlobalObjectOperand());
4893 __ mov(r4, Operand(var->name()));
4894 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4895 __ Push(r5, r4, r3);
4896 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4897 context()->Plug(r3);
4898 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4899 // Result of deleting non-global, non-dynamic variables is false.
4900 // The subexpression does not have side effects.
4901 context()->Plug(is_this);
4903 // Non-global variable. Call the runtime to try to delete from the
4904 // context where the variable was introduced.
4905 DCHECK(!context_register().is(r5));
4906 __ mov(r5, Operand(var->name()));
4907 __ Push(context_register(), r5);
4908 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4909 context()->Plug(r3);
4912 // Result of deleting non-property, non-variable reference is true.
4913 // The subexpression may have side effects.
4914 VisitForEffect(expr->expression());
4915 context()->Plug(true);
4921 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4922 VisitForEffect(expr->expression());
4923 context()->Plug(Heap::kUndefinedValueRootIndex);
4928 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4929 if (context()->IsEffect()) {
4930 // Unary NOT has no side effects so it's only necessary to visit the
4931 // subexpression. Match the optimizing compiler by not branching.
4932 VisitForEffect(expr->expression());
4933 } else if (context()->IsTest()) {
4934 const TestContext* test = TestContext::cast(context());
4935 // The labels are swapped for the recursive call.
4936 VisitForControl(expr->expression(), test->false_label(),
4937 test->true_label(), test->fall_through());
4938 context()->Plug(test->true_label(), test->false_label());
4940 // We handle value contexts explicitly rather than simply visiting
4941 // for control and plugging the control flow into the context,
4942 // because we need to prepare a pair of extra administrative AST ids
4943 // for the optimizing compiler.
4944 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4945 Label materialize_true, materialize_false, done;
4946 VisitForControl(expr->expression(), &materialize_false,
4947 &materialize_true, &materialize_true);
4948 __ bind(&materialize_true);
4949 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4950 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4951 if (context()->IsStackValue()) __ push(r3);
4953 __ bind(&materialize_false);
4954 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4955 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4956 if (context()->IsStackValue()) __ push(r3);
4962 case Token::TYPEOF: {
4963 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4965 AccumulatorValueContext context(this);
4966 VisitForTypeofValue(expr->expression());
4969 TypeofStub typeof_stub(isolate());
4970 __ CallStub(&typeof_stub);
4971 context()->Plug(r3);
4981 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4982 DCHECK(expr->expression()->IsValidReferenceExpression());
4984 Comment cmnt(masm_, "[ CountOperation");
4986 Property* prop = expr->expression()->AsProperty();
4987 LhsKind assign_type = Property::GetAssignType(prop);
4989 // Evaluate expression and get value.
4990 if (assign_type == VARIABLE) {
4991 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4992 AccumulatorValueContext context(this);
4993 EmitVariableLoad(expr->expression()->AsVariableProxy());
4995 // Reserve space for result of postfix operation.
4996 if (expr->is_postfix() && !context()->IsEffect()) {
4997 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5000 switch (assign_type) {
5001 case NAMED_PROPERTY: {
5002 // Put the object both on the stack and in the register.
5003 VisitForStackValue(prop->obj());
5004 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
5005 EmitNamedPropertyLoad(prop);
5009 case NAMED_SUPER_PROPERTY: {
5010 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
5011 VisitForAccumulatorValue(
5012 prop->obj()->AsSuperPropertyReference()->home_object());
5013 __ Push(result_register());
5014 const Register scratch = r4;
5015 __ LoadP(scratch, MemOperand(sp, kPointerSize));
5016 __ Push(scratch, result_register());
5017 EmitNamedSuperPropertyLoad(prop);
5021 case KEYED_SUPER_PROPERTY: {
5022 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
5023 VisitForAccumulatorValue(
5024 prop->obj()->AsSuperPropertyReference()->home_object());
5025 const Register scratch = r4;
5026 const Register scratch1 = r5;
5027 __ mr(scratch, result_register());
5028 VisitForAccumulatorValue(prop->key());
5029 __ Push(scratch, result_register());
5030 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
5031 __ Push(scratch1, scratch, result_register());
5032 EmitKeyedSuperPropertyLoad(prop);
5036 case KEYED_PROPERTY: {
5037 VisitForStackValue(prop->obj());
5038 VisitForStackValue(prop->key());
5039 __ LoadP(LoadDescriptor::ReceiverRegister(),
5040 MemOperand(sp, 1 * kPointerSize));
5041 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
5042 EmitKeyedPropertyLoad(prop);
5051 // We need a second deoptimization point after loading the value
5052 // in case evaluating the property load my have a side effect.
5053 if (assign_type == VARIABLE) {
5054 PrepareForBailout(expr->expression(), TOS_REG);
5056 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
5059 // Inline smi case if we are in a loop.
5060 Label stub_call, done;
5061 JumpPatchSite patch_site(masm_);
5063 int count_value = expr->op() == Token::INC ? 1 : -1;
5064 if (ShouldInlineSmiCase(expr->op())) {
5066 patch_site.EmitJumpIfNotSmi(r3, &slow);
5068 // Save result for postfix expressions.
5069 if (expr->is_postfix()) {
5070 if (!context()->IsEffect()) {
5071 // Save the result on the stack. If we have a named or keyed property
5072 // we store the result under the receiver that is currently on top
5074 switch (assign_type) {
5078 case NAMED_PROPERTY:
5079 __ StoreP(r3, MemOperand(sp, kPointerSize));
5081 case NAMED_SUPER_PROPERTY:
5082 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5084 case KEYED_PROPERTY:
5085 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5087 case KEYED_SUPER_PROPERTY:
5088 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
5094 Register scratch1 = r4;
5095 Register scratch2 = r5;
5096 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
5097 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
5098 __ BranchOnNoOverflow(&done);
5099 // Call stub. Undo operation first.
5100 __ sub(r3, r3, scratch1);
5104 if (!is_strong(language_mode())) {
5105 ToNumberStub convert_stub(isolate());
5106 __ CallStub(&convert_stub);
5107 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5110 // Save result for postfix expressions.
5111 if (expr->is_postfix()) {
5112 if (!context()->IsEffect()) {
5113 // Save the result on the stack. If we have a named or keyed property
5114 // we store the result under the receiver that is currently on top
5116 switch (assign_type) {
5120 case NAMED_PROPERTY:
5121 __ StoreP(r3, MemOperand(sp, kPointerSize));
5123 case NAMED_SUPER_PROPERTY:
5124 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5126 case KEYED_PROPERTY:
5127 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
5129 case KEYED_SUPER_PROPERTY:
5130 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
5136 __ bind(&stub_call);
5138 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
5140 SetExpressionPosition(expr);
5142 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
5143 strength(language_mode())).code();
5144 CallIC(code, expr->CountBinOpFeedbackId());
5145 patch_site.EmitPatchInfo();
5148 if (is_strong(language_mode())) {
5149 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5151 // Store the value returned in r3.
5152 switch (assign_type) {
5154 if (expr->is_postfix()) {
5156 EffectContext context(this);
5157 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5158 Token::ASSIGN, expr->CountSlot());
5159 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5162 // For all contexts except EffectConstant We have the result on
5163 // top of the stack.
5164 if (!context()->IsEffect()) {
5165 context()->PlugTOS();
5168 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5169 Token::ASSIGN, expr->CountSlot());
5170 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5171 context()->Plug(r3);
5174 case NAMED_PROPERTY: {
5175 __ mov(StoreDescriptor::NameRegister(),
5176 Operand(prop->key()->AsLiteral()->value()));
5177 __ pop(StoreDescriptor::ReceiverRegister());
5178 if (FLAG_vector_stores) {
5179 EmitLoadStoreICSlot(expr->CountSlot());
5182 CallStoreIC(expr->CountStoreFeedbackId());
5184 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5185 if (expr->is_postfix()) {
5186 if (!context()->IsEffect()) {
5187 context()->PlugTOS();
5190 context()->Plug(r3);
5194 case NAMED_SUPER_PROPERTY: {
5195 EmitNamedSuperPropertyStore(prop);
5196 if (expr->is_postfix()) {
5197 if (!context()->IsEffect()) {
5198 context()->PlugTOS();
5201 context()->Plug(r3);
5205 case KEYED_SUPER_PROPERTY: {
5206 EmitKeyedSuperPropertyStore(prop);
5207 if (expr->is_postfix()) {
5208 if (!context()->IsEffect()) {
5209 context()->PlugTOS();
5212 context()->Plug(r3);
5216 case KEYED_PROPERTY: {
5217 __ Pop(StoreDescriptor::ReceiverRegister(),
5218 StoreDescriptor::NameRegister());
5220 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5221 if (FLAG_vector_stores) {
5222 EmitLoadStoreICSlot(expr->CountSlot());
5225 CallIC(ic, expr->CountStoreFeedbackId());
5227 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5228 if (expr->is_postfix()) {
5229 if (!context()->IsEffect()) {
5230 context()->PlugTOS();
5233 context()->Plug(r3);
5241 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5242 Expression* sub_expr,
5243 Handle<String> check) {
5244 Label materialize_true, materialize_false;
5245 Label* if_true = NULL;
5246 Label* if_false = NULL;
5247 Label* fall_through = NULL;
5248 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5249 &if_false, &fall_through);
5252 AccumulatorValueContext context(this);
5253 VisitForTypeofValue(sub_expr);
5255 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5257 Factory* factory = isolate()->factory();
5258 if (String::Equals(check, factory->number_string())) {
5259 __ JumpIfSmi(r3, if_true);
5260 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5261 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5263 Split(eq, if_true, if_false, fall_through);
5264 } else if (String::Equals(check, factory->string_string())) {
5265 __ JumpIfSmi(r3, if_false);
5266 // Check for undetectable objects => false.
5267 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
5269 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5270 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
5271 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5272 Split(eq, if_true, if_false, fall_through, cr0);
5273 } else if (String::Equals(check, factory->symbol_string())) {
5274 __ JumpIfSmi(r3, if_false);
5275 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
5276 Split(eq, if_true, if_false, fall_through);
5277 } else if (String::Equals(check, factory->boolean_string())) {
5278 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
5280 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
5281 Split(eq, if_true, if_false, fall_through);
5282 } else if (String::Equals(check, factory->undefined_string())) {
5283 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
5285 __ JumpIfSmi(r3, if_false);
5286 // Check for undetectable objects => true.
5287 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5288 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5289 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5290 Split(ne, if_true, if_false, fall_through, cr0);
5292 } else if (String::Equals(check, factory->function_string())) {
5293 __ JumpIfSmi(r3, if_false);
5294 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5295 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
5297 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
5298 Split(eq, if_true, if_false, fall_through);
5299 } else if (String::Equals(check, factory->object_string())) {
5300 __ JumpIfSmi(r3, if_false);
5301 __ CompareRoot(r3, Heap::kNullValueRootIndex);
5303 // Check for JS objects => true.
5304 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5306 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5308 // Check for undetectable objects => false.
5309 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5310 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5311 Split(eq, if_true, if_false, fall_through, cr0);
5313 if (if_false != fall_through) __ b(if_false);
5315 context()->Plug(if_true, if_false);
5319 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5320 Comment cmnt(masm_, "[ CompareOperation");
5321 SetExpressionPosition(expr);
5323 // First we try a fast inlined version of the compare when one of
5324 // the operands is a literal.
5325 if (TryLiteralCompare(expr)) return;
5327 // Always perform the comparison for its control flow. Pack the result
5328 // into the expression's context after the comparison is performed.
5329 Label materialize_true, materialize_false;
5330 Label* if_true = NULL;
5331 Label* if_false = NULL;
5332 Label* fall_through = NULL;
5333 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5334 &if_false, &fall_through);
5336 Token::Value op = expr->op();
5337 VisitForStackValue(expr->left());
5340 VisitForStackValue(expr->right());
5341 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5342 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5343 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5345 Split(eq, if_true, if_false, fall_through);
5348 case Token::INSTANCEOF: {
5349 VisitForStackValue(expr->right());
5350 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5352 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5353 // The stub returns 0 for true.
5354 __ cmpi(r3, Operand::Zero());
5355 Split(eq, if_true, if_false, fall_through);
5360 VisitForAccumulatorValue(expr->right());
5361 Condition cond = CompareIC::ComputeCondition(op);
5364 bool inline_smi_code = ShouldInlineSmiCase(op);
5365 JumpPatchSite patch_site(masm_);
5366 if (inline_smi_code) {
5369 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
5371 Split(cond, if_true, if_false, NULL);
5372 __ bind(&slow_case);
5375 Handle<Code> ic = CodeFactory::CompareIC(
5376 isolate(), op, strength(language_mode())).code();
5377 CallIC(ic, expr->CompareOperationFeedbackId());
5378 patch_site.EmitPatchInfo();
5379 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5380 __ cmpi(r3, Operand::Zero());
5381 Split(cond, if_true, if_false, fall_through);
5385 // Convert the result of the comparison into one expected for this
5386 // expression's context.
5387 context()->Plug(if_true, if_false);
5391 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5392 Expression* sub_expr,
5394 Label materialize_true, materialize_false;
5395 Label* if_true = NULL;
5396 Label* if_false = NULL;
5397 Label* fall_through = NULL;
5398 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5399 &if_false, &fall_through);
5401 VisitForAccumulatorValue(sub_expr);
5402 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5403 if (expr->op() == Token::EQ_STRICT) {
5404 Heap::RootListIndex nil_value = nil == kNullValue
5405 ? Heap::kNullValueRootIndex
5406 : Heap::kUndefinedValueRootIndex;
5407 __ LoadRoot(r4, nil_value);
5409 Split(eq, if_true, if_false, fall_through);
5411 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5412 CallIC(ic, expr->CompareOperationFeedbackId());
5413 __ cmpi(r3, Operand::Zero());
5414 Split(ne, if_true, if_false, fall_through);
5416 context()->Plug(if_true, if_false);
5420 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5421 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5422 context()->Plug(r3);
5426 Register FullCodeGenerator::result_register() { return r3; }
5429 Register FullCodeGenerator::context_register() { return cp; }
5432 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5433 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
5434 __ StoreP(value, MemOperand(fp, frame_offset), r0);
5438 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5439 __ LoadP(dst, ContextOperand(cp, context_index), r0);
5443 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5444 Scope* declaration_scope = scope()->DeclarationScope();
5445 if (declaration_scope->is_script_scope() ||
5446 declaration_scope->is_module_scope()) {
5447 // Contexts nested in the native context have a canonical empty function
5448 // as their closure, not the anonymous closure containing the global
5449 // code. Pass a smi sentinel and let the runtime look up the empty
5451 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5452 } else if (declaration_scope->is_eval_scope()) {
5453 // Contexts created by a call to eval have the same closure as the
5454 // context calling eval, not the anonymous closure containing the eval
5455 // code. Fetch it from the context.
5456 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5458 DCHECK(declaration_scope->is_function_scope());
5459 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5465 // ----------------------------------------------------------------------------
5466 // Non-local control flow support.
5468 void FullCodeGenerator::EnterFinallyBlock() {
5469 DCHECK(!result_register().is(r4));
5470 // Store result register while executing finally block.
5471 __ push(result_register());
5472 // Cook return address in link register to stack (smi encoded Code* delta)
5474 __ mov(ip, Operand(masm_->CodeObject()));
5478 // Store result register while executing finally block.
5481 // Store pending message while executing finally block.
5482 ExternalReference pending_message_obj =
5483 ExternalReference::address_of_pending_message_obj(isolate());
5484 __ mov(ip, Operand(pending_message_obj));
5485 __ LoadP(r4, MemOperand(ip));
5488 ClearPendingMessage();
5492 void FullCodeGenerator::ExitFinallyBlock() {
5493 DCHECK(!result_register().is(r4));
5494 // Restore pending message from stack.
5496 ExternalReference pending_message_obj =
5497 ExternalReference::address_of_pending_message_obj(isolate());
5498 __ mov(ip, Operand(pending_message_obj));
5499 __ StoreP(r4, MemOperand(ip));
5501 // Restore result register from stack.
5504 // Uncook return address and return.
5505 __ pop(result_register());
5507 __ mov(ip, Operand(masm_->CodeObject()));
5514 void FullCodeGenerator::ClearPendingMessage() {
5515 DCHECK(!result_register().is(r4));
5516 ExternalReference pending_message_obj =
5517 ExternalReference::address_of_pending_message_obj(isolate());
5518 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5519 __ mov(ip, Operand(pending_message_obj));
5520 __ StoreP(r4, MemOperand(ip));
5524 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5525 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5526 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5527 Operand(SmiFromSlot(slot)));
5534 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
5535 BackEdgeState target_state,
5536 Code* replacement_code) {
5537 Address mov_address = Assembler::target_address_from_return_address(pc);
5538 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5539 CodePatcher patcher(cmp_address, 1);
5541 switch (target_state) {
5543 // <decrement profiling counter>
5545 // bge <ok> ;; not changed
5546 // mov r12, <interrupt stub address>
5549 // <reset profiling counter>
5551 patcher.masm()->cmpi(r6, Operand::Zero());
5554 case ON_STACK_REPLACEMENT:
5555 case OSR_AFTER_STACK_CHECK:
5556 // <decrement profiling counter>
5558 // bge <ok> ;; not changed
5559 // mov r12, <on-stack replacement address>
5562 // <reset profiling counter>
5563 // ok-label ----- pc_after points here
5565 // Set the LT bit such that bge is a NOP
5566 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
5570 // Replace the stack check address in the mov sequence with the
5571 // entry address of the replacement code.
5572 Assembler::set_target_address_at(mov_address, unoptimized_code,
5573 replacement_code->entry());
5575 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5576 unoptimized_code, mov_address, replacement_code);
5580 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5581 Isolate* isolate, Code* unoptimized_code, Address pc) {
5582 Address mov_address = Assembler::target_address_from_return_address(pc);
5583 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5584 Address interrupt_address =
5585 Assembler::target_address_at(mov_address, unoptimized_code);
5587 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
5588 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
5592 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5594 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5595 return ON_STACK_REPLACEMENT;
5598 DCHECK(interrupt_address ==
5599 isolate->builtins()->OsrAfterStackCheck()->entry());
5600 return OSR_AFTER_STACK_CHECK;
5602 } // namespace internal
5604 #endif // V8_TARGET_ARCH_PPC