1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
20 #include "src/ppc/code-stubs-ppc.h"
21 #include "src/ppc/macro-assembler-ppc.h"
26 #define __ ACCESS_MASM(masm_)
28 // A patch site is a location in the code which it is possible to patch. This
29 // class has a number of methods to emit the code which is patchable and the
30 // method EmitPatchInfo to record a marker back to the patchable code. This
31 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
32 // immediate value is used) is the delta from the pc to the first instruction of
33 // the patchable code.
34 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
35 class JumpPatchSite BASE_EMBEDDED {
37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
39 info_emitted_ = false;
43 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
45 // When initially emitting this ensure that a jump is always generated to skip
46 // the inlined smi code.
47 void EmitJumpIfNotSmi(Register reg, Label* target) {
48 DCHECK(!patch_site_.is_bound() && !info_emitted_);
49 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
50 __ bind(&patch_site_);
51 __ cmp(reg, reg, cr0);
52 __ beq(target, cr0); // Always taken before patched.
55 // When initially emitting this ensure that a jump is never generated to skip
56 // the inlined smi code.
57 void EmitJumpIfSmi(Register reg, Label* target) {
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 __ bind(&patch_site_);
61 __ cmp(reg, reg, cr0);
62 __ bne(target, cr0); // Never taken before patched.
65 void EmitPatchInfo() {
66 if (patch_site_.is_bound()) {
67 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
69 // I believe this is using reg as the high bits of of the offset
70 reg.set_code(delta_to_patch_site / kOff16Mask);
71 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
76 __ nop(); // Signals no inlined code.
81 MacroAssembler* masm_;
89 // Generate code for a JS function. On entry to the function the receiver
90 // and arguments have been pushed on the stack left to right. The actual
91 // argument count matches the formal parameter count expected by the
94 // The live registers are:
95 // o r4: the JS function object being called (i.e., ourselves)
97 // o fp: our caller's frame pointer (aka r31)
98 // o sp: stack pointer
99 // o lr: return address
100 // o ip: our own function entry (required by the prologue)
102 // The function builds a JS frame. Please see JavaScriptFrameConstants in
103 // frames-ppc.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
107 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
108 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(function());
113 Comment cmnt(masm_, "[ function compiled by full code generator");
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
124 // Sloppy mode functions and builtins need to replace the receiver with the
125 // global proxy when called as functions (without an explicit receiver
127 if (is_sloppy(info->language_mode()) && !info->is_native()) {
129 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
130 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
131 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
134 __ LoadP(r5, GlobalObjectOperand());
135 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
137 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
142 // Open a frame scope to indicate that there is a frame on the stack. The
143 // MANUAL indicates that the scope shouldn't actually generate code to set up
144 // the frame (that is done below).
145 FrameScope frame_scope(masm_, StackFrame::MANUAL);
146 int prologue_offset = masm_->pc_offset();
148 if (prologue_offset) {
149 // Prologue logic requires it's starting address in ip and the
150 // corresponding offset from the function entry.
151 prologue_offset += Instruction::kInstrSize;
152 __ addi(ip, ip, Operand(prologue_offset));
154 info->set_prologue_offset(prologue_offset);
155 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
156 info->AddNoFrameRange(0, masm_->pc_offset());
159 Comment cmnt(masm_, "[ Allocate locals");
160 int locals_count = info->scope()->num_stack_slots();
161 // Generators allocate locals, if any, in context slots.
162 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
163 if (locals_count > 0) {
164 if (locals_count >= 128) {
166 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
167 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
169 __ bc_short(ge, &ok);
170 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
173 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
174 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
175 if (locals_count >= kMaxPushes) {
176 int loop_iterations = locals_count / kMaxPushes;
177 __ mov(r5, Operand(loop_iterations));
180 __ bind(&loop_header);
182 for (int i = 0; i < kMaxPushes; i++) {
185 // Continue loop if not done.
186 __ bdnz(&loop_header);
188 int remaining = locals_count % kMaxPushes;
189 // Emit the remaining pushes.
190 for (int i = 0; i < remaining; i++) {
196 bool function_in_register = true;
198 // Possibly allocate a local context.
199 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (heap_slots > 0) {
201 // Argument to NewContext is the function, which is still in r4.
202 Comment cmnt(masm_, "[ Allocate context");
203 bool need_write_barrier = true;
204 if (info->scope()->is_script_scope()) {
206 __ Push(info->scope()->GetScopeInfo(info->isolate()));
207 __ CallRuntime(Runtime::kNewScriptContext, 2);
208 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
209 FastNewContextStub stub(isolate(), heap_slots);
211 // Result of FastNewContextStub is always in new space.
212 need_write_barrier = false;
215 __ CallRuntime(Runtime::kNewFunctionContext, 1);
217 function_in_register = false;
218 // Context is returned in r3. It replaces the context passed to us.
219 // It's saved in the stack and kept live in cp.
221 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
222 // Copy any necessary parameters into the context.
223 int num_parameters = info->scope()->num_parameters();
224 for (int i = 0; i < num_parameters; i++) {
225 Variable* var = scope()->parameter(i);
226 if (var->IsContextSlot()) {
227 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
228 (num_parameters - 1 - i) * kPointerSize;
229 // Load parameter from stack.
230 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
231 // Store it in the context.
232 MemOperand target = ContextOperand(cp, var->index());
233 __ StoreP(r3, target, r0);
235 // Update the write barrier.
236 if (need_write_barrier) {
237 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
238 kLRHasBeenSaved, kDontSaveFPRegs);
239 } else if (FLAG_debug_code) {
241 __ JumpIfInNewSpace(cp, r3, &done);
242 __ Abort(kExpectedNewSpaceObject);
249 ArgumentsAccessStub::HasNewTarget has_new_target =
250 IsSubclassConstructor(info->function()->kind())
251 ? ArgumentsAccessStub::HAS_NEW_TARGET
252 : ArgumentsAccessStub::NO_NEW_TARGET;
254 // Possibly allocate RestParameters
256 Variable* rest_param = scope()->rest_parameter(&rest_index);
258 Comment cmnt(masm_, "[ Allocate rest parameter array");
260 int num_parameters = info->scope()->num_parameters();
261 int offset = num_parameters * kPointerSize;
262 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
267 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
268 __ mov(r5, Operand(Smi::FromInt(num_parameters)));
269 __ mov(r4, Operand(Smi::FromInt(rest_index)));
272 RestParamAccessStub stub(isolate());
275 SetVar(rest_param, r3, r4, r5);
278 Variable* arguments = scope()->arguments();
279 if (arguments != NULL) {
280 // Function uses arguments object.
281 Comment cmnt(masm_, "[ Allocate arguments object");
282 if (!function_in_register) {
283 // Load this again, if it's used by the local context below.
284 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
288 // Receiver is just before the parameters on the caller's stack.
289 int num_parameters = info->scope()->num_parameters();
290 int offset = num_parameters * kPointerSize;
291 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
292 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
295 // Arguments to ArgumentsAccessStub:
296 // function, receiver address, parameter count.
297 // The stub will rewrite receiever and parameter count if the previous
298 // stack frame was an arguments adapter frame.
299 ArgumentsAccessStub::Type type;
300 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
301 type = ArgumentsAccessStub::NEW_STRICT;
302 } else if (function()->has_duplicate_parameters()) {
303 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
305 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
307 ArgumentsAccessStub stub(isolate(), type, has_new_target);
310 SetVar(arguments, r3, r4, r5);
314 __ CallRuntime(Runtime::kTraceEnter, 0);
317 // Visit the declarations and body unless there is an illegal
319 if (scope()->HasIllegalRedeclaration()) {
320 Comment cmnt(masm_, "[ Declarations");
321 scope()->VisitIllegalRedeclaration(this);
324 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
326 Comment cmnt(masm_, "[ Declarations");
327 // For named function expressions, declare the function name as a
329 if (scope()->is_function_scope() && scope()->function() != NULL) {
330 VariableDeclaration* function = scope()->function();
331 DCHECK(function->proxy()->var()->mode() == CONST ||
332 function->proxy()->var()->mode() == CONST_LEGACY);
333 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
334 VisitVariableDeclaration(function);
336 VisitDeclarations(scope()->declarations());
340 Comment cmnt(masm_, "[ Stack check");
341 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
343 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
345 __ bc_short(ge, &ok);
346 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
351 Comment cmnt(masm_, "[ Body");
352 DCHECK(loop_depth() == 0);
353 VisitStatements(function()->body());
354 DCHECK(loop_depth() == 0);
358 // Always emit a 'return undefined' in case control fell off the end of
361 Comment cmnt(masm_, "[ return <undefined>;");
362 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
364 EmitReturnSequence();
368 void FullCodeGenerator::ClearAccumulator() {
369 __ LoadSmiLiteral(r3, Smi::FromInt(0));
373 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
374 __ mov(r5, Operand(profiling_counter_));
375 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
376 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
377 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
381 void FullCodeGenerator::EmitProfilingCounterReset() {
382 int reset_value = FLAG_interrupt_budget;
383 if (info_->is_debug()) {
384 // Detect debug break requests as soon as possible.
385 reset_value = FLAG_interrupt_budget >> 4;
387 __ mov(r5, Operand(profiling_counter_));
388 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
389 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
393 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
394 Label* back_edge_target) {
395 Comment cmnt(masm_, "[ Back edge bookkeeping");
398 DCHECK(back_edge_target->is_bound());
399 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
400 kCodeSizeMultiplier / 2;
401 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
402 EmitProfilingCounterDecrement(weight);
404 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
405 // BackEdgeTable::PatchAt manipulates this sequence.
406 __ cmpi(r6, Operand::Zero());
407 __ bc_short(ge, &ok);
408 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
410 // Record a mapping of this PC offset to the OSR id. This is used to find
411 // the AST id from the unoptimized code in order to use it as a key into
412 // the deoptimization input data found in the optimized code.
413 RecordBackEdge(stmt->OsrEntryId());
415 EmitProfilingCounterReset();
418 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
419 // Record a mapping of the OSR id to this PC. This is used if the OSR
420 // entry becomes the target of a bailout. We don't expect it to be, but
421 // we want it to work if it is.
422 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
426 void FullCodeGenerator::EmitReturnSequence() {
427 Comment cmnt(masm_, "[ Return sequence");
428 if (return_label_.is_bound()) {
429 __ b(&return_label_);
431 __ bind(&return_label_);
433 // Push the return value on the stack as the parameter.
434 // Runtime::TraceExit returns its parameter in r3
436 __ CallRuntime(Runtime::kTraceExit, 1);
438 // Pretend that the exit is a backwards jump to the entry.
440 if (info_->ShouldSelfOptimize()) {
441 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
443 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
444 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
446 EmitProfilingCounterDecrement(weight);
448 __ cmpi(r6, Operand::Zero());
451 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
453 EmitProfilingCounterReset();
457 // Add a label for checking the size of the code used for returning.
458 Label check_exit_codesize;
459 __ bind(&check_exit_codesize);
461 // Make sure that the constant pool is not emitted inside of the return
464 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
465 int32_t arg_count = info_->scope()->num_parameters() + 1;
466 if (IsSubclassConstructor(info_->function()->kind())) {
469 int32_t sp_delta = arg_count * kPointerSize;
470 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
472 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
473 #if V8_TARGET_ARCH_PPC64
474 // With 64bit we may need nop() instructions to ensure we have
475 // enough space to SetDebugBreakAtReturn()
476 if (is_int16(sp_delta)) {
482 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
486 // Check that the size of the code used for returning is large enough
487 // for the debugger's requirements.
488 DCHECK(Assembler::kJSReturnSequenceInstructions <=
489 masm_->InstructionsGeneratedSince(&check_exit_codesize));
495 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
496 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
500 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
501 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
502 codegen()->GetVar(result_register(), var);
506 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
507 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
508 codegen()->GetVar(result_register(), var);
509 __ push(result_register());
513 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
514 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
515 // For simplicity we always test the accumulator register.
516 codegen()->GetVar(result_register(), var);
517 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
518 codegen()->DoTest(this);
522 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
525 void FullCodeGenerator::AccumulatorValueContext::Plug(
526 Heap::RootListIndex index) const {
527 __ LoadRoot(result_register(), index);
531 void FullCodeGenerator::StackValueContext::Plug(
532 Heap::RootListIndex index) const {
533 __ LoadRoot(result_register(), index);
534 __ push(result_register());
538 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
539 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
541 if (index == Heap::kUndefinedValueRootIndex ||
542 index == Heap::kNullValueRootIndex ||
543 index == Heap::kFalseValueRootIndex) {
544 if (false_label_ != fall_through_) __ b(false_label_);
545 } else if (index == Heap::kTrueValueRootIndex) {
546 if (true_label_ != fall_through_) __ b(true_label_);
548 __ LoadRoot(result_register(), index);
549 codegen()->DoTest(this);
554 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
557 void FullCodeGenerator::AccumulatorValueContext::Plug(
558 Handle<Object> lit) const {
559 __ mov(result_register(), Operand(lit));
563 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
564 // Immediates cannot be pushed directly.
565 __ mov(result_register(), Operand(lit));
566 __ push(result_register());
570 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
571 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
573 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
574 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
575 if (false_label_ != fall_through_) __ b(false_label_);
576 } else if (lit->IsTrue() || lit->IsJSObject()) {
577 if (true_label_ != fall_through_) __ b(true_label_);
578 } else if (lit->IsString()) {
579 if (String::cast(*lit)->length() == 0) {
580 if (false_label_ != fall_through_) __ b(false_label_);
582 if (true_label_ != fall_through_) __ b(true_label_);
584 } else if (lit->IsSmi()) {
585 if (Smi::cast(*lit)->value() == 0) {
586 if (false_label_ != fall_through_) __ b(false_label_);
588 if (true_label_ != fall_through_) __ b(true_label_);
591 // For simplicity we always test the accumulator register.
592 __ mov(result_register(), Operand(lit));
593 codegen()->DoTest(this);
598 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
599 Register reg) const {
605 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
606 int count, Register reg) const {
609 __ Move(result_register(), reg);
613 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
614 Register reg) const {
616 if (count > 1) __ Drop(count - 1);
617 __ StoreP(reg, MemOperand(sp, 0));
621 void FullCodeGenerator::TestContext::DropAndPlug(int count,
622 Register reg) const {
624 // For simplicity we always test the accumulator register.
626 __ Move(result_register(), reg);
627 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
628 codegen()->DoTest(this);
632 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
633 Label* materialize_false) const {
634 DCHECK(materialize_true == materialize_false);
635 __ bind(materialize_true);
639 void FullCodeGenerator::AccumulatorValueContext::Plug(
640 Label* materialize_true, Label* materialize_false) const {
642 __ bind(materialize_true);
643 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
645 __ bind(materialize_false);
646 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
651 void FullCodeGenerator::StackValueContext::Plug(
652 Label* materialize_true, Label* materialize_false) const {
654 __ bind(materialize_true);
655 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
657 __ bind(materialize_false);
658 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
664 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
665 Label* materialize_false) const {
666 DCHECK(materialize_true == true_label_);
667 DCHECK(materialize_false == false_label_);
671 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
674 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
675 Heap::RootListIndex value_root_index =
676 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
677 __ LoadRoot(result_register(), value_root_index);
681 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
682 Heap::RootListIndex value_root_index =
683 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
684 __ LoadRoot(ip, value_root_index);
689 void FullCodeGenerator::TestContext::Plug(bool flag) const {
690 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
693 if (true_label_ != fall_through_) __ b(true_label_);
695 if (false_label_ != fall_through_) __ b(false_label_);
700 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
701 Label* if_false, Label* fall_through) {
702 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
703 CallIC(ic, condition->test_id());
704 __ cmpi(result_register(), Operand::Zero());
705 Split(ne, if_true, if_false, fall_through);
709 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
710 Label* fall_through, CRegister cr) {
711 if (if_false == fall_through) {
712 __ b(cond, if_true, cr);
713 } else if (if_true == fall_through) {
714 __ b(NegateCondition(cond), if_false, cr);
716 __ b(cond, if_true, cr);
722 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
723 DCHECK(var->IsStackAllocated());
724 // Offset is negative because higher indexes are at lower addresses.
725 int offset = -var->index() * kPointerSize;
726 // Adjust by a (parameter or local) base offset.
727 if (var->IsParameter()) {
728 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
730 offset += JavaScriptFrameConstants::kLocal0Offset;
732 return MemOperand(fp, offset);
736 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
737 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
738 if (var->IsContextSlot()) {
739 int context_chain_length = scope()->ContextChainLength(var->scope());
740 __ LoadContext(scratch, context_chain_length);
741 return ContextOperand(scratch, var->index());
743 return StackOperand(var);
748 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
749 // Use destination as scratch.
750 MemOperand location = VarOperand(var, dest);
751 __ LoadP(dest, location, r0);
755 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
757 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
758 DCHECK(!scratch0.is(src));
759 DCHECK(!scratch0.is(scratch1));
760 DCHECK(!scratch1.is(src));
761 MemOperand location = VarOperand(var, scratch0);
762 __ StoreP(src, location, r0);
764 // Emit the write barrier code if the location is in the heap.
765 if (var->IsContextSlot()) {
766 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
767 kLRHasBeenSaved, kDontSaveFPRegs);
772 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
773 bool should_normalize,
776 // Only prepare for bailouts before splits if we're in a test
777 // context. Otherwise, we let the Visit function deal with the
778 // preparation to avoid preparing with the same AST id twice.
779 if (!context()->IsTest() || !info_->IsOptimizable()) return;
782 if (should_normalize) __ b(&skip);
783 PrepareForBailout(expr, TOS_REG);
784 if (should_normalize) {
785 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
787 Split(eq, if_true, if_false, NULL);
793 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
794 // The variable in the declaration always resides in the current function
796 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
797 if (generate_debug_code_) {
798 // Check that we're not inside a with or catch context.
799 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
800 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
801 __ Check(ne, kDeclarationInWithContext);
802 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
803 __ Check(ne, kDeclarationInCatchContext);
808 void FullCodeGenerator::VisitVariableDeclaration(
809 VariableDeclaration* declaration) {
810 // If it was not possible to allocate the variable at compile time, we
811 // need to "declare" it at runtime to make sure it actually exists in the
813 VariableProxy* proxy = declaration->proxy();
814 VariableMode mode = declaration->mode();
815 Variable* variable = proxy->var();
816 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
817 switch (variable->location()) {
818 case Variable::UNALLOCATED:
819 globals_->Add(variable->name(), zone());
820 globals_->Add(variable->binding_needs_init()
821 ? isolate()->factory()->the_hole_value()
822 : isolate()->factory()->undefined_value(),
826 case Variable::PARAMETER:
827 case Variable::LOCAL:
829 Comment cmnt(masm_, "[ VariableDeclaration");
830 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
831 __ StoreP(ip, StackOperand(variable));
835 case Variable::CONTEXT:
837 Comment cmnt(masm_, "[ VariableDeclaration");
838 EmitDebugCheckDeclarationContext(variable);
839 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
840 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
841 // No write barrier since the_hole_value is in old space.
842 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846 case Variable::LOOKUP: {
847 Comment cmnt(masm_, "[ VariableDeclaration");
848 __ mov(r5, Operand(variable->name()));
849 // Declaration nodes are always introduced in one of four modes.
850 DCHECK(IsDeclaredVariableMode(mode));
851 PropertyAttributes attr =
852 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
853 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
854 // Push initial value, if any.
855 // Note: For variables we must not push an initial value (such as
856 // 'undefined') because we may have a (legal) redeclaration and we
857 // must not destroy the current value.
859 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
860 __ Push(cp, r5, r4, r3);
862 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
863 __ Push(cp, r5, r4, r3);
865 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
872 void FullCodeGenerator::VisitFunctionDeclaration(
873 FunctionDeclaration* declaration) {
874 VariableProxy* proxy = declaration->proxy();
875 Variable* variable = proxy->var();
876 switch (variable->location()) {
877 case Variable::UNALLOCATED: {
878 globals_->Add(variable->name(), zone());
879 Handle<SharedFunctionInfo> function =
880 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
881 // Check for stack-overflow exception.
882 if (function.is_null()) return SetStackOverflow();
883 globals_->Add(function, zone());
887 case Variable::PARAMETER:
888 case Variable::LOCAL: {
889 Comment cmnt(masm_, "[ FunctionDeclaration");
890 VisitForAccumulatorValue(declaration->fun());
891 __ StoreP(result_register(), StackOperand(variable));
895 case Variable::CONTEXT: {
896 Comment cmnt(masm_, "[ FunctionDeclaration");
897 EmitDebugCheckDeclarationContext(variable);
898 VisitForAccumulatorValue(declaration->fun());
899 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
900 int offset = Context::SlotOffset(variable->index());
901 // We know that we have written a function, which is not a smi.
902 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
903 kLRHasBeenSaved, kDontSaveFPRegs,
904 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
905 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
909 case Variable::LOOKUP: {
910 Comment cmnt(masm_, "[ FunctionDeclaration");
911 __ mov(r5, Operand(variable->name()));
912 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
914 // Push initial value for function declaration.
915 VisitForStackValue(declaration->fun());
916 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
923 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
924 Variable* variable = declaration->proxy()->var();
925 ModuleDescriptor* descriptor = declaration->module()->descriptor();
926 DCHECK(variable->location() == Variable::CONTEXT);
927 DCHECK(descriptor->IsFrozen());
929 Comment cmnt(masm_, "[ ModuleDeclaration");
930 EmitDebugCheckDeclarationContext(variable);
932 // Load instance object.
933 __ LoadContext(r4, scope_->ContextChainLength(scope_->ScriptScope()));
934 __ LoadP(r4, ContextOperand(r4, descriptor->Index()));
935 __ LoadP(r4, ContextOperand(r4, Context::EXTENSION_INDEX));
938 __ StoreP(r4, ContextOperand(cp, variable->index()), r0);
939 // We know that we have written a module, which is not a smi.
940 __ RecordWriteContextSlot(cp, Context::SlotOffset(variable->index()), r4, r6,
941 kLRHasBeenSaved, kDontSaveFPRegs,
942 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
943 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
945 // Traverse into body.
946 Visit(declaration->module());
950 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
951 VariableProxy* proxy = declaration->proxy();
952 Variable* variable = proxy->var();
953 switch (variable->location()) {
954 case Variable::UNALLOCATED:
958 case Variable::CONTEXT: {
959 Comment cmnt(masm_, "[ ImportDeclaration");
960 EmitDebugCheckDeclarationContext(variable);
965 case Variable::PARAMETER:
966 case Variable::LOCAL:
967 case Variable::LOOKUP:
973 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
978 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
979 // Call the runtime to declare the globals.
980 // The context is the first argument.
981 __ mov(r4, Operand(pairs));
982 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
984 __ CallRuntime(Runtime::kDeclareGlobals, 3);
985 // Return value is ignored.
989 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
990 // Call the runtime to declare the modules.
991 __ Push(descriptions);
992 __ CallRuntime(Runtime::kDeclareModules, 1);
993 // Return value is ignored.
997 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
998 Comment cmnt(masm_, "[ SwitchStatement");
999 Breakable nested_statement(this, stmt);
1000 SetStatementPosition(stmt);
1002 // Keep the switch value on the stack until a case matches.
1003 VisitForStackValue(stmt->tag());
1004 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1006 ZoneList<CaseClause*>* clauses = stmt->cases();
1007 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1009 Label next_test; // Recycled for each test.
1010 // Compile all the tests with branches to their bodies.
1011 for (int i = 0; i < clauses->length(); i++) {
1012 CaseClause* clause = clauses->at(i);
1013 clause->body_target()->Unuse();
1015 // The default is not a test, but remember it as final fall through.
1016 if (clause->is_default()) {
1017 default_clause = clause;
1021 Comment cmnt(masm_, "[ Case comparison");
1022 __ bind(&next_test);
1025 // Compile the label expression.
1026 VisitForAccumulatorValue(clause->label());
1028 // Perform the comparison as if via '==='.
1029 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
1030 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1031 JumpPatchSite patch_site(masm_);
1032 if (inline_smi_code) {
1035 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
1039 __ Drop(1); // Switch value is no longer needed.
1040 __ b(clause->body_target());
1041 __ bind(&slow_case);
1044 // Record position before stub call for type feedback.
1045 SetSourcePosition(clause->position());
1047 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1048 CallIC(ic, clause->CompareId());
1049 patch_site.EmitPatchInfo();
1053 PrepareForBailout(clause, TOS_REG);
1054 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1058 __ b(clause->body_target());
1061 __ cmpi(r3, Operand::Zero());
1063 __ Drop(1); // Switch value is no longer needed.
1064 __ b(clause->body_target());
1067 // Discard the test value and jump to the default if present, otherwise to
1068 // the end of the statement.
1069 __ bind(&next_test);
1070 __ Drop(1); // Switch value is no longer needed.
1071 if (default_clause == NULL) {
1072 __ b(nested_statement.break_label());
1074 __ b(default_clause->body_target());
1077 // Compile all the case bodies.
1078 for (int i = 0; i < clauses->length(); i++) {
1079 Comment cmnt(masm_, "[ Case body");
1080 CaseClause* clause = clauses->at(i);
1081 __ bind(clause->body_target());
1082 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1083 VisitStatements(clause->statements());
1086 __ bind(nested_statement.break_label());
1087 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1091 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1092 Comment cmnt(masm_, "[ ForInStatement");
1093 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1094 SetStatementPosition(stmt);
1097 ForIn loop_statement(this, stmt);
1098 increment_loop_depth();
1100 // Get the object to enumerate over. If the object is null or undefined, skip
1101 // over the loop. See ECMA-262 version 5, section 12.6.4.
1102 SetExpressionPosition(stmt->enumerable());
1103 VisitForAccumulatorValue(stmt->enumerable());
1104 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1107 Register null_value = r7;
1108 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1109 __ cmp(r3, null_value);
1112 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1114 // Convert the object to a JS object.
1115 Label convert, done_convert;
1116 __ JumpIfSmi(r3, &convert);
1117 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1118 __ bge(&done_convert);
1121 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1122 __ bind(&done_convert);
1123 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1126 // Check for proxies.
1128 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1129 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1130 __ ble(&call_runtime);
1132 // Check cache validity in generated code. This is a fast case for
1133 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1134 // guarantee cache validity, call the runtime system to check cache
1135 // validity or get the property names in a fixed array.
1136 __ CheckEnumCache(null_value, &call_runtime);
1138 // The enum cache is valid. Load the map of the object being
1139 // iterated over and use the cache for the iteration.
1141 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1144 // Get the set of properties to enumerate.
1145 __ bind(&call_runtime);
1146 __ push(r3); // Duplicate the enumerable object on the stack.
1147 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1148 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1150 // If we got a map from the runtime call, we can do a fast
1151 // modification check. Otherwise, we got a fixed array, and we have
1152 // to do a slow check.
1154 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1155 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1157 __ bne(&fixed_array);
1159 // We got a map in register r3. Get the enumeration cache from it.
1160 Label no_descriptors;
1161 __ bind(&use_cache);
1163 __ EnumLength(r4, r3);
1164 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1165 __ beq(&no_descriptors);
1167 __ LoadInstanceDescriptors(r3, r5);
1168 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1170 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1172 // Set up the four remaining stack slots.
1173 __ push(r3); // Map.
1174 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1175 // Push enumeration cache, enumeration cache length (as smi) and zero.
1176 __ Push(r5, r4, r3);
1179 __ bind(&no_descriptors);
1183 // We got a fixed array in register r3. Iterate through that.
1185 __ bind(&fixed_array);
1187 __ Move(r4, FeedbackVector());
1188 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1189 int vector_index = FeedbackVector()->GetIndex(slot);
1191 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1193 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1194 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1195 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1196 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1198 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1199 __ bind(&non_proxy);
1200 __ Push(r4, r3); // Smi and array
1201 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1202 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1203 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1205 // Generate code for doing the condition check.
1206 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1208 SetExpressionPosition(stmt->each());
1210 // Load the current count to r3, load the length to r4.
1211 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1212 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1213 __ cmpl(r3, r4); // Compare to the array length.
1214 __ bge(loop_statement.break_label());
1216 // Get the current entry of the array into register r6.
1217 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1218 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1219 __ SmiToPtrArrayOffset(r6, r3);
1220 __ LoadPX(r6, MemOperand(r6, r5));
1222 // Get the expected map from the stack or a smi in the
1223 // permanent slow case into register r5.
1224 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1226 // Check if the expected map still matches that of the enumerable.
1227 // If not, we may have to filter the key.
1229 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1230 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1232 __ beq(&update_each);
1234 // For proxies, no filtering is done.
1235 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1236 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1237 __ beq(&update_each);
1239 // Convert the entry to a string or (smi) 0 if it isn't a property
1240 // any more. If the property has been removed while iterating, we
1242 __ Push(r4, r6); // Enumerable and current entry.
1243 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1245 __ cmpi(r6, Operand::Zero());
1246 __ beq(loop_statement.continue_label());
1248 // Update the 'each' property or variable from the possibly filtered
1249 // entry in register r6.
1250 __ bind(&update_each);
1251 __ mr(result_register(), r6);
1252 // Perform the assignment as if via '='.
1254 EffectContext context(this);
1255 EmitAssignment(stmt->each());
1256 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1259 // Generate code for the body of the loop.
1260 Visit(stmt->body());
1262 // Generate code for the going to the next element by incrementing
1263 // the index (smi) stored on top of the stack.
1264 __ bind(loop_statement.continue_label());
1266 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1269 EmitBackEdgeBookkeeping(stmt, &loop);
1272 // Remove the pointers stored on the stack.
1273 __ bind(loop_statement.break_label());
1276 // Exit and decrement the loop depth.
1277 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1279 decrement_loop_depth();
1283 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1285 // Use the fast case closure allocation code that allocates in new
1286 // space for nested functions that don't need literals cloning. If
1287 // we're running with the --always-opt or the --prepare-always-opt
1288 // flag, we need to use the runtime function so that the new function
1289 // we are creating here gets a chance to have its code optimized and
1290 // doesn't just get a copy of the existing unoptimized code.
1291 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1292 scope()->is_function_scope() && info->num_literals() == 0) {
1293 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1294 __ mov(r5, Operand(info));
1297 __ mov(r3, Operand(info));
1299 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1300 __ Push(cp, r3, r4);
1301 __ CallRuntime(Runtime::kNewClosure, 3);
1303 context()->Plug(r3);
1307 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1308 Comment cmnt(masm_, "[ VariableProxy");
1309 EmitVariableLoad(expr);
1313 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1314 Comment cnmt(masm_, "[ SuperReference ");
1316 __ LoadP(LoadDescriptor::ReceiverRegister(),
1317 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1319 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1320 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1322 if (FLAG_vector_ics) {
1323 __ mov(VectorLoadICDescriptor::SlotRegister(),
1324 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1325 CallLoadIC(NOT_CONTEXTUAL);
1327 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1330 __ Cmpi(r3, Operand(isolate()->factory()->undefined_value()), r0);
1333 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1338 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1340 if (NeedsHomeObject(initializer)) {
1341 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1342 __ mov(StoreDescriptor::NameRegister(),
1343 Operand(isolate()->factory()->home_object_symbol()));
1344 __ LoadP(StoreDescriptor::ValueRegister(),
1345 MemOperand(sp, offset * kPointerSize));
1351 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1352 TypeofState typeof_state,
1354 Register current = cp;
1360 if (s->num_heap_slots() > 0) {
1361 if (s->calls_sloppy_eval()) {
1362 // Check that extension is NULL.
1363 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1364 __ cmpi(temp, Operand::Zero());
1367 // Load next context in chain.
1368 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1369 // Walk the rest of the chain without clobbering cp.
1372 // If no outer scope calls eval, we do not need to check more
1373 // context extensions.
1374 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1375 s = s->outer_scope();
1378 if (s->is_eval_scope()) {
1380 if (!current.is(next)) {
1381 __ Move(next, current);
1384 // Terminate at native context.
1385 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1386 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1389 // Check that extension is NULL.
1390 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1391 __ cmpi(temp, Operand::Zero());
1393 // Load next context in chain.
1394 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1399 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1400 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1401 if (FLAG_vector_ics) {
1402 __ mov(VectorLoadICDescriptor::SlotRegister(),
1403 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1406 ContextualMode mode =
1407 (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL : CONTEXTUAL;
1412 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1414 DCHECK(var->IsContextSlot());
1415 Register context = cp;
1419 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1420 if (s->num_heap_slots() > 0) {
1421 if (s->calls_sloppy_eval()) {
1422 // Check that extension is NULL.
1423 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1424 __ cmpi(temp, Operand::Zero());
1427 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1428 // Walk the rest of the chain without clobbering cp.
1432 // Check that last extension is NULL.
1433 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1434 __ cmpi(temp, Operand::Zero());
1437 // This function is used only for loads, not stores, so it's safe to
1438 // return an cp-based operand (the write barrier cannot be allowed to
1439 // destroy the cp register).
1440 return ContextOperand(context, var->index());
1444 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1445 TypeofState typeof_state,
1446 Label* slow, Label* done) {
1447 // Generate fast-case code for variables that might be shadowed by
1448 // eval-introduced variables. Eval is used a lot without
1449 // introducing variables. In those cases, we do not want to
1450 // perform a runtime call for all variables in the scope
1451 // containing the eval.
1452 Variable* var = proxy->var();
1453 if (var->mode() == DYNAMIC_GLOBAL) {
1454 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1456 } else if (var->mode() == DYNAMIC_LOCAL) {
1457 Variable* local = var->local_if_not_shadowed();
1458 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1459 if (local->mode() == LET || local->mode() == CONST ||
1460 local->mode() == CONST_LEGACY) {
1461 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1463 if (local->mode() == CONST_LEGACY) {
1464 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1465 } else { // LET || CONST
1466 __ mov(r3, Operand(var->name()));
1468 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1476 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1477 // Record position before possible IC call.
1478 SetSourcePosition(proxy->position());
1479 Variable* var = proxy->var();
1481 // Three cases: global variables, lookup variables, and all other types of
1483 switch (var->location()) {
1484 case Variable::UNALLOCATED: {
1485 Comment cmnt(masm_, "[ Global variable");
1486 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1487 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1488 if (FLAG_vector_ics) {
1489 __ mov(VectorLoadICDescriptor::SlotRegister(),
1490 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1492 CallGlobalLoadIC(var->name());
1493 context()->Plug(r3);
1497 case Variable::PARAMETER:
1498 case Variable::LOCAL:
1499 case Variable::CONTEXT: {
1500 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1501 : "[ Stack variable");
1502 if (var->binding_needs_init()) {
1503 // var->scope() may be NULL when the proxy is located in eval code and
1504 // refers to a potential outside binding. Currently those bindings are
1505 // always looked up dynamically, i.e. in that case
1506 // var->location() == LOOKUP.
1508 DCHECK(var->scope() != NULL);
1510 // Check if the binding really needs an initialization check. The check
1511 // can be skipped in the following situation: we have a LET or CONST
1512 // binding in harmony mode, both the Variable and the VariableProxy have
1513 // the same declaration scope (i.e. they are both in global code, in the
1514 // same function or in the same eval code) and the VariableProxy is in
1515 // the source physically located after the initializer of the variable.
1517 // We cannot skip any initialization checks for CONST in non-harmony
1518 // mode because const variables may be declared but never initialized:
1519 // if (false) { const x; }; var y = x;
1521 // The condition on the declaration scopes is a conservative check for
1522 // nested functions that access a binding and are called before the
1523 // binding is initialized:
1524 // function() { f(); let x = 1; function f() { x = 2; } }
1526 bool skip_init_check;
1527 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1528 skip_init_check = false;
1529 } else if (var->is_this()) {
1530 CHECK(info_->function() != nullptr &&
1531 (info_->function()->kind() & kSubclassConstructor) != 0);
1532 // TODO(dslomov): implement 'this' hole check elimination.
1533 skip_init_check = false;
1535 // Check that we always have valid source position.
1536 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1537 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1538 skip_init_check = var->mode() != CONST_LEGACY &&
1539 var->initializer_position() < proxy->position();
1542 if (!skip_init_check) {
1544 // Let and const need a read barrier.
1546 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1548 if (var->mode() == LET || var->mode() == CONST) {
1549 // Throw a reference error when using an uninitialized let/const
1550 // binding in harmony mode.
1551 __ mov(r3, Operand(var->name()));
1553 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1555 // Uninitalized const bindings outside of harmony mode are unholed.
1556 DCHECK(var->mode() == CONST_LEGACY);
1557 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1560 context()->Plug(r3);
1564 context()->Plug(var);
1568 case Variable::LOOKUP: {
1569 Comment cmnt(masm_, "[ Lookup variable");
1571 // Generate code for loading from variables potentially shadowed
1572 // by eval-introduced variables.
1573 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1575 __ mov(r4, Operand(var->name()));
1576 __ Push(cp, r4); // Context and name.
1577 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1579 context()->Plug(r3);
1585 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1586 Comment cmnt(masm_, "[ RegExpLiteral");
1588 // Registers will be used as follows:
1589 // r8 = materialized value (RegExp literal)
1590 // r7 = JS function, literals array
1591 // r6 = literal index
1592 // r5 = RegExp pattern
1593 // r4 = RegExp flags
1594 // r3 = RegExp literal clone
1595 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1596 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1597 int literal_offset =
1598 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1599 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1600 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1602 __ bne(&materialized);
1604 // Create regexp literal using runtime function.
1605 // Result will be in r3.
1606 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1607 __ mov(r5, Operand(expr->pattern()));
1608 __ mov(r4, Operand(expr->flags()));
1609 __ Push(r7, r6, r5, r4);
1610 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1613 __ bind(&materialized);
1614 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1615 Label allocated, runtime_allocate;
1616 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1619 __ bind(&runtime_allocate);
1620 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1622 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1625 __ bind(&allocated);
1626 // After this, registers are used as follows:
1627 // r3: Newly allocated regexp.
1628 // r8: Materialized regexp.
1630 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1631 context()->Plug(r3);
1635 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1636 if (expression == NULL) {
1637 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1640 VisitForStackValue(expression);
1645 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1646 Comment cmnt(masm_, "[ ObjectLiteral");
1648 expr->BuildConstantProperties(isolate());
1649 Handle<FixedArray> constant_properties = expr->constant_properties();
1650 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1651 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1652 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1653 __ mov(r4, Operand(constant_properties));
1654 int flags = expr->ComputeFlags();
1655 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1656 if (MustCreateObjectLiteralWithRuntime(expr)) {
1657 __ Push(r6, r5, r4, r3);
1658 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1660 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1663 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1665 // If result_saved is true the result is on top of the stack. If
1666 // result_saved is false the result is in r3.
1667 bool result_saved = false;
1669 // Mark all computed expressions that are bound to a key that
1670 // is shadowed by a later occurrence of the same key. For the
1671 // marked expressions, no store code is emitted.
1672 expr->CalculateEmitStore(zone());
1674 AccessorTable accessor_table(zone());
1675 int property_index = 0;
1676 for (; property_index < expr->properties()->length(); property_index++) {
1677 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1678 if (property->is_computed_name()) break;
1679 if (property->IsCompileTimeValue()) continue;
1681 Literal* key = property->key()->AsLiteral();
1682 Expression* value = property->value();
1683 if (!result_saved) {
1684 __ push(r3); // Save result on stack
1685 result_saved = true;
1687 switch (property->kind()) {
1688 case ObjectLiteral::Property::CONSTANT:
1690 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1691 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1693 case ObjectLiteral::Property::COMPUTED:
1694 // It is safe to use [[Put]] here because the boilerplate already
1695 // contains computed properties with an uninitialized value.
1696 if (key->value()->IsInternalizedString()) {
1697 if (property->emit_store()) {
1698 VisitForAccumulatorValue(value);
1699 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1700 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1701 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1702 CallStoreIC(key->LiteralFeedbackId());
1703 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1705 if (NeedsHomeObject(value)) {
1706 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1707 __ mov(StoreDescriptor::NameRegister(),
1708 Operand(isolate()->factory()->home_object_symbol()));
1709 __ LoadP(StoreDescriptor::ValueRegister(), MemOperand(sp));
1713 VisitForEffect(value);
1717 // Duplicate receiver on stack.
1718 __ LoadP(r3, MemOperand(sp));
1720 VisitForStackValue(key);
1721 VisitForStackValue(value);
1722 if (property->emit_store()) {
1723 EmitSetHomeObjectIfNeeded(value, 2);
1724 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1726 __ CallRuntime(Runtime::kSetProperty, 4);
1731 case ObjectLiteral::Property::PROTOTYPE:
1732 // Duplicate receiver on stack.
1733 __ LoadP(r3, MemOperand(sp));
1735 VisitForStackValue(value);
1736 DCHECK(property->emit_store());
1737 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1739 case ObjectLiteral::Property::GETTER:
1740 if (property->emit_store()) {
1741 accessor_table.lookup(key)->second->getter = value;
1744 case ObjectLiteral::Property::SETTER:
1745 if (property->emit_store()) {
1746 accessor_table.lookup(key)->second->setter = value;
1752 // Emit code to define accessors, using only a single call to the runtime for
1753 // each pair of corresponding getters and setters.
1754 for (AccessorTable::Iterator it = accessor_table.begin();
1755 it != accessor_table.end(); ++it) {
1756 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1758 VisitForStackValue(it->first);
1759 EmitAccessor(it->second->getter);
1760 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
1761 EmitAccessor(it->second->setter);
1762 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
1763 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1765 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1768 // Object literals have two parts. The "static" part on the left contains no
1769 // computed property names, and so we can compute its map ahead of time; see
1770 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1771 // starts with the first computed property name, and continues with all
1772 // properties to its right. All the code from above initializes the static
1773 // component of the object literal, and arranges for the map of the result to
1774 // reflect the static order in which the keys appear. For the dynamic
1775 // properties, we compile them into a series of "SetOwnProperty" runtime
1776 // calls. This will preserve insertion order.
1777 for (; property_index < expr->properties()->length(); property_index++) {
1778 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1780 Expression* value = property->value();
1781 if (!result_saved) {
1782 __ push(r3); // Save result on the stack
1783 result_saved = true;
1786 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1789 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1790 DCHECK(!property->is_computed_name());
1791 VisitForStackValue(value);
1792 DCHECK(property->emit_store());
1793 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1795 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1796 VisitForStackValue(value);
1797 EmitSetHomeObjectIfNeeded(value, 2);
1799 switch (property->kind()) {
1800 case ObjectLiteral::Property::CONSTANT:
1801 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1802 case ObjectLiteral::Property::COMPUTED:
1803 if (property->emit_store()) {
1804 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1806 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1812 case ObjectLiteral::Property::PROTOTYPE:
1816 case ObjectLiteral::Property::GETTER:
1817 __ mov(r3, Operand(Smi::FromInt(NONE)));
1819 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1822 case ObjectLiteral::Property::SETTER:
1823 __ mov(r3, Operand(Smi::FromInt(NONE)));
1825 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1831 if (expr->has_function()) {
1832 DCHECK(result_saved);
1833 __ LoadP(r3, MemOperand(sp));
1835 __ CallRuntime(Runtime::kToFastProperties, 1);
1839 context()->PlugTOS();
1841 context()->Plug(r3);
1846 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1847 Comment cmnt(masm_, "[ ArrayLiteral");
1849 expr->BuildConstantElements(isolate());
1850 Handle<FixedArray> constant_elements = expr->constant_elements();
1851 bool has_fast_elements =
1852 IsFastObjectElementsKind(expr->constant_elements_kind());
1853 Handle<FixedArrayBase> constant_elements_values(
1854 FixedArrayBase::cast(constant_elements->get(1)));
1856 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1857 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1858 // If the only customer of allocation sites is transitioning, then
1859 // we can turn it off if we don't have anywhere else to transition to.
1860 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1863 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1864 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1865 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1866 __ mov(r4, Operand(constant_elements));
1867 if (MustCreateArrayLiteralWithRuntime(expr)) {
1868 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1869 __ Push(r6, r5, r4, r3);
1870 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1872 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1875 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1877 bool result_saved = false; // Is the result saved to the stack?
1878 ZoneList<Expression*>* subexprs = expr->values();
1879 int length = subexprs->length();
1881 // Emit code to evaluate all the non-constant subexpressions and to store
1882 // them into the newly cloned array.
1883 for (int i = 0; i < length; i++) {
1884 Expression* subexpr = subexprs->at(i);
1885 // If the subexpression is a literal or a simple materialized literal it
1886 // is already set in the cloned array.
1887 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1889 if (!result_saved) {
1891 __ Push(Smi::FromInt(expr->literal_index()));
1892 result_saved = true;
1894 VisitForAccumulatorValue(subexpr);
1896 if (has_fast_elements) {
1897 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1898 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1899 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1900 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1901 // Update the write barrier for the array store.
1902 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1903 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1906 __ LoadSmiLiteral(r6, Smi::FromInt(i));
1907 StoreArrayLiteralElementStub stub(isolate());
1911 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1915 __ pop(); // literal index
1916 context()->PlugTOS();
1918 context()->Plug(r3);
1923 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1924 DCHECK(expr->target()->IsValidReferenceExpression());
1926 Comment cmnt(masm_, "[ Assignment");
1928 Property* property = expr->target()->AsProperty();
1929 LhsKind assign_type = GetAssignType(property);
1931 // Evaluate LHS expression.
1932 switch (assign_type) {
1934 // Nothing to do here.
1936 case NAMED_PROPERTY:
1937 if (expr->is_compound()) {
1938 // We need the receiver both on the stack and in the register.
1939 VisitForStackValue(property->obj());
1940 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1942 VisitForStackValue(property->obj());
1945 case NAMED_SUPER_PROPERTY:
1946 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1947 EmitLoadHomeObject(property->obj()->AsSuperReference());
1948 __ Push(result_register());
1949 if (expr->is_compound()) {
1950 const Register scratch = r4;
1951 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1952 __ Push(scratch, result_register());
1955 case KEYED_SUPER_PROPERTY: {
1956 const Register scratch = r4;
1957 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1958 EmitLoadHomeObject(property->obj()->AsSuperReference());
1959 __ Move(scratch, result_register());
1960 VisitForAccumulatorValue(property->key());
1961 __ Push(scratch, result_register());
1962 if (expr->is_compound()) {
1963 const Register scratch1 = r5;
1964 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1965 __ Push(scratch1, scratch, result_register());
1969 case KEYED_PROPERTY:
1970 if (expr->is_compound()) {
1971 VisitForStackValue(property->obj());
1972 VisitForStackValue(property->key());
1973 __ LoadP(LoadDescriptor::ReceiverRegister(),
1974 MemOperand(sp, 1 * kPointerSize));
1975 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1977 VisitForStackValue(property->obj());
1978 VisitForStackValue(property->key());
1983 // For compound assignments we need another deoptimization point after the
1984 // variable/property load.
1985 if (expr->is_compound()) {
1987 AccumulatorValueContext context(this);
1988 switch (assign_type) {
1990 EmitVariableLoad(expr->target()->AsVariableProxy());
1991 PrepareForBailout(expr->target(), TOS_REG);
1993 case NAMED_PROPERTY:
1994 EmitNamedPropertyLoad(property);
1995 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1997 case NAMED_SUPER_PROPERTY:
1998 EmitNamedSuperPropertyLoad(property);
1999 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2001 case KEYED_SUPER_PROPERTY:
2002 EmitKeyedSuperPropertyLoad(property);
2003 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2005 case KEYED_PROPERTY:
2006 EmitKeyedPropertyLoad(property);
2007 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2012 Token::Value op = expr->binary_op();
2013 __ push(r3); // Left operand goes on the stack.
2014 VisitForAccumulatorValue(expr->value());
2016 SetSourcePosition(expr->position() + 1);
2017 AccumulatorValueContext context(this);
2018 if (ShouldInlineSmiCase(op)) {
2019 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
2022 EmitBinaryOp(expr->binary_operation(), op);
2025 // Deoptimization point in case the binary operation may have side effects.
2026 PrepareForBailout(expr->binary_operation(), TOS_REG);
2028 VisitForAccumulatorValue(expr->value());
2031 // Record source position before possible IC call.
2032 SetSourcePosition(expr->position());
2035 switch (assign_type) {
2037 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2039 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2040 context()->Plug(r3);
2042 case NAMED_PROPERTY:
2043 EmitNamedPropertyAssignment(expr);
2045 case NAMED_SUPER_PROPERTY:
2046 EmitNamedSuperPropertyStore(property);
2047 context()->Plug(r3);
2049 case KEYED_SUPER_PROPERTY:
2050 EmitKeyedSuperPropertyStore(property);
2051 context()->Plug(r3);
2053 case KEYED_PROPERTY:
2054 EmitKeyedPropertyAssignment(expr);
2060 void FullCodeGenerator::VisitYield(Yield* expr) {
2061 Comment cmnt(masm_, "[ Yield");
2062 // Evaluate yielded value first; the initial iterator definition depends on
2063 // this. It stays on the stack while we update the iterator.
2064 VisitForStackValue(expr->expression());
2066 switch (expr->yield_kind()) {
2067 case Yield::kSuspend:
2068 // Pop value from top-of-stack slot; box result into result register.
2069 EmitCreateIteratorResult(false);
2070 __ push(result_register());
2072 case Yield::kInitial: {
2073 Label suspend, continuation, post_runtime, resume;
2077 __ bind(&continuation);
2081 VisitForAccumulatorValue(expr->generator_object());
2082 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2083 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2084 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2086 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2088 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2089 kLRHasBeenSaved, kDontSaveFPRegs);
2090 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2092 __ beq(&post_runtime);
2093 __ push(r3); // generator object
2094 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2095 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2096 __ bind(&post_runtime);
2097 __ pop(result_register());
2098 EmitReturnSequence();
2101 context()->Plug(result_register());
2105 case Yield::kFinal: {
2106 VisitForAccumulatorValue(expr->generator_object());
2107 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2108 __ StoreP(r4, FieldMemOperand(result_register(),
2109 JSGeneratorObject::kContinuationOffset),
2111 // Pop value from top-of-stack slot, box result into result register.
2112 EmitCreateIteratorResult(true);
2113 EmitUnwindBeforeReturn();
2114 EmitReturnSequence();
2118 case Yield::kDelegating: {
2119 VisitForStackValue(expr->generator_object());
2121 // Initial stack layout is as follows:
2122 // [sp + 1 * kPointerSize] iter
2123 // [sp + 0 * kPointerSize] g
2125 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2126 Label l_next, l_call;
2127 Register load_receiver = LoadDescriptor::ReceiverRegister();
2128 Register load_name = LoadDescriptor::NameRegister();
2130 // Initial send value is undefined.
2131 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2134 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2136 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2137 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2138 __ Push(load_name, r6, r3); // "throw", iter, except
2141 // try { received = %yield result }
2142 // Shuffle the received result above a try handler and yield it without
2145 __ pop(r3); // result
2146 EnterTryBlock(expr->index(), &l_catch);
2147 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2148 __ push(r3); // result
2150 __ bind(&l_continuation);
2152 __ bind(&l_suspend);
2153 const int generator_object_depth = kPointerSize + try_block_size;
2154 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2156 __ Push(Smi::FromInt(expr->index())); // handler-index
2157 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2158 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2159 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2161 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2163 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2164 kLRHasBeenSaved, kDontSaveFPRegs);
2165 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2166 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2167 __ pop(r3); // result
2168 EmitReturnSequence();
2169 __ bind(&l_resume); // received in r3
2170 ExitTryBlock(expr->index());
2172 // receiver = iter; f = 'next'; arg = received;
2175 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2176 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2177 __ Push(load_name, r6, r3); // "next", iter, received
2179 // result = receiver[f](arg);
2181 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2182 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2183 if (FLAG_vector_ics) {
2184 __ mov(VectorLoadICDescriptor::SlotRegister(),
2185 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2187 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2188 CallIC(ic, TypeFeedbackId::None());
2190 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2191 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2194 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2195 __ Drop(1); // The function is still on the stack; drop it.
2197 // if (!result.done) goto l_try;
2198 __ Move(load_receiver, r3);
2200 __ push(load_receiver); // save result
2201 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2202 if (FLAG_vector_ics) {
2203 __ mov(VectorLoadICDescriptor::SlotRegister(),
2204 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2206 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2207 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2209 __ cmpi(r3, Operand::Zero());
2213 __ pop(load_receiver); // result
2214 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2215 if (FLAG_vector_ics) {
2216 __ mov(VectorLoadICDescriptor::SlotRegister(),
2217 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2219 CallLoadIC(NOT_CONTEXTUAL); // r3=result.value
2220 context()->DropAndPlug(2, r3); // drop iter and g
2227 void FullCodeGenerator::EmitGeneratorResume(
2228 Expression* generator, Expression* value,
2229 JSGeneratorObject::ResumeMode resume_mode) {
2230 // The value stays in r3, and is ultimately read by the resumed generator, as
2231 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2232 // is read to throw the value when the resumed generator is already closed.
2233 // r4 will hold the generator object until the activation has been resumed.
2234 VisitForStackValue(generator);
2235 VisitForAccumulatorValue(value);
2238 // Load suspended function and context.
2239 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2240 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2242 // Load receiver and store as the first argument.
2243 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2246 // Push holes for the rest of the arguments to the generator function.
2247 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2249 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2250 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2251 Label argument_loop, push_frame;
2252 #if V8_TARGET_ARCH_PPC64
2253 __ cmpi(r6, Operand::Zero());
2254 __ beq(&push_frame);
2256 __ SmiUntag(r6, SetRC);
2257 __ beq(&push_frame, cr0);
2260 __ bind(&argument_loop);
2262 __ bdnz(&argument_loop);
2264 // Enter a new JavaScript frame, and initialize its slots as they were when
2265 // the generator was suspended.
2266 Label resume_frame, done;
2267 __ bind(&push_frame);
2268 __ b(&resume_frame, SetLK);
2270 __ bind(&resume_frame);
2271 // lr = return address.
2272 // fp = caller's frame pointer.
2273 // cp = callee's context,
2274 // r7 = callee's JS function.
2275 __ PushFixedFrame(r7);
2276 // Adjust FP to point to saved FP.
2277 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2279 // Load the operand stack size.
2280 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2281 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2282 __ SmiUntag(r6, SetRC);
2284 // If we are sending a value and there is no operand stack, we can jump back
2287 if (resume_mode == JSGeneratorObject::NEXT) {
2289 __ bne(&slow_resume, cr0);
2290 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2292 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2295 __ LoadSmiLiteral(r5,
2296 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2297 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2300 __ bind(&slow_resume);
2303 __ beq(&call_resume, cr0);
2306 // Otherwise, we push holes for the operand stack and call the runtime to fix
2307 // up the stack and the handlers.
2310 __ bind(&operand_loop);
2312 __ bdnz(&operand_loop);
2314 __ bind(&call_resume);
2315 DCHECK(!result_register().is(r4));
2316 __ Push(r4, result_register());
2317 __ Push(Smi::FromInt(resume_mode));
2318 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2319 // Not reached: the runtime call returns elsewhere.
2320 __ stop("not-reached");
2323 context()->Plug(result_register());
2327 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2331 const int instance_size = 5 * kPointerSize;
2332 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2335 __ Allocate(instance_size, r3, r5, r6, &gc_required, TAG_OBJECT);
2338 __ bind(&gc_required);
2339 __ Push(Smi::FromInt(instance_size));
2340 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2341 __ LoadP(context_register(),
2342 MemOperand(fp, StandardFrameConstants::kContextOffset));
2344 __ bind(&allocated);
2345 __ LoadP(r4, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2346 __ LoadP(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2347 __ LoadP(r4, ContextOperand(r4, Context::ITERATOR_RESULT_MAP_INDEX));
2349 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2350 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2351 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2352 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2353 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2355 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2358 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2361 // Only the value field needs a write barrier, as the other values are in the
2363 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2364 kLRHasBeenSaved, kDontSaveFPRegs);
2368 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2369 SetSourcePosition(prop->position());
2370 Literal* key = prop->key()->AsLiteral();
2371 DCHECK(!prop->IsSuperAccess());
2373 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2374 if (FLAG_vector_ics) {
2375 __ mov(VectorLoadICDescriptor::SlotRegister(),
2376 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2377 CallLoadIC(NOT_CONTEXTUAL);
2379 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2384 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2385 // Stack: receiver, home_object.
2386 SetSourcePosition(prop->position());
2387 Literal* key = prop->key()->AsLiteral();
2388 DCHECK(!key->value()->IsSmi());
2389 DCHECK(prop->IsSuperAccess());
2391 __ Push(key->value());
2392 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2396 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2397 SetSourcePosition(prop->position());
2398 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2399 if (FLAG_vector_ics) {
2400 __ mov(VectorLoadICDescriptor::SlotRegister(),
2401 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2404 CallIC(ic, prop->PropertyFeedbackId());
2409 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2410 // Stack: receiver, home_object, key.
2411 SetSourcePosition(prop->position());
2413 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2417 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2419 Expression* left_expr,
2420 Expression* right_expr) {
2421 Label done, smi_case, stub_call;
2423 Register scratch1 = r5;
2424 Register scratch2 = r6;
2426 // Get the arguments.
2428 Register right = r3;
2431 // Perform combined smi check on both operands.
2432 __ orx(scratch1, left, right);
2433 STATIC_ASSERT(kSmiTag == 0);
2434 JumpPatchSite patch_site(masm_);
2435 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2437 __ bind(&stub_call);
2438 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2439 CallIC(code, expr->BinaryOperationFeedbackId());
2440 patch_site.EmitPatchInfo();
2444 // Smi case. This code works the same way as the smi-smi case in the type
2445 // recording binary operation stub.
2448 __ GetLeastBitsFromSmi(scratch1, right, 5);
2449 __ ShiftRightArith(right, left, scratch1);
2450 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2453 __ GetLeastBitsFromSmi(scratch2, right, 5);
2454 #if V8_TARGET_ARCH_PPC64
2455 __ ShiftLeft_(right, left, scratch2);
2457 __ SmiUntag(scratch1, left);
2458 __ ShiftLeft_(scratch1, scratch1, scratch2);
2459 // Check that the *signed* result fits in a smi
2460 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2461 __ SmiTag(right, scratch1);
2466 __ SmiUntag(scratch1, left);
2467 __ GetLeastBitsFromSmi(scratch2, right, 5);
2468 __ srw(scratch1, scratch1, scratch2);
2469 // Unsigned shift is not allowed to produce a negative number.
2470 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2471 __ SmiTag(right, scratch1);
2475 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2476 __ BranchOnOverflow(&stub_call);
2477 __ mr(right, scratch1);
2481 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2482 __ BranchOnOverflow(&stub_call);
2483 __ mr(right, scratch1);
2488 #if V8_TARGET_ARCH_PPC64
2489 // Remove tag from both operands.
2490 __ SmiUntag(ip, right);
2491 __ SmiUntag(r0, left);
2492 __ Mul(scratch1, r0, ip);
2493 // Check for overflowing the smi range - no overflow if higher 33 bits of
2494 // the result are identical.
2495 __ TestIfInt32(scratch1, r0);
2498 __ SmiUntag(ip, right);
2499 __ mullw(scratch1, left, ip);
2500 __ mulhw(scratch2, left, ip);
2501 // Check for overflowing the smi range - no overflow if higher 33 bits of
2502 // the result are identical.
2503 __ TestIfInt32(scratch2, scratch1, ip);
2506 // Go slow on zero result to handle -0.
2507 __ cmpi(scratch1, Operand::Zero());
2509 #if V8_TARGET_ARCH_PPC64
2510 __ SmiTag(right, scratch1);
2512 __ mr(right, scratch1);
2515 // We need -0 if we were multiplying a negative number with 0 to get 0.
2516 // We know one of them was zero.
2518 __ add(scratch2, right, left);
2519 __ cmpi(scratch2, Operand::Zero());
2521 __ LoadSmiLiteral(right, Smi::FromInt(0));
2525 __ orx(right, left, right);
2527 case Token::BIT_AND:
2528 __ and_(right, left, right);
2530 case Token::BIT_XOR:
2531 __ xor_(right, left, right);
2538 context()->Plug(r3);
2542 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2543 // Constructor is in r3.
2544 DCHECK(lit != NULL);
2547 // No access check is needed here since the constructor is created by the
2549 Register scratch = r4;
2551 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2554 for (int i = 0; i < lit->properties()->length(); i++) {
2555 ObjectLiteral::Property* property = lit->properties()->at(i);
2556 Expression* value = property->value();
2558 if (property->is_static()) {
2559 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2561 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2564 EmitPropertyKey(property, lit->GetIdForProperty(i));
2566 // The static prototype property is read only. We handle the non computed
2567 // property name case in the parser. Since this is the only case where we
2568 // need to check for an own read only property we special case this so we do
2569 // not need to do this for every property.
2570 if (property->is_static() && property->is_computed_name()) {
2571 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2575 VisitForStackValue(value);
2576 EmitSetHomeObjectIfNeeded(value, 2);
2578 switch (property->kind()) {
2579 case ObjectLiteral::Property::CONSTANT:
2580 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2581 case ObjectLiteral::Property::PROTOTYPE:
2583 case ObjectLiteral::Property::COMPUTED:
2584 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2587 case ObjectLiteral::Property::GETTER:
2588 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2590 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2593 case ObjectLiteral::Property::SETTER:
2594 __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2596 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2605 __ CallRuntime(Runtime::kToFastProperties, 1);
2608 __ CallRuntime(Runtime::kToFastProperties, 1);
2612 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2614 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2615 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2616 CallIC(code, expr->BinaryOperationFeedbackId());
2617 patch_site.EmitPatchInfo();
2618 context()->Plug(r3);
2622 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2623 DCHECK(expr->IsValidReferenceExpression());
2625 Property* prop = expr->AsProperty();
2626 LhsKind assign_type = GetAssignType(prop);
2628 switch (assign_type) {
2630 Variable* var = expr->AsVariableProxy()->var();
2631 EffectContext context(this);
2632 EmitVariableAssignment(var, Token::ASSIGN);
2635 case NAMED_PROPERTY: {
2636 __ push(r3); // Preserve value.
2637 VisitForAccumulatorValue(prop->obj());
2638 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2639 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2640 __ mov(StoreDescriptor::NameRegister(),
2641 Operand(prop->key()->AsLiteral()->value()));
2645 case NAMED_SUPER_PROPERTY: {
2647 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2648 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2649 // stack: value, this; r3: home_object
2650 Register scratch = r5;
2651 Register scratch2 = r6;
2652 __ mr(scratch, result_register()); // home_object
2653 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2654 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2655 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2656 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2657 // stack: this, home_object; r3: value
2658 EmitNamedSuperPropertyStore(prop);
2661 case KEYED_SUPER_PROPERTY: {
2663 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2664 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2665 __ Push(result_register());
2666 VisitForAccumulatorValue(prop->key());
2667 Register scratch = r5;
2668 Register scratch2 = r6;
2669 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2670 // stack: value, this, home_object; r3: key, r6: value
2671 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2672 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2673 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2674 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2675 __ StoreP(r3, MemOperand(sp, 0));
2676 __ Move(r3, scratch2);
2677 // stack: this, home_object, key; r3: value.
2678 EmitKeyedSuperPropertyStore(prop);
2681 case KEYED_PROPERTY: {
2682 __ push(r3); // Preserve value.
2683 VisitForStackValue(prop->obj());
2684 VisitForAccumulatorValue(prop->key());
2685 __ Move(StoreDescriptor::NameRegister(), r3);
2686 __ Pop(StoreDescriptor::ValueRegister(),
2687 StoreDescriptor::ReceiverRegister());
2689 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2694 context()->Plug(r3);
2698 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2699 Variable* var, MemOperand location) {
2700 __ StoreP(result_register(), location, r0);
2701 if (var->IsContextSlot()) {
2702 // RecordWrite may destroy all its register arguments.
2703 __ mr(r6, result_register());
2704 int offset = Context::SlotOffset(var->index());
2705 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2711 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2712 if (var->IsUnallocated()) {
2713 // Global var, const, or let.
2714 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2715 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2718 } else if (var->mode() == LET && op != Token::INIT_LET) {
2719 // Non-initializing assignment to let variable needs a write barrier.
2720 DCHECK(!var->IsLookupSlot());
2721 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2723 MemOperand location = VarOperand(var, r4);
2724 __ LoadP(r6, location);
2725 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2727 __ mov(r6, Operand(var->name()));
2729 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2730 // Perform the assignment.
2732 EmitStoreToStackLocalOrContextSlot(var, location);
2734 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2735 // Assignment to const variable needs a write barrier.
2736 DCHECK(!var->IsLookupSlot());
2737 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2739 MemOperand location = VarOperand(var, r4);
2740 __ LoadP(r6, location);
2741 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2742 __ bne(&const_error);
2743 __ mov(r6, Operand(var->name()));
2745 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2746 __ bind(&const_error);
2747 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2749 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2750 if (var->IsLookupSlot()) {
2751 // Assignment to var.
2752 __ push(r3); // Value.
2753 __ mov(r4, Operand(var->name()));
2754 __ mov(r3, Operand(Smi::FromInt(language_mode())));
2755 __ Push(cp, r4, r3); // Context, name, language mode.
2756 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2758 // Assignment to var or initializing assignment to let/const in harmony
2760 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2761 MemOperand location = VarOperand(var, r4);
2762 if (generate_debug_code_ && op == Token::INIT_LET) {
2763 // Check for an uninitialized let binding.
2764 __ LoadP(r5, location);
2765 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2766 __ Check(eq, kLetBindingReInitialization);
2768 EmitStoreToStackLocalOrContextSlot(var, location);
2770 } else if (op == Token::INIT_CONST_LEGACY) {
2771 // Const initializers need a write barrier.
2772 DCHECK(var->mode() == CONST_LEGACY);
2773 DCHECK(!var->IsParameter()); // No const parameters.
2774 if (var->IsLookupSlot()) {
2776 __ mov(r3, Operand(var->name()));
2777 __ Push(cp, r3); // Context and name.
2778 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2780 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2782 MemOperand location = VarOperand(var, r4);
2783 __ LoadP(r5, location);
2784 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2786 EmitStoreToStackLocalOrContextSlot(var, location);
2791 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2792 if (is_strict(language_mode())) {
2793 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2795 // Silently ignore store in sloppy mode.
2800 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2801 // Assignment to a property, using a named store IC.
2802 Property* prop = expr->target()->AsProperty();
2803 DCHECK(prop != NULL);
2804 DCHECK(prop->key()->IsLiteral());
2806 // Record source code position before IC call.
2807 SetSourcePosition(expr->position());
2808 __ mov(StoreDescriptor::NameRegister(),
2809 Operand(prop->key()->AsLiteral()->value()));
2810 __ pop(StoreDescriptor::ReceiverRegister());
2811 CallStoreIC(expr->AssignmentFeedbackId());
2813 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2814 context()->Plug(r3);
2818 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2819 // Assignment to named property of super.
2821 // stack : receiver ('this'), home_object
2822 DCHECK(prop != NULL);
2823 Literal* key = prop->key()->AsLiteral();
2824 DCHECK(key != NULL);
2826 __ Push(key->value());
2828 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2829 : Runtime::kStoreToSuper_Sloppy),
2834 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2835 // Assignment to named property of super.
2837 // stack : receiver ('this'), home_object, key
2838 DCHECK(prop != NULL);
2842 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2843 : Runtime::kStoreKeyedToSuper_Sloppy),
2848 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2849 // Assignment to a property, using a keyed store IC.
2851 // Record source code position before IC call.
2852 SetSourcePosition(expr->position());
2853 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2854 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2857 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2858 CallIC(ic, expr->AssignmentFeedbackId());
2860 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2861 context()->Plug(r3);
2865 void FullCodeGenerator::VisitProperty(Property* expr) {
2866 Comment cmnt(masm_, "[ Property");
2867 Expression* key = expr->key();
2869 if (key->IsPropertyName()) {
2870 if (!expr->IsSuperAccess()) {
2871 VisitForAccumulatorValue(expr->obj());
2872 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2873 EmitNamedPropertyLoad(expr);
2875 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2876 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2877 __ Push(result_register());
2878 EmitNamedSuperPropertyLoad(expr);
2881 if (!expr->IsSuperAccess()) {
2882 VisitForStackValue(expr->obj());
2883 VisitForAccumulatorValue(expr->key());
2884 __ Move(LoadDescriptor::NameRegister(), r3);
2885 __ pop(LoadDescriptor::ReceiverRegister());
2886 EmitKeyedPropertyLoad(expr);
2888 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2889 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2890 __ Push(result_register());
2891 VisitForStackValue(expr->key());
2892 EmitKeyedSuperPropertyLoad(expr);
2895 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2896 context()->Plug(r3);
2900 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2902 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2906 // Code common for calls using the IC.
2907 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2908 Expression* callee = expr->expression();
2910 CallICState::CallType call_type =
2911 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2913 // Get the target function.
2914 if (call_type == CallICState::FUNCTION) {
2916 StackValueContext context(this);
2917 EmitVariableLoad(callee->AsVariableProxy());
2918 PrepareForBailout(callee, NO_REGISTERS);
2920 // Push undefined as receiver. This is patched in the method prologue if it
2921 // is a sloppy mode method.
2922 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2925 // Load the function from the receiver.
2926 DCHECK(callee->IsProperty());
2927 DCHECK(!callee->AsProperty()->IsSuperAccess());
2928 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2929 EmitNamedPropertyLoad(callee->AsProperty());
2930 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2931 // Push the target function under the receiver.
2932 __ LoadP(r0, MemOperand(sp, 0));
2934 __ StoreP(r3, MemOperand(sp, kPointerSize));
2937 EmitCall(expr, call_type);
2941 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2942 Expression* callee = expr->expression();
2943 DCHECK(callee->IsProperty());
2944 Property* prop = callee->AsProperty();
2945 DCHECK(prop->IsSuperAccess());
2947 SetSourcePosition(prop->position());
2948 Literal* key = prop->key()->AsLiteral();
2949 DCHECK(!key->value()->IsSmi());
2950 // Load the function from the receiver.
2951 const Register scratch = r4;
2952 SuperReference* super_ref = prop->obj()->AsSuperReference();
2953 EmitLoadHomeObject(super_ref);
2955 VisitForAccumulatorValue(super_ref->this_var());
2956 __ Push(scratch, r3, r3, scratch);
2957 __ Push(key->value());
2961 // - this (receiver)
2962 // - this (receiver) <-- LoadFromSuper will pop here and below.
2965 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2967 // Replace home_object with target function.
2968 __ StoreP(r3, MemOperand(sp, kPointerSize));
2971 // - target function
2972 // - this (receiver)
2973 EmitCall(expr, CallICState::METHOD);
2977 // Code common for calls using the IC.
2978 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2980 VisitForAccumulatorValue(key);
2982 Expression* callee = expr->expression();
2984 // Load the function from the receiver.
2985 DCHECK(callee->IsProperty());
2986 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2987 __ Move(LoadDescriptor::NameRegister(), r3);
2988 EmitKeyedPropertyLoad(callee->AsProperty());
2989 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2991 // Push the target function under the receiver.
2992 __ LoadP(ip, MemOperand(sp, 0));
2994 __ StoreP(r3, MemOperand(sp, kPointerSize));
2996 EmitCall(expr, CallICState::METHOD);
3000 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3001 Expression* callee = expr->expression();
3002 DCHECK(callee->IsProperty());
3003 Property* prop = callee->AsProperty();
3004 DCHECK(prop->IsSuperAccess());
3006 SetSourcePosition(prop->position());
3007 // Load the function from the receiver.
3008 const Register scratch = r4;
3009 SuperReference* super_ref = prop->obj()->AsSuperReference();
3010 EmitLoadHomeObject(super_ref);
3012 VisitForAccumulatorValue(super_ref->this_var());
3015 __ LoadP(scratch, MemOperand(sp, kPointerSize * 2));
3017 VisitForStackValue(prop->key());
3021 // - this (receiver)
3022 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3025 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
3027 // Replace home_object with target function.
3028 __ StoreP(r3, MemOperand(sp, kPointerSize));
3031 // - target function
3032 // - this (receiver)
3033 EmitCall(expr, CallICState::METHOD);
3037 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3038 // Load the arguments.
3039 ZoneList<Expression*>* args = expr->arguments();
3040 int arg_count = args->length();
3042 PreservePositionScope scope(masm()->positions_recorder());
3043 for (int i = 0; i < arg_count; i++) {
3044 VisitForStackValue(args->at(i));
3048 // Record source position of the IC call.
3049 SetSourcePosition(expr->position());
3050 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3051 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
3052 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3053 // Don't assign a type feedback id to the IC, since type feedback is provided
3054 // by the vector above.
3057 RecordJSReturnSite(expr);
3058 // Restore context register.
3059 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3060 context()->DropAndPlug(1, r3);
3064 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3065 // r8: copy of the first argument or undefined if it doesn't exist.
3066 if (arg_count > 0) {
3067 __ LoadP(r8, MemOperand(sp, arg_count * kPointerSize), r0);
3069 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
3072 // r7: the receiver of the enclosing function.
3073 __ LoadP(r7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3075 // r6: the receiver of the enclosing function.
3076 int receiver_offset = 2 + info_->scope()->num_parameters();
3077 __ LoadP(r6, MemOperand(fp, receiver_offset * kPointerSize), r0);
3079 // r5: language mode.
3080 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
3082 // r4: the start position of the scope the calls resides in.
3083 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
3085 // Do the runtime call.
3086 __ Push(r8, r7, r6, r5, r4);
3087 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3091 void FullCodeGenerator::EmitLoadSuperConstructor() {
3092 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3094 __ CallRuntime(Runtime::kGetPrototype, 1);
3098 void FullCodeGenerator::VisitCall(Call* expr) {
3100 // We want to verify that RecordJSReturnSite gets called on all paths
3101 // through this function. Avoid early returns.
3102 expr->return_is_recorded_ = false;
3105 Comment cmnt(masm_, "[ Call");
3106 Expression* callee = expr->expression();
3107 Call::CallType call_type = expr->GetCallType(isolate());
3109 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3110 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3111 // to resolve the function we need to call and the receiver of the
3112 // call. Then we call the resolved function using the given
3114 ZoneList<Expression*>* args = expr->arguments();
3115 int arg_count = args->length();
3118 PreservePositionScope pos_scope(masm()->positions_recorder());
3119 VisitForStackValue(callee);
3120 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3121 __ push(r5); // Reserved receiver slot.
3123 // Push the arguments.
3124 for (int i = 0; i < arg_count; i++) {
3125 VisitForStackValue(args->at(i));
3128 // Push a copy of the function (found below the arguments) and
3130 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3132 EmitResolvePossiblyDirectEval(arg_count);
3134 // The runtime call returns a pair of values in r3 (function) and
3135 // r4 (receiver). Touch up the stack with the right values.
3136 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3137 __ StoreP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3139 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3142 // Record source position for debugger.
3143 SetSourcePosition(expr->position());
3144 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3145 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3147 RecordJSReturnSite(expr);
3148 // Restore context register.
3149 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3150 context()->DropAndPlug(1, r3);
3151 } else if (call_type == Call::GLOBAL_CALL) {
3152 EmitCallWithLoadIC(expr);
3154 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3155 // Call to a lookup slot (dynamically introduced variable).
3156 VariableProxy* proxy = callee->AsVariableProxy();
3160 PreservePositionScope scope(masm()->positions_recorder());
3161 // Generate code for loading from variables potentially shadowed
3162 // by eval-introduced variables.
3163 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3167 // Call the runtime to find the function to call (returned in r3)
3168 // and the object holding it (returned in edx).
3169 DCHECK(!context_register().is(r5));
3170 __ mov(r5, Operand(proxy->name()));
3171 __ Push(context_register(), r5);
3172 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3173 __ Push(r3, r4); // Function, receiver.
3174 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3176 // If fast case code has been generated, emit code to push the
3177 // function and receiver and have the slow path jump around this
3179 if (done.is_linked()) {
3185 // The receiver is implicitly the global receiver. Indicate this
3186 // by passing the hole to the call function stub.
3187 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3192 // The receiver is either the global receiver or an object found
3193 // by LoadContextSlot.
3195 } else if (call_type == Call::PROPERTY_CALL) {
3196 Property* property = callee->AsProperty();
3197 bool is_named_call = property->key()->IsPropertyName();
3198 if (property->IsSuperAccess()) {
3199 if (is_named_call) {
3200 EmitSuperCallWithLoadIC(expr);
3202 EmitKeyedSuperCallWithLoadIC(expr);
3206 PreservePositionScope scope(masm()->positions_recorder());
3207 VisitForStackValue(property->obj());
3209 if (is_named_call) {
3210 EmitCallWithLoadIC(expr);
3212 EmitKeyedCallWithLoadIC(expr, property->key());
3215 } else if (call_type == Call::SUPER_CALL) {
3216 EmitSuperConstructorCall(expr);
3218 DCHECK(call_type == Call::OTHER_CALL);
3219 // Call to an arbitrary expression not handled specially above.
3221 PreservePositionScope scope(masm()->positions_recorder());
3222 VisitForStackValue(callee);
3224 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3226 // Emit function call.
3231 // RecordJSReturnSite should have been called.
3232 DCHECK(expr->return_is_recorded_);
3237 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3238 Comment cmnt(masm_, "[ CallNew");
3239 // According to ECMA-262, section 11.2.2, page 44, the function
3240 // expression in new calls must be evaluated before the
3243 // Push constructor on the stack. If it's not a function it's used as
3244 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3246 DCHECK(!expr->expression()->IsSuperReference());
3247 VisitForStackValue(expr->expression());
3249 // Push the arguments ("left-to-right") on the stack.
3250 ZoneList<Expression*>* args = expr->arguments();
3251 int arg_count = args->length();
3252 for (int i = 0; i < arg_count; i++) {
3253 VisitForStackValue(args->at(i));
3256 // Call the construct call builtin that handles allocation and
3257 // constructor invocation.
3258 SetSourcePosition(expr->position());
3260 // Load function and argument count into r4 and r3.
3261 __ mov(r3, Operand(arg_count));
3262 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3264 // Record call targets in unoptimized code.
3265 if (FLAG_pretenuring_call_new) {
3266 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3267 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3268 expr->CallNewFeedbackSlot().ToInt() + 1);
3271 __ Move(r5, FeedbackVector());
3272 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3274 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3275 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3276 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3277 context()->Plug(r3);
3281 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3282 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
3283 GetVar(result_register(), new_target_var);
3284 __ Push(result_register());
3286 EmitLoadSuperConstructor();
3287 __ push(result_register());
3289 // Push the arguments ("left-to-right") on the stack.
3290 ZoneList<Expression*>* args = expr->arguments();
3291 int arg_count = args->length();
3292 for (int i = 0; i < arg_count; i++) {
3293 VisitForStackValue(args->at(i));
3296 // Call the construct call builtin that handles allocation and
3297 // constructor invocation.
3298 SetSourcePosition(expr->position());
3300 // Load function and argument count into r1 and r0.
3301 __ mov(r3, Operand(arg_count));
3302 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
3304 // Record call targets in unoptimized code.
3305 if (FLAG_pretenuring_call_new) {
3307 /* TODO(dslomov): support pretenuring.
3308 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3309 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3310 expr->CallNewFeedbackSlot().ToInt() + 1);
3314 __ Move(r5, FeedbackVector());
3315 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
3317 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3318 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3322 RecordJSReturnSite(expr);
3324 SuperReference* super_ref = expr->expression()->AsSuperReference();
3325 Variable* this_var = super_ref->this_var()->var();
3326 GetVar(r4, this_var);
3327 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
3328 Label uninitialized_this;
3329 __ beq(&uninitialized_this);
3330 __ mov(r4, Operand(this_var->name()));
3332 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3333 __ bind(&uninitialized_this);
3335 EmitVariableAssignment(this_var, Token::INIT_CONST);
3336 context()->Plug(r3);
3340 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3341 ZoneList<Expression*>* args = expr->arguments();
3342 DCHECK(args->length() == 1);
3344 VisitForAccumulatorValue(args->at(0));
3346 Label materialize_true, materialize_false;
3347 Label* if_true = NULL;
3348 Label* if_false = NULL;
3349 Label* fall_through = NULL;
3350 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3351 &if_false, &fall_through);
3353 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3354 __ TestIfSmi(r3, r0);
3355 Split(eq, if_true, if_false, fall_through, cr0);
3357 context()->Plug(if_true, if_false);
3361 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3362 ZoneList<Expression*>* args = expr->arguments();
3363 DCHECK(args->length() == 1);
3365 VisitForAccumulatorValue(args->at(0));
3367 Label materialize_true, materialize_false;
3368 Label* if_true = NULL;
3369 Label* if_false = NULL;
3370 Label* fall_through = NULL;
3371 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3372 &if_false, &fall_through);
3374 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3375 __ TestIfPositiveSmi(r3, r0);
3376 Split(eq, if_true, if_false, fall_through, cr0);
3378 context()->Plug(if_true, if_false);
3382 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3383 ZoneList<Expression*>* args = expr->arguments();
3384 DCHECK(args->length() == 1);
3386 VisitForAccumulatorValue(args->at(0));
3388 Label materialize_true, materialize_false;
3389 Label* if_true = NULL;
3390 Label* if_false = NULL;
3391 Label* fall_through = NULL;
3392 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3393 &if_false, &fall_through);
3395 __ JumpIfSmi(r3, if_false);
3396 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3399 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3400 // Undetectable objects behave like undefined when tested with typeof.
3401 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3402 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3403 __ bne(if_false, cr0);
3404 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3405 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3407 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3408 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3409 Split(le, if_true, if_false, fall_through);
3411 context()->Plug(if_true, if_false);
3415 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3416 ZoneList<Expression*>* args = expr->arguments();
3417 DCHECK(args->length() == 1);
3419 VisitForAccumulatorValue(args->at(0));
3421 Label materialize_true, materialize_false;
3422 Label* if_true = NULL;
3423 Label* if_false = NULL;
3424 Label* fall_through = NULL;
3425 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3426 &if_false, &fall_through);
3428 __ JumpIfSmi(r3, if_false);
3429 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3430 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3431 Split(ge, if_true, if_false, fall_through);
3433 context()->Plug(if_true, if_false);
3437 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3438 ZoneList<Expression*>* args = expr->arguments();
3439 DCHECK(args->length() == 1);
3441 VisitForAccumulatorValue(args->at(0));
3443 Label materialize_true, materialize_false;
3444 Label* if_true = NULL;
3445 Label* if_false = NULL;
3446 Label* fall_through = NULL;
3447 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3448 &if_false, &fall_through);
3450 __ JumpIfSmi(r3, if_false);
3451 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3452 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3453 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3454 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3455 Split(ne, if_true, if_false, fall_through, cr0);
3457 context()->Plug(if_true, if_false);
3461 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3462 CallRuntime* expr) {
3463 ZoneList<Expression*>* args = expr->arguments();
3464 DCHECK(args->length() == 1);
3466 VisitForAccumulatorValue(args->at(0));
3468 Label materialize_true, materialize_false, skip_lookup;
3469 Label* if_true = NULL;
3470 Label* if_false = NULL;
3471 Label* fall_through = NULL;
3472 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3473 &if_false, &fall_through);
3475 __ AssertNotSmi(r3);
3477 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3478 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3479 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3480 __ bne(&skip_lookup, cr0);
3482 // Check for fast case object. Generate false result for slow case object.
3483 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3484 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3485 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3489 // Look for valueOf name in the descriptor array, and indicate false if
3490 // found. Since we omit an enumeration index check, if it is added via a
3491 // transition that shares its descriptor array, this is a false positive.
3492 Label entry, loop, done;
3494 // Skip loop if no descriptors are valid.
3495 __ NumberOfOwnDescriptors(r6, r4);
3496 __ cmpi(r6, Operand::Zero());
3499 __ LoadInstanceDescriptors(r4, r7);
3500 // r7: descriptor array.
3501 // r6: valid entries in the descriptor array.
3502 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3504 // Calculate location of the first key name.
3505 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3506 // Calculate the end of the descriptor array.
3508 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3511 // Loop through all the keys in the descriptor array. If one of these is the
3512 // string "valueOf" the result is false.
3513 // The use of ip to store the valueOf string assumes that it is not otherwise
3514 // used in the loop below.
3515 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3518 __ LoadP(r6, MemOperand(r7, 0));
3521 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3528 // Set the bit in the map to indicate that there is no local valueOf field.
3529 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3530 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3531 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3533 __ bind(&skip_lookup);
3535 // If a valueOf property is not found on the object check that its
3536 // prototype is the un-modified String prototype. If not result is false.
3537 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3538 __ JumpIfSmi(r5, if_false);
3539 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3540 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3541 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3543 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3545 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3546 Split(eq, if_true, if_false, fall_through);
3548 context()->Plug(if_true, if_false);
3552 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3553 ZoneList<Expression*>* args = expr->arguments();
3554 DCHECK(args->length() == 1);
3556 VisitForAccumulatorValue(args->at(0));
3558 Label materialize_true, materialize_false;
3559 Label* if_true = NULL;
3560 Label* if_false = NULL;
3561 Label* fall_through = NULL;
3562 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3563 &if_false, &fall_through);
3565 __ JumpIfSmi(r3, if_false);
3566 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3567 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3568 Split(eq, if_true, if_false, fall_through);
3570 context()->Plug(if_true, if_false);
3574 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3575 ZoneList<Expression*>* args = expr->arguments();
3576 DCHECK(args->length() == 1);
3578 VisitForAccumulatorValue(args->at(0));
3580 Label materialize_true, materialize_false;
3581 Label* if_true = NULL;
3582 Label* if_false = NULL;
3583 Label* fall_through = NULL;
3584 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3585 &if_false, &fall_through);
3587 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3588 #if V8_TARGET_ARCH_PPC64
3589 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3590 __ li(r5, Operand(1));
3591 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3594 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3595 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3597 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3600 __ cmpi(r4, Operand::Zero());
3604 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3605 Split(eq, if_true, if_false, fall_through);
3607 context()->Plug(if_true, if_false);
3611 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3612 ZoneList<Expression*>* args = expr->arguments();
3613 DCHECK(args->length() == 1);
3615 VisitForAccumulatorValue(args->at(0));
3617 Label materialize_true, materialize_false;
3618 Label* if_true = NULL;
3619 Label* if_false = NULL;
3620 Label* fall_through = NULL;
3621 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3622 &if_false, &fall_through);
3624 __ JumpIfSmi(r3, if_false);
3625 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3626 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3627 Split(eq, if_true, if_false, fall_through);
3629 context()->Plug(if_true, if_false);
3633 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3634 ZoneList<Expression*>* args = expr->arguments();
3635 DCHECK(args->length() == 1);
3637 VisitForAccumulatorValue(args->at(0));
3639 Label materialize_true, materialize_false;
3640 Label* if_true = NULL;
3641 Label* if_false = NULL;
3642 Label* fall_through = NULL;
3643 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3644 &if_false, &fall_through);
3646 __ JumpIfSmi(r3, if_false);
3647 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3648 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3649 Split(eq, if_true, if_false, fall_through);
3651 context()->Plug(if_true, if_false);
3655 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3656 ZoneList<Expression*>* args = expr->arguments();
3657 DCHECK(args->length() == 1);
3659 VisitForAccumulatorValue(args->at(0));
3661 Label materialize_true, materialize_false;
3662 Label* if_true = NULL;
3663 Label* if_false = NULL;
3664 Label* fall_through = NULL;
3665 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3666 &if_false, &fall_through);
3668 __ JumpIfSmi(r3, if_false);
3670 Register type_reg = r5;
3671 __ LoadP(map, FieldMemOperand(r3, HeapObject::kMapOffset));
3672 __ lbz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3673 __ subi(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3674 __ cmpli(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3675 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3676 Split(le, if_true, if_false, fall_through);
3678 context()->Plug(if_true, if_false);
3682 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3683 DCHECK(expr->arguments()->length() == 0);
3685 Label materialize_true, materialize_false;
3686 Label* if_true = NULL;
3687 Label* if_false = NULL;
3688 Label* fall_through = NULL;
3689 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3690 &if_false, &fall_through);
3692 // Get the frame pointer for the calling frame.
3693 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3695 // Skip the arguments adaptor frame if it exists.
3696 Label check_frame_marker;
3697 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3698 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3699 __ bne(&check_frame_marker);
3700 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3702 // Check the marker in the calling frame.
3703 __ bind(&check_frame_marker);
3704 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3705 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3706 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3707 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3708 Split(eq, if_true, if_false, fall_through);
3710 context()->Plug(if_true, if_false);
3714 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3715 ZoneList<Expression*>* args = expr->arguments();
3716 DCHECK(args->length() == 2);
3718 // Load the two objects into registers and perform the comparison.
3719 VisitForStackValue(args->at(0));
3720 VisitForAccumulatorValue(args->at(1));
3722 Label materialize_true, materialize_false;
3723 Label* if_true = NULL;
3724 Label* if_false = NULL;
3725 Label* fall_through = NULL;
3726 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3727 &if_false, &fall_through);
3731 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3732 Split(eq, if_true, if_false, fall_through);
3734 context()->Plug(if_true, if_false);
3738 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3739 ZoneList<Expression*>* args = expr->arguments();
3740 DCHECK(args->length() == 1);
3742 // ArgumentsAccessStub expects the key in edx and the formal
3743 // parameter count in r3.
3744 VisitForAccumulatorValue(args->at(0));
3746 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3747 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3749 context()->Plug(r3);
3753 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3754 DCHECK(expr->arguments()->length() == 0);
3756 // Get the number of formal parameters.
3757 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3759 // Check if the calling frame is an arguments adaptor frame.
3760 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3761 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3762 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3765 // Arguments adaptor case: Read the arguments length from the
3767 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3770 context()->Plug(r3);
3774 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3775 ZoneList<Expression*>* args = expr->arguments();
3776 DCHECK(args->length() == 1);
3777 Label done, null, function, non_function_constructor;
3779 VisitForAccumulatorValue(args->at(0));
3781 // If the object is a smi, we return null.
3782 __ JumpIfSmi(r3, &null);
3784 // Check that the object is a JS object but take special care of JS
3785 // functions to make sure they have 'Function' as their class.
3786 // Assume that there are only two callable types, and one of them is at
3787 // either end of the type range for JS object types. Saves extra comparisons.
3788 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3789 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3790 // Map is now in r3.
3792 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3793 FIRST_SPEC_OBJECT_TYPE + 1);
3796 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3797 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3799 // Assume that there is no larger type.
3800 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3802 // Check if the constructor in the map is a JS function.
3803 Register instance_type = r5;
3804 __ GetMapConstructor(r3, r3, r4, instance_type);
3805 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3806 __ bne(&non_function_constructor);
3808 // r3 now contains the constructor function. Grab the
3809 // instance class name from there.
3810 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3812 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3815 // Functions have class 'Function'.
3817 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3820 // Objects with a non-function constructor have class 'Object'.
3821 __ bind(&non_function_constructor);
3822 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3825 // Non-JS objects have class null.
3827 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3832 context()->Plug(r3);
3836 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3837 // Load the arguments on the stack and call the stub.
3838 SubStringStub stub(isolate());
3839 ZoneList<Expression*>* args = expr->arguments();
3840 DCHECK(args->length() == 3);
3841 VisitForStackValue(args->at(0));
3842 VisitForStackValue(args->at(1));
3843 VisitForStackValue(args->at(2));
3845 context()->Plug(r3);
3849 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3850 // Load the arguments on the stack and call the stub.
3851 RegExpExecStub stub(isolate());
3852 ZoneList<Expression*>* args = expr->arguments();
3853 DCHECK(args->length() == 4);
3854 VisitForStackValue(args->at(0));
3855 VisitForStackValue(args->at(1));
3856 VisitForStackValue(args->at(2));
3857 VisitForStackValue(args->at(3));
3859 context()->Plug(r3);
3863 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3864 ZoneList<Expression*>* args = expr->arguments();
3865 DCHECK(args->length() == 1);
3866 VisitForAccumulatorValue(args->at(0)); // Load the object.
3869 // If the object is a smi return the object.
3870 __ JumpIfSmi(r3, &done);
3871 // If the object is not a value type, return the object.
3872 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3874 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3877 context()->Plug(r3);
3881 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3882 ZoneList<Expression*>* args = expr->arguments();
3883 DCHECK(args->length() == 2);
3884 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3885 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3887 VisitForAccumulatorValue(args->at(0)); // Load the object.
3889 Label runtime, done, not_date_object;
3890 Register object = r3;
3891 Register result = r3;
3892 Register scratch0 = r11;
3893 Register scratch1 = r4;
3895 __ JumpIfSmi(object, ¬_date_object);
3896 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3897 __ bne(¬_date_object);
3899 if (index->value() == 0) {
3900 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3903 if (index->value() < JSDate::kFirstUncachedField) {
3904 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3905 __ mov(scratch1, Operand(stamp));
3906 __ LoadP(scratch1, MemOperand(scratch1));
3907 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3908 __ cmp(scratch1, scratch0);
3911 FieldMemOperand(object, JSDate::kValueOffset +
3912 kPointerSize * index->value()),
3917 __ PrepareCallCFunction(2, scratch1);
3918 __ LoadSmiLiteral(r4, index);
3919 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3923 __ bind(¬_date_object);
3924 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3926 context()->Plug(r3);
3930 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3931 ZoneList<Expression*>* args = expr->arguments();
3932 DCHECK_EQ(3, args->length());
3934 Register string = r3;
3935 Register index = r4;
3936 Register value = r5;
3938 VisitForStackValue(args->at(0)); // index
3939 VisitForStackValue(args->at(1)); // value
3940 VisitForAccumulatorValue(args->at(2)); // string
3941 __ Pop(index, value);
3943 if (FLAG_debug_code) {
3944 __ TestIfSmi(value, r0);
3945 __ Check(eq, kNonSmiValue, cr0);
3946 __ TestIfSmi(index, r0);
3947 __ Check(eq, kNonSmiIndex, cr0);
3948 __ SmiUntag(index, index);
3949 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3950 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3951 __ SmiTag(index, index);
3955 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3956 __ SmiToByteArrayOffset(r0, index);
3957 __ stbx(value, MemOperand(ip, r0));
3958 context()->Plug(string);
3962 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3963 ZoneList<Expression*>* args = expr->arguments();
3964 DCHECK_EQ(3, args->length());
3966 Register string = r3;
3967 Register index = r4;
3968 Register value = r5;
3970 VisitForStackValue(args->at(0)); // index
3971 VisitForStackValue(args->at(1)); // value
3972 VisitForAccumulatorValue(args->at(2)); // string
3973 __ Pop(index, value);
3975 if (FLAG_debug_code) {
3976 __ TestIfSmi(value, r0);
3977 __ Check(eq, kNonSmiValue, cr0);
3978 __ TestIfSmi(index, r0);
3979 __ Check(eq, kNonSmiIndex, cr0);
3980 __ SmiUntag(index, index);
3981 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3982 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3983 __ SmiTag(index, index);
3987 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3988 __ SmiToShortArrayOffset(r0, index);
3989 __ sthx(value, MemOperand(ip, r0));
3990 context()->Plug(string);
3994 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3995 // Load the arguments on the stack and call the runtime function.
3996 ZoneList<Expression*>* args = expr->arguments();
3997 DCHECK(args->length() == 2);
3998 VisitForStackValue(args->at(0));
3999 VisitForStackValue(args->at(1));
4000 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
4002 context()->Plug(r3);
4006 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4007 ZoneList<Expression*>* args = expr->arguments();
4008 DCHECK(args->length() == 2);
4009 VisitForStackValue(args->at(0)); // Load the object.
4010 VisitForAccumulatorValue(args->at(1)); // Load the value.
4011 __ pop(r4); // r3 = value. r4 = object.
4014 // If the object is a smi, return the value.
4015 __ JumpIfSmi(r4, &done);
4017 // If the object is not a value type, return the value.
4018 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
4022 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
4023 // Update the write barrier. Save the value as it will be
4024 // overwritten by the write barrier code and is needed afterward.
4026 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
4030 context()->Plug(r3);
4034 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4035 ZoneList<Expression*>* args = expr->arguments();
4036 DCHECK_EQ(args->length(), 1);
4037 // Load the argument into r3 and call the stub.
4038 VisitForAccumulatorValue(args->at(0));
4040 NumberToStringStub stub(isolate());
4042 context()->Plug(r3);
4046 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4047 ZoneList<Expression*>* args = expr->arguments();
4048 DCHECK(args->length() == 1);
4049 VisitForAccumulatorValue(args->at(0));
4052 StringCharFromCodeGenerator generator(r3, r4);
4053 generator.GenerateFast(masm_);
4056 NopRuntimeCallHelper call_helper;
4057 generator.GenerateSlow(masm_, call_helper);
4060 context()->Plug(r4);
4064 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4065 ZoneList<Expression*>* args = expr->arguments();
4066 DCHECK(args->length() == 2);
4067 VisitForStackValue(args->at(0));
4068 VisitForAccumulatorValue(args->at(1));
4070 Register object = r4;
4071 Register index = r3;
4072 Register result = r6;
4076 Label need_conversion;
4077 Label index_out_of_range;
4079 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
4080 &need_conversion, &index_out_of_range,
4081 STRING_INDEX_IS_NUMBER);
4082 generator.GenerateFast(masm_);
4085 __ bind(&index_out_of_range);
4086 // When the index is out of range, the spec requires us to return
4088 __ LoadRoot(result, Heap::kNanValueRootIndex);
4091 __ bind(&need_conversion);
4092 // Load the undefined value into the result register, which will
4093 // trigger conversion.
4094 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4097 NopRuntimeCallHelper call_helper;
4098 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4101 context()->Plug(result);
4105 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4106 ZoneList<Expression*>* args = expr->arguments();
4107 DCHECK(args->length() == 2);
4108 VisitForStackValue(args->at(0));
4109 VisitForAccumulatorValue(args->at(1));
4111 Register object = r4;
4112 Register index = r3;
4113 Register scratch = r6;
4114 Register result = r3;
4118 Label need_conversion;
4119 Label index_out_of_range;
4121 StringCharAtGenerator generator(object, index, scratch, result,
4122 &need_conversion, &need_conversion,
4123 &index_out_of_range, STRING_INDEX_IS_NUMBER);
4124 generator.GenerateFast(masm_);
4127 __ bind(&index_out_of_range);
4128 // When the index is out of range, the spec requires us to return
4129 // the empty string.
4130 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4133 __ bind(&need_conversion);
4134 // Move smi zero into the result register, which will trigger
4136 __ LoadSmiLiteral(result, Smi::FromInt(0));
4139 NopRuntimeCallHelper call_helper;
4140 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4143 context()->Plug(result);
4147 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4148 ZoneList<Expression*>* args = expr->arguments();
4149 DCHECK_EQ(2, args->length());
4150 VisitForStackValue(args->at(0));
4151 VisitForAccumulatorValue(args->at(1));
4154 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4156 context()->Plug(r3);
4160 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4161 ZoneList<Expression*>* args = expr->arguments();
4162 DCHECK_EQ(2, args->length());
4163 VisitForStackValue(args->at(0));
4164 VisitForStackValue(args->at(1));
4166 StringCompareStub stub(isolate());
4168 context()->Plug(r3);
4172 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4173 ZoneList<Expression*>* args = expr->arguments();
4174 DCHECK(args->length() >= 2);
4176 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4177 for (int i = 0; i < arg_count + 1; i++) {
4178 VisitForStackValue(args->at(i));
4180 VisitForAccumulatorValue(args->last()); // Function.
4182 Label runtime, done;
4183 // Check for non-function argument (including proxy).
4184 __ JumpIfSmi(r3, &runtime);
4185 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
4188 // InvokeFunction requires the function in r4. Move it in there.
4189 __ mr(r4, result_register());
4190 ParameterCount count(arg_count);
4191 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
4192 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4197 __ CallRuntime(Runtime::kCall, args->length());
4200 context()->Plug(r3);
4204 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4205 Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
4206 GetVar(result_register(), new_target_var);
4207 __ Push(result_register());
4209 EmitLoadSuperConstructor();
4210 __ mr(r4, result_register());
4213 // Check if the calling frame is an arguments adaptor frame.
4214 Label adaptor_frame, args_set_up, runtime;
4215 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4216 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
4217 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
4218 __ beq(&adaptor_frame);
4220 // default constructor has no arguments, so no adaptor frame means no args.
4221 __ li(r3, Operand::Zero());
4224 // Copy arguments from adaptor frame.
4226 __ bind(&adaptor_frame);
4227 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
4230 // Subtract 1 from arguments count, for new.target.
4231 __ subi(r3, r3, Operand(1));
4233 // Get arguments pointer in r5.
4234 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
4236 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
4241 // Pre-decrement in order to skip receiver.
4242 __ LoadPU(r6, MemOperand(r5, -kPointerSize));
4247 __ bind(&args_set_up);
4248 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
4250 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4251 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4255 context()->Plug(result_register());
4259 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4260 RegExpConstructResultStub stub(isolate());
4261 ZoneList<Expression*>* args = expr->arguments();
4262 DCHECK(args->length() == 3);
4263 VisitForStackValue(args->at(0));
4264 VisitForStackValue(args->at(1));
4265 VisitForAccumulatorValue(args->at(2));
4268 context()->Plug(r3);
4272 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4273 ZoneList<Expression*>* args = expr->arguments();
4274 DCHECK_EQ(2, args->length());
4275 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4276 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4278 Handle<FixedArray> jsfunction_result_caches(
4279 isolate()->native_context()->jsfunction_result_caches());
4280 if (jsfunction_result_caches->length() <= cache_id) {
4281 __ Abort(kAttemptToUseUndefinedCache);
4282 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4283 context()->Plug(r3);
4287 VisitForAccumulatorValue(args->at(1));
4290 Register cache = r4;
4291 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4292 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4294 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4296 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
4298 Label done, not_found;
4299 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4300 // r5 now holds finger offset as a smi.
4301 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4302 // r6 now points to the start of fixed array elements.
4303 __ SmiToPtrArrayOffset(r5, r5);
4304 __ LoadPUX(r5, MemOperand(r6, r5));
4305 // r6 now points to the key of the pair.
4309 __ LoadP(r3, MemOperand(r6, kPointerSize));
4312 __ bind(¬_found);
4313 // Call runtime to perform the lookup.
4314 __ Push(cache, key);
4315 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4318 context()->Plug(r3);
4322 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4323 ZoneList<Expression*>* args = expr->arguments();
4324 VisitForAccumulatorValue(args->at(0));
4326 Label materialize_true, materialize_false;
4327 Label* if_true = NULL;
4328 Label* if_false = NULL;
4329 Label* fall_through = NULL;
4330 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4331 &if_false, &fall_through);
4333 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4334 // PPC - assume ip is free
4335 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
4336 __ and_(r0, r3, ip);
4337 __ cmpi(r0, Operand::Zero());
4338 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4339 Split(eq, if_true, if_false, fall_through);
4341 context()->Plug(if_true, if_false);
4345 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4346 ZoneList<Expression*>* args = expr->arguments();
4347 DCHECK(args->length() == 1);
4348 VisitForAccumulatorValue(args->at(0));
4350 __ AssertString(r3);
4352 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4353 __ IndexFromHash(r3, r3);
4355 context()->Plug(r3);
4359 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4360 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4361 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4362 one_char_separator_loop_entry, long_separator_loop;
4363 ZoneList<Expression*>* args = expr->arguments();
4364 DCHECK(args->length() == 2);
4365 VisitForStackValue(args->at(1));
4366 VisitForAccumulatorValue(args->at(0));
4368 // All aliases of the same register have disjoint lifetimes.
4369 Register array = r3;
4370 Register elements = no_reg; // Will be r3.
4371 Register result = no_reg; // Will be r3.
4372 Register separator = r4;
4373 Register array_length = r5;
4374 Register result_pos = no_reg; // Will be r5
4375 Register string_length = r6;
4376 Register string = r7;
4377 Register element = r8;
4378 Register elements_end = r9;
4379 Register scratch1 = r10;
4380 Register scratch2 = r11;
4382 // Separator operand is on the stack.
4385 // Check that the array is a JSArray.
4386 __ JumpIfSmi(array, &bailout);
4387 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
4390 // Check that the array has fast elements.
4391 __ CheckFastElements(scratch1, scratch2, &bailout);
4393 // If the array has length zero, return the empty string.
4394 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4395 __ SmiUntag(array_length);
4396 __ cmpi(array_length, Operand::Zero());
4397 __ bne(&non_trivial_array);
4398 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
4401 __ bind(&non_trivial_array);
4403 // Get the FixedArray containing array's elements.
4405 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4406 array = no_reg; // End of array's live range.
4408 // Check that all array elements are sequential one-byte strings, and
4409 // accumulate the sum of their lengths, as a smi-encoded value.
4410 __ li(string_length, Operand::Zero());
4411 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4412 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4413 __ add(elements_end, element, elements_end);
4414 // Loop condition: while (element < elements_end).
4415 // Live values in registers:
4416 // elements: Fixed array of strings.
4417 // array_length: Length of the fixed array of strings (not smi)
4418 // separator: Separator string
4419 // string_length: Accumulated sum of string lengths (smi).
4420 // element: Current array element.
4421 // elements_end: Array end.
4422 if (generate_debug_code_) {
4423 __ cmpi(array_length, Operand::Zero());
4424 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4427 __ LoadP(string, MemOperand(element));
4428 __ addi(element, element, Operand(kPointerSize));
4429 __ JumpIfSmi(string, &bailout);
4430 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4431 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4432 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4433 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4435 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4437 __ BranchOnOverflow(&bailout);
4439 __ cmp(element, elements_end);
4442 // If array_length is 1, return elements[0], a string.
4443 __ cmpi(array_length, Operand(1));
4444 __ bne(¬_size_one_array);
4445 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
4448 __ bind(¬_size_one_array);
4450 // Live values in registers:
4451 // separator: Separator string
4452 // array_length: Length of the array.
4453 // string_length: Sum of string lengths (smi).
4454 // elements: FixedArray of strings.
4456 // Check that the separator is a flat one-byte string.
4457 __ JumpIfSmi(separator, &bailout);
4458 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4459 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4460 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4462 // Add (separator length times array_length) - separator length to the
4463 // string_length to get the length of the result string.
4465 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4466 __ sub(string_length, string_length, scratch1);
4467 #if V8_TARGET_ARCH_PPC64
4468 __ SmiUntag(scratch1, scratch1);
4469 __ Mul(scratch2, array_length, scratch1);
4470 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4472 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4473 __ bne(&bailout, cr0);
4474 __ SmiTag(scratch2, scratch2);
4476 // array_length is not smi but the other values are, so the result is a smi
4477 __ mullw(scratch2, array_length, scratch1);
4478 __ mulhw(ip, array_length, scratch1);
4479 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4481 __ cmpi(ip, Operand::Zero());
4483 __ cmpwi(scratch2, Operand::Zero());
4487 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4489 __ BranchOnOverflow(&bailout);
4490 __ SmiUntag(string_length);
4492 // Get first element in the array to free up the elements register to be used
4494 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4495 result = elements; // End of live range for elements.
4497 // Live values in registers:
4498 // element: First array element
4499 // separator: Separator string
4500 // string_length: Length of result string (not smi)
4501 // array_length: Length of the array.
4502 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4503 elements_end, &bailout);
4504 // Prepare for looping. Set up elements_end to end of the array. Set
4505 // result_pos to the position of the result where to write the first
4507 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4508 __ add(elements_end, element, elements_end);
4509 result_pos = array_length; // End of live range for array_length.
4510 array_length = no_reg;
4511 __ addi(result_pos, result,
4512 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4514 // Check the length of the separator.
4516 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4517 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4518 __ beq(&one_char_separator);
4519 __ bgt(&long_separator);
4521 // Empty separator case
4522 __ bind(&empty_separator_loop);
4523 // Live values in registers:
4524 // result_pos: the position to which we are currently copying characters.
4525 // element: Current array element.
4526 // elements_end: Array end.
4528 // Copy next array element to the result.
4529 __ LoadP(string, MemOperand(element));
4530 __ addi(element, element, Operand(kPointerSize));
4531 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4532 __ SmiUntag(string_length);
4533 __ addi(string, string,
4534 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4535 __ CopyBytes(string, result_pos, string_length, scratch1);
4536 __ cmp(element, elements_end);
4537 __ blt(&empty_separator_loop); // End while (element < elements_end).
4538 DCHECK(result.is(r3));
4541 // One-character separator case
4542 __ bind(&one_char_separator);
4543 // Replace separator with its one-byte character value.
4544 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4545 // Jump into the loop after the code that copies the separator, so the first
4546 // element is not preceded by a separator
4547 __ b(&one_char_separator_loop_entry);
4549 __ bind(&one_char_separator_loop);
4550 // Live values in registers:
4551 // result_pos: the position to which we are currently copying characters.
4552 // element: Current array element.
4553 // elements_end: Array end.
4554 // separator: Single separator one-byte char (in lower byte).
4556 // Copy the separator character to the result.
4557 __ stb(separator, MemOperand(result_pos));
4558 __ addi(result_pos, result_pos, Operand(1));
4560 // Copy next array element to the result.
4561 __ bind(&one_char_separator_loop_entry);
4562 __ LoadP(string, MemOperand(element));
4563 __ addi(element, element, Operand(kPointerSize));
4564 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4565 __ SmiUntag(string_length);
4566 __ addi(string, string,
4567 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4568 __ CopyBytes(string, result_pos, string_length, scratch1);
4569 __ cmpl(element, elements_end);
4570 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4571 DCHECK(result.is(r3));
4574 // Long separator case (separator is more than one character). Entry is at the
4575 // label long_separator below.
4576 __ bind(&long_separator_loop);
4577 // Live values in registers:
4578 // result_pos: the position to which we are currently copying characters.
4579 // element: Current array element.
4580 // elements_end: Array end.
4581 // separator: Separator string.
4583 // Copy the separator to the result.
4584 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4585 __ SmiUntag(string_length);
4586 __ addi(string, separator,
4587 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4588 __ CopyBytes(string, result_pos, string_length, scratch1);
4590 __ bind(&long_separator);
4591 __ LoadP(string, MemOperand(element));
4592 __ addi(element, element, Operand(kPointerSize));
4593 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4594 __ SmiUntag(string_length);
4595 __ addi(string, string,
4596 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4597 __ CopyBytes(string, result_pos, string_length, scratch1);
4598 __ cmpl(element, elements_end);
4599 __ blt(&long_separator_loop); // End while (element < elements_end).
4600 DCHECK(result.is(r3));
4604 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4606 context()->Plug(r3);
4610 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4611 DCHECK(expr->arguments()->length() == 0);
4612 ExternalReference debug_is_active =
4613 ExternalReference::debug_is_active_address(isolate());
4614 __ mov(ip, Operand(debug_is_active));
4615 __ lbz(r3, MemOperand(ip));
4617 context()->Plug(r3);
4621 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4622 ZoneList<Expression*>* args = expr->arguments();
4623 int arg_count = args->length();
4625 if (expr->is_jsruntime()) {
4626 Comment cmnt(masm_, "[ CallRuntime");
4627 // Push the builtins object as the receiver.
4628 Register receiver = LoadDescriptor::ReceiverRegister();
4629 __ LoadP(receiver, GlobalObjectOperand());
4631 FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4634 // Load the function from the receiver.
4635 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4636 if (FLAG_vector_ics) {
4637 __ mov(VectorLoadICDescriptor::SlotRegister(),
4638 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4639 CallLoadIC(NOT_CONTEXTUAL);
4641 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4644 // Push the target function under the receiver.
4645 __ LoadP(ip, MemOperand(sp, 0));
4647 __ StoreP(r3, MemOperand(sp, kPointerSize));
4649 // Push the arguments ("left-to-right").
4650 for (int i = 0; i < arg_count; i++) {
4651 VisitForStackValue(args->at(i));
4654 // Record source position of the IC call.
4655 SetSourcePosition(expr->position());
4656 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4657 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4660 // Restore context register.
4661 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4663 context()->DropAndPlug(1, r3);
4666 const Runtime::Function* function = expr->function();
4667 switch (function->function_id) {
4668 #define CALL_INTRINSIC_GENERATOR(Name) \
4669 case Runtime::kInline##Name: { \
4670 Comment cmnt(masm_, "[ Inline" #Name); \
4671 return Emit##Name(expr); \
4673 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4674 #undef CALL_INTRINSIC_GENERATOR
4676 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4677 // Push the arguments ("left-to-right").
4678 for (int i = 0; i < arg_count; i++) {
4679 VisitForStackValue(args->at(i));
4682 // Call the C runtime function.
4683 __ CallRuntime(expr->function(), arg_count);
4684 context()->Plug(r3);
4691 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4692 switch (expr->op()) {
4693 case Token::DELETE: {
4694 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4695 Property* property = expr->expression()->AsProperty();
4696 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4698 if (property != NULL) {
4699 VisitForStackValue(property->obj());
4700 VisitForStackValue(property->key());
4701 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
4703 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4704 context()->Plug(r3);
4705 } else if (proxy != NULL) {
4706 Variable* var = proxy->var();
4707 // Delete of an unqualified identifier is disallowed in strict mode
4708 // but "delete this" is allowed.
4709 DCHECK(is_sloppy(language_mode()) || var->is_this());
4710 if (var->IsUnallocated()) {
4711 __ LoadP(r5, GlobalObjectOperand());
4712 __ mov(r4, Operand(var->name()));
4713 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4714 __ Push(r5, r4, r3);
4715 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4716 context()->Plug(r3);
4717 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4718 // Result of deleting non-global, non-dynamic variables is false.
4719 // The subexpression does not have side effects.
4720 context()->Plug(var->is_this());
4722 // Non-global variable. Call the runtime to try to delete from the
4723 // context where the variable was introduced.
4724 DCHECK(!context_register().is(r5));
4725 __ mov(r5, Operand(var->name()));
4726 __ Push(context_register(), r5);
4727 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4728 context()->Plug(r3);
4731 // Result of deleting non-property, non-variable reference is true.
4732 // The subexpression may have side effects.
4733 VisitForEffect(expr->expression());
4734 context()->Plug(true);
4740 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4741 VisitForEffect(expr->expression());
4742 context()->Plug(Heap::kUndefinedValueRootIndex);
4747 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4748 if (context()->IsEffect()) {
4749 // Unary NOT has no side effects so it's only necessary to visit the
4750 // subexpression. Match the optimizing compiler by not branching.
4751 VisitForEffect(expr->expression());
4752 } else if (context()->IsTest()) {
4753 const TestContext* test = TestContext::cast(context());
4754 // The labels are swapped for the recursive call.
4755 VisitForControl(expr->expression(), test->false_label(),
4756 test->true_label(), test->fall_through());
4757 context()->Plug(test->true_label(), test->false_label());
4759 // We handle value contexts explicitly rather than simply visiting
4760 // for control and plugging the control flow into the context,
4761 // because we need to prepare a pair of extra administrative AST ids
4762 // for the optimizing compiler.
4763 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4764 Label materialize_true, materialize_false, done;
4765 VisitForControl(expr->expression(), &materialize_false,
4766 &materialize_true, &materialize_true);
4767 __ bind(&materialize_true);
4768 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4769 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4770 if (context()->IsStackValue()) __ push(r3);
4772 __ bind(&materialize_false);
4773 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4774 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4775 if (context()->IsStackValue()) __ push(r3);
4781 case Token::TYPEOF: {
4782 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4784 StackValueContext context(this);
4785 VisitForTypeofValue(expr->expression());
4787 __ CallRuntime(Runtime::kTypeof, 1);
4788 context()->Plug(r3);
4798 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4799 DCHECK(expr->expression()->IsValidReferenceExpression());
4801 Comment cmnt(masm_, "[ CountOperation");
4802 SetSourcePosition(expr->position());
4804 Property* prop = expr->expression()->AsProperty();
4805 LhsKind assign_type = GetAssignType(prop);
4807 // Evaluate expression and get value.
4808 if (assign_type == VARIABLE) {
4809 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4810 AccumulatorValueContext context(this);
4811 EmitVariableLoad(expr->expression()->AsVariableProxy());
4813 // Reserve space for result of postfix operation.
4814 if (expr->is_postfix() && !context()->IsEffect()) {
4815 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4818 switch (assign_type) {
4819 case NAMED_PROPERTY: {
4820 // Put the object both on the stack and in the register.
4821 VisitForStackValue(prop->obj());
4822 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4823 EmitNamedPropertyLoad(prop);
4827 case NAMED_SUPER_PROPERTY: {
4828 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4829 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4830 __ Push(result_register());
4831 const Register scratch = r4;
4832 __ LoadP(scratch, MemOperand(sp, kPointerSize));
4833 __ Push(scratch, result_register());
4834 EmitNamedSuperPropertyLoad(prop);
4838 case KEYED_SUPER_PROPERTY: {
4839 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4840 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4841 const Register scratch = r4;
4842 const Register scratch1 = r5;
4843 __ Move(scratch, result_register());
4844 VisitForAccumulatorValue(prop->key());
4845 __ Push(scratch, result_register());
4846 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
4847 __ Push(scratch1, scratch, result_register());
4848 EmitKeyedSuperPropertyLoad(prop);
4852 case KEYED_PROPERTY: {
4853 VisitForStackValue(prop->obj());
4854 VisitForStackValue(prop->key());
4855 __ LoadP(LoadDescriptor::ReceiverRegister(),
4856 MemOperand(sp, 1 * kPointerSize));
4857 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4858 EmitKeyedPropertyLoad(prop);
4867 // We need a second deoptimization point after loading the value
4868 // in case evaluating the property load my have a side effect.
4869 if (assign_type == VARIABLE) {
4870 PrepareForBailout(expr->expression(), TOS_REG);
4872 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4875 // Inline smi case if we are in a loop.
4876 Label stub_call, done;
4877 JumpPatchSite patch_site(masm_);
4879 int count_value = expr->op() == Token::INC ? 1 : -1;
4880 if (ShouldInlineSmiCase(expr->op())) {
4882 patch_site.EmitJumpIfNotSmi(r3, &slow);
4884 // Save result for postfix expressions.
4885 if (expr->is_postfix()) {
4886 if (!context()->IsEffect()) {
4887 // Save the result on the stack. If we have a named or keyed property
4888 // we store the result under the receiver that is currently on top
4890 switch (assign_type) {
4894 case NAMED_PROPERTY:
4895 __ StoreP(r3, MemOperand(sp, kPointerSize));
4897 case NAMED_SUPER_PROPERTY:
4898 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4900 case KEYED_PROPERTY:
4901 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4903 case KEYED_SUPER_PROPERTY:
4904 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4910 Register scratch1 = r4;
4911 Register scratch2 = r5;
4912 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4913 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4914 __ BranchOnNoOverflow(&done);
4915 // Call stub. Undo operation first.
4916 __ sub(r3, r3, scratch1);
4920 ToNumberStub convert_stub(isolate());
4921 __ CallStub(&convert_stub);
4922 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4924 // Save result for postfix expressions.
4925 if (expr->is_postfix()) {
4926 if (!context()->IsEffect()) {
4927 // Save the result on the stack. If we have a named or keyed property
4928 // we store the result under the receiver that is currently on top
4930 switch (assign_type) {
4934 case NAMED_PROPERTY:
4935 __ StoreP(r3, MemOperand(sp, kPointerSize));
4937 case NAMED_SUPER_PROPERTY:
4938 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4940 case KEYED_PROPERTY:
4941 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4943 case KEYED_SUPER_PROPERTY:
4944 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4950 __ bind(&stub_call);
4952 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4954 // Record position before stub call.
4955 SetSourcePosition(expr->position());
4957 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
4958 CallIC(code, expr->CountBinOpFeedbackId());
4959 patch_site.EmitPatchInfo();
4962 // Store the value returned in r3.
4963 switch (assign_type) {
4965 if (expr->is_postfix()) {
4967 EffectContext context(this);
4968 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4970 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4973 // For all contexts except EffectConstant We have the result on
4974 // top of the stack.
4975 if (!context()->IsEffect()) {
4976 context()->PlugTOS();
4979 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4981 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4982 context()->Plug(r3);
4985 case NAMED_PROPERTY: {
4986 __ mov(StoreDescriptor::NameRegister(),
4987 Operand(prop->key()->AsLiteral()->value()));
4988 __ pop(StoreDescriptor::ReceiverRegister());
4989 CallStoreIC(expr->CountStoreFeedbackId());
4990 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4991 if (expr->is_postfix()) {
4992 if (!context()->IsEffect()) {
4993 context()->PlugTOS();
4996 context()->Plug(r3);
5000 case NAMED_SUPER_PROPERTY: {
5001 EmitNamedSuperPropertyStore(prop);
5002 if (expr->is_postfix()) {
5003 if (!context()->IsEffect()) {
5004 context()->PlugTOS();
5007 context()->Plug(r3);
5011 case KEYED_SUPER_PROPERTY: {
5012 EmitKeyedSuperPropertyStore(prop);
5013 if (expr->is_postfix()) {
5014 if (!context()->IsEffect()) {
5015 context()->PlugTOS();
5018 context()->Plug(r3);
5022 case KEYED_PROPERTY: {
5023 __ Pop(StoreDescriptor::ReceiverRegister(),
5024 StoreDescriptor::NameRegister());
5026 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5027 CallIC(ic, expr->CountStoreFeedbackId());
5028 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5029 if (expr->is_postfix()) {
5030 if (!context()->IsEffect()) {
5031 context()->PlugTOS();
5034 context()->Plug(r3);
5042 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5043 DCHECK(!context()->IsEffect());
5044 DCHECK(!context()->IsTest());
5045 VariableProxy* proxy = expr->AsVariableProxy();
5046 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5047 Comment cmnt(masm_, "[ Global variable");
5048 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5049 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
5050 if (FLAG_vector_ics) {
5051 __ mov(VectorLoadICDescriptor::SlotRegister(),
5052 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
5054 // Use a regular load, not a contextual load, to avoid a reference
5056 CallLoadIC(NOT_CONTEXTUAL);
5057 PrepareForBailout(expr, TOS_REG);
5058 context()->Plug(r3);
5059 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5060 Comment cmnt(masm_, "[ Lookup slot");
5063 // Generate code for loading from variables potentially shadowed
5064 // by eval-introduced variables.
5065 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5068 __ mov(r3, Operand(proxy->name()));
5070 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5071 PrepareForBailout(expr, TOS_REG);
5074 context()->Plug(r3);
5076 // This expression cannot throw a reference error at the top level.
5077 VisitInDuplicateContext(expr);
5082 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5083 Expression* sub_expr,
5084 Handle<String> check) {
5085 Label materialize_true, materialize_false;
5086 Label* if_true = NULL;
5087 Label* if_false = NULL;
5088 Label* fall_through = NULL;
5089 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5090 &if_false, &fall_through);
5093 AccumulatorValueContext context(this);
5094 VisitForTypeofValue(sub_expr);
5096 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5098 Factory* factory = isolate()->factory();
5099 if (String::Equals(check, factory->number_string())) {
5100 __ JumpIfSmi(r3, if_true);
5101 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5102 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5104 Split(eq, if_true, if_false, fall_through);
5105 } else if (String::Equals(check, factory->string_string())) {
5106 __ JumpIfSmi(r3, if_false);
5107 // Check for undetectable objects => false.
5108 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
5110 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5111 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
5112 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5113 Split(eq, if_true, if_false, fall_through, cr0);
5114 } else if (String::Equals(check, factory->symbol_string())) {
5115 __ JumpIfSmi(r3, if_false);
5116 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
5117 Split(eq, if_true, if_false, fall_through);
5118 } else if (String::Equals(check, factory->boolean_string())) {
5119 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
5121 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
5122 Split(eq, if_true, if_false, fall_through);
5123 } else if (String::Equals(check, factory->undefined_string())) {
5124 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
5126 __ JumpIfSmi(r3, if_false);
5127 // Check for undetectable objects => true.
5128 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
5129 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5130 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5131 Split(ne, if_true, if_false, fall_through, cr0);
5133 } else if (String::Equals(check, factory->function_string())) {
5134 __ JumpIfSmi(r3, if_false);
5135 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5136 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
5138 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
5139 Split(eq, if_true, if_false, fall_through);
5140 } else if (String::Equals(check, factory->object_string())) {
5141 __ JumpIfSmi(r3, if_false);
5142 __ CompareRoot(r3, Heap::kNullValueRootIndex);
5144 // Check for JS objects => true.
5145 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
5147 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5149 // Check for undetectable objects => false.
5150 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
5151 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
5152 Split(eq, if_true, if_false, fall_through, cr0);
5154 if (if_false != fall_through) __ b(if_false);
5156 context()->Plug(if_true, if_false);
5160 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5161 Comment cmnt(masm_, "[ CompareOperation");
5162 SetSourcePosition(expr->position());
5164 // First we try a fast inlined version of the compare when one of
5165 // the operands is a literal.
5166 if (TryLiteralCompare(expr)) return;
5168 // Always perform the comparison for its control flow. Pack the result
5169 // into the expression's context after the comparison is performed.
5170 Label materialize_true, materialize_false;
5171 Label* if_true = NULL;
5172 Label* if_false = NULL;
5173 Label* fall_through = NULL;
5174 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5175 &if_false, &fall_through);
5177 Token::Value op = expr->op();
5178 VisitForStackValue(expr->left());
5181 VisitForStackValue(expr->right());
5182 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5183 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5184 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
5186 Split(eq, if_true, if_false, fall_through);
5189 case Token::INSTANCEOF: {
5190 VisitForStackValue(expr->right());
5191 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5193 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5194 // The stub returns 0 for true.
5195 __ cmpi(r3, Operand::Zero());
5196 Split(eq, if_true, if_false, fall_through);
5201 VisitForAccumulatorValue(expr->right());
5202 Condition cond = CompareIC::ComputeCondition(op);
5205 bool inline_smi_code = ShouldInlineSmiCase(op);
5206 JumpPatchSite patch_site(masm_);
5207 if (inline_smi_code) {
5210 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
5212 Split(cond, if_true, if_false, NULL);
5213 __ bind(&slow_case);
5216 // Record position and call the compare IC.
5217 SetSourcePosition(expr->position());
5218 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5219 CallIC(ic, expr->CompareOperationFeedbackId());
5220 patch_site.EmitPatchInfo();
5221 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5222 __ cmpi(r3, Operand::Zero());
5223 Split(cond, if_true, if_false, fall_through);
5227 // Convert the result of the comparison into one expected for this
5228 // expression's context.
5229 context()->Plug(if_true, if_false);
5233 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5234 Expression* sub_expr,
5236 Label materialize_true, materialize_false;
5237 Label* if_true = NULL;
5238 Label* if_false = NULL;
5239 Label* fall_through = NULL;
5240 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5241 &if_false, &fall_through);
5243 VisitForAccumulatorValue(sub_expr);
5244 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5245 if (expr->op() == Token::EQ_STRICT) {
5246 Heap::RootListIndex nil_value = nil == kNullValue
5247 ? Heap::kNullValueRootIndex
5248 : Heap::kUndefinedValueRootIndex;
5249 __ LoadRoot(r4, nil_value);
5251 Split(eq, if_true, if_false, fall_through);
5253 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5254 CallIC(ic, expr->CompareOperationFeedbackId());
5255 __ cmpi(r3, Operand::Zero());
5256 Split(ne, if_true, if_false, fall_through);
5258 context()->Plug(if_true, if_false);
5262 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5263 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5264 context()->Plug(r3);
5268 Register FullCodeGenerator::result_register() { return r3; }
5271 Register FullCodeGenerator::context_register() { return cp; }
5274 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5275 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
5276 __ StoreP(value, MemOperand(fp, frame_offset), r0);
5280 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5281 __ LoadP(dst, ContextOperand(cp, context_index), r0);
5285 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5286 Scope* declaration_scope = scope()->DeclarationScope();
5287 if (declaration_scope->is_script_scope() ||
5288 declaration_scope->is_module_scope()) {
5289 // Contexts nested in the native context have a canonical empty function
5290 // as their closure, not the anonymous closure containing the global
5291 // code. Pass a smi sentinel and let the runtime look up the empty
5293 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5294 } else if (declaration_scope->is_eval_scope()) {
5295 // Contexts created by a call to eval have the same closure as the
5296 // context calling eval, not the anonymous closure containing the eval
5297 // code. Fetch it from the context.
5298 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5300 DCHECK(declaration_scope->is_function_scope());
5301 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5307 // ----------------------------------------------------------------------------
5308 // Non-local control flow support.
5310 void FullCodeGenerator::EnterFinallyBlock() {
5311 DCHECK(!result_register().is(r4));
5312 // Store result register while executing finally block.
5313 __ push(result_register());
5314 // Cook return address in link register to stack (smi encoded Code* delta)
5316 __ mov(ip, Operand(masm_->CodeObject()));
5320 // Store result register while executing finally block.
5323 // Store pending message while executing finally block.
5324 ExternalReference pending_message_obj =
5325 ExternalReference::address_of_pending_message_obj(isolate());
5326 __ mov(ip, Operand(pending_message_obj));
5327 __ LoadP(r4, MemOperand(ip));
5332 void FullCodeGenerator::ExitFinallyBlock() {
5333 DCHECK(!result_register().is(r4));
5334 // Restore pending message from stack.
5336 ExternalReference pending_message_obj =
5337 ExternalReference::address_of_pending_message_obj(isolate());
5338 __ mov(ip, Operand(pending_message_obj));
5339 __ StoreP(r4, MemOperand(ip));
5341 // Restore result register from stack.
5344 // Uncook return address and return.
5345 __ pop(result_register());
5347 __ mov(ip, Operand(masm_->CodeObject()));
5357 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
5358 BackEdgeState target_state,
5359 Code* replacement_code) {
5360 Address mov_address = Assembler::target_address_from_return_address(pc);
5361 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5362 CodePatcher patcher(cmp_address, 1);
5364 switch (target_state) {
5366 // <decrement profiling counter>
5368 // bge <ok> ;; not changed
5369 // mov r12, <interrupt stub address>
5372 // <reset profiling counter>
5374 patcher.masm()->cmpi(r6, Operand::Zero());
5377 case ON_STACK_REPLACEMENT:
5378 case OSR_AFTER_STACK_CHECK:
5379 // <decrement profiling counter>
5381 // bge <ok> ;; not changed
5382 // mov r12, <on-stack replacement address>
5385 // <reset profiling counter>
5386 // ok-label ----- pc_after points here
5388 // Set the LT bit such that bge is a NOP
5389 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
5393 // Replace the stack check address in the mov sequence with the
5394 // entry address of the replacement code.
5395 Assembler::set_target_address_at(mov_address, unoptimized_code,
5396 replacement_code->entry());
5398 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5399 unoptimized_code, mov_address, replacement_code);
5403 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5404 Isolate* isolate, Code* unoptimized_code, Address pc) {
5405 Address mov_address = Assembler::target_address_from_return_address(pc);
5406 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5407 Address interrupt_address =
5408 Assembler::target_address_at(mov_address, unoptimized_code);
5410 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
5411 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
5415 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5417 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5418 return ON_STACK_REPLACEMENT;
5421 DCHECK(interrupt_address ==
5422 isolate->builtins()->OsrAfterStackCheck()->entry());
5423 return OSR_AFTER_STACK_CHECK;
5426 } // namespace v8::internal
5427 #endif // V8_TARGET_ARCH_PPC