1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
22 #define __ ACCESS_MASM(masm_)
25 class JumpPatchSite BASE_EMBEDDED {
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 info_emitted_ = false;
34 DCHECK(patch_site_.is_bound() == info_emitted_);
37 void EmitJumpIfNotSmi(Register reg,
39 Label::Distance distance = Label::kFar) {
40 __ test(reg, Immediate(kSmiTagMask));
41 EmitJump(not_carry, target, distance); // Always taken before patched.
44 void EmitJumpIfSmi(Register reg,
46 Label::Distance distance = Label::kFar) {
47 __ test(reg, Immediate(kSmiTagMask));
48 EmitJump(carry, target, distance); // Never taken before patched.
51 void EmitPatchInfo() {
52 if (patch_site_.is_bound()) {
53 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54 DCHECK(is_uint8(delta_to_patch_site));
55 __ test(eax, Immediate(delta_to_patch_site));
60 __ nop(); // Signals no inlined code.
65 // jc will be patched with jz, jnc will become jnz.
66 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 DCHECK(cc == carry || cc == not_carry);
69 __ bind(&patch_site_);
70 __ j(cc, target, distance);
73 MacroAssembler* masm_;
81 // Generate code for a JS function. On entry to the function the receiver
82 // and arguments have been pushed on the stack left to right, with the
83 // return address on top of them. The actual argument count matches the
84 // formal parameter count expected by the function.
86 // The live registers are:
87 // o edi: the JS function object being called (i.e. ourselves)
89 // o ebp: our caller's frame pointer
90 // o esp: stack pointer (pointing to return address)
92 // The function builds a JS frame. Please see JavaScriptFrameConstants in
93 // frames-ia32.h for its layout.
94 void FullCodeGenerator::Generate() {
95 CompilationInfo* info = info_;
97 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
98 HandlerTable::LengthForRange(function()->handler_count()), TENURED));
100 profiling_counter_ = isolate()->factory()->NewCell(
101 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102 SetFunctionPosition(function());
103 Comment cmnt(masm_, "[ function compiled by full code generator");
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
108 if (strlen(FLAG_stop_at) > 0 &&
109 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
114 // Sloppy mode functions and builtins need to replace the receiver with the
115 // global proxy when called as functions (without an explicit receiver
117 if (is_sloppy(info->language_mode()) && !info->is_native() &&
118 info->MayUseThis()) {
120 // +1 for return address.
121 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
122 __ mov(ecx, Operand(esp, receiver_offset));
124 __ cmp(ecx, isolate()->factory()->undefined_value());
125 __ j(not_equal, &ok, Label::kNear);
127 __ mov(ecx, GlobalObjectOperand());
128 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
130 __ mov(Operand(esp, receiver_offset), ecx);
135 // Open a frame scope to indicate that there is a frame on the stack. The
136 // MANUAL indicates that the scope shouldn't actually generate code to set up
137 // the frame (that is done below).
138 FrameScope frame_scope(masm_, StackFrame::MANUAL);
140 info->set_prologue_offset(masm_->pc_offset());
141 __ Prologue(info->IsCodePreAgingActive());
142 info->AddNoFrameRange(0, masm_->pc_offset());
144 { Comment cmnt(masm_, "[ Allocate locals");
145 int locals_count = info->scope()->num_stack_slots();
146 // Generators allocate locals, if any, in context slots.
147 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
148 if (locals_count == 1) {
149 __ push(Immediate(isolate()->factory()->undefined_value()));
150 } else if (locals_count > 1) {
151 if (locals_count >= 128) {
154 __ sub(ecx, Immediate(locals_count * kPointerSize));
155 ExternalReference stack_limit =
156 ExternalReference::address_of_real_stack_limit(isolate());
157 __ cmp(ecx, Operand::StaticVariable(stack_limit));
158 __ j(above_equal, &ok, Label::kNear);
159 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
162 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
163 const int kMaxPushes = 32;
164 if (locals_count >= kMaxPushes) {
165 int loop_iterations = locals_count / kMaxPushes;
166 __ mov(ecx, loop_iterations);
168 __ bind(&loop_header);
170 for (int i = 0; i < kMaxPushes; i++) {
174 __ j(not_zero, &loop_header, Label::kNear);
176 int remaining = locals_count % kMaxPushes;
177 // Emit the remaining pushes.
178 for (int i = 0; i < remaining; i++) {
184 bool function_in_register = true;
186 // Possibly allocate a local context.
187 if (info->scope()->num_heap_slots() > 0) {
188 Comment cmnt(masm_, "[ Allocate context");
189 bool need_write_barrier = true;
190 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
191 // Argument to NewContext is the function, which is still in edi.
192 if (info->scope()->is_script_scope()) {
194 __ Push(info->scope()->GetScopeInfo(info->isolate()));
195 __ CallRuntime(Runtime::kNewScriptContext, 2);
196 } else if (slots <= FastNewContextStub::kMaximumSlots) {
197 FastNewContextStub stub(isolate(), slots);
199 // Result of FastNewContextStub is always in new space.
200 need_write_barrier = false;
203 __ CallRuntime(Runtime::kNewFunctionContext, 1);
205 function_in_register = false;
206 // Context is returned in eax. It replaces the context passed to us.
207 // It's saved in the stack and kept live in esi.
209 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
211 // Copy parameters into context if necessary.
212 int num_parameters = info->scope()->num_parameters();
213 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
214 for (int i = first_parameter; i < num_parameters; i++) {
215 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
216 if (var->IsContextSlot()) {
217 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
218 (num_parameters - 1 - i) * kPointerSize;
219 // Load parameter from stack.
220 __ mov(eax, Operand(ebp, parameter_offset));
221 // Store it in the context.
222 int context_offset = Context::SlotOffset(var->index());
223 __ mov(Operand(esi, context_offset), eax);
224 // Update the write barrier. This clobbers eax and ebx.
225 if (need_write_barrier) {
226 __ RecordWriteContextSlot(esi,
231 } else if (FLAG_debug_code) {
233 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
234 __ Abort(kExpectedNewSpaceObject);
241 // Possibly set up a local binding to the this function which is used in
242 // derived constructors with super calls.
243 Variable* this_function_var = scope()->this_function_var();
244 if (this_function_var != nullptr) {
245 Comment cmnt(masm_, "[ This function");
246 SetVar(this_function_var, edi, ebx, edx);
249 Variable* new_target_var = scope()->new_target_var();
250 if (new_target_var != nullptr) {
251 Comment cmnt(masm_, "[ new.target");
252 // new.target is parameter -2.
253 int offset = 2 * kPointerSize + kFPOnStackSize + kPCOnStackSize +
254 (info_->scope()->num_parameters() - 1) * kPointerSize;
255 __ mov(eax, Operand(ebp, offset));
256 SetVar(new_target_var, eax, ebx, edx);
259 ArgumentsAccessStub::HasNewTarget has_new_target =
260 IsSubclassConstructor(info->function()->kind())
261 ? ArgumentsAccessStub::HAS_NEW_TARGET
262 : ArgumentsAccessStub::NO_NEW_TARGET;
264 // Possibly allocate RestParameters
266 Variable* rest_param = scope()->rest_parameter(&rest_index);
268 Comment cmnt(masm_, "[ Allocate rest parameter array");
270 int num_parameters = info->scope()->num_parameters();
271 int offset = num_parameters * kPointerSize;
272 if (has_new_target == ArgumentsAccessStub::HAS_NEW_TARGET) {
278 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
280 __ push(Immediate(Smi::FromInt(num_parameters)));
281 __ push(Immediate(Smi::FromInt(rest_index)));
282 __ push(Immediate(Smi::FromInt(language_mode())));
284 RestParamAccessStub stub(isolate());
287 SetVar(rest_param, eax, ebx, edx);
290 Variable* arguments = scope()->arguments();
291 if (arguments != NULL) {
292 // Function uses arguments object.
293 Comment cmnt(masm_, "[ Allocate arguments object");
294 if (function_in_register) {
297 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
299 // Receiver is just before the parameters on the caller's stack.
300 int num_parameters = info->scope()->num_parameters();
301 int offset = num_parameters * kPointerSize;
303 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
305 __ push(Immediate(Smi::FromInt(num_parameters)));
306 // Arguments to ArgumentsAccessStub:
307 // function, receiver address, parameter count.
308 // The stub will rewrite receiver and parameter count if the previous
309 // stack frame was an arguments adapter frame.
310 ArgumentsAccessStub::Type type;
311 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
312 type = ArgumentsAccessStub::NEW_STRICT;
313 } else if (function()->has_duplicate_parameters()) {
314 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
316 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
319 ArgumentsAccessStub stub(isolate(), type, has_new_target);
322 SetVar(arguments, eax, ebx, edx);
326 __ CallRuntime(Runtime::kTraceEnter, 0);
329 // Visit the declarations and body unless there is an illegal
331 if (scope()->HasIllegalRedeclaration()) {
332 Comment cmnt(masm_, "[ Declarations");
333 scope()->VisitIllegalRedeclaration(this);
336 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
337 { Comment cmnt(masm_, "[ Declarations");
338 // For named function expressions, declare the function name as a
340 if (scope()->is_function_scope() && scope()->function() != NULL) {
341 VariableDeclaration* function = scope()->function();
342 DCHECK(function->proxy()->var()->mode() == CONST ||
343 function->proxy()->var()->mode() == CONST_LEGACY);
344 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
345 VisitVariableDeclaration(function);
347 VisitDeclarations(scope()->declarations());
350 { Comment cmnt(masm_, "[ Stack check");
351 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
353 ExternalReference stack_limit
354 = ExternalReference::address_of_stack_limit(isolate());
355 __ cmp(esp, Operand::StaticVariable(stack_limit));
356 __ j(above_equal, &ok, Label::kNear);
357 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
361 { Comment cmnt(masm_, "[ Body");
362 DCHECK(loop_depth() == 0);
363 VisitStatements(function()->body());
364 DCHECK(loop_depth() == 0);
368 // Always emit a 'return undefined' in case control fell off the end of
370 { Comment cmnt(masm_, "[ return <undefined>;");
371 __ mov(eax, isolate()->factory()->undefined_value());
372 EmitReturnSequence();
377 void FullCodeGenerator::ClearAccumulator() {
378 __ Move(eax, Immediate(Smi::FromInt(0)));
382 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
383 __ mov(ebx, Immediate(profiling_counter_));
384 __ sub(FieldOperand(ebx, Cell::kValueOffset),
385 Immediate(Smi::FromInt(delta)));
389 void FullCodeGenerator::EmitProfilingCounterReset() {
390 int reset_value = FLAG_interrupt_budget;
391 __ mov(ebx, Immediate(profiling_counter_));
392 __ mov(FieldOperand(ebx, Cell::kValueOffset),
393 Immediate(Smi::FromInt(reset_value)));
397 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
398 Label* back_edge_target) {
399 Comment cmnt(masm_, "[ Back edge bookkeeping");
402 DCHECK(back_edge_target->is_bound());
403 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
404 int weight = Min(kMaxBackEdgeWeight,
405 Max(1, distance / kCodeSizeMultiplier));
406 EmitProfilingCounterDecrement(weight);
407 __ j(positive, &ok, Label::kNear);
408 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
410 // Record a mapping of this PC offset to the OSR id. This is used to find
411 // the AST id from the unoptimized code in order to use it as a key into
412 // the deoptimization input data found in the optimized code.
413 RecordBackEdge(stmt->OsrEntryId());
415 EmitProfilingCounterReset();
418 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
419 // Record a mapping of the OSR id to this PC. This is used if the OSR
420 // entry becomes the target of a bailout. We don't expect it to be, but
421 // we want it to work if it is.
422 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
426 void FullCodeGenerator::EmitReturnSequence() {
427 Comment cmnt(masm_, "[ Return sequence");
428 if (return_label_.is_bound()) {
429 __ jmp(&return_label_);
431 // Common return label
432 __ bind(&return_label_);
435 __ CallRuntime(Runtime::kTraceExit, 1);
437 // Pretend that the exit is a backwards jump to the entry.
439 if (info_->ShouldSelfOptimize()) {
440 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
442 int distance = masm_->pc_offset();
443 weight = Min(kMaxBackEdgeWeight,
444 Max(1, distance / kCodeSizeMultiplier));
446 EmitProfilingCounterDecrement(weight);
448 __ j(positive, &ok, Label::kNear);
450 __ call(isolate()->builtins()->InterruptCheck(),
451 RelocInfo::CODE_TARGET);
453 EmitProfilingCounterReset();
456 // Add a label for checking the size of the code used for returning.
457 Label check_exit_codesize;
458 masm_->bind(&check_exit_codesize);
460 SetSourcePosition(function()->end_position() - 1);
462 // Do not use the leave instruction here because it is too short to
463 // patch with the code required by the debugger.
465 int no_frame_start = masm_->pc_offset();
468 int arg_count = info_->scope()->num_parameters() + 1;
469 if (IsSubclassConstructor(info_->function()->kind())) {
472 int arguments_bytes = arg_count * kPointerSize;
473 __ Ret(arguments_bytes, ecx);
474 // Check that the size of the code used for returning is large enough
475 // for the debugger's requirements.
476 DCHECK(Assembler::kJSReturnSequenceLength <=
477 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
478 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
483 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
484 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
488 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
489 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
490 codegen()->GetVar(result_register(), var);
494 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
495 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496 MemOperand operand = codegen()->VarOperand(var, result_register());
497 // Memory operands can be pushed directly.
502 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
503 // For simplicity we always test the accumulator register.
504 codegen()->GetVar(result_register(), var);
505 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
506 codegen()->DoTest(this);
510 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
511 UNREACHABLE(); // Not used on IA32.
515 void FullCodeGenerator::AccumulatorValueContext::Plug(
516 Heap::RootListIndex index) const {
517 UNREACHABLE(); // Not used on IA32.
521 void FullCodeGenerator::StackValueContext::Plug(
522 Heap::RootListIndex index) const {
523 UNREACHABLE(); // Not used on IA32.
527 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
528 UNREACHABLE(); // Not used on IA32.
532 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
536 void FullCodeGenerator::AccumulatorValueContext::Plug(
537 Handle<Object> lit) const {
539 __ SafeMove(result_register(), Immediate(lit));
541 __ Move(result_register(), Immediate(lit));
546 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
548 __ SafePush(Immediate(lit));
550 __ push(Immediate(lit));
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(),
560 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562 if (false_label_ != fall_through_) __ jmp(false_label_);
563 } else if (lit->IsTrue() || lit->IsJSObject()) {
564 if (true_label_ != fall_through_) __ jmp(true_label_);
565 } else if (lit->IsString()) {
566 if (String::cast(*lit)->length() == 0) {
567 if (false_label_ != fall_through_) __ jmp(false_label_);
569 if (true_label_ != fall_through_) __ jmp(true_label_);
571 } else if (lit->IsSmi()) {
572 if (Smi::cast(*lit)->value() == 0) {
573 if (false_label_ != fall_through_) __ jmp(false_label_);
575 if (true_label_ != fall_through_) __ jmp(true_label_);
578 // For simplicity we always test the accumulator register.
579 __ mov(result_register(), lit);
580 codegen()->DoTest(this);
585 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
586 Register reg) const {
592 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
594 Register reg) const {
597 __ Move(result_register(), reg);
601 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
602 Register reg) const {
604 if (count > 1) __ Drop(count - 1);
605 __ mov(Operand(esp, 0), reg);
609 void FullCodeGenerator::TestContext::DropAndPlug(int count,
610 Register reg) const {
612 // For simplicity we always test the accumulator register.
614 __ Move(result_register(), reg);
615 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
616 codegen()->DoTest(this);
620 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
621 Label* materialize_false) const {
622 DCHECK(materialize_true == materialize_false);
623 __ bind(materialize_true);
627 void FullCodeGenerator::AccumulatorValueContext::Plug(
628 Label* materialize_true,
629 Label* materialize_false) const {
631 __ bind(materialize_true);
632 __ mov(result_register(), isolate()->factory()->true_value());
633 __ jmp(&done, Label::kNear);
634 __ bind(materialize_false);
635 __ mov(result_register(), isolate()->factory()->false_value());
640 void FullCodeGenerator::StackValueContext::Plug(
641 Label* materialize_true,
642 Label* materialize_false) const {
644 __ bind(materialize_true);
645 __ push(Immediate(isolate()->factory()->true_value()));
646 __ jmp(&done, Label::kNear);
647 __ bind(materialize_false);
648 __ push(Immediate(isolate()->factory()->false_value()));
653 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
654 Label* materialize_false) const {
655 DCHECK(materialize_true == true_label_);
656 DCHECK(materialize_false == false_label_);
660 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
664 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
665 Handle<Object> value = flag
666 ? isolate()->factory()->true_value()
667 : isolate()->factory()->false_value();
668 __ mov(result_register(), value);
672 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
673 Handle<Object> value = flag
674 ? isolate()->factory()->true_value()
675 : isolate()->factory()->false_value();
676 __ push(Immediate(value));
680 void FullCodeGenerator::TestContext::Plug(bool flag) const {
681 codegen()->PrepareForBailoutBeforeSplit(condition(),
686 if (true_label_ != fall_through_) __ jmp(true_label_);
688 if (false_label_ != fall_through_) __ jmp(false_label_);
693 void FullCodeGenerator::DoTest(Expression* condition,
696 Label* fall_through) {
697 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
698 CallIC(ic, condition->test_id());
699 __ test(result_register(), result_register());
700 // The stub returns nonzero for true.
701 Split(not_zero, if_true, if_false, fall_through);
705 void FullCodeGenerator::Split(Condition cc,
708 Label* fall_through) {
709 if (if_false == fall_through) {
711 } else if (if_true == fall_through) {
712 __ j(NegateCondition(cc), if_false);
720 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
721 DCHECK(var->IsStackAllocated());
722 // Offset is negative because higher indexes are at lower addresses.
723 int offset = -var->index() * kPointerSize;
724 // Adjust by a (parameter or local) base offset.
725 if (var->IsParameter()) {
726 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
728 offset += JavaScriptFrameConstants::kLocal0Offset;
730 return Operand(ebp, offset);
734 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
735 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
736 if (var->IsContextSlot()) {
737 int context_chain_length = scope()->ContextChainLength(var->scope());
738 __ LoadContext(scratch, context_chain_length);
739 return ContextOperand(scratch, var->index());
741 return StackOperand(var);
746 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
747 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
748 MemOperand location = VarOperand(var, dest);
749 __ mov(dest, location);
753 void FullCodeGenerator::SetVar(Variable* var,
757 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
758 DCHECK(!scratch0.is(src));
759 DCHECK(!scratch0.is(scratch1));
760 DCHECK(!scratch1.is(src));
761 MemOperand location = VarOperand(var, scratch0);
762 __ mov(location, src);
764 // Emit the write barrier code if the location is in the heap.
765 if (var->IsContextSlot()) {
766 int offset = Context::SlotOffset(var->index());
767 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
768 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
773 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
774 bool should_normalize,
777 // Only prepare for bailouts before splits if we're in a test
778 // context. Otherwise, we let the Visit function deal with the
779 // preparation to avoid preparing with the same AST id twice.
780 if (!context()->IsTest() || !info_->IsOptimizable()) return;
783 if (should_normalize) __ jmp(&skip, Label::kNear);
784 PrepareForBailout(expr, TOS_REG);
785 if (should_normalize) {
786 __ cmp(eax, isolate()->factory()->true_value());
787 Split(equal, if_true, if_false, NULL);
793 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
794 // The variable in the declaration always resides in the current context.
795 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (generate_debug_code_) {
797 // Check that we're not inside a with or catch context.
798 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
799 __ cmp(ebx, isolate()->factory()->with_context_map());
800 __ Check(not_equal, kDeclarationInWithContext);
801 __ cmp(ebx, isolate()->factory()->catch_context_map());
802 __ Check(not_equal, kDeclarationInCatchContext);
807 void FullCodeGenerator::VisitVariableDeclaration(
808 VariableDeclaration* declaration) {
809 // If it was not possible to allocate the variable at compile time, we
810 // need to "declare" it at runtime to make sure it actually exists in the
812 VariableProxy* proxy = declaration->proxy();
813 VariableMode mode = declaration->mode();
814 Variable* variable = proxy->var();
815 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
816 switch (variable->location()) {
817 case Variable::UNALLOCATED:
818 globals_->Add(variable->name(), zone());
819 globals_->Add(variable->binding_needs_init()
820 ? isolate()->factory()->the_hole_value()
821 : isolate()->factory()->undefined_value(), zone());
824 case Variable::PARAMETER:
825 case Variable::LOCAL:
827 Comment cmnt(masm_, "[ VariableDeclaration");
828 __ mov(StackOperand(variable),
829 Immediate(isolate()->factory()->the_hole_value()));
833 case Variable::CONTEXT:
835 Comment cmnt(masm_, "[ VariableDeclaration");
836 EmitDebugCheckDeclarationContext(variable);
837 __ mov(ContextOperand(esi, variable->index()),
838 Immediate(isolate()->factory()->the_hole_value()));
839 // No write barrier since the hole value is in old space.
840 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
844 case Variable::LOOKUP: {
845 Comment cmnt(masm_, "[ VariableDeclaration");
847 __ push(Immediate(variable->name()));
848 // VariableDeclaration nodes are always introduced in one of four modes.
849 DCHECK(IsDeclaredVariableMode(mode));
850 PropertyAttributes attr =
851 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
852 __ push(Immediate(Smi::FromInt(attr)));
853 // Push initial value, if any.
854 // Note: For variables we must not push an initial value (such as
855 // 'undefined') because we may have a (legal) redeclaration and we
856 // must not destroy the current value.
858 __ push(Immediate(isolate()->factory()->the_hole_value()));
860 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
862 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
869 void FullCodeGenerator::VisitFunctionDeclaration(
870 FunctionDeclaration* declaration) {
871 VariableProxy* proxy = declaration->proxy();
872 Variable* variable = proxy->var();
873 switch (variable->location()) {
874 case Variable::UNALLOCATED: {
875 globals_->Add(variable->name(), zone());
876 Handle<SharedFunctionInfo> function =
877 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
878 // Check for stack-overflow exception.
879 if (function.is_null()) return SetStackOverflow();
880 globals_->Add(function, zone());
884 case Variable::PARAMETER:
885 case Variable::LOCAL: {
886 Comment cmnt(masm_, "[ FunctionDeclaration");
887 VisitForAccumulatorValue(declaration->fun());
888 __ mov(StackOperand(variable), result_register());
892 case Variable::CONTEXT: {
893 Comment cmnt(masm_, "[ FunctionDeclaration");
894 EmitDebugCheckDeclarationContext(variable);
895 VisitForAccumulatorValue(declaration->fun());
896 __ mov(ContextOperand(esi, variable->index()), result_register());
897 // We know that we have written a function, which is not a smi.
898 __ RecordWriteContextSlot(esi,
899 Context::SlotOffset(variable->index()),
905 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
909 case Variable::LOOKUP: {
910 Comment cmnt(masm_, "[ FunctionDeclaration");
912 __ push(Immediate(variable->name()));
913 __ push(Immediate(Smi::FromInt(NONE)));
914 VisitForStackValue(declaration->fun());
915 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
922 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
923 VariableProxy* proxy = declaration->proxy();
924 Variable* variable = proxy->var();
925 switch (variable->location()) {
926 case Variable::UNALLOCATED:
930 case Variable::CONTEXT: {
931 Comment cmnt(masm_, "[ ImportDeclaration");
932 EmitDebugCheckDeclarationContext(variable);
937 case Variable::PARAMETER:
938 case Variable::LOCAL:
939 case Variable::LOOKUP:
945 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
950 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
951 // Call the runtime to declare the globals.
952 __ push(esi); // The context is the first argument.
954 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
955 __ CallRuntime(Runtime::kDeclareGlobals, 3);
956 // Return value is ignored.
960 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
961 // Call the runtime to declare the modules.
962 __ Push(descriptions);
963 __ CallRuntime(Runtime::kDeclareModules, 1);
964 // Return value is ignored.
968 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
969 Comment cmnt(masm_, "[ SwitchStatement");
970 Breakable nested_statement(this, stmt);
971 SetStatementPosition(stmt);
973 // Keep the switch value on the stack until a case matches.
974 VisitForStackValue(stmt->tag());
975 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
977 ZoneList<CaseClause*>* clauses = stmt->cases();
978 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
980 Label next_test; // Recycled for each test.
981 // Compile all the tests with branches to their bodies.
982 for (int i = 0; i < clauses->length(); i++) {
983 CaseClause* clause = clauses->at(i);
984 clause->body_target()->Unuse();
986 // The default is not a test, but remember it as final fall through.
987 if (clause->is_default()) {
988 default_clause = clause;
992 Comment cmnt(masm_, "[ Case comparison");
996 // Compile the label expression.
997 VisitForAccumulatorValue(clause->label());
999 // Perform the comparison as if via '==='.
1000 __ mov(edx, Operand(esp, 0)); // Switch value.
1001 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1002 JumpPatchSite patch_site(masm_);
1003 if (inline_smi_code) {
1007 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1010 __ j(not_equal, &next_test);
1011 __ Drop(1); // Switch value is no longer needed.
1012 __ jmp(clause->body_target());
1013 __ bind(&slow_case);
1016 // Record position before stub call for type feedback.
1017 SetSourcePosition(clause->position());
1018 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1019 language_mode()).code();
1020 CallIC(ic, clause->CompareId());
1021 patch_site.EmitPatchInfo();
1024 __ jmp(&skip, Label::kNear);
1025 PrepareForBailout(clause, TOS_REG);
1026 __ cmp(eax, isolate()->factory()->true_value());
1027 __ j(not_equal, &next_test);
1029 __ jmp(clause->body_target());
1033 __ j(not_equal, &next_test);
1034 __ Drop(1); // Switch value is no longer needed.
1035 __ jmp(clause->body_target());
1038 // Discard the test value and jump to the default if present, otherwise to
1039 // the end of the statement.
1040 __ bind(&next_test);
1041 __ Drop(1); // Switch value is no longer needed.
1042 if (default_clause == NULL) {
1043 __ jmp(nested_statement.break_label());
1045 __ jmp(default_clause->body_target());
1048 // Compile all the case bodies.
1049 for (int i = 0; i < clauses->length(); i++) {
1050 Comment cmnt(masm_, "[ Case body");
1051 CaseClause* clause = clauses->at(i);
1052 __ bind(clause->body_target());
1053 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1054 VisitStatements(clause->statements());
1057 __ bind(nested_statement.break_label());
1058 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1062 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1063 Comment cmnt(masm_, "[ ForInStatement");
1064 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1066 SetStatementPosition(stmt);
1069 ForIn loop_statement(this, stmt);
1070 increment_loop_depth();
1072 // Get the object to enumerate over. If the object is null or undefined, skip
1073 // over the loop. See ECMA-262 version 5, section 12.6.4.
1074 SetExpressionPosition(stmt->enumerable());
1075 VisitForAccumulatorValue(stmt->enumerable());
1076 __ cmp(eax, isolate()->factory()->undefined_value());
1078 __ cmp(eax, isolate()->factory()->null_value());
1081 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1083 // Convert the object to a JS object.
1084 Label convert, done_convert;
1085 __ JumpIfSmi(eax, &convert, Label::kNear);
1086 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1087 __ j(above_equal, &done_convert, Label::kNear);
1090 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1091 __ bind(&done_convert);
1092 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1095 // Check for proxies.
1096 Label call_runtime, use_cache, fixed_array;
1097 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1098 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1099 __ j(below_equal, &call_runtime);
1101 // Check cache validity in generated code. This is a fast case for
1102 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1103 // guarantee cache validity, call the runtime system to check cache
1104 // validity or get the property names in a fixed array.
1105 __ CheckEnumCache(&call_runtime);
1107 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1108 __ jmp(&use_cache, Label::kNear);
1110 // Get the set of properties to enumerate.
1111 __ bind(&call_runtime);
1113 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1114 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1115 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1116 isolate()->factory()->meta_map());
1117 __ j(not_equal, &fixed_array);
1120 // We got a map in register eax. Get the enumeration cache from it.
1121 Label no_descriptors;
1122 __ bind(&use_cache);
1124 __ EnumLength(edx, eax);
1125 __ cmp(edx, Immediate(Smi::FromInt(0)));
1126 __ j(equal, &no_descriptors);
1128 __ LoadInstanceDescriptors(eax, ecx);
1129 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1130 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1132 // Set up the four remaining stack slots.
1133 __ push(eax); // Map.
1134 __ push(ecx); // Enumeration cache.
1135 __ push(edx); // Number of valid entries for the map in the enum cache.
1136 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1139 __ bind(&no_descriptors);
1140 __ add(esp, Immediate(kPointerSize));
1143 // We got a fixed array in register eax. Iterate through that.
1145 __ bind(&fixed_array);
1147 // No need for a write barrier, we are storing a Smi in the feedback vector.
1148 __ LoadHeapObject(ebx, FeedbackVector());
1149 int vector_index = FeedbackVector()->GetIndex(slot);
1150 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1151 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1153 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1154 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1155 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1156 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1157 __ j(above, &non_proxy);
1158 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1159 __ bind(&non_proxy);
1160 __ push(ebx); // Smi
1161 __ push(eax); // Array
1162 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1163 __ push(eax); // Fixed array length (as smi).
1164 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1166 // Generate code for doing the condition check.
1167 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1169 SetExpressionPosition(stmt->each());
1171 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1172 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1173 __ j(above_equal, loop_statement.break_label());
1175 // Get the current entry of the array into register ebx.
1176 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1177 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1179 // Get the expected map from the stack or a smi in the
1180 // permanent slow case into register edx.
1181 __ mov(edx, Operand(esp, 3 * kPointerSize));
1183 // Check if the expected map still matches that of the enumerable.
1184 // If not, we may have to filter the key.
1186 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1187 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1188 __ j(equal, &update_each, Label::kNear);
1190 // For proxies, no filtering is done.
1191 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1192 DCHECK(Smi::FromInt(0) == 0);
1194 __ j(zero, &update_each);
1196 // Convert the entry to a string or null if it isn't a property
1197 // anymore. If the property has been removed while iterating, we
1199 __ push(ecx); // Enumerable.
1200 __ push(ebx); // Current entry.
1201 __ CallRuntime(Runtime::kForInFilter, 2);
1202 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1203 __ cmp(eax, isolate()->factory()->undefined_value());
1204 __ j(equal, loop_statement.continue_label());
1207 // Update the 'each' property or variable from the possibly filtered
1208 // entry in register ebx.
1209 __ bind(&update_each);
1210 __ mov(result_register(), ebx);
1211 // Perform the assignment as if via '='.
1212 { EffectContext context(this);
1213 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1214 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1217 // Generate code for the body of the loop.
1218 Visit(stmt->body());
1220 // Generate code for going to the next element by incrementing the
1221 // index (smi) stored on top of the stack.
1222 __ bind(loop_statement.continue_label());
1223 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1225 EmitBackEdgeBookkeeping(stmt, &loop);
1228 // Remove the pointers stored on the stack.
1229 __ bind(loop_statement.break_label());
1230 __ add(esp, Immediate(5 * kPointerSize));
1232 // Exit and decrement the loop depth.
1233 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1235 decrement_loop_depth();
1239 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1241 // Use the fast case closure allocation code that allocates in new
1242 // space for nested functions that don't need literals cloning. If
1243 // we're running with the --always-opt or the --prepare-always-opt
1244 // flag, we need to use the runtime function so that the new function
1245 // we are creating here gets a chance to have its code optimized and
1246 // doesn't just get a copy of the existing unoptimized code.
1247 if (!FLAG_always_opt &&
1248 !FLAG_prepare_always_opt &&
1250 scope()->is_function_scope() &&
1251 info->num_literals() == 0) {
1252 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1253 __ mov(ebx, Immediate(info));
1257 __ push(Immediate(info));
1258 __ push(Immediate(pretenure
1259 ? isolate()->factory()->true_value()
1260 : isolate()->factory()->false_value()));
1261 __ CallRuntime(Runtime::kNewClosure, 3);
1263 context()->Plug(eax);
1267 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1268 Comment cmnt(masm_, "[ VariableProxy");
1269 EmitVariableLoad(expr);
1273 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1275 FeedbackVectorICSlot slot) {
1276 if (NeedsHomeObject(initializer)) {
1277 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1278 __ mov(StoreDescriptor::NameRegister(),
1279 Immediate(isolate()->factory()->home_object_symbol()));
1280 __ mov(StoreDescriptor::ValueRegister(),
1281 Operand(esp, offset * kPointerSize));
1282 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1288 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1289 TypeofState typeof_state,
1291 Register context = esi;
1292 Register temp = edx;
1296 if (s->num_heap_slots() > 0) {
1297 if (s->calls_sloppy_eval()) {
1298 // Check that extension is NULL.
1299 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1301 __ j(not_equal, slow);
1303 // Load next context in chain.
1304 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1305 // Walk the rest of the chain without clobbering esi.
1308 // If no outer scope calls eval, we do not need to check more
1309 // context extensions. If we have reached an eval scope, we check
1310 // all extensions from this point.
1311 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1312 s = s->outer_scope();
1315 if (s != NULL && s->is_eval_scope()) {
1316 // Loop up the context chain. There is no frame effect so it is
1317 // safe to use raw labels here.
1319 if (!context.is(temp)) {
1320 __ mov(temp, context);
1323 // Terminate at native context.
1324 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1325 Immediate(isolate()->factory()->native_context_map()));
1326 __ j(equal, &fast, Label::kNear);
1327 // Check that extension is NULL.
1328 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1329 __ j(not_equal, slow);
1330 // Load next context in chain.
1331 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1336 // All extension objects were empty and it is safe to use a global
1338 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1339 __ mov(LoadDescriptor::NameRegister(), proxy->var()->name());
1340 __ mov(LoadDescriptor::SlotRegister(),
1341 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1343 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1351 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1353 DCHECK(var->IsContextSlot());
1354 Register context = esi;
1355 Register temp = ebx;
1357 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1358 if (s->num_heap_slots() > 0) {
1359 if (s->calls_sloppy_eval()) {
1360 // Check that extension is NULL.
1361 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1363 __ j(not_equal, slow);
1365 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1366 // Walk the rest of the chain without clobbering esi.
1370 // Check that last extension is NULL.
1371 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1372 __ j(not_equal, slow);
1374 // This function is used only for loads, not stores, so it's safe to
1375 // return an esi-based operand (the write barrier cannot be allowed to
1376 // destroy the esi register).
1377 return ContextOperand(context, var->index());
1381 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1382 TypeofState typeof_state,
1385 // Generate fast-case code for variables that might be shadowed by
1386 // eval-introduced variables. Eval is used a lot without
1387 // introducing variables. In those cases, we do not want to
1388 // perform a runtime call for all variables in the scope
1389 // containing the eval.
1390 Variable* var = proxy->var();
1391 if (var->mode() == DYNAMIC_GLOBAL) {
1392 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1394 } else if (var->mode() == DYNAMIC_LOCAL) {
1395 Variable* local = var->local_if_not_shadowed();
1396 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1397 if (local->mode() == LET || local->mode() == CONST ||
1398 local->mode() == CONST_LEGACY) {
1399 __ cmp(eax, isolate()->factory()->the_hole_value());
1400 __ j(not_equal, done);
1401 if (local->mode() == CONST_LEGACY) {
1402 __ mov(eax, isolate()->factory()->undefined_value());
1403 } else { // LET || CONST
1404 __ push(Immediate(var->name()));
1405 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1413 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1414 // Record position before possible IC call.
1415 SetSourcePosition(proxy->position());
1416 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1417 Variable* var = proxy->var();
1419 // Three cases: global variables, lookup variables, and all other types of
1421 switch (var->location()) {
1422 case Variable::UNALLOCATED: {
1423 Comment cmnt(masm_, "[ Global variable");
1424 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1425 __ mov(LoadDescriptor::NameRegister(), var->name());
1426 __ mov(LoadDescriptor::SlotRegister(),
1427 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1428 CallGlobalLoadIC(var->name());
1429 context()->Plug(eax);
1433 case Variable::PARAMETER:
1434 case Variable::LOCAL:
1435 case Variable::CONTEXT: {
1436 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1437 : "[ Stack variable");
1438 if (var->binding_needs_init()) {
1439 // var->scope() may be NULL when the proxy is located in eval code and
1440 // refers to a potential outside binding. Currently those bindings are
1441 // always looked up dynamically, i.e. in that case
1442 // var->location() == LOOKUP.
1444 DCHECK(var->scope() != NULL);
1446 // Check if the binding really needs an initialization check. The check
1447 // can be skipped in the following situation: we have a LET or CONST
1448 // binding in harmony mode, both the Variable and the VariableProxy have
1449 // the same declaration scope (i.e. they are both in global code, in the
1450 // same function or in the same eval code) and the VariableProxy is in
1451 // the source physically located after the initializer of the variable.
1453 // We cannot skip any initialization checks for CONST in non-harmony
1454 // mode because const variables may be declared but never initialized:
1455 // if (false) { const x; }; var y = x;
1457 // The condition on the declaration scopes is a conservative check for
1458 // nested functions that access a binding and are called before the
1459 // binding is initialized:
1460 // function() { f(); let x = 1; function f() { x = 2; } }
1462 bool skip_init_check;
1463 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1464 skip_init_check = false;
1465 } else if (var->is_this()) {
1466 CHECK(info_->function() != nullptr &&
1467 (info_->function()->kind() & kSubclassConstructor) != 0);
1468 // TODO(dslomov): implement 'this' hole check elimination.
1469 skip_init_check = false;
1471 // Check that we always have valid source position.
1472 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1473 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1474 skip_init_check = var->mode() != CONST_LEGACY &&
1475 var->initializer_position() < proxy->position();
1478 if (!skip_init_check) {
1479 // Let and const need a read barrier.
1482 __ cmp(eax, isolate()->factory()->the_hole_value());
1483 __ j(not_equal, &done, Label::kNear);
1484 if (var->mode() == LET || var->mode() == CONST) {
1485 // Throw a reference error when using an uninitialized let/const
1486 // binding in harmony mode.
1487 __ push(Immediate(var->name()));
1488 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1490 // Uninitalized const bindings outside of harmony mode are unholed.
1491 DCHECK(var->mode() == CONST_LEGACY);
1492 __ mov(eax, isolate()->factory()->undefined_value());
1495 context()->Plug(eax);
1499 context()->Plug(var);
1503 case Variable::LOOKUP: {
1504 Comment cmnt(masm_, "[ Lookup variable");
1506 // Generate code for loading from variables potentially shadowed
1507 // by eval-introduced variables.
1508 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1510 __ push(esi); // Context.
1511 __ push(Immediate(var->name()));
1512 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1514 context()->Plug(eax);
1521 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1522 Comment cmnt(masm_, "[ RegExpLiteral");
1524 // Registers will be used as follows:
1525 // edi = JS function.
1526 // ecx = literals array.
1527 // ebx = regexp literal.
1528 // eax = regexp literal clone.
1529 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1530 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1531 int literal_offset =
1532 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1533 __ mov(ebx, FieldOperand(ecx, literal_offset));
1534 __ cmp(ebx, isolate()->factory()->undefined_value());
1535 __ j(not_equal, &materialized, Label::kNear);
1537 // Create regexp literal using runtime function
1538 // Result will be in eax.
1540 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1541 __ push(Immediate(expr->pattern()));
1542 __ push(Immediate(expr->flags()));
1543 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1546 __ bind(&materialized);
1547 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1548 Label allocated, runtime_allocate;
1549 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1552 __ bind(&runtime_allocate);
1554 __ push(Immediate(Smi::FromInt(size)));
1555 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1558 __ bind(&allocated);
1559 // Copy the content into the newly allocated memory.
1560 // (Unroll copy loop once for better throughput).
1561 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1562 __ mov(edx, FieldOperand(ebx, i));
1563 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1564 __ mov(FieldOperand(eax, i), edx);
1565 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1567 if ((size % (2 * kPointerSize)) != 0) {
1568 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1569 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1571 context()->Plug(eax);
1575 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1576 if (expression == NULL) {
1577 __ push(Immediate(isolate()->factory()->null_value()));
1579 VisitForStackValue(expression);
1584 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1585 Comment cmnt(masm_, "[ ObjectLiteral");
1587 Handle<FixedArray> constant_properties = expr->constant_properties();
1588 int flags = expr->ComputeFlags();
1589 // If any of the keys would store to the elements array, then we shouldn't
1591 if (MustCreateObjectLiteralWithRuntime(expr)) {
1592 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1593 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1594 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1595 __ push(Immediate(constant_properties));
1596 __ push(Immediate(Smi::FromInt(flags)));
1597 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1599 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1600 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1601 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1602 __ mov(ecx, Immediate(constant_properties));
1603 __ mov(edx, Immediate(Smi::FromInt(flags)));
1604 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1607 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1609 // If result_saved is true the result is on top of the stack. If
1610 // result_saved is false the result is in eax.
1611 bool result_saved = false;
1613 AccessorTable accessor_table(zone());
1614 int property_index = 0;
1615 // store_slot_index points to the vector IC slot for the next store IC used.
1616 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1617 // and must be updated if the number of store ICs emitted here changes.
1618 int store_slot_index = 0;
1619 for (; property_index < expr->properties()->length(); property_index++) {
1620 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1621 if (property->is_computed_name()) break;
1622 if (property->IsCompileTimeValue()) continue;
1624 Literal* key = property->key()->AsLiteral();
1625 Expression* value = property->value();
1626 if (!result_saved) {
1627 __ push(eax); // Save result on the stack
1628 result_saved = true;
1630 switch (property->kind()) {
1631 case ObjectLiteral::Property::CONSTANT:
1633 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1634 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1636 case ObjectLiteral::Property::COMPUTED:
1637 // It is safe to use [[Put]] here because the boilerplate already
1638 // contains computed properties with an uninitialized value.
1639 if (key->value()->IsInternalizedString()) {
1640 if (property->emit_store()) {
1641 VisitForAccumulatorValue(value);
1642 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1643 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1644 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1645 if (FLAG_vector_stores) {
1646 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1649 CallStoreIC(key->LiteralFeedbackId());
1651 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1653 if (NeedsHomeObject(value)) {
1654 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1655 __ mov(StoreDescriptor::NameRegister(),
1656 Immediate(isolate()->factory()->home_object_symbol()));
1657 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, 0));
1658 if (FLAG_vector_stores) {
1659 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1664 VisitForEffect(value);
1668 __ push(Operand(esp, 0)); // Duplicate receiver.
1669 VisitForStackValue(key);
1670 VisitForStackValue(value);
1671 if (property->emit_store()) {
1672 EmitSetHomeObjectIfNeeded(
1673 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1674 __ push(Immediate(Smi::FromInt(SLOPPY))); // Language mode
1675 __ CallRuntime(Runtime::kSetProperty, 4);
1680 case ObjectLiteral::Property::PROTOTYPE:
1681 __ push(Operand(esp, 0)); // Duplicate receiver.
1682 VisitForStackValue(value);
1683 DCHECK(property->emit_store());
1684 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1686 case ObjectLiteral::Property::GETTER:
1687 if (property->emit_store()) {
1688 accessor_table.lookup(key)->second->getter = value;
1691 case ObjectLiteral::Property::SETTER:
1692 if (property->emit_store()) {
1693 accessor_table.lookup(key)->second->setter = value;
1699 // Emit code to define accessors, using only a single call to the runtime for
1700 // each pair of corresponding getters and setters.
1701 for (AccessorTable::Iterator it = accessor_table.begin();
1702 it != accessor_table.end();
1704 __ push(Operand(esp, 0)); // Duplicate receiver.
1705 VisitForStackValue(it->first);
1706 EmitAccessor(it->second->getter);
1707 EmitSetHomeObjectIfNeeded(
1708 it->second->getter, 2,
1709 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1711 EmitAccessor(it->second->setter);
1712 EmitSetHomeObjectIfNeeded(
1713 it->second->setter, 3,
1714 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1716 __ push(Immediate(Smi::FromInt(NONE)));
1717 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1720 // Object literals have two parts. The "static" part on the left contains no
1721 // computed property names, and so we can compute its map ahead of time; see
1722 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1723 // starts with the first computed property name, and continues with all
1724 // properties to its right. All the code from above initializes the static
1725 // component of the object literal, and arranges for the map of the result to
1726 // reflect the static order in which the keys appear. For the dynamic
1727 // properties, we compile them into a series of "SetOwnProperty" runtime
1728 // calls. This will preserve insertion order.
1729 for (; property_index < expr->properties()->length(); property_index++) {
1730 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1732 Expression* value = property->value();
1733 if (!result_saved) {
1734 __ push(eax); // Save result on the stack
1735 result_saved = true;
1738 __ push(Operand(esp, 0)); // Duplicate receiver.
1740 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1741 DCHECK(!property->is_computed_name());
1742 VisitForStackValue(value);
1743 DCHECK(property->emit_store());
1744 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1746 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1747 VisitForStackValue(value);
1748 EmitSetHomeObjectIfNeeded(
1749 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1751 switch (property->kind()) {
1752 case ObjectLiteral::Property::CONSTANT:
1753 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1754 case ObjectLiteral::Property::COMPUTED:
1755 if (property->emit_store()) {
1756 __ push(Immediate(Smi::FromInt(NONE)));
1757 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1763 case ObjectLiteral::Property::PROTOTYPE:
1767 case ObjectLiteral::Property::GETTER:
1768 __ push(Immediate(Smi::FromInt(NONE)));
1769 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1772 case ObjectLiteral::Property::SETTER:
1773 __ push(Immediate(Smi::FromInt(NONE)));
1774 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1780 if (expr->has_function()) {
1781 DCHECK(result_saved);
1782 __ push(Operand(esp, 0));
1783 __ CallRuntime(Runtime::kToFastProperties, 1);
1787 context()->PlugTOS();
1789 context()->Plug(eax);
1792 // Verify that compilation exactly consumed the number of store ic slots that
1793 // the ObjectLiteral node had to offer.
1794 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1798 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1799 Comment cmnt(masm_, "[ ArrayLiteral");
1801 expr->BuildConstantElements(isolate());
1802 Handle<FixedArray> constant_elements = expr->constant_elements();
1803 bool has_constant_fast_elements =
1804 IsFastObjectElementsKind(expr->constant_elements_kind());
1806 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1807 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1808 // If the only customer of allocation sites is transitioning, then
1809 // we can turn it off if we don't have anywhere else to transition to.
1810 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1813 if (MustCreateArrayLiteralWithRuntime(expr)) {
1814 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1815 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1816 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1817 __ push(Immediate(constant_elements));
1818 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1819 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1821 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1822 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1823 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1824 __ mov(ecx, Immediate(constant_elements));
1825 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1828 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1830 bool result_saved = false; // Is the result saved to the stack?
1831 ZoneList<Expression*>* subexprs = expr->values();
1832 int length = subexprs->length();
1834 // Emit code to evaluate all the non-constant subexpressions and to store
1835 // them into the newly cloned array.
1836 int array_index = 0;
1837 for (; array_index < length; array_index++) {
1838 Expression* subexpr = subexprs->at(array_index);
1839 if (subexpr->IsSpread()) break;
1841 // If the subexpression is a literal or a simple materialized literal it
1842 // is already set in the cloned array.
1843 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1845 if (!result_saved) {
1846 __ push(eax); // array literal.
1847 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1848 result_saved = true;
1850 VisitForAccumulatorValue(subexpr);
1852 if (has_constant_fast_elements) {
1853 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1854 // cannot transition and don't need to call the runtime stub.
1855 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1856 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1857 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1858 // Store the subexpression value in the array's elements.
1859 __ mov(FieldOperand(ebx, offset), result_register());
1860 // Update the write barrier for the array store.
1861 __ RecordWriteField(ebx, offset, result_register(), ecx,
1863 EMIT_REMEMBERED_SET,
1866 // Store the subexpression value in the array's elements.
1867 __ mov(ecx, Immediate(Smi::FromInt(array_index)));
1868 StoreArrayLiteralElementStub stub(isolate());
1872 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1875 // In case the array literal contains spread expressions it has two parts. The
1876 // first part is the "static" array which has a literal index is handled
1877 // above. The second part is the part after the first spread expression
1878 // (inclusive) and these elements gets appended to the array. Note that the
1879 // number elements an iterable produces is unknown ahead of time.
1880 if (array_index < length && result_saved) {
1881 __ Drop(1); // literal index
1883 result_saved = false;
1885 for (; array_index < length; array_index++) {
1886 Expression* subexpr = subexprs->at(array_index);
1889 if (subexpr->IsSpread()) {
1890 VisitForStackValue(subexpr->AsSpread()->expression());
1891 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1893 VisitForStackValue(subexpr);
1894 __ CallRuntime(Runtime::kAppendElement, 2);
1897 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1901 __ Drop(1); // literal index
1902 context()->PlugTOS();
1904 context()->Plug(eax);
1909 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1910 DCHECK(expr->target()->IsValidReferenceExpression());
1912 Comment cmnt(masm_, "[ Assignment");
1914 Property* property = expr->target()->AsProperty();
1915 LhsKind assign_type = Property::GetAssignType(property);
1917 // Evaluate LHS expression.
1918 switch (assign_type) {
1920 // Nothing to do here.
1922 case NAMED_SUPER_PROPERTY:
1924 property->obj()->AsSuperPropertyReference()->this_var());
1925 VisitForAccumulatorValue(
1926 property->obj()->AsSuperPropertyReference()->home_object());
1927 __ push(result_register());
1928 if (expr->is_compound()) {
1929 __ push(MemOperand(esp, kPointerSize));
1930 __ push(result_register());
1933 case NAMED_PROPERTY:
1934 if (expr->is_compound()) {
1935 // We need the receiver both on the stack and in the register.
1936 VisitForStackValue(property->obj());
1937 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1939 VisitForStackValue(property->obj());
1942 case KEYED_SUPER_PROPERTY:
1944 property->obj()->AsSuperPropertyReference()->this_var());
1946 property->obj()->AsSuperPropertyReference()->home_object());
1947 VisitForAccumulatorValue(property->key());
1948 __ Push(result_register());
1949 if (expr->is_compound()) {
1950 __ push(MemOperand(esp, 2 * kPointerSize));
1951 __ push(MemOperand(esp, 2 * kPointerSize));
1952 __ push(result_register());
1955 case KEYED_PROPERTY: {
1956 if (expr->is_compound()) {
1957 VisitForStackValue(property->obj());
1958 VisitForStackValue(property->key());
1959 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1960 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1962 VisitForStackValue(property->obj());
1963 VisitForStackValue(property->key());
1969 // For compound assignments we need another deoptimization point after the
1970 // variable/property load.
1971 if (expr->is_compound()) {
1972 AccumulatorValueContext result_context(this);
1973 { AccumulatorValueContext left_operand_context(this);
1974 switch (assign_type) {
1976 EmitVariableLoad(expr->target()->AsVariableProxy());
1977 PrepareForBailout(expr->target(), TOS_REG);
1979 case NAMED_SUPER_PROPERTY:
1980 EmitNamedSuperPropertyLoad(property);
1981 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1983 case NAMED_PROPERTY:
1984 EmitNamedPropertyLoad(property);
1985 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1987 case KEYED_SUPER_PROPERTY:
1988 EmitKeyedSuperPropertyLoad(property);
1989 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1991 case KEYED_PROPERTY:
1992 EmitKeyedPropertyLoad(property);
1993 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1998 Token::Value op = expr->binary_op();
1999 __ push(eax); // Left operand goes on the stack.
2000 VisitForAccumulatorValue(expr->value());
2002 SetSourcePosition(expr->position() + 1);
2003 if (ShouldInlineSmiCase(op)) {
2004 EmitInlineSmiBinaryOp(expr->binary_operation(),
2009 EmitBinaryOp(expr->binary_operation(), op);
2012 // Deoptimization point in case the binary operation may have side effects.
2013 PrepareForBailout(expr->binary_operation(), TOS_REG);
2015 VisitForAccumulatorValue(expr->value());
2018 // Record source position before possible IC call.
2019 SetSourcePosition(expr->position());
2022 switch (assign_type) {
2024 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2025 expr->op(), expr->AssignmentSlot());
2026 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2027 context()->Plug(eax);
2029 case NAMED_PROPERTY:
2030 EmitNamedPropertyAssignment(expr);
2032 case NAMED_SUPER_PROPERTY:
2033 EmitNamedSuperPropertyStore(property);
2034 context()->Plug(result_register());
2036 case KEYED_SUPER_PROPERTY:
2037 EmitKeyedSuperPropertyStore(property);
2038 context()->Plug(result_register());
2040 case KEYED_PROPERTY:
2041 EmitKeyedPropertyAssignment(expr);
2047 void FullCodeGenerator::VisitYield(Yield* expr) {
2048 Comment cmnt(masm_, "[ Yield");
2049 // Evaluate yielded value first; the initial iterator definition depends on
2050 // this. It stays on the stack while we update the iterator.
2051 VisitForStackValue(expr->expression());
2053 switch (expr->yield_kind()) {
2054 case Yield::kSuspend:
2055 // Pop value from top-of-stack slot; box result into result register.
2056 EmitCreateIteratorResult(false);
2057 __ push(result_register());
2059 case Yield::kInitial: {
2060 Label suspend, continuation, post_runtime, resume;
2064 __ bind(&continuation);
2068 VisitForAccumulatorValue(expr->generator_object());
2069 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2070 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2071 Immediate(Smi::FromInt(continuation.pos())));
2072 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2074 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2076 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
2078 __ j(equal, &post_runtime);
2079 __ push(eax); // generator object
2080 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2081 __ mov(context_register(),
2082 Operand(ebp, StandardFrameConstants::kContextOffset));
2083 __ bind(&post_runtime);
2084 __ pop(result_register());
2085 EmitReturnSequence();
2088 context()->Plug(result_register());
2092 case Yield::kFinal: {
2093 VisitForAccumulatorValue(expr->generator_object());
2094 __ mov(FieldOperand(result_register(),
2095 JSGeneratorObject::kContinuationOffset),
2096 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2097 // Pop value from top-of-stack slot, box result into result register.
2098 EmitCreateIteratorResult(true);
2099 EmitUnwindBeforeReturn();
2100 EmitReturnSequence();
2104 case Yield::kDelegating: {
2105 VisitForStackValue(expr->generator_object());
2107 // Initial stack layout is as follows:
2108 // [sp + 1 * kPointerSize] iter
2109 // [sp + 0 * kPointerSize] g
2111 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2112 Label l_next, l_call, l_loop;
2113 Register load_receiver = LoadDescriptor::ReceiverRegister();
2114 Register load_name = LoadDescriptor::NameRegister();
2116 // Initial send value is undefined.
2117 __ mov(eax, isolate()->factory()->undefined_value());
2120 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2122 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2123 __ push(load_name); // "throw"
2124 __ push(Operand(esp, 2 * kPointerSize)); // iter
2125 __ push(eax); // exception
2128 // try { received = %yield result }
2129 // Shuffle the received result above a try handler and yield it without
2132 __ pop(eax); // result
2133 EnterTryBlock(expr->index(), &l_catch);
2134 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2135 __ push(eax); // result
2137 __ bind(&l_continuation);
2139 __ bind(&l_suspend);
2140 const int generator_object_depth = kPointerSize + try_block_size;
2141 __ mov(eax, Operand(esp, generator_object_depth));
2143 __ push(Immediate(Smi::FromInt(expr->index()))); // handler-index
2144 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2145 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2146 Immediate(Smi::FromInt(l_continuation.pos())));
2147 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2149 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2151 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2152 __ mov(context_register(),
2153 Operand(ebp, StandardFrameConstants::kContextOffset));
2154 __ pop(eax); // result
2155 EmitReturnSequence();
2156 __ bind(&l_resume); // received in eax
2157 ExitTryBlock(expr->index());
2159 // receiver = iter; f = iter.next; arg = received;
2162 __ mov(load_name, isolate()->factory()->next_string());
2163 __ push(load_name); // "next"
2164 __ push(Operand(esp, 2 * kPointerSize)); // iter
2165 __ push(eax); // received
2167 // result = receiver[f](arg);
2169 __ mov(load_receiver, Operand(esp, kPointerSize));
2170 __ mov(LoadDescriptor::SlotRegister(),
2171 Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2172 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2173 CallIC(ic, TypeFeedbackId::None());
2175 __ mov(Operand(esp, 2 * kPointerSize), edi);
2176 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2179 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2180 __ Drop(1); // The function is still on the stack; drop it.
2182 // if (!result.done) goto l_try;
2184 __ push(eax); // save result
2185 __ Move(load_receiver, eax); // result
2187 isolate()->factory()->done_string()); // "done"
2188 __ mov(LoadDescriptor::SlotRegister(),
2189 Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
2190 CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2191 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2197 __ pop(load_receiver); // result
2199 isolate()->factory()->value_string()); // "value"
2200 __ mov(LoadDescriptor::SlotRegister(),
2201 Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
2202 CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2203 context()->DropAndPlug(2, eax); // drop iter and g
2210 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2212 JSGeneratorObject::ResumeMode resume_mode) {
2213 // The value stays in eax, and is ultimately read by the resumed generator, as
2214 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2215 // is read to throw the value when the resumed generator is already closed.
2216 // ebx will hold the generator object until the activation has been resumed.
2217 VisitForStackValue(generator);
2218 VisitForAccumulatorValue(value);
2221 // Load suspended function and context.
2222 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2223 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2226 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2228 // Push holes for arguments to generator function.
2229 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2231 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2232 __ mov(ecx, isolate()->factory()->the_hole_value());
2233 Label push_argument_holes, push_frame;
2234 __ bind(&push_argument_holes);
2235 __ sub(edx, Immediate(Smi::FromInt(1)));
2236 __ j(carry, &push_frame);
2238 __ jmp(&push_argument_holes);
2240 // Enter a new JavaScript frame, and initialize its slots as they were when
2241 // the generator was suspended.
2242 Label resume_frame, done;
2243 __ bind(&push_frame);
2244 __ call(&resume_frame);
2246 __ bind(&resume_frame);
2247 __ push(ebp); // Caller's frame pointer.
2249 __ push(esi); // Callee's context.
2250 __ push(edi); // Callee's JS Function.
2252 // Load the operand stack size.
2253 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2254 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2257 // If we are sending a value and there is no operand stack, we can jump back
2259 if (resume_mode == JSGeneratorObject::NEXT) {
2261 __ cmp(edx, Immediate(0));
2262 __ j(not_zero, &slow_resume);
2263 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2264 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2267 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2268 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2270 __ bind(&slow_resume);
2273 // Otherwise, we push holes for the operand stack and call the runtime to fix
2274 // up the stack and the handlers.
2275 Label push_operand_holes, call_resume;
2276 __ bind(&push_operand_holes);
2277 __ sub(edx, Immediate(1));
2278 __ j(carry, &call_resume);
2280 __ jmp(&push_operand_holes);
2281 __ bind(&call_resume);
2283 __ push(result_register());
2284 __ Push(Smi::FromInt(resume_mode));
2285 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2286 // Not reached: the runtime call returns elsewhere.
2287 __ Abort(kGeneratorFailedToResume);
2290 context()->Plug(result_register());
2294 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2298 const int instance_size = 5 * kPointerSize;
2299 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2302 __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2305 __ bind(&gc_required);
2306 __ Push(Smi::FromInt(instance_size));
2307 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2308 __ mov(context_register(),
2309 Operand(ebp, StandardFrameConstants::kContextOffset));
2311 __ bind(&allocated);
2312 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2313 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2314 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2316 __ mov(edx, isolate()->factory()->ToBoolean(done));
2317 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2318 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2319 isolate()->factory()->empty_fixed_array());
2320 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2321 isolate()->factory()->empty_fixed_array());
2322 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2323 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2325 // Only the value field needs a write barrier, as the other values are in the
2327 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2328 ecx, edx, kDontSaveFPRegs);
2332 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2333 SetSourcePosition(prop->position());
2334 Literal* key = prop->key()->AsLiteral();
2335 DCHECK(!key->value()->IsSmi());
2336 DCHECK(!prop->IsSuperAccess());
2338 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2339 __ mov(LoadDescriptor::SlotRegister(),
2340 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2341 CallLoadIC(NOT_CONTEXTUAL);
2345 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2346 // Stack: receiver, home_object.
2347 SetSourcePosition(prop->position());
2348 Literal* key = prop->key()->AsLiteral();
2349 DCHECK(!key->value()->IsSmi());
2350 DCHECK(prop->IsSuperAccess());
2352 __ push(Immediate(key->value()));
2353 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2357 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2358 SetSourcePosition(prop->position());
2359 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2360 __ mov(LoadDescriptor::SlotRegister(),
2361 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2366 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2367 // Stack: receiver, home_object, key.
2368 SetSourcePosition(prop->position());
2370 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2374 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2377 Expression* right) {
2378 // Do combined smi check of the operands. Left operand is on the
2379 // stack. Right operand is in eax.
2380 Label smi_case, done, stub_call;
2384 JumpPatchSite patch_site(masm_);
2385 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2387 __ bind(&stub_call);
2389 Handle<Code> code = CodeFactory::BinaryOpIC(
2390 isolate(), op, language_mode()).code();
2391 CallIC(code, expr->BinaryOperationFeedbackId());
2392 patch_site.EmitPatchInfo();
2393 __ jmp(&done, Label::kNear);
2397 __ mov(eax, edx); // Copy left operand in case of a stub call.
2402 __ sar_cl(eax); // No checks of result necessary
2403 __ and_(eax, Immediate(~kSmiTagMask));
2410 // Check that the *signed* result fits in a smi.
2411 __ cmp(eax, 0xc0000000);
2412 __ j(positive, &result_ok);
2415 __ bind(&result_ok);
2424 __ test(eax, Immediate(0xc0000000));
2425 __ j(zero, &result_ok);
2428 __ bind(&result_ok);
2434 __ j(overflow, &stub_call);
2438 __ j(overflow, &stub_call);
2443 __ j(overflow, &stub_call);
2445 __ j(not_zero, &done, Label::kNear);
2448 __ j(negative, &stub_call);
2454 case Token::BIT_AND:
2457 case Token::BIT_XOR:
2465 context()->Plug(eax);
2469 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2470 // Constructor is in eax.
2471 DCHECK(lit != NULL);
2474 // No access check is needed here since the constructor is created by the
2476 Register scratch = ebx;
2477 __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2480 // store_slot_index points to the vector IC slot for the next store IC used.
2481 // ClassLiteral::ComputeFeedbackRequirements controls the allocation of slots
2482 // and must be updated if the number of store ICs emitted here changes.
2483 int store_slot_index = 0;
2484 for (int i = 0; i < lit->properties()->length(); i++) {
2485 ObjectLiteral::Property* property = lit->properties()->at(i);
2486 Expression* value = property->value();
2488 if (property->is_static()) {
2489 __ push(Operand(esp, kPointerSize)); // constructor
2491 __ push(Operand(esp, 0)); // prototype
2493 EmitPropertyKey(property, lit->GetIdForProperty(i));
2495 // The static prototype property is read only. We handle the non computed
2496 // property name case in the parser. Since this is the only case where we
2497 // need to check for an own read only property we special case this so we do
2498 // not need to do this for every property.
2499 if (property->is_static() && property->is_computed_name()) {
2500 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2504 VisitForStackValue(value);
2505 EmitSetHomeObjectIfNeeded(value, 2,
2506 lit->SlotForHomeObject(value, &store_slot_index));
2508 switch (property->kind()) {
2509 case ObjectLiteral::Property::CONSTANT:
2510 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2511 case ObjectLiteral::Property::PROTOTYPE:
2513 case ObjectLiteral::Property::COMPUTED:
2514 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2517 case ObjectLiteral::Property::GETTER:
2518 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2519 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2522 case ObjectLiteral::Property::SETTER:
2523 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2524 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2530 __ CallRuntime(Runtime::kToFastProperties, 1);
2533 __ CallRuntime(Runtime::kToFastProperties, 1);
2535 // Verify that compilation exactly consumed the number of store ic slots that
2536 // the ClassLiteral node had to offer.
2537 DCHECK(!FLAG_vector_stores || store_slot_index == lit->slot_count());
2541 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2543 Handle<Code> code = CodeFactory::BinaryOpIC(
2544 isolate(), op, language_mode()).code();
2545 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2546 CallIC(code, expr->BinaryOperationFeedbackId());
2547 patch_site.EmitPatchInfo();
2548 context()->Plug(eax);
2552 void FullCodeGenerator::EmitAssignment(Expression* expr,
2553 FeedbackVectorICSlot slot) {
2554 DCHECK(expr->IsValidReferenceExpression());
2556 Property* prop = expr->AsProperty();
2557 LhsKind assign_type = Property::GetAssignType(prop);
2559 switch (assign_type) {
2561 Variable* var = expr->AsVariableProxy()->var();
2562 EffectContext context(this);
2563 EmitVariableAssignment(var, Token::ASSIGN, slot);
2566 case NAMED_PROPERTY: {
2567 __ push(eax); // Preserve value.
2568 VisitForAccumulatorValue(prop->obj());
2569 __ Move(StoreDescriptor::ReceiverRegister(), eax);
2570 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2571 __ mov(StoreDescriptor::NameRegister(),
2572 prop->key()->AsLiteral()->value());
2573 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2577 case NAMED_SUPER_PROPERTY: {
2579 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2580 VisitForAccumulatorValue(
2581 prop->obj()->AsSuperPropertyReference()->home_object());
2582 // stack: value, this; eax: home_object
2583 Register scratch = ecx;
2584 Register scratch2 = edx;
2585 __ mov(scratch, result_register()); // home_object
2586 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2587 __ mov(scratch2, MemOperand(esp, 0)); // this
2588 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2589 __ mov(MemOperand(esp, 0), scratch); // home_object
2590 // stack: this, home_object. eax: value
2591 EmitNamedSuperPropertyStore(prop);
2594 case KEYED_SUPER_PROPERTY: {
2596 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2598 prop->obj()->AsSuperPropertyReference()->home_object());
2599 VisitForAccumulatorValue(prop->key());
2600 Register scratch = ecx;
2601 Register scratch2 = edx;
2602 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2603 // stack: value, this, home_object; eax: key, edx: value
2604 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2605 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2606 __ mov(scratch, MemOperand(esp, 0)); // home_object
2607 __ mov(MemOperand(esp, kPointerSize), scratch);
2608 __ mov(MemOperand(esp, 0), eax);
2609 __ mov(eax, scratch2);
2610 // stack: this, home_object, key; eax: value.
2611 EmitKeyedSuperPropertyStore(prop);
2614 case KEYED_PROPERTY: {
2615 __ push(eax); // Preserve value.
2616 VisitForStackValue(prop->obj());
2617 VisitForAccumulatorValue(prop->key());
2618 __ Move(StoreDescriptor::NameRegister(), eax);
2619 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2620 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2621 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2623 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2628 context()->Plug(eax);
2632 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2633 Variable* var, MemOperand location) {
2634 __ mov(location, eax);
2635 if (var->IsContextSlot()) {
2637 int offset = Context::SlotOffset(var->index());
2638 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2643 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2644 FeedbackVectorICSlot slot) {
2645 if (var->IsUnallocated()) {
2646 // Global var, const, or let.
2647 __ mov(StoreDescriptor::NameRegister(), var->name());
2648 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2649 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2652 } else if (var->mode() == LET && op != Token::INIT_LET) {
2653 // Non-initializing assignment to let variable needs a write barrier.
2654 DCHECK(!var->IsLookupSlot());
2655 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2657 MemOperand location = VarOperand(var, ecx);
2658 __ mov(edx, location);
2659 __ cmp(edx, isolate()->factory()->the_hole_value());
2660 __ j(not_equal, &assign, Label::kNear);
2661 __ push(Immediate(var->name()));
2662 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2664 EmitStoreToStackLocalOrContextSlot(var, location);
2666 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2667 // Assignment to const variable needs a write barrier.
2668 DCHECK(!var->IsLookupSlot());
2669 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2671 MemOperand location = VarOperand(var, ecx);
2672 __ mov(edx, location);
2673 __ cmp(edx, isolate()->factory()->the_hole_value());
2674 __ j(not_equal, &const_error, Label::kNear);
2675 __ push(Immediate(var->name()));
2676 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2677 __ bind(&const_error);
2678 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2680 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2681 if (var->IsLookupSlot()) {
2682 // Assignment to var.
2683 __ push(eax); // Value.
2684 __ push(esi); // Context.
2685 __ push(Immediate(var->name()));
2686 __ push(Immediate(Smi::FromInt(language_mode())));
2687 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2689 // Assignment to var or initializing assignment to let/const in harmony
2691 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2692 MemOperand location = VarOperand(var, ecx);
2693 if (generate_debug_code_ && op == Token::INIT_LET) {
2694 // Check for an uninitialized let binding.
2695 __ mov(edx, location);
2696 __ cmp(edx, isolate()->factory()->the_hole_value());
2697 __ Check(equal, kLetBindingReInitialization);
2699 EmitStoreToStackLocalOrContextSlot(var, location);
2702 } else if (op == Token::INIT_CONST_LEGACY) {
2703 // Const initializers need a write barrier.
2704 DCHECK(var->mode() == CONST_LEGACY);
2705 DCHECK(!var->IsParameter()); // No const parameters.
2706 if (var->IsLookupSlot()) {
2709 __ push(Immediate(var->name()));
2710 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2712 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2714 MemOperand location = VarOperand(var, ecx);
2715 __ mov(edx, location);
2716 __ cmp(edx, isolate()->factory()->the_hole_value());
2717 __ j(not_equal, &skip, Label::kNear);
2718 EmitStoreToStackLocalOrContextSlot(var, location);
2723 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2724 if (is_strict(language_mode())) {
2725 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2727 // Silently ignore store in sloppy mode.
2732 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2733 // Assignment to a property, using a named store IC.
2735 // esp[0] : receiver
2737 Property* prop = expr->target()->AsProperty();
2738 DCHECK(prop != NULL);
2739 DCHECK(prop->key()->IsLiteral());
2741 // Record source code position before IC call.
2742 SetSourcePosition(expr->position());
2743 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2744 __ pop(StoreDescriptor::ReceiverRegister());
2745 if (FLAG_vector_stores) {
2746 EmitLoadStoreICSlot(expr->AssignmentSlot());
2749 CallStoreIC(expr->AssignmentFeedbackId());
2751 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2752 context()->Plug(eax);
2756 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2757 // Assignment to named property of super.
2759 // stack : receiver ('this'), home_object
2760 DCHECK(prop != NULL);
2761 Literal* key = prop->key()->AsLiteral();
2762 DCHECK(key != NULL);
2764 __ push(Immediate(key->value()));
2766 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2767 : Runtime::kStoreToSuper_Sloppy),
2772 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2773 // Assignment to named property of super.
2775 // stack : receiver ('this'), home_object, key
2779 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2780 : Runtime::kStoreKeyedToSuper_Sloppy),
2785 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2786 // Assignment to a property, using a keyed store IC.
2789 // esp[kPointerSize] : receiver
2791 __ pop(StoreDescriptor::NameRegister()); // Key.
2792 __ pop(StoreDescriptor::ReceiverRegister());
2793 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2794 // Record source code position before IC call.
2795 SetSourcePosition(expr->position());
2797 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2798 if (FLAG_vector_stores) {
2799 EmitLoadStoreICSlot(expr->AssignmentSlot());
2802 CallIC(ic, expr->AssignmentFeedbackId());
2805 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2806 context()->Plug(eax);
2810 void FullCodeGenerator::VisitProperty(Property* expr) {
2811 Comment cmnt(masm_, "[ Property");
2812 Expression* key = expr->key();
2814 if (key->IsPropertyName()) {
2815 if (!expr->IsSuperAccess()) {
2816 VisitForAccumulatorValue(expr->obj());
2817 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2818 EmitNamedPropertyLoad(expr);
2820 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2822 expr->obj()->AsSuperPropertyReference()->home_object());
2823 EmitNamedSuperPropertyLoad(expr);
2826 if (!expr->IsSuperAccess()) {
2827 VisitForStackValue(expr->obj());
2828 VisitForAccumulatorValue(expr->key());
2829 __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2830 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2831 EmitKeyedPropertyLoad(expr);
2833 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2835 expr->obj()->AsSuperPropertyReference()->home_object());
2836 VisitForStackValue(expr->key());
2837 EmitKeyedSuperPropertyLoad(expr);
2840 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2841 context()->Plug(eax);
2845 void FullCodeGenerator::CallIC(Handle<Code> code,
2846 TypeFeedbackId ast_id) {
2848 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2852 // Code common for calls using the IC.
2853 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2854 Expression* callee = expr->expression();
2856 CallICState::CallType call_type =
2857 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2858 // Get the target function.
2859 if (call_type == CallICState::FUNCTION) {
2860 { StackValueContext context(this);
2861 EmitVariableLoad(callee->AsVariableProxy());
2862 PrepareForBailout(callee, NO_REGISTERS);
2864 // Push undefined as receiver. This is patched in the method prologue if it
2865 // is a sloppy mode method.
2866 __ push(Immediate(isolate()->factory()->undefined_value()));
2868 // Load the function from the receiver.
2869 DCHECK(callee->IsProperty());
2870 DCHECK(!callee->AsProperty()->IsSuperAccess());
2871 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2872 EmitNamedPropertyLoad(callee->AsProperty());
2873 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2874 // Push the target function under the receiver.
2875 __ push(Operand(esp, 0));
2876 __ mov(Operand(esp, kPointerSize), eax);
2879 EmitCall(expr, call_type);
2883 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2884 Expression* callee = expr->expression();
2885 DCHECK(callee->IsProperty());
2886 Property* prop = callee->AsProperty();
2887 DCHECK(prop->IsSuperAccess());
2889 SetSourcePosition(prop->position());
2890 Literal* key = prop->key()->AsLiteral();
2891 DCHECK(!key->value()->IsSmi());
2892 // Load the function from the receiver.
2893 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2894 VisitForStackValue(super_ref->home_object());
2895 VisitForAccumulatorValue(super_ref->this_var());
2898 __ push(Operand(esp, kPointerSize * 2));
2899 __ push(Immediate(key->value()));
2902 // - this (receiver)
2903 // - this (receiver) <-- LoadFromSuper will pop here and below.
2906 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2908 // Replace home_object with target function.
2909 __ mov(Operand(esp, kPointerSize), eax);
2912 // - target function
2913 // - this (receiver)
2914 EmitCall(expr, CallICState::METHOD);
2918 // Code common for calls using the IC.
2919 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2922 VisitForAccumulatorValue(key);
2924 Expression* callee = expr->expression();
2926 // Load the function from the receiver.
2927 DCHECK(callee->IsProperty());
2928 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2929 __ mov(LoadDescriptor::NameRegister(), eax);
2930 EmitKeyedPropertyLoad(callee->AsProperty());
2931 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2933 // Push the target function under the receiver.
2934 __ push(Operand(esp, 0));
2935 __ mov(Operand(esp, kPointerSize), eax);
2937 EmitCall(expr, CallICState::METHOD);
2941 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2942 Expression* callee = expr->expression();
2943 DCHECK(callee->IsProperty());
2944 Property* prop = callee->AsProperty();
2945 DCHECK(prop->IsSuperAccess());
2947 SetSourcePosition(prop->position());
2948 // Load the function from the receiver.
2949 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2950 VisitForStackValue(super_ref->home_object());
2951 VisitForAccumulatorValue(super_ref->this_var());
2954 __ push(Operand(esp, kPointerSize * 2));
2955 VisitForStackValue(prop->key());
2958 // - this (receiver)
2959 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2962 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2964 // Replace home_object with target function.
2965 __ mov(Operand(esp, kPointerSize), eax);
2968 // - target function
2969 // - this (receiver)
2970 EmitCall(expr, CallICState::METHOD);
2974 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2975 // Load the arguments.
2976 ZoneList<Expression*>* args = expr->arguments();
2977 int arg_count = args->length();
2978 { PreservePositionScope scope(masm()->positions_recorder());
2979 for (int i = 0; i < arg_count; i++) {
2980 VisitForStackValue(args->at(i));
2984 // Record source position of the IC call.
2985 SetSourcePosition(expr->position());
2986 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2987 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2988 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2989 // Don't assign a type feedback id to the IC, since type feedback is provided
2990 // by the vector above.
2993 RecordJSReturnSite(expr);
2995 // Restore context register.
2996 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2998 context()->DropAndPlug(1, eax);
3002 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3003 // Push copy of the first argument or undefined if it doesn't exist.
3004 if (arg_count > 0) {
3005 __ push(Operand(esp, arg_count * kPointerSize));
3007 __ push(Immediate(isolate()->factory()->undefined_value()));
3010 // Push the enclosing function.
3011 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3012 // Push the receiver of the enclosing function.
3013 Variable* this_var = scope()->LookupThis();
3014 DCHECK_NOT_NULL(this_var);
3015 __ push(VarOperand(this_var, ecx));
3016 // Push the language mode.
3017 __ push(Immediate(Smi::FromInt(language_mode())));
3019 // Push the start position of the scope the calls resides in.
3020 __ push(Immediate(Smi::FromInt(scope()->start_position())));
3022 // Do the runtime call.
3023 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3027 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3028 SuperCallReference* super_call_ref, FeedbackVectorICSlot slot) {
3029 Variable* this_var = super_call_ref->this_var()->var();
3030 GetVar(ecx, this_var);
3031 __ cmp(ecx, isolate()->factory()->the_hole_value());
3032 Label uninitialized_this;
3033 __ j(equal, &uninitialized_this);
3034 __ push(Immediate(this_var->name()));
3035 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3036 __ bind(&uninitialized_this);
3038 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3042 void FullCodeGenerator::VisitCall(Call* expr) {
3044 // We want to verify that RecordJSReturnSite gets called on all paths
3045 // through this function. Avoid early returns.
3046 expr->return_is_recorded_ = false;
3049 Comment cmnt(masm_, "[ Call");
3050 Expression* callee = expr->expression();
3051 Call::CallType call_type = expr->GetCallType(isolate());
3053 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3054 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3055 // to resolve the function we need to call and the receiver of the call.
3056 // Then we call the resolved function using the given arguments.
3057 ZoneList<Expression*>* args = expr->arguments();
3058 int arg_count = args->length();
3059 { PreservePositionScope pos_scope(masm()->positions_recorder());
3060 VisitForStackValue(callee);
3061 // Reserved receiver slot.
3062 __ push(Immediate(isolate()->factory()->undefined_value()));
3063 // Push the arguments.
3064 for (int i = 0; i < arg_count; i++) {
3065 VisitForStackValue(args->at(i));
3068 // Push a copy of the function (found below the arguments) and
3070 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
3071 EmitResolvePossiblyDirectEval(arg_count);
3073 // The runtime call returns a pair of values in eax (function) and
3074 // edx (receiver). Touch up the stack with the right values.
3075 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
3076 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
3078 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3080 // Record source position for debugger.
3081 SetSourcePosition(expr->position());
3082 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3083 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3085 RecordJSReturnSite(expr);
3086 // Restore context register.
3087 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3088 context()->DropAndPlug(1, eax);
3090 } else if (call_type == Call::GLOBAL_CALL) {
3091 EmitCallWithLoadIC(expr);
3092 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3093 // Call to a lookup slot (dynamically introduced variable).
3094 VariableProxy* proxy = callee->AsVariableProxy();
3096 { PreservePositionScope scope(masm()->positions_recorder());
3097 // Generate code for loading from variables potentially shadowed by
3098 // eval-introduced variables.
3099 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3102 // Call the runtime to find the function to call (returned in eax) and
3103 // the object holding it (returned in edx).
3104 __ push(context_register());
3105 __ push(Immediate(proxy->name()));
3106 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3107 __ push(eax); // Function.
3108 __ push(edx); // Receiver.
3109 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3111 // If fast case code has been generated, emit code to push the function
3112 // and receiver and have the slow path jump around this code.
3113 if (done.is_linked()) {
3115 __ jmp(&call, Label::kNear);
3119 // The receiver is implicitly the global receiver. Indicate this by
3120 // passing the hole to the call function stub.
3121 __ push(Immediate(isolate()->factory()->undefined_value()));
3125 // The receiver is either the global receiver or an object found by
3129 } else if (call_type == Call::PROPERTY_CALL) {
3130 Property* property = callee->AsProperty();
3131 bool is_named_call = property->key()->IsPropertyName();
3132 if (property->IsSuperAccess()) {
3133 if (is_named_call) {
3134 EmitSuperCallWithLoadIC(expr);
3136 EmitKeyedSuperCallWithLoadIC(expr);
3140 PreservePositionScope scope(masm()->positions_recorder());
3141 VisitForStackValue(property->obj());
3143 if (is_named_call) {
3144 EmitCallWithLoadIC(expr);
3146 EmitKeyedCallWithLoadIC(expr, property->key());
3149 } else if (call_type == Call::SUPER_CALL) {
3150 EmitSuperConstructorCall(expr);
3152 DCHECK(call_type == Call::OTHER_CALL);
3153 // Call to an arbitrary expression not handled specially above.
3154 { PreservePositionScope scope(masm()->positions_recorder());
3155 VisitForStackValue(callee);
3157 __ push(Immediate(isolate()->factory()->undefined_value()));
3158 // Emit function call.
3163 // RecordJSReturnSite should have been called.
3164 DCHECK(expr->return_is_recorded_);
3169 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3170 Comment cmnt(masm_, "[ CallNew");
3171 // According to ECMA-262, section 11.2.2, page 44, the function
3172 // expression in new calls must be evaluated before the
3175 // Push constructor on the stack. If it's not a function it's used as
3176 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3178 DCHECK(!expr->expression()->IsSuperPropertyReference());
3179 VisitForStackValue(expr->expression());
3181 // Push the arguments ("left-to-right") on the stack.
3182 ZoneList<Expression*>* args = expr->arguments();
3183 int arg_count = args->length();
3184 for (int i = 0; i < arg_count; i++) {
3185 VisitForStackValue(args->at(i));
3188 // Call the construct call builtin that handles allocation and
3189 // constructor invocation.
3190 SetSourcePosition(expr->position());
3192 // Load function and argument count into edi and eax.
3193 __ Move(eax, Immediate(arg_count));
3194 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3196 // Record call targets in unoptimized code.
3197 if (FLAG_pretenuring_call_new) {
3198 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3199 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3200 expr->CallNewFeedbackSlot().ToInt() + 1);
3203 __ LoadHeapObject(ebx, FeedbackVector());
3204 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
3206 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3207 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3208 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3209 context()->Plug(eax);
3213 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3214 SuperCallReference* super_call_ref =
3215 expr->expression()->AsSuperCallReference();
3216 DCHECK_NOT_NULL(super_call_ref);
3218 VariableProxy* new_target_proxy = super_call_ref->new_target_var();
3219 VisitForStackValue(new_target_proxy);
3221 EmitLoadSuperConstructor(super_call_ref);
3222 __ push(result_register());
3224 // Push the arguments ("left-to-right") on the stack.
3225 ZoneList<Expression*>* args = expr->arguments();
3226 int arg_count = args->length();
3227 for (int i = 0; i < arg_count; i++) {
3228 VisitForStackValue(args->at(i));
3231 // Call the construct call builtin that handles allocation and
3232 // constructor invocation.
3233 SetSourcePosition(expr->position());
3235 // Load function and argument count into edi and eax.
3236 __ Move(eax, Immediate(arg_count));
3237 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3239 // Record call targets in unoptimized code.
3240 if (FLAG_pretenuring_call_new) {
3242 /* TODO(dslomov): support pretenuring.
3243 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3244 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3245 expr->CallNewFeedbackSlot().ToInt() + 1);
3249 __ LoadHeapObject(ebx, FeedbackVector());
3250 __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
3252 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3253 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3257 RecordJSReturnSite(expr);
3259 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3260 context()->Plug(eax);
3264 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3265 ZoneList<Expression*>* args = expr->arguments();
3266 DCHECK(args->length() == 1);
3268 VisitForAccumulatorValue(args->at(0));
3270 Label materialize_true, materialize_false;
3271 Label* if_true = NULL;
3272 Label* if_false = NULL;
3273 Label* fall_through = NULL;
3274 context()->PrepareTest(&materialize_true, &materialize_false,
3275 &if_true, &if_false, &fall_through);
3277 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3278 __ test(eax, Immediate(kSmiTagMask));
3279 Split(zero, if_true, if_false, fall_through);
3281 context()->Plug(if_true, if_false);
3285 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3286 ZoneList<Expression*>* args = expr->arguments();
3287 DCHECK(args->length() == 1);
3289 VisitForAccumulatorValue(args->at(0));
3291 Label materialize_true, materialize_false;
3292 Label* if_true = NULL;
3293 Label* if_false = NULL;
3294 Label* fall_through = NULL;
3295 context()->PrepareTest(&materialize_true, &materialize_false,
3296 &if_true, &if_false, &fall_through);
3298 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3299 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
3300 Split(zero, if_true, if_false, fall_through);
3302 context()->Plug(if_true, if_false);
3306 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3307 ZoneList<Expression*>* args = expr->arguments();
3308 DCHECK(args->length() == 1);
3310 VisitForAccumulatorValue(args->at(0));
3312 Label materialize_true, materialize_false;
3313 Label* if_true = NULL;
3314 Label* if_false = NULL;
3315 Label* fall_through = NULL;
3316 context()->PrepareTest(&materialize_true, &materialize_false,
3317 &if_true, &if_false, &fall_through);
3319 __ JumpIfSmi(eax, if_false);
3320 __ cmp(eax, isolate()->factory()->null_value());
3321 __ j(equal, if_true);
3322 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3323 // Undetectable objects behave like undefined when tested with typeof.
3324 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
3325 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3326 __ j(not_zero, if_false);
3327 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3328 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3329 __ j(below, if_false);
3330 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3331 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3332 Split(below_equal, if_true, if_false, fall_through);
3334 context()->Plug(if_true, if_false);
3338 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3339 ZoneList<Expression*>* args = expr->arguments();
3340 DCHECK(args->length() == 1);
3342 VisitForAccumulatorValue(args->at(0));
3344 Label materialize_true, materialize_false;
3345 Label* if_true = NULL;
3346 Label* if_false = NULL;
3347 Label* fall_through = NULL;
3348 context()->PrepareTest(&materialize_true, &materialize_false,
3349 &if_true, &if_false, &fall_through);
3351 __ JumpIfSmi(eax, if_false);
3352 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3353 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3354 Split(above_equal, if_true, if_false, fall_through);
3356 context()->Plug(if_true, if_false);
3360 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3361 ZoneList<Expression*>* args = expr->arguments();
3362 DCHECK(args->length() == 1);
3364 VisitForAccumulatorValue(args->at(0));
3366 Label materialize_true, materialize_false;
3367 Label* if_true = NULL;
3368 Label* if_false = NULL;
3369 Label* fall_through = NULL;
3370 context()->PrepareTest(&materialize_true, &materialize_false,
3371 &if_true, &if_false, &fall_through);
3373 __ JumpIfSmi(eax, if_false);
3374 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3375 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
3376 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3377 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3378 Split(not_zero, if_true, if_false, fall_through);
3380 context()->Plug(if_true, if_false);
3384 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3385 CallRuntime* expr) {
3386 ZoneList<Expression*>* args = expr->arguments();
3387 DCHECK(args->length() == 1);
3389 VisitForAccumulatorValue(args->at(0));
3391 Label materialize_true, materialize_false, skip_lookup;
3392 Label* if_true = NULL;
3393 Label* if_false = NULL;
3394 Label* fall_through = NULL;
3395 context()->PrepareTest(&materialize_true, &materialize_false,
3396 &if_true, &if_false, &fall_through);
3398 __ AssertNotSmi(eax);
3400 // Check whether this map has already been checked to be safe for default
3402 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3403 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3404 1 << Map::kStringWrapperSafeForDefaultValueOf);
3405 __ j(not_zero, &skip_lookup);
3407 // Check for fast case object. Return false for slow case objects.
3408 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3409 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3410 __ cmp(ecx, isolate()->factory()->hash_table_map());
3411 __ j(equal, if_false);
3413 // Look for valueOf string in the descriptor array, and indicate false if
3414 // found. Since we omit an enumeration index check, if it is added via a
3415 // transition that shares its descriptor array, this is a false positive.
3416 Label entry, loop, done;
3418 // Skip loop if no descriptors are valid.
3419 __ NumberOfOwnDescriptors(ecx, ebx);
3423 __ LoadInstanceDescriptors(ebx, ebx);
3424 // ebx: descriptor array.
3425 // ecx: valid entries in the descriptor array.
3426 // Calculate the end of the descriptor array.
3427 STATIC_ASSERT(kSmiTag == 0);
3428 STATIC_ASSERT(kSmiTagSize == 1);
3429 STATIC_ASSERT(kPointerSize == 4);
3430 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3431 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3432 // Calculate location of the first key name.
3433 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3434 // Loop through all the keys in the descriptor array. If one of these is the
3435 // internalized string "valueOf" the result is false.
3438 __ mov(edx, FieldOperand(ebx, 0));
3439 __ cmp(edx, isolate()->factory()->value_of_string());
3440 __ j(equal, if_false);
3441 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3444 __ j(not_equal, &loop);
3448 // Reload map as register ebx was used as temporary above.
3449 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3451 // Set the bit in the map to indicate that there is no local valueOf field.
3452 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3453 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3455 __ bind(&skip_lookup);
3457 // If a valueOf property is not found on the object check that its
3458 // prototype is the un-modified String prototype. If not result is false.
3459 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3460 __ JumpIfSmi(ecx, if_false);
3461 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3462 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3464 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3467 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3468 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3469 Split(equal, if_true, if_false, fall_through);
3471 context()->Plug(if_true, if_false);
3475 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3476 ZoneList<Expression*>* args = expr->arguments();
3477 DCHECK(args->length() == 1);
3479 VisitForAccumulatorValue(args->at(0));
3481 Label materialize_true, materialize_false;
3482 Label* if_true = NULL;
3483 Label* if_false = NULL;
3484 Label* fall_through = NULL;
3485 context()->PrepareTest(&materialize_true, &materialize_false,
3486 &if_true, &if_false, &fall_through);
3488 __ JumpIfSmi(eax, if_false);
3489 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3490 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3491 Split(equal, if_true, if_false, fall_through);
3493 context()->Plug(if_true, if_false);
3497 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3498 ZoneList<Expression*>* args = expr->arguments();
3499 DCHECK(args->length() == 1);
3501 VisitForAccumulatorValue(args->at(0));
3503 Label materialize_true, materialize_false;
3504 Label* if_true = NULL;
3505 Label* if_false = NULL;
3506 Label* fall_through = NULL;
3507 context()->PrepareTest(&materialize_true, &materialize_false,
3508 &if_true, &if_false, &fall_through);
3510 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3511 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3512 // Check if the exponent half is 0x80000000. Comparing against 1 and
3513 // checking for overflow is the shortest possible encoding.
3514 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3515 __ j(no_overflow, if_false);
3516 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3517 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3518 Split(equal, if_true, if_false, fall_through);
3520 context()->Plug(if_true, if_false);
3525 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3526 ZoneList<Expression*>* args = expr->arguments();
3527 DCHECK(args->length() == 1);
3529 VisitForAccumulatorValue(args->at(0));
3531 Label materialize_true, materialize_false;
3532 Label* if_true = NULL;
3533 Label* if_false = NULL;
3534 Label* fall_through = NULL;
3535 context()->PrepareTest(&materialize_true, &materialize_false,
3536 &if_true, &if_false, &fall_through);
3538 __ JumpIfSmi(eax, if_false);
3539 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3540 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3541 Split(equal, if_true, if_false, fall_through);
3543 context()->Plug(if_true, if_false);
3547 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3548 ZoneList<Expression*>* args = expr->arguments();
3549 DCHECK(args->length() == 1);
3551 VisitForAccumulatorValue(args->at(0));
3553 Label materialize_true, materialize_false;
3554 Label* if_true = NULL;
3555 Label* if_false = NULL;
3556 Label* fall_through = NULL;
3557 context()->PrepareTest(&materialize_true, &materialize_false,
3558 &if_true, &if_false, &fall_through);
3560 __ JumpIfSmi(eax, if_false);
3561 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3562 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3563 Split(equal, if_true, if_false, fall_through);
3565 context()->Plug(if_true, if_false);
3569 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3570 ZoneList<Expression*>* args = expr->arguments();
3571 DCHECK(args->length() == 1);
3573 VisitForAccumulatorValue(args->at(0));
3575 Label materialize_true, materialize_false;
3576 Label* if_true = NULL;
3577 Label* if_false = NULL;
3578 Label* fall_through = NULL;
3579 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3580 &if_false, &fall_through);
3582 __ JumpIfSmi(eax, if_false);
3584 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3585 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3586 __ j(less, if_false);
3587 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3588 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3589 Split(less_equal, if_true, if_false, fall_through);
3591 context()->Plug(if_true, if_false);
3595 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3596 DCHECK(expr->arguments()->length() == 0);
3598 Label materialize_true, materialize_false;
3599 Label* if_true = NULL;
3600 Label* if_false = NULL;
3601 Label* fall_through = NULL;
3602 context()->PrepareTest(&materialize_true, &materialize_false,
3603 &if_true, &if_false, &fall_through);
3605 // Get the frame pointer for the calling frame.
3606 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3608 // Skip the arguments adaptor frame if it exists.
3609 Label check_frame_marker;
3610 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3611 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3612 __ j(not_equal, &check_frame_marker);
3613 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3615 // Check the marker in the calling frame.
3616 __ bind(&check_frame_marker);
3617 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3618 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3619 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3620 Split(equal, if_true, if_false, fall_through);
3622 context()->Plug(if_true, if_false);
3626 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3627 ZoneList<Expression*>* args = expr->arguments();
3628 DCHECK(args->length() == 2);
3630 // Load the two objects into registers and perform the comparison.
3631 VisitForStackValue(args->at(0));
3632 VisitForAccumulatorValue(args->at(1));
3634 Label materialize_true, materialize_false;
3635 Label* if_true = NULL;
3636 Label* if_false = NULL;
3637 Label* fall_through = NULL;
3638 context()->PrepareTest(&materialize_true, &materialize_false,
3639 &if_true, &if_false, &fall_through);
3643 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3644 Split(equal, if_true, if_false, fall_through);
3646 context()->Plug(if_true, if_false);
3650 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3651 ZoneList<Expression*>* args = expr->arguments();
3652 DCHECK(args->length() == 1);
3654 // ArgumentsAccessStub expects the key in edx and the formal
3655 // parameter count in eax.
3656 VisitForAccumulatorValue(args->at(0));
3658 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3659 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3661 context()->Plug(eax);
3665 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3666 DCHECK(expr->arguments()->length() == 0);
3669 // Get the number of formal parameters.
3670 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3672 // Check if the calling frame is an arguments adaptor frame.
3673 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3674 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3675 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3676 __ j(not_equal, &exit);
3678 // Arguments adaptor case: Read the arguments length from the
3680 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3684 context()->Plug(eax);
3688 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3690 DCHECK(args->length() == 1);
3691 Label done, null, function, non_function_constructor;
3693 VisitForAccumulatorValue(args->at(0));
3695 // If the object is a smi, we return null.
3696 __ JumpIfSmi(eax, &null);
3698 // Check that the object is a JS object but take special care of JS
3699 // functions to make sure they have 'Function' as their class.
3700 // Assume that there are only two callable types, and one of them is at
3701 // either end of the type range for JS object types. Saves extra comparisons.
3702 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3703 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3704 // Map is now in eax.
3706 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3707 FIRST_SPEC_OBJECT_TYPE + 1);
3708 __ j(equal, &function);
3710 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3711 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3712 LAST_SPEC_OBJECT_TYPE - 1);
3713 __ j(equal, &function);
3714 // Assume that there is no larger type.
3715 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3717 // Check if the constructor in the map is a JS function.
3718 __ GetMapConstructor(eax, eax, ebx);
3719 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3720 __ j(not_equal, &non_function_constructor);
3722 // eax now contains the constructor function. Grab the
3723 // instance class name from there.
3724 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3725 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3728 // Functions have class 'Function'.
3730 __ mov(eax, isolate()->factory()->Function_string());
3733 // Objects with a non-function constructor have class 'Object'.
3734 __ bind(&non_function_constructor);
3735 __ mov(eax, isolate()->factory()->Object_string());
3738 // Non-JS objects have class null.
3740 __ mov(eax, isolate()->factory()->null_value());
3745 context()->Plug(eax);
3749 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3750 // Load the arguments on the stack and call the stub.
3751 SubStringStub stub(isolate());
3752 ZoneList<Expression*>* args = expr->arguments();
3753 DCHECK(args->length() == 3);
3754 VisitForStackValue(args->at(0));
3755 VisitForStackValue(args->at(1));
3756 VisitForStackValue(args->at(2));
3758 context()->Plug(eax);
3762 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3763 // Load the arguments on the stack and call the stub.
3764 RegExpExecStub stub(isolate());
3765 ZoneList<Expression*>* args = expr->arguments();
3766 DCHECK(args->length() == 4);
3767 VisitForStackValue(args->at(0));
3768 VisitForStackValue(args->at(1));
3769 VisitForStackValue(args->at(2));
3770 VisitForStackValue(args->at(3));
3772 context()->Plug(eax);
3776 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3777 ZoneList<Expression*>* args = expr->arguments();
3778 DCHECK(args->length() == 1);
3780 VisitForAccumulatorValue(args->at(0)); // Load the object.
3783 // If the object is a smi return the object.
3784 __ JumpIfSmi(eax, &done, Label::kNear);
3785 // If the object is not a value type, return the object.
3786 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3787 __ j(not_equal, &done, Label::kNear);
3788 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3791 context()->Plug(eax);
3795 void FullCodeGenerator::EmitThrowIfNotADate(CallRuntime* expr) {
3796 ZoneList<Expression*>* args = expr->arguments();
3797 DCHECK_EQ(1, args->length());
3799 VisitForAccumulatorValue(args->at(0)); // Load the object.
3801 Label done, not_date_object;
3802 Register object = eax;
3803 Register result = eax;
3804 Register scratch = ecx;
3806 __ JumpIfSmi(object, ¬_date_object, Label::kNear);
3807 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3808 __ j(equal, &done, Label::kNear);
3809 __ bind(¬_date_object);
3810 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3813 context()->Plug(result);
3817 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3818 ZoneList<Expression*>* args = expr->arguments();
3819 DCHECK(args->length() == 2);
3820 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3821 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3823 VisitForAccumulatorValue(args->at(0)); // Load the object.
3825 Register object = eax;
3826 Register result = eax;
3827 Register scratch = ecx;
3829 if (index->value() == 0) {
3830 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3832 Label runtime, done;
3833 if (index->value() < JSDate::kFirstUncachedField) {
3834 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3835 __ mov(scratch, Operand::StaticVariable(stamp));
3836 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3837 __ j(not_equal, &runtime, Label::kNear);
3838 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3839 kPointerSize * index->value()));
3840 __ jmp(&done, Label::kNear);
3843 __ PrepareCallCFunction(2, scratch);
3844 __ mov(Operand(esp, 0), object);
3845 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3846 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3850 context()->Plug(result);
3854 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3855 ZoneList<Expression*>* args = expr->arguments();
3856 DCHECK_EQ(3, args->length());
3858 Register string = eax;
3859 Register index = ebx;
3860 Register value = ecx;
3862 VisitForStackValue(args->at(0)); // index
3863 VisitForStackValue(args->at(1)); // value
3864 VisitForAccumulatorValue(args->at(2)); // string
3869 if (FLAG_debug_code) {
3870 __ test(value, Immediate(kSmiTagMask));
3871 __ Check(zero, kNonSmiValue);
3872 __ test(index, Immediate(kSmiTagMask));
3873 __ Check(zero, kNonSmiValue);
3879 if (FLAG_debug_code) {
3880 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3881 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3884 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3886 context()->Plug(string);
3890 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3891 ZoneList<Expression*>* args = expr->arguments();
3892 DCHECK_EQ(3, args->length());
3894 Register string = eax;
3895 Register index = ebx;
3896 Register value = ecx;
3898 VisitForStackValue(args->at(0)); // index
3899 VisitForStackValue(args->at(1)); // value
3900 VisitForAccumulatorValue(args->at(2)); // string
3904 if (FLAG_debug_code) {
3905 __ test(value, Immediate(kSmiTagMask));
3906 __ Check(zero, kNonSmiValue);
3907 __ test(index, Immediate(kSmiTagMask));
3908 __ Check(zero, kNonSmiValue);
3910 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3911 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3916 // No need to untag a smi for two-byte addressing.
3917 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3919 context()->Plug(string);
3923 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3924 // Load the arguments on the stack and call the runtime function.
3925 ZoneList<Expression*>* args = expr->arguments();
3926 DCHECK(args->length() == 2);
3927 VisitForStackValue(args->at(0));
3928 VisitForStackValue(args->at(1));
3930 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3932 context()->Plug(eax);
3936 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3937 ZoneList<Expression*>* args = expr->arguments();
3938 DCHECK(args->length() == 2);
3940 VisitForStackValue(args->at(0)); // Load the object.
3941 VisitForAccumulatorValue(args->at(1)); // Load the value.
3942 __ pop(ebx); // eax = value. ebx = object.
3945 // If the object is a smi, return the value.
3946 __ JumpIfSmi(ebx, &done, Label::kNear);
3948 // If the object is not a value type, return the value.
3949 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3950 __ j(not_equal, &done, Label::kNear);
3953 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3955 // Update the write barrier. Save the value as it will be
3956 // overwritten by the write barrier code and is needed afterward.
3958 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3961 context()->Plug(eax);
3965 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3966 ZoneList<Expression*>* args = expr->arguments();
3967 DCHECK_EQ(args->length(), 1);
3969 // Load the argument into eax and call the stub.
3970 VisitForAccumulatorValue(args->at(0));
3972 NumberToStringStub stub(isolate());
3974 context()->Plug(eax);
3978 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3979 ZoneList<Expression*>* args = expr->arguments();
3980 DCHECK(args->length() == 1);
3982 VisitForAccumulatorValue(args->at(0));
3985 StringCharFromCodeGenerator generator(eax, ebx);
3986 generator.GenerateFast(masm_);
3989 NopRuntimeCallHelper call_helper;
3990 generator.GenerateSlow(masm_, call_helper);
3993 context()->Plug(ebx);
3997 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3998 ZoneList<Expression*>* args = expr->arguments();
3999 DCHECK(args->length() == 2);
4001 VisitForStackValue(args->at(0));
4002 VisitForAccumulatorValue(args->at(1));
4004 Register object = ebx;
4005 Register index = eax;
4006 Register result = edx;
4010 Label need_conversion;
4011 Label index_out_of_range;
4013 StringCharCodeAtGenerator generator(object,
4018 &index_out_of_range,
4019 STRING_INDEX_IS_NUMBER);
4020 generator.GenerateFast(masm_);
4023 __ bind(&index_out_of_range);
4024 // When the index is out of range, the spec requires us to return
4026 __ Move(result, Immediate(isolate()->factory()->nan_value()));
4029 __ bind(&need_conversion);
4030 // Move the undefined value into the result register, which will
4031 // trigger conversion.
4032 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
4035 NopRuntimeCallHelper call_helper;
4036 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4039 context()->Plug(result);
4043 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4044 ZoneList<Expression*>* args = expr->arguments();
4045 DCHECK(args->length() == 2);
4047 VisitForStackValue(args->at(0));
4048 VisitForAccumulatorValue(args->at(1));
4050 Register object = ebx;
4051 Register index = eax;
4052 Register scratch = edx;
4053 Register result = eax;
4057 Label need_conversion;
4058 Label index_out_of_range;
4060 StringCharAtGenerator generator(object,
4066 &index_out_of_range,
4067 STRING_INDEX_IS_NUMBER);
4068 generator.GenerateFast(masm_);
4071 __ bind(&index_out_of_range);
4072 // When the index is out of range, the spec requires us to return
4073 // the empty string.
4074 __ Move(result, Immediate(isolate()->factory()->empty_string()));
4077 __ bind(&need_conversion);
4078 // Move smi zero into the result register, which will trigger
4080 __ Move(result, Immediate(Smi::FromInt(0)));
4083 NopRuntimeCallHelper call_helper;
4084 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4087 context()->Plug(result);
4091 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4092 ZoneList<Expression*>* args = expr->arguments();
4093 DCHECK_EQ(2, args->length());
4094 VisitForStackValue(args->at(0));
4095 VisitForAccumulatorValue(args->at(1));
4098 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4100 context()->Plug(eax);
4104 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4105 ZoneList<Expression*>* args = expr->arguments();
4106 DCHECK_EQ(2, args->length());
4108 VisitForStackValue(args->at(0));
4109 VisitForStackValue(args->at(1));
4111 StringCompareStub stub(isolate());
4113 context()->Plug(eax);
4117 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4118 ZoneList<Expression*>* args = expr->arguments();
4119 DCHECK(args->length() >= 2);
4121 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4122 for (int i = 0; i < arg_count + 1; ++i) {
4123 VisitForStackValue(args->at(i));
4125 VisitForAccumulatorValue(args->last()); // Function.
4127 Label runtime, done;
4128 // Check for non-function argument (including proxy).
4129 __ JumpIfSmi(eax, &runtime);
4130 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
4131 __ j(not_equal, &runtime);
4133 // InvokeFunction requires the function in edi. Move it in there.
4134 __ mov(edi, result_register());
4135 ParameterCount count(arg_count);
4136 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
4137 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4142 __ CallRuntime(Runtime::kCall, args->length());
4145 context()->Plug(eax);
4149 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4150 ZoneList<Expression*>* args = expr->arguments();
4151 DCHECK(args->length() == 2);
4154 VisitForStackValue(args->at(0));
4157 VisitForStackValue(args->at(1));
4158 __ CallRuntime(Runtime::kGetPrototype, 1);
4159 __ push(result_register());
4161 // Check if the calling frame is an arguments adaptor frame.
4162 Label adaptor_frame, args_set_up, runtime;
4163 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4164 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
4165 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4166 __ j(equal, &adaptor_frame);
4167 // default constructor has no arguments, so no adaptor frame means no args.
4168 __ mov(eax, Immediate(0));
4169 __ jmp(&args_set_up);
4171 // Copy arguments from adaptor frame.
4173 __ bind(&adaptor_frame);
4174 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4177 // Subtract 1 from arguments count, for new.target.
4178 __ sub(ecx, Immediate(1));
4180 __ lea(edx, Operand(edx, ecx, times_pointer_size,
4181 StandardFrameConstants::kCallerSPOffset));
4184 __ push(Operand(edx, -1 * kPointerSize));
4185 __ sub(edx, Immediate(kPointerSize));
4187 __ j(not_zero, &loop);
4190 __ bind(&args_set_up);
4192 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
4193 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
4194 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4195 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4199 context()->Plug(eax);
4203 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4204 // Load the arguments on the stack and call the stub.
4205 RegExpConstructResultStub stub(isolate());
4206 ZoneList<Expression*>* args = expr->arguments();
4207 DCHECK(args->length() == 3);
4208 VisitForStackValue(args->at(0));
4209 VisitForStackValue(args->at(1));
4210 VisitForAccumulatorValue(args->at(2));
4214 context()->Plug(eax);
4218 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4219 ZoneList<Expression*>* args = expr->arguments();
4220 DCHECK_EQ(2, args->length());
4222 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4223 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4225 Handle<FixedArray> jsfunction_result_caches(
4226 isolate()->native_context()->jsfunction_result_caches());
4227 if (jsfunction_result_caches->length() <= cache_id) {
4228 __ Abort(kAttemptToUseUndefinedCache);
4229 __ mov(eax, isolate()->factory()->undefined_value());
4230 context()->Plug(eax);
4234 VisitForAccumulatorValue(args->at(1));
4237 Register cache = ebx;
4239 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
4241 FieldOperand(cache, GlobalObject::kNativeContextOffset));
4242 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4244 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4246 Label done, not_found;
4247 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4248 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
4249 // tmp now holds finger offset as a smi.
4250 __ cmp(key, FixedArrayElementOperand(cache, tmp));
4251 __ j(not_equal, ¬_found);
4253 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
4256 __ bind(¬_found);
4257 // Call runtime to perform the lookup.
4260 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4263 context()->Plug(eax);
4267 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4268 ZoneList<Expression*>* args = expr->arguments();
4269 DCHECK(args->length() == 1);
4271 VisitForAccumulatorValue(args->at(0));
4273 __ AssertString(eax);
4275 Label materialize_true, materialize_false;
4276 Label* if_true = NULL;
4277 Label* if_false = NULL;
4278 Label* fall_through = NULL;
4279 context()->PrepareTest(&materialize_true, &materialize_false,
4280 &if_true, &if_false, &fall_through);
4282 __ test(FieldOperand(eax, String::kHashFieldOffset),
4283 Immediate(String::kContainsCachedArrayIndexMask));
4284 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4285 Split(zero, if_true, if_false, fall_through);
4287 context()->Plug(if_true, if_false);
4291 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4292 ZoneList<Expression*>* args = expr->arguments();
4293 DCHECK(args->length() == 1);
4294 VisitForAccumulatorValue(args->at(0));
4296 __ AssertString(eax);
4298 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
4299 __ IndexFromHash(eax, eax);
4301 context()->Plug(eax);
4305 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4306 Label bailout, done, one_char_separator, long_separator,
4307 non_trivial_array, not_size_one_array, loop,
4308 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4310 ZoneList<Expression*>* args = expr->arguments();
4311 DCHECK(args->length() == 2);
4312 // We will leave the separator on the stack until the end of the function.
4313 VisitForStackValue(args->at(1));
4314 // Load this to eax (= array)
4315 VisitForAccumulatorValue(args->at(0));
4316 // All aliases of the same register have disjoint lifetimes.
4317 Register array = eax;
4318 Register elements = no_reg; // Will be eax.
4320 Register index = edx;
4322 Register string_length = ecx;
4324 Register string = esi;
4326 Register scratch = ebx;
4328 Register array_length = edi;
4329 Register result_pos = no_reg; // Will be edi.
4331 // Separator operand is already pushed.
4332 Operand separator_operand = Operand(esp, 2 * kPointerSize);
4333 Operand result_operand = Operand(esp, 1 * kPointerSize);
4334 Operand array_length_operand = Operand(esp, 0);
4335 __ sub(esp, Immediate(2 * kPointerSize));
4337 // Check that the array is a JSArray
4338 __ JumpIfSmi(array, &bailout);
4339 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4340 __ j(not_equal, &bailout);
4342 // Check that the array has fast elements.
4343 __ CheckFastElements(scratch, &bailout);
4345 // If the array has length zero, return the empty string.
4346 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
4347 __ SmiUntag(array_length);
4348 __ j(not_zero, &non_trivial_array);
4349 __ mov(result_operand, isolate()->factory()->empty_string());
4352 // Save the array length.
4353 __ bind(&non_trivial_array);
4354 __ mov(array_length_operand, array_length);
4356 // Save the FixedArray containing array's elements.
4357 // End of array's live range.
4359 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
4363 // Check that all array elements are sequential one-byte strings, and
4364 // accumulate the sum of their lengths, as a smi-encoded value.
4365 __ Move(index, Immediate(0));
4366 __ Move(string_length, Immediate(0));
4367 // Loop condition: while (index < length).
4368 // Live loop registers: index, array_length, string,
4369 // scratch, string_length, elements.
4370 if (generate_debug_code_) {
4371 __ cmp(index, array_length);
4372 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4375 __ mov(string, FieldOperand(elements,
4378 FixedArray::kHeaderSize));
4379 __ JumpIfSmi(string, &bailout);
4380 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4381 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4382 __ and_(scratch, Immediate(
4383 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4384 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4385 __ j(not_equal, &bailout);
4386 __ add(string_length,
4387 FieldOperand(string, SeqOneByteString::kLengthOffset));
4388 __ j(overflow, &bailout);
4389 __ add(index, Immediate(1));
4390 __ cmp(index, array_length);
4393 // If array_length is 1, return elements[0], a string.
4394 __ cmp(array_length, 1);
4395 __ j(not_equal, ¬_size_one_array);
4396 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4397 __ mov(result_operand, scratch);
4400 __ bind(¬_size_one_array);
4402 // End of array_length live range.
4403 result_pos = array_length;
4404 array_length = no_reg;
4407 // string_length: Sum of string lengths, as a smi.
4408 // elements: FixedArray of strings.
4410 // Check that the separator is a flat one-byte string.
4411 __ mov(string, separator_operand);
4412 __ JumpIfSmi(string, &bailout);
4413 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4414 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4415 __ and_(scratch, Immediate(
4416 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4417 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4418 __ j(not_equal, &bailout);
4420 // Add (separator length times array_length) - separator length
4421 // to string_length.
4422 __ mov(scratch, separator_operand);
4423 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4424 __ sub(string_length, scratch); // May be negative, temporarily.
4425 __ imul(scratch, array_length_operand);
4426 __ j(overflow, &bailout);
4427 __ add(string_length, scratch);
4428 __ j(overflow, &bailout);
4430 __ shr(string_length, 1);
4431 // Live registers and stack values:
4434 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4436 __ mov(result_operand, result_pos);
4437 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4440 __ mov(string, separator_operand);
4441 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4442 Immediate(Smi::FromInt(1)));
4443 __ j(equal, &one_char_separator);
4444 __ j(greater, &long_separator);
4447 // Empty separator case
4448 __ mov(index, Immediate(0));
4449 __ jmp(&loop_1_condition);
4450 // Loop condition: while (index < length).
4452 // Each iteration of the loop concatenates one string to the result.
4453 // Live values in registers:
4454 // index: which element of the elements array we are adding to the result.
4455 // result_pos: the position to which we are currently copying characters.
4456 // elements: the FixedArray of strings we are joining.
4458 // Get string = array[index].
4459 __ mov(string, FieldOperand(elements, index,
4461 FixedArray::kHeaderSize));
4462 __ mov(string_length,
4463 FieldOperand(string, String::kLengthOffset));
4464 __ shr(string_length, 1);
4466 FieldOperand(string, SeqOneByteString::kHeaderSize));
4467 __ CopyBytes(string, result_pos, string_length, scratch);
4468 __ add(index, Immediate(1));
4469 __ bind(&loop_1_condition);
4470 __ cmp(index, array_length_operand);
4471 __ j(less, &loop_1); // End while (index < length).
4476 // One-character separator case
4477 __ bind(&one_char_separator);
4478 // Replace separator with its one-byte character value.
4479 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4480 __ mov_b(separator_operand, scratch);
4482 __ Move(index, Immediate(0));
4483 // Jump into the loop after the code that copies the separator, so the first
4484 // element is not preceded by a separator
4485 __ jmp(&loop_2_entry);
4486 // Loop condition: while (index < length).
4488 // Each iteration of the loop concatenates one string to the result.
4489 // Live values in registers:
4490 // index: which element of the elements array we are adding to the result.
4491 // result_pos: the position to which we are currently copying characters.
4493 // Copy the separator character to the result.
4494 __ mov_b(scratch, separator_operand);
4495 __ mov_b(Operand(result_pos, 0), scratch);
4498 __ bind(&loop_2_entry);
4499 // Get string = array[index].
4500 __ mov(string, FieldOperand(elements, index,
4502 FixedArray::kHeaderSize));
4503 __ mov(string_length,
4504 FieldOperand(string, String::kLengthOffset));
4505 __ shr(string_length, 1);
4507 FieldOperand(string, SeqOneByteString::kHeaderSize));
4508 __ CopyBytes(string, result_pos, string_length, scratch);
4509 __ add(index, Immediate(1));
4511 __ cmp(index, array_length_operand);
4512 __ j(less, &loop_2); // End while (index < length).
4516 // Long separator case (separator is more than one character).
4517 __ bind(&long_separator);
4519 __ Move(index, Immediate(0));
4520 // Jump into the loop after the code that copies the separator, so the first
4521 // element is not preceded by a separator
4522 __ jmp(&loop_3_entry);
4523 // Loop condition: while (index < length).
4525 // Each iteration of the loop concatenates one string to the result.
4526 // Live values in registers:
4527 // index: which element of the elements array we are adding to the result.
4528 // result_pos: the position to which we are currently copying characters.
4530 // Copy the separator to the result.
4531 __ mov(string, separator_operand);
4532 __ mov(string_length,
4533 FieldOperand(string, String::kLengthOffset));
4534 __ shr(string_length, 1);
4536 FieldOperand(string, SeqOneByteString::kHeaderSize));
4537 __ CopyBytes(string, result_pos, string_length, scratch);
4539 __ bind(&loop_3_entry);
4540 // Get string = array[index].
4541 __ mov(string, FieldOperand(elements, index,
4543 FixedArray::kHeaderSize));
4544 __ mov(string_length,
4545 FieldOperand(string, String::kLengthOffset));
4546 __ shr(string_length, 1);
4548 FieldOperand(string, SeqOneByteString::kHeaderSize));
4549 __ CopyBytes(string, result_pos, string_length, scratch);
4550 __ add(index, Immediate(1));
4552 __ cmp(index, array_length_operand);
4553 __ j(less, &loop_3); // End while (index < length).
4558 __ mov(result_operand, isolate()->factory()->undefined_value());
4560 __ mov(eax, result_operand);
4561 // Drop temp values from the stack, and restore context register.
4562 __ add(esp, Immediate(3 * kPointerSize));
4564 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4565 context()->Plug(eax);
4569 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4570 DCHECK(expr->arguments()->length() == 0);
4571 ExternalReference debug_is_active =
4572 ExternalReference::debug_is_active_address(isolate());
4573 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4575 context()->Plug(eax);
4579 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4580 // Assert: expr == CallRuntime("ReflectConstruct")
4581 DCHECK_EQ(1, expr->arguments()->length());
4582 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4584 ZoneList<Expression*>* args = call->arguments();
4585 DCHECK_EQ(3, args->length());
4587 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4588 DCHECK_NOT_NULL(super_call_ref);
4590 // Load ReflectConstruct function
4591 EmitLoadJSRuntimeFunction(call);
4593 // Push the target function under the receiver
4594 __ push(Operand(esp, 0));
4595 __ mov(Operand(esp, kPointerSize), eax);
4597 // Push super constructor
4598 EmitLoadSuperConstructor(super_call_ref);
4599 __ Push(result_register());
4601 // Push arguments array
4602 VisitForStackValue(args->at(1));
4605 DCHECK(args->at(2)->IsVariableProxy());
4606 VisitForStackValue(args->at(2));
4608 EmitCallJSRuntimeFunction(call);
4610 // Restore context register.
4611 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4612 context()->DropAndPlug(1, eax);
4614 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4615 EmitInitializeThisAfterSuper(super_call_ref);
4619 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4620 // Push the builtins object as receiver.
4621 __ mov(eax, GlobalObjectOperand());
4622 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4624 // Load the function from the receiver.
4625 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4626 __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4627 __ mov(LoadDescriptor::SlotRegister(),
4628 Immediate(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4629 CallLoadIC(NOT_CONTEXTUAL);
4633 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4634 ZoneList<Expression*>* args = expr->arguments();
4635 int arg_count = args->length();
4637 // Record source position of the IC call.
4638 SetSourcePosition(expr->position());
4639 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4640 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4645 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4646 ZoneList<Expression*>* args = expr->arguments();
4647 int arg_count = args->length();
4649 if (expr->is_jsruntime()) {
4650 Comment cmnt(masm_, "[ CallRuntime");
4651 EmitLoadJSRuntimeFunction(expr);
4653 // Push the target function under the receiver.
4654 __ push(Operand(esp, 0));
4655 __ mov(Operand(esp, kPointerSize), eax);
4657 // Push the arguments ("left-to-right").
4658 for (int i = 0; i < arg_count; i++) {
4659 VisitForStackValue(args->at(i));
4662 EmitCallJSRuntimeFunction(expr);
4664 // Restore context register.
4665 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4666 context()->DropAndPlug(1, eax);
4669 const Runtime::Function* function = expr->function();
4670 switch (function->function_id) {
4671 #define CALL_INTRINSIC_GENERATOR(Name) \
4672 case Runtime::kInline##Name: { \
4673 Comment cmnt(masm_, "[ Inline" #Name); \
4674 return Emit##Name(expr); \
4676 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4677 #undef CALL_INTRINSIC_GENERATOR
4679 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4680 // Push the arguments ("left-to-right").
4681 for (int i = 0; i < arg_count; i++) {
4682 VisitForStackValue(args->at(i));
4685 // Call the C runtime function.
4686 __ CallRuntime(expr->function(), arg_count);
4687 context()->Plug(eax);
4694 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4695 switch (expr->op()) {
4696 case Token::DELETE: {
4697 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4698 Property* property = expr->expression()->AsProperty();
4699 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4701 if (property != NULL) {
4702 VisitForStackValue(property->obj());
4703 VisitForStackValue(property->key());
4704 __ push(Immediate(Smi::FromInt(language_mode())));
4705 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4706 context()->Plug(eax);
4707 } else if (proxy != NULL) {
4708 Variable* var = proxy->var();
4709 // Delete of an unqualified identifier is disallowed in strict mode
4710 // but "delete this" is allowed.
4711 DCHECK(is_sloppy(language_mode()) || var->is_this());
4712 if (var->IsUnallocated()) {
4713 __ push(GlobalObjectOperand());
4714 __ push(Immediate(var->name()));
4715 __ push(Immediate(Smi::FromInt(SLOPPY)));
4716 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4717 context()->Plug(eax);
4718 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4719 // Result of deleting non-global variables is false. 'this' is
4720 // not really a variable, though we implement it as one. The
4721 // subexpression does not have side effects.
4722 context()->Plug(var->is_this());
4724 // Non-global variable. Call the runtime to try to delete from the
4725 // context where the variable was introduced.
4726 __ push(context_register());
4727 __ push(Immediate(var->name()));
4728 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4729 context()->Plug(eax);
4732 // Result of deleting non-property, non-variable reference is true.
4733 // The subexpression may have side effects.
4734 VisitForEffect(expr->expression());
4735 context()->Plug(true);
4741 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4742 VisitForEffect(expr->expression());
4743 context()->Plug(isolate()->factory()->undefined_value());
4748 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4749 if (context()->IsEffect()) {
4750 // Unary NOT has no side effects so it's only necessary to visit the
4751 // subexpression. Match the optimizing compiler by not branching.
4752 VisitForEffect(expr->expression());
4753 } else if (context()->IsTest()) {
4754 const TestContext* test = TestContext::cast(context());
4755 // The labels are swapped for the recursive call.
4756 VisitForControl(expr->expression(),
4757 test->false_label(),
4759 test->fall_through());
4760 context()->Plug(test->true_label(), test->false_label());
4762 // We handle value contexts explicitly rather than simply visiting
4763 // for control and plugging the control flow into the context,
4764 // because we need to prepare a pair of extra administrative AST ids
4765 // for the optimizing compiler.
4766 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4767 Label materialize_true, materialize_false, done;
4768 VisitForControl(expr->expression(),
4772 __ bind(&materialize_true);
4773 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4774 if (context()->IsAccumulatorValue()) {
4775 __ mov(eax, isolate()->factory()->true_value());
4777 __ Push(isolate()->factory()->true_value());
4779 __ jmp(&done, Label::kNear);
4780 __ bind(&materialize_false);
4781 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4782 if (context()->IsAccumulatorValue()) {
4783 __ mov(eax, isolate()->factory()->false_value());
4785 __ Push(isolate()->factory()->false_value());
4792 case Token::TYPEOF: {
4793 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4795 AccumulatorValueContext context(this);
4796 VisitForTypeofValue(expr->expression());
4799 TypeofStub typeof_stub(isolate());
4800 __ CallStub(&typeof_stub);
4801 context()->Plug(eax);
4811 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4812 DCHECK(expr->expression()->IsValidReferenceExpression());
4814 Comment cmnt(masm_, "[ CountOperation");
4815 SetSourcePosition(expr->position());
4817 Property* prop = expr->expression()->AsProperty();
4818 LhsKind assign_type = Property::GetAssignType(prop);
4820 // Evaluate expression and get value.
4821 if (assign_type == VARIABLE) {
4822 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4823 AccumulatorValueContext context(this);
4824 EmitVariableLoad(expr->expression()->AsVariableProxy());
4826 // Reserve space for result of postfix operation.
4827 if (expr->is_postfix() && !context()->IsEffect()) {
4828 __ push(Immediate(Smi::FromInt(0)));
4830 switch (assign_type) {
4831 case NAMED_PROPERTY: {
4832 // Put the object both on the stack and in the register.
4833 VisitForStackValue(prop->obj());
4834 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4835 EmitNamedPropertyLoad(prop);
4839 case NAMED_SUPER_PROPERTY: {
4840 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4841 VisitForAccumulatorValue(
4842 prop->obj()->AsSuperPropertyReference()->home_object());
4843 __ push(result_register());
4844 __ push(MemOperand(esp, kPointerSize));
4845 __ push(result_register());
4846 EmitNamedSuperPropertyLoad(prop);
4850 case KEYED_SUPER_PROPERTY: {
4851 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4853 prop->obj()->AsSuperPropertyReference()->home_object());
4854 VisitForAccumulatorValue(prop->key());
4855 __ push(result_register());
4856 __ push(MemOperand(esp, 2 * kPointerSize));
4857 __ push(MemOperand(esp, 2 * kPointerSize));
4858 __ push(result_register());
4859 EmitKeyedSuperPropertyLoad(prop);
4863 case KEYED_PROPERTY: {
4864 VisitForStackValue(prop->obj());
4865 VisitForStackValue(prop->key());
4866 __ mov(LoadDescriptor::ReceiverRegister(),
4867 Operand(esp, kPointerSize)); // Object.
4868 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4869 EmitKeyedPropertyLoad(prop);
4878 // We need a second deoptimization point after loading the value
4879 // in case evaluating the property load my have a side effect.
4880 if (assign_type == VARIABLE) {
4881 PrepareForBailout(expr->expression(), TOS_REG);
4883 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4886 // Inline smi case if we are in a loop.
4887 Label done, stub_call;
4888 JumpPatchSite patch_site(masm_);
4889 if (ShouldInlineSmiCase(expr->op())) {
4891 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4893 // Save result for postfix expressions.
4894 if (expr->is_postfix()) {
4895 if (!context()->IsEffect()) {
4896 // Save the result on the stack. If we have a named or keyed property
4897 // we store the result under the receiver that is currently on top
4899 switch (assign_type) {
4903 case NAMED_PROPERTY:
4904 __ mov(Operand(esp, kPointerSize), eax);
4906 case NAMED_SUPER_PROPERTY:
4907 __ mov(Operand(esp, 2 * kPointerSize), eax);
4909 case KEYED_PROPERTY:
4910 __ mov(Operand(esp, 2 * kPointerSize), eax);
4912 case KEYED_SUPER_PROPERTY:
4913 __ mov(Operand(esp, 3 * kPointerSize), eax);
4919 if (expr->op() == Token::INC) {
4920 __ add(eax, Immediate(Smi::FromInt(1)));
4922 __ sub(eax, Immediate(Smi::FromInt(1)));
4924 __ j(no_overflow, &done, Label::kNear);
4925 // Call stub. Undo operation first.
4926 if (expr->op() == Token::INC) {
4927 __ sub(eax, Immediate(Smi::FromInt(1)));
4929 __ add(eax, Immediate(Smi::FromInt(1)));
4931 __ jmp(&stub_call, Label::kNear);
4934 ToNumberStub convert_stub(isolate());
4935 __ CallStub(&convert_stub);
4936 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4938 // Save result for postfix expressions.
4939 if (expr->is_postfix()) {
4940 if (!context()->IsEffect()) {
4941 // Save the result on the stack. If we have a named or keyed property
4942 // we store the result under the receiver that is currently on top
4944 switch (assign_type) {
4948 case NAMED_PROPERTY:
4949 __ mov(Operand(esp, kPointerSize), eax);
4951 case NAMED_SUPER_PROPERTY:
4952 __ mov(Operand(esp, 2 * kPointerSize), eax);
4954 case KEYED_PROPERTY:
4955 __ mov(Operand(esp, 2 * kPointerSize), eax);
4957 case KEYED_SUPER_PROPERTY:
4958 __ mov(Operand(esp, 3 * kPointerSize), eax);
4964 // Record position before stub call.
4965 SetSourcePosition(expr->position());
4967 // Call stub for +1/-1.
4968 __ bind(&stub_call);
4970 __ mov(eax, Immediate(Smi::FromInt(1)));
4972 CodeFactory::BinaryOpIC(
4973 isolate(), expr->binary_op(), language_mode()).code();
4974 CallIC(code, expr->CountBinOpFeedbackId());
4975 patch_site.EmitPatchInfo();
4978 // Store the value returned in eax.
4979 switch (assign_type) {
4981 if (expr->is_postfix()) {
4982 // Perform the assignment as if via '='.
4983 { EffectContext context(this);
4984 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4985 Token::ASSIGN, expr->CountSlot());
4986 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4989 // For all contexts except EffectContext We have the result on
4990 // top of the stack.
4991 if (!context()->IsEffect()) {
4992 context()->PlugTOS();
4995 // Perform the assignment as if via '='.
4996 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4997 Token::ASSIGN, expr->CountSlot());
4998 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4999 context()->Plug(eax);
5002 case NAMED_PROPERTY: {
5003 __ mov(StoreDescriptor::NameRegister(),
5004 prop->key()->AsLiteral()->value());
5005 __ pop(StoreDescriptor::ReceiverRegister());
5006 if (FLAG_vector_stores) {
5007 EmitLoadStoreICSlot(expr->CountSlot());
5010 CallStoreIC(expr->CountStoreFeedbackId());
5012 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5013 if (expr->is_postfix()) {
5014 if (!context()->IsEffect()) {
5015 context()->PlugTOS();
5018 context()->Plug(eax);
5022 case NAMED_SUPER_PROPERTY: {
5023 EmitNamedSuperPropertyStore(prop);
5024 if (expr->is_postfix()) {
5025 if (!context()->IsEffect()) {
5026 context()->PlugTOS();
5029 context()->Plug(eax);
5033 case KEYED_SUPER_PROPERTY: {
5034 EmitKeyedSuperPropertyStore(prop);
5035 if (expr->is_postfix()) {
5036 if (!context()->IsEffect()) {
5037 context()->PlugTOS();
5040 context()->Plug(eax);
5044 case KEYED_PROPERTY: {
5045 __ pop(StoreDescriptor::NameRegister());
5046 __ pop(StoreDescriptor::ReceiverRegister());
5048 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5049 if (FLAG_vector_stores) {
5050 EmitLoadStoreICSlot(expr->CountSlot());
5053 CallIC(ic, expr->CountStoreFeedbackId());
5055 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5056 if (expr->is_postfix()) {
5057 // Result is on the stack
5058 if (!context()->IsEffect()) {
5059 context()->PlugTOS();
5062 context()->Plug(eax);
5070 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
5071 VariableProxy* proxy = expr->AsVariableProxy();
5072 DCHECK(!context()->IsEffect());
5073 DCHECK(!context()->IsTest());
5075 if (proxy != NULL && proxy->var()->IsUnallocated()) {
5076 Comment cmnt(masm_, "[ Global variable");
5077 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
5078 __ mov(LoadDescriptor::NameRegister(), Immediate(proxy->name()));
5079 __ mov(LoadDescriptor::SlotRegister(),
5080 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
5081 // Use a regular load, not a contextual load, to avoid a reference
5083 CallLoadIC(NOT_CONTEXTUAL);
5084 PrepareForBailout(expr, TOS_REG);
5085 context()->Plug(eax);
5086 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
5087 Comment cmnt(masm_, "[ Lookup slot");
5090 // Generate code for loading from variables potentially shadowed
5091 // by eval-introduced variables.
5092 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
5096 __ push(Immediate(proxy->name()));
5097 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
5098 PrepareForBailout(expr, TOS_REG);
5101 context()->Plug(eax);
5103 // This expression cannot throw a reference error at the top level.
5104 VisitInDuplicateContext(expr);
5109 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5110 Expression* sub_expr,
5111 Handle<String> check) {
5112 Label materialize_true, materialize_false;
5113 Label* if_true = NULL;
5114 Label* if_false = NULL;
5115 Label* fall_through = NULL;
5116 context()->PrepareTest(&materialize_true, &materialize_false,
5117 &if_true, &if_false, &fall_through);
5119 { AccumulatorValueContext context(this);
5120 VisitForTypeofValue(sub_expr);
5122 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5124 Factory* factory = isolate()->factory();
5125 if (String::Equals(check, factory->number_string())) {
5126 __ JumpIfSmi(eax, if_true);
5127 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
5128 isolate()->factory()->heap_number_map());
5129 Split(equal, if_true, if_false, fall_through);
5130 } else if (String::Equals(check, factory->string_string())) {
5131 __ JumpIfSmi(eax, if_false);
5132 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
5133 __ j(above_equal, if_false);
5134 // Check for undetectable objects => false.
5135 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5136 1 << Map::kIsUndetectable);
5137 Split(zero, if_true, if_false, fall_through);
5138 } else if (String::Equals(check, factory->symbol_string())) {
5139 __ JumpIfSmi(eax, if_false);
5140 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
5141 Split(equal, if_true, if_false, fall_through);
5142 } else if (String::Equals(check, factory->boolean_string())) {
5143 __ cmp(eax, isolate()->factory()->true_value());
5144 __ j(equal, if_true);
5145 __ cmp(eax, isolate()->factory()->false_value());
5146 Split(equal, if_true, if_false, fall_through);
5147 } else if (String::Equals(check, factory->undefined_string())) {
5148 __ cmp(eax, isolate()->factory()->undefined_value());
5149 __ j(equal, if_true);
5150 __ JumpIfSmi(eax, if_false);
5151 // Check for undetectable objects => true.
5152 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
5153 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
5154 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
5155 Split(not_zero, if_true, if_false, fall_through);
5156 } else if (String::Equals(check, factory->function_string())) {
5157 __ JumpIfSmi(eax, if_false);
5158 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5159 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
5160 __ j(equal, if_true);
5161 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
5162 Split(equal, if_true, if_false, fall_through);
5163 } else if (String::Equals(check, factory->object_string())) {
5164 __ JumpIfSmi(eax, if_false);
5165 __ cmp(eax, isolate()->factory()->null_value());
5166 __ j(equal, if_true);
5167 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
5168 __ j(below, if_false);
5169 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5170 __ j(above, if_false);
5171 // Check for undetectable objects => false.
5172 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5173 1 << Map::kIsUndetectable);
5174 Split(zero, if_true, if_false, fall_through);
5176 if (if_false != fall_through) __ jmp(if_false);
5178 context()->Plug(if_true, if_false);
5182 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5183 Comment cmnt(masm_, "[ CompareOperation");
5184 SetSourcePosition(expr->position());
5186 // First we try a fast inlined version of the compare when one of
5187 // the operands is a literal.
5188 if (TryLiteralCompare(expr)) return;
5190 // Always perform the comparison for its control flow. Pack the result
5191 // into the expression's context after the comparison is performed.
5192 Label materialize_true, materialize_false;
5193 Label* if_true = NULL;
5194 Label* if_false = NULL;
5195 Label* fall_through = NULL;
5196 context()->PrepareTest(&materialize_true, &materialize_false,
5197 &if_true, &if_false, &fall_through);
5199 Token::Value op = expr->op();
5200 VisitForStackValue(expr->left());
5203 VisitForStackValue(expr->right());
5204 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5205 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5206 __ cmp(eax, isolate()->factory()->true_value());
5207 Split(equal, if_true, if_false, fall_through);
5210 case Token::INSTANCEOF: {
5211 VisitForStackValue(expr->right());
5212 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5214 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5216 // The stub returns 0 for true.
5217 Split(zero, if_true, if_false, fall_through);
5222 VisitForAccumulatorValue(expr->right());
5223 Condition cc = CompareIC::ComputeCondition(op);
5226 bool inline_smi_code = ShouldInlineSmiCase(op);
5227 JumpPatchSite patch_site(masm_);
5228 if (inline_smi_code) {
5232 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
5234 Split(cc, if_true, if_false, NULL);
5235 __ bind(&slow_case);
5238 // Record position and call the compare IC.
5239 SetSourcePosition(expr->position());
5241 CodeFactory::CompareIC(isolate(), op, language_mode()).code();
5242 CallIC(ic, expr->CompareOperationFeedbackId());
5243 patch_site.EmitPatchInfo();
5245 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5247 Split(cc, if_true, if_false, fall_through);
5251 // Convert the result of the comparison into one expected for this
5252 // expression's context.
5253 context()->Plug(if_true, if_false);
5257 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5258 Expression* sub_expr,
5260 Label materialize_true, materialize_false;
5261 Label* if_true = NULL;
5262 Label* if_false = NULL;
5263 Label* fall_through = NULL;
5264 context()->PrepareTest(&materialize_true, &materialize_false,
5265 &if_true, &if_false, &fall_through);
5267 VisitForAccumulatorValue(sub_expr);
5268 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5270 Handle<Object> nil_value = nil == kNullValue
5271 ? isolate()->factory()->null_value()
5272 : isolate()->factory()->undefined_value();
5273 if (expr->op() == Token::EQ_STRICT) {
5274 __ cmp(eax, nil_value);
5275 Split(equal, if_true, if_false, fall_through);
5277 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5278 CallIC(ic, expr->CompareOperationFeedbackId());
5280 Split(not_zero, if_true, if_false, fall_through);
5282 context()->Plug(if_true, if_false);
5286 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5287 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5288 context()->Plug(eax);
5292 Register FullCodeGenerator::result_register() {
5297 Register FullCodeGenerator::context_register() {
5302 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5303 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5304 __ mov(Operand(ebp, frame_offset), value);
5308 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5309 __ mov(dst, ContextOperand(esi, context_index));
5313 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5314 Scope* declaration_scope = scope()->DeclarationScope();
5315 if (declaration_scope->is_script_scope() ||
5316 declaration_scope->is_module_scope()) {
5317 // Contexts nested in the native context have a canonical empty function
5318 // as their closure, not the anonymous closure containing the global
5319 // code. Pass a smi sentinel and let the runtime look up the empty
5321 __ push(Immediate(Smi::FromInt(0)));
5322 } else if (declaration_scope->is_eval_scope()) {
5323 // Contexts nested inside eval code have the same closure as the context
5324 // calling eval, not the anonymous closure containing the eval code.
5325 // Fetch it from the context.
5326 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
5328 DCHECK(declaration_scope->is_function_scope());
5329 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5334 // ----------------------------------------------------------------------------
5335 // Non-local control flow support.
5337 void FullCodeGenerator::EnterFinallyBlock() {
5338 // Cook return address on top of stack (smi encoded Code* delta)
5339 DCHECK(!result_register().is(edx));
5341 __ sub(edx, Immediate(masm_->CodeObject()));
5342 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5343 STATIC_ASSERT(kSmiTag == 0);
5347 // Store result register while executing finally block.
5348 __ push(result_register());
5350 // Store pending message while executing finally block.
5351 ExternalReference pending_message_obj =
5352 ExternalReference::address_of_pending_message_obj(isolate());
5353 __ mov(edx, Operand::StaticVariable(pending_message_obj));
5356 ClearPendingMessage();
5360 void FullCodeGenerator::ExitFinallyBlock() {
5361 DCHECK(!result_register().is(edx));
5362 // Restore pending message from stack.
5364 ExternalReference pending_message_obj =
5365 ExternalReference::address_of_pending_message_obj(isolate());
5366 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5368 // Restore result register from stack.
5369 __ pop(result_register());
5371 // Uncook return address.
5374 __ add(edx, Immediate(masm_->CodeObject()));
5379 void FullCodeGenerator::ClearPendingMessage() {
5380 DCHECK(!result_register().is(edx));
5381 ExternalReference pending_message_obj =
5382 ExternalReference::address_of_pending_message_obj(isolate());
5383 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
5384 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5388 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5389 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5390 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5391 Immediate(SmiFromSlot(slot)));
5398 static const byte kJnsInstruction = 0x79;
5399 static const byte kJnsOffset = 0x11;
5400 static const byte kNopByteOne = 0x66;
5401 static const byte kNopByteTwo = 0x90;
5403 static const byte kCallInstruction = 0xe8;
5407 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5409 BackEdgeState target_state,
5410 Code* replacement_code) {
5411 Address call_target_address = pc - kIntSize;
5412 Address jns_instr_address = call_target_address - 3;
5413 Address jns_offset_address = call_target_address - 2;
5415 switch (target_state) {
5417 // sub <profiling_counter>, <delta> ;; Not changed
5419 // call <interrupt stub>
5421 *jns_instr_address = kJnsInstruction;
5422 *jns_offset_address = kJnsOffset;
5424 case ON_STACK_REPLACEMENT:
5425 case OSR_AFTER_STACK_CHECK:
5426 // sub <profiling_counter>, <delta> ;; Not changed
5429 // call <on-stack replacment>
5431 *jns_instr_address = kNopByteOne;
5432 *jns_offset_address = kNopByteTwo;
5436 Assembler::set_target_address_at(call_target_address,
5438 replacement_code->entry());
5439 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5440 unoptimized_code, call_target_address, replacement_code);
5444 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5446 Code* unoptimized_code,
5448 Address call_target_address = pc - kIntSize;
5449 Address jns_instr_address = call_target_address - 3;
5450 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5452 if (*jns_instr_address == kJnsInstruction) {
5453 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5454 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5455 Assembler::target_address_at(call_target_address,
5460 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5461 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5463 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
5464 isolate->builtins()->OnStackReplacement()->entry()) {
5465 return ON_STACK_REPLACEMENT;
5468 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5469 Assembler::target_address_at(call_target_address,
5471 return OSR_AFTER_STACK_CHECK;
5475 } // namespace internal
5478 #endif // V8_TARGET_ARCH_IA32