1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
22 #define __ ACCESS_MASM(masm_)
25 class JumpPatchSite BASE_EMBEDDED {
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 info_emitted_ = false;
34 DCHECK(patch_site_.is_bound() == info_emitted_);
37 void EmitJumpIfNotSmi(Register reg,
39 Label::Distance distance = Label::kFar) {
40 __ test(reg, Immediate(kSmiTagMask));
41 EmitJump(not_carry, target, distance); // Always taken before patched.
44 void EmitJumpIfSmi(Register reg,
46 Label::Distance distance = Label::kFar) {
47 __ test(reg, Immediate(kSmiTagMask));
48 EmitJump(carry, target, distance); // Never taken before patched.
51 void EmitPatchInfo() {
52 if (patch_site_.is_bound()) {
53 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54 DCHECK(is_uint8(delta_to_patch_site));
55 __ test(eax, Immediate(delta_to_patch_site));
60 __ nop(); // Signals no inlined code.
65 // jc will be patched with jz, jnc will become jnz.
66 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 DCHECK(cc == carry || cc == not_carry);
69 __ bind(&patch_site_);
70 __ j(cc, target, distance);
73 MacroAssembler* masm_;
81 // Generate code for a JS function. On entry to the function the receiver
82 // and arguments have been pushed on the stack left to right, with the
83 // return address on top of them. The actual argument count matches the
84 // formal parameter count expected by the function.
86 // The live registers are:
87 // o edi: the JS function object being called (i.e. ourselves)
89 // o ebp: our caller's frame pointer
90 // o esp: stack pointer (pointing to return address)
92 // The function builds a JS frame. Please see JavaScriptFrameConstants in
93 // frames-ia32.h for its layout.
94 void FullCodeGenerator::Generate() {
95 CompilationInfo* info = info_;
96 profiling_counter_ = isolate()->factory()->NewCell(
97 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
98 SetFunctionPosition(function());
99 Comment cmnt(masm_, "[ function compiled by full code generator");
101 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
104 if (strlen(FLAG_stop_at) > 0 &&
105 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110 // Sloppy mode functions and builtins need to replace the receiver with the
111 // global proxy when called as functions (without an explicit receiver
113 if (is_sloppy(info->language_mode()) && !info->is_native() &&
114 info->MayUseThis()) {
116 // +1 for return address.
117 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
118 __ mov(ecx, Operand(esp, receiver_offset));
120 __ cmp(ecx, isolate()->factory()->undefined_value());
121 __ j(not_equal, &ok, Label::kNear);
123 __ mov(ecx, GlobalObjectOperand());
124 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
126 __ mov(Operand(esp, receiver_offset), ecx);
131 // Open a frame scope to indicate that there is a frame on the stack. The
132 // MANUAL indicates that the scope shouldn't actually generate code to set up
133 // the frame (that is done below).
134 FrameScope frame_scope(masm_, StackFrame::MANUAL);
136 info->set_prologue_offset(masm_->pc_offset());
137 __ Prologue(info->IsCodePreAgingActive());
138 info->AddNoFrameRange(0, masm_->pc_offset());
140 { Comment cmnt(masm_, "[ Allocate locals");
141 int locals_count = info->scope()->num_stack_slots();
142 // Generators allocate locals, if any, in context slots.
143 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
144 if (locals_count == 1) {
145 __ push(Immediate(isolate()->factory()->undefined_value()));
146 } else if (locals_count > 1) {
147 if (locals_count >= 128) {
150 __ sub(ecx, Immediate(locals_count * kPointerSize));
151 ExternalReference stack_limit =
152 ExternalReference::address_of_real_stack_limit(isolate());
153 __ cmp(ecx, Operand::StaticVariable(stack_limit));
154 __ j(above_equal, &ok, Label::kNear);
155 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
158 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
159 const int kMaxPushes = 32;
160 if (locals_count >= kMaxPushes) {
161 int loop_iterations = locals_count / kMaxPushes;
162 __ mov(ecx, loop_iterations);
164 __ bind(&loop_header);
166 for (int i = 0; i < kMaxPushes; i++) {
170 __ j(not_zero, &loop_header, Label::kNear);
172 int remaining = locals_count % kMaxPushes;
173 // Emit the remaining pushes.
174 for (int i = 0; i < remaining; i++) {
180 bool function_in_register = true;
182 // Possibly allocate a local context.
183 if (info->scope()->num_heap_slots() > 0) {
184 Comment cmnt(masm_, "[ Allocate context");
185 bool need_write_barrier = true;
186 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187 // Argument to NewContext is the function, which is still in edi.
188 if (info->scope()->is_script_scope()) {
190 __ Push(info->scope()->GetScopeInfo(info->isolate()));
191 __ CallRuntime(Runtime::kNewScriptContext, 2);
192 } else if (slots <= FastNewContextStub::kMaximumSlots) {
193 FastNewContextStub stub(isolate(), slots);
195 // Result of FastNewContextStub is always in new space.
196 need_write_barrier = false;
199 __ CallRuntime(Runtime::kNewFunctionContext, 1);
201 function_in_register = false;
202 // Context is returned in eax. It replaces the context passed to us.
203 // It's saved in the stack and kept live in esi.
205 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
207 // Copy parameters into context if necessary.
208 int num_parameters = info->scope()->num_parameters();
209 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
210 for (int i = first_parameter; i < num_parameters; i++) {
211 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
212 if (var->IsContextSlot()) {
213 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214 (num_parameters - 1 - i) * kPointerSize;
215 // Load parameter from stack.
216 __ mov(eax, Operand(ebp, parameter_offset));
217 // Store it in the context.
218 int context_offset = Context::SlotOffset(var->index());
219 __ mov(Operand(esi, context_offset), eax);
220 // Update the write barrier. This clobbers eax and ebx.
221 if (need_write_barrier) {
222 __ RecordWriteContextSlot(esi,
227 } else if (FLAG_debug_code) {
229 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
230 __ Abort(kExpectedNewSpaceObject);
237 // Possibly set up a local binding to the this function which is used in
238 // derived constructors with super calls.
239 Variable* this_function_var = scope()->this_function_var();
240 if (this_function_var != nullptr) {
241 Comment cmnt(masm_, "[ This function");
242 if (!function_in_register) {
243 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
244 // The write barrier clobbers register again, keep is marked as such.
246 SetVar(this_function_var, edi, ebx, edx);
249 Variable* new_target_var = scope()->new_target_var();
250 if (new_target_var != nullptr) {
251 Comment cmnt(masm_, "[ new.target");
252 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
253 Label non_adaptor_frame;
254 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
255 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
256 __ j(not_equal, &non_adaptor_frame);
257 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
259 __ bind(&non_adaptor_frame);
260 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
261 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
263 Label non_construct_frame, done;
264 __ j(not_equal, &non_construct_frame);
268 Operand(eax, ConstructFrameConstants::kOriginalConstructorOffset));
271 // Non-construct frame
272 __ bind(&non_construct_frame);
273 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
276 SetVar(new_target_var, eax, ebx, edx);
280 // Possibly allocate RestParameters
282 Variable* rest_param = scope()->rest_parameter(&rest_index);
284 Comment cmnt(masm_, "[ Allocate rest parameter array");
286 int num_parameters = info->scope()->num_parameters();
287 int offset = num_parameters * kPointerSize;
290 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
292 __ push(Immediate(Smi::FromInt(num_parameters)));
293 __ push(Immediate(Smi::FromInt(rest_index)));
294 __ push(Immediate(Smi::FromInt(language_mode())));
296 RestParamAccessStub stub(isolate());
299 SetVar(rest_param, eax, ebx, edx);
302 Variable* arguments = scope()->arguments();
303 if (arguments != NULL) {
304 // Function uses arguments object.
305 Comment cmnt(masm_, "[ Allocate arguments object");
306 if (function_in_register) {
309 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
311 // Receiver is just before the parameters on the caller's stack.
312 int num_parameters = info->scope()->num_parameters();
313 int offset = num_parameters * kPointerSize;
315 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
317 __ push(Immediate(Smi::FromInt(num_parameters)));
318 // Arguments to ArgumentsAccessStub:
319 // function, receiver address, parameter count.
320 // The stub will rewrite receiver and parameter count if the previous
321 // stack frame was an arguments adapter frame.
322 ArgumentsAccessStub::Type type;
323 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
324 type = ArgumentsAccessStub::NEW_STRICT;
325 } else if (function()->has_duplicate_parameters()) {
326 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
328 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
331 ArgumentsAccessStub stub(isolate(), type);
334 SetVar(arguments, eax, ebx, edx);
338 __ CallRuntime(Runtime::kTraceEnter, 0);
341 // Visit the declarations and body unless there is an illegal
343 if (scope()->HasIllegalRedeclaration()) {
344 Comment cmnt(masm_, "[ Declarations");
345 scope()->VisitIllegalRedeclaration(this);
348 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
349 { Comment cmnt(masm_, "[ Declarations");
350 // For named function expressions, declare the function name as a
352 if (scope()->is_function_scope() && scope()->function() != NULL) {
353 VariableDeclaration* function = scope()->function();
354 DCHECK(function->proxy()->var()->mode() == CONST ||
355 function->proxy()->var()->mode() == CONST_LEGACY);
356 DCHECK(!function->proxy()->var()->IsUnallocatedOrGlobalSlot());
357 VisitVariableDeclaration(function);
359 VisitDeclarations(scope()->declarations());
362 { Comment cmnt(masm_, "[ Stack check");
363 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
365 ExternalReference stack_limit
366 = ExternalReference::address_of_stack_limit(isolate());
367 __ cmp(esp, Operand::StaticVariable(stack_limit));
368 __ j(above_equal, &ok, Label::kNear);
369 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
373 { Comment cmnt(masm_, "[ Body");
374 DCHECK(loop_depth() == 0);
375 VisitStatements(function()->body());
376 DCHECK(loop_depth() == 0);
380 // Always emit a 'return undefined' in case control fell off the end of
382 { Comment cmnt(masm_, "[ return <undefined>;");
383 __ mov(eax, isolate()->factory()->undefined_value());
384 EmitReturnSequence();
389 void FullCodeGenerator::ClearAccumulator() {
390 __ Move(eax, Immediate(Smi::FromInt(0)));
394 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
395 __ mov(ebx, Immediate(profiling_counter_));
396 __ sub(FieldOperand(ebx, Cell::kValueOffset),
397 Immediate(Smi::FromInt(delta)));
401 void FullCodeGenerator::EmitProfilingCounterReset() {
402 int reset_value = FLAG_interrupt_budget;
403 __ mov(ebx, Immediate(profiling_counter_));
404 __ mov(FieldOperand(ebx, Cell::kValueOffset),
405 Immediate(Smi::FromInt(reset_value)));
409 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
410 Label* back_edge_target) {
411 Comment cmnt(masm_, "[ Back edge bookkeeping");
414 DCHECK(back_edge_target->is_bound());
415 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
416 int weight = Min(kMaxBackEdgeWeight,
417 Max(1, distance / kCodeSizeMultiplier));
418 EmitProfilingCounterDecrement(weight);
419 __ j(positive, &ok, Label::kNear);
420 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
422 // Record a mapping of this PC offset to the OSR id. This is used to find
423 // the AST id from the unoptimized code in order to use it as a key into
424 // the deoptimization input data found in the optimized code.
425 RecordBackEdge(stmt->OsrEntryId());
427 EmitProfilingCounterReset();
430 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
431 // Record a mapping of the OSR id to this PC. This is used if the OSR
432 // entry becomes the target of a bailout. We don't expect it to be, but
433 // we want it to work if it is.
434 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
438 void FullCodeGenerator::EmitReturnSequence() {
439 Comment cmnt(masm_, "[ Return sequence");
440 if (return_label_.is_bound()) {
441 __ jmp(&return_label_);
443 // Common return label
444 __ bind(&return_label_);
447 __ CallRuntime(Runtime::kTraceExit, 1);
449 // Pretend that the exit is a backwards jump to the entry.
451 if (info_->ShouldSelfOptimize()) {
452 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
454 int distance = masm_->pc_offset();
455 weight = Min(kMaxBackEdgeWeight,
456 Max(1, distance / kCodeSizeMultiplier));
458 EmitProfilingCounterDecrement(weight);
460 __ j(positive, &ok, Label::kNear);
462 __ call(isolate()->builtins()->InterruptCheck(),
463 RelocInfo::CODE_TARGET);
465 EmitProfilingCounterReset();
468 SetReturnPosition(function());
469 int no_frame_start = masm_->pc_offset();
472 int arg_count = info_->scope()->num_parameters() + 1;
473 int arguments_bytes = arg_count * kPointerSize;
474 __ Ret(arguments_bytes, ecx);
475 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
480 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
481 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
485 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
486 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
487 codegen()->GetVar(result_register(), var);
491 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
492 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
493 MemOperand operand = codegen()->VarOperand(var, result_register());
494 // Memory operands can be pushed directly.
499 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
500 // For simplicity we always test the accumulator register.
501 codegen()->GetVar(result_register(), var);
502 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
503 codegen()->DoTest(this);
507 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
508 UNREACHABLE(); // Not used on IA32.
512 void FullCodeGenerator::AccumulatorValueContext::Plug(
513 Heap::RootListIndex index) const {
514 UNREACHABLE(); // Not used on IA32.
518 void FullCodeGenerator::StackValueContext::Plug(
519 Heap::RootListIndex index) const {
520 UNREACHABLE(); // Not used on IA32.
524 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
525 UNREACHABLE(); // Not used on IA32.
529 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
533 void FullCodeGenerator::AccumulatorValueContext::Plug(
534 Handle<Object> lit) const {
536 __ SafeMove(result_register(), Immediate(lit));
538 __ Move(result_register(), Immediate(lit));
543 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
545 __ SafePush(Immediate(lit));
547 __ push(Immediate(lit));
552 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
553 codegen()->PrepareForBailoutBeforeSplit(condition(),
557 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
558 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
559 if (false_label_ != fall_through_) __ jmp(false_label_);
560 } else if (lit->IsTrue() || lit->IsJSObject()) {
561 if (true_label_ != fall_through_) __ jmp(true_label_);
562 } else if (lit->IsString()) {
563 if (String::cast(*lit)->length() == 0) {
564 if (false_label_ != fall_through_) __ jmp(false_label_);
566 if (true_label_ != fall_through_) __ jmp(true_label_);
568 } else if (lit->IsSmi()) {
569 if (Smi::cast(*lit)->value() == 0) {
570 if (false_label_ != fall_through_) __ jmp(false_label_);
572 if (true_label_ != fall_through_) __ jmp(true_label_);
575 // For simplicity we always test the accumulator register.
576 __ mov(result_register(), lit);
577 codegen()->DoTest(this);
582 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
583 Register reg) const {
589 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
591 Register reg) const {
594 __ Move(result_register(), reg);
598 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
599 Register reg) const {
601 if (count > 1) __ Drop(count - 1);
602 __ mov(Operand(esp, 0), reg);
606 void FullCodeGenerator::TestContext::DropAndPlug(int count,
607 Register reg) const {
609 // For simplicity we always test the accumulator register.
611 __ Move(result_register(), reg);
612 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
613 codegen()->DoTest(this);
617 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
618 Label* materialize_false) const {
619 DCHECK(materialize_true == materialize_false);
620 __ bind(materialize_true);
624 void FullCodeGenerator::AccumulatorValueContext::Plug(
625 Label* materialize_true,
626 Label* materialize_false) const {
628 __ bind(materialize_true);
629 __ mov(result_register(), isolate()->factory()->true_value());
630 __ jmp(&done, Label::kNear);
631 __ bind(materialize_false);
632 __ mov(result_register(), isolate()->factory()->false_value());
637 void FullCodeGenerator::StackValueContext::Plug(
638 Label* materialize_true,
639 Label* materialize_false) const {
641 __ bind(materialize_true);
642 __ push(Immediate(isolate()->factory()->true_value()));
643 __ jmp(&done, Label::kNear);
644 __ bind(materialize_false);
645 __ push(Immediate(isolate()->factory()->false_value()));
650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651 Label* materialize_false) const {
652 DCHECK(materialize_true == true_label_);
653 DCHECK(materialize_false == false_label_);
657 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
661 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
662 Handle<Object> value = flag
663 ? isolate()->factory()->true_value()
664 : isolate()->factory()->false_value();
665 __ mov(result_register(), value);
669 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
670 Handle<Object> value = flag
671 ? isolate()->factory()->true_value()
672 : isolate()->factory()->false_value();
673 __ push(Immediate(value));
677 void FullCodeGenerator::TestContext::Plug(bool flag) const {
678 codegen()->PrepareForBailoutBeforeSplit(condition(),
683 if (true_label_ != fall_through_) __ jmp(true_label_);
685 if (false_label_ != fall_through_) __ jmp(false_label_);
690 void FullCodeGenerator::DoTest(Expression* condition,
693 Label* fall_through) {
694 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
695 CallIC(ic, condition->test_id());
696 __ test(result_register(), result_register());
697 // The stub returns nonzero for true.
698 Split(not_zero, if_true, if_false, fall_through);
702 void FullCodeGenerator::Split(Condition cc,
705 Label* fall_through) {
706 if (if_false == fall_through) {
708 } else if (if_true == fall_through) {
709 __ j(NegateCondition(cc), if_false);
717 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
718 DCHECK(var->IsStackAllocated());
719 // Offset is negative because higher indexes are at lower addresses.
720 int offset = -var->index() * kPointerSize;
721 // Adjust by a (parameter or local) base offset.
722 if (var->IsParameter()) {
723 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
725 offset += JavaScriptFrameConstants::kLocal0Offset;
727 return Operand(ebp, offset);
731 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
732 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
733 if (var->IsContextSlot()) {
734 int context_chain_length = scope()->ContextChainLength(var->scope());
735 __ LoadContext(scratch, context_chain_length);
736 return ContextOperand(scratch, var->index());
738 return StackOperand(var);
743 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
744 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
745 MemOperand location = VarOperand(var, dest);
746 __ mov(dest, location);
750 void FullCodeGenerator::SetVar(Variable* var,
754 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
755 DCHECK(!scratch0.is(src));
756 DCHECK(!scratch0.is(scratch1));
757 DCHECK(!scratch1.is(src));
758 MemOperand location = VarOperand(var, scratch0);
759 __ mov(location, src);
761 // Emit the write barrier code if the location is in the heap.
762 if (var->IsContextSlot()) {
763 int offset = Context::SlotOffset(var->index());
764 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
765 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
770 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
771 bool should_normalize,
774 // Only prepare for bailouts before splits if we're in a test
775 // context. Otherwise, we let the Visit function deal with the
776 // preparation to avoid preparing with the same AST id twice.
777 if (!context()->IsTest() || !info_->IsOptimizable()) return;
780 if (should_normalize) __ jmp(&skip, Label::kNear);
781 PrepareForBailout(expr, TOS_REG);
782 if (should_normalize) {
783 __ cmp(eax, isolate()->factory()->true_value());
784 Split(equal, if_true, if_false, NULL);
790 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
791 // The variable in the declaration always resides in the current context.
792 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
793 if (generate_debug_code_) {
794 // Check that we're not inside a with or catch context.
795 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
796 __ cmp(ebx, isolate()->factory()->with_context_map());
797 __ Check(not_equal, kDeclarationInWithContext);
798 __ cmp(ebx, isolate()->factory()->catch_context_map());
799 __ Check(not_equal, kDeclarationInCatchContext);
804 void FullCodeGenerator::VisitVariableDeclaration(
805 VariableDeclaration* declaration) {
806 // If it was not possible to allocate the variable at compile time, we
807 // need to "declare" it at runtime to make sure it actually exists in the
809 VariableProxy* proxy = declaration->proxy();
810 VariableMode mode = declaration->mode();
811 Variable* variable = proxy->var();
812 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
813 switch (variable->location()) {
814 case VariableLocation::GLOBAL:
815 case VariableLocation::UNALLOCATED:
816 globals_->Add(variable->name(), zone());
817 globals_->Add(variable->binding_needs_init()
818 ? isolate()->factory()->the_hole_value()
819 : isolate()->factory()->undefined_value(), zone());
822 case VariableLocation::PARAMETER:
823 case VariableLocation::LOCAL:
825 Comment cmnt(masm_, "[ VariableDeclaration");
826 __ mov(StackOperand(variable),
827 Immediate(isolate()->factory()->the_hole_value()));
831 case VariableLocation::CONTEXT:
833 Comment cmnt(masm_, "[ VariableDeclaration");
834 EmitDebugCheckDeclarationContext(variable);
835 __ mov(ContextOperand(esi, variable->index()),
836 Immediate(isolate()->factory()->the_hole_value()));
837 // No write barrier since the hole value is in old space.
838 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
842 case VariableLocation::LOOKUP: {
843 Comment cmnt(masm_, "[ VariableDeclaration");
845 __ push(Immediate(variable->name()));
846 // VariableDeclaration nodes are always introduced in one of four modes.
847 DCHECK(IsDeclaredVariableMode(mode));
848 PropertyAttributes attr =
849 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
850 __ push(Immediate(Smi::FromInt(attr)));
851 // Push initial value, if any.
852 // Note: For variables we must not push an initial value (such as
853 // 'undefined') because we may have a (legal) redeclaration and we
854 // must not destroy the current value.
856 __ push(Immediate(isolate()->factory()->the_hole_value()));
858 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
860 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
867 void FullCodeGenerator::VisitFunctionDeclaration(
868 FunctionDeclaration* declaration) {
869 VariableProxy* proxy = declaration->proxy();
870 Variable* variable = proxy->var();
871 switch (variable->location()) {
872 case VariableLocation::GLOBAL:
873 case VariableLocation::UNALLOCATED: {
874 globals_->Add(variable->name(), zone());
875 Handle<SharedFunctionInfo> function =
876 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
877 // Check for stack-overflow exception.
878 if (function.is_null()) return SetStackOverflow();
879 globals_->Add(function, zone());
883 case VariableLocation::PARAMETER:
884 case VariableLocation::LOCAL: {
885 Comment cmnt(masm_, "[ FunctionDeclaration");
886 VisitForAccumulatorValue(declaration->fun());
887 __ mov(StackOperand(variable), result_register());
891 case VariableLocation::CONTEXT: {
892 Comment cmnt(masm_, "[ FunctionDeclaration");
893 EmitDebugCheckDeclarationContext(variable);
894 VisitForAccumulatorValue(declaration->fun());
895 __ mov(ContextOperand(esi, variable->index()), result_register());
896 // We know that we have written a function, which is not a smi.
897 __ RecordWriteContextSlot(esi,
898 Context::SlotOffset(variable->index()),
904 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
908 case VariableLocation::LOOKUP: {
909 Comment cmnt(masm_, "[ FunctionDeclaration");
911 __ push(Immediate(variable->name()));
912 __ push(Immediate(Smi::FromInt(NONE)));
913 VisitForStackValue(declaration->fun());
914 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
921 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
922 VariableProxy* proxy = declaration->proxy();
923 Variable* variable = proxy->var();
924 switch (variable->location()) {
925 case VariableLocation::GLOBAL:
926 case VariableLocation::UNALLOCATED:
930 case VariableLocation::CONTEXT: {
931 Comment cmnt(masm_, "[ ImportDeclaration");
932 EmitDebugCheckDeclarationContext(variable);
937 case VariableLocation::PARAMETER:
938 case VariableLocation::LOCAL:
939 case VariableLocation::LOOKUP:
945 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
950 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
951 // Call the runtime to declare the globals.
952 __ push(esi); // The context is the first argument.
954 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
955 __ CallRuntime(Runtime::kDeclareGlobals, 3);
956 // Return value is ignored.
960 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
961 // Call the runtime to declare the modules.
962 __ Push(descriptions);
963 __ CallRuntime(Runtime::kDeclareModules, 1);
964 // Return value is ignored.
968 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
969 Comment cmnt(masm_, "[ SwitchStatement");
970 Breakable nested_statement(this, stmt);
971 SetStatementPosition(stmt);
973 // Keep the switch value on the stack until a case matches.
974 VisitForStackValue(stmt->tag());
975 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
977 ZoneList<CaseClause*>* clauses = stmt->cases();
978 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
980 Label next_test; // Recycled for each test.
981 // Compile all the tests with branches to their bodies.
982 for (int i = 0; i < clauses->length(); i++) {
983 CaseClause* clause = clauses->at(i);
984 clause->body_target()->Unuse();
986 // The default is not a test, but remember it as final fall through.
987 if (clause->is_default()) {
988 default_clause = clause;
992 Comment cmnt(masm_, "[ Case comparison");
996 // Compile the label expression.
997 VisitForAccumulatorValue(clause->label());
999 // Perform the comparison as if via '==='.
1000 __ mov(edx, Operand(esp, 0)); // Switch value.
1001 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1002 JumpPatchSite patch_site(masm_);
1003 if (inline_smi_code) {
1007 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1010 __ j(not_equal, &next_test);
1011 __ Drop(1); // Switch value is no longer needed.
1012 __ jmp(clause->body_target());
1013 __ bind(&slow_case);
1016 SetExpressionPosition(clause);
1017 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1018 strength(language_mode())).code();
1019 CallIC(ic, clause->CompareId());
1020 patch_site.EmitPatchInfo();
1023 __ jmp(&skip, Label::kNear);
1024 PrepareForBailout(clause, TOS_REG);
1025 __ cmp(eax, isolate()->factory()->true_value());
1026 __ j(not_equal, &next_test);
1028 __ jmp(clause->body_target());
1032 __ j(not_equal, &next_test);
1033 __ Drop(1); // Switch value is no longer needed.
1034 __ jmp(clause->body_target());
1037 // Discard the test value and jump to the default if present, otherwise to
1038 // the end of the statement.
1039 __ bind(&next_test);
1040 __ Drop(1); // Switch value is no longer needed.
1041 if (default_clause == NULL) {
1042 __ jmp(nested_statement.break_label());
1044 __ jmp(default_clause->body_target());
1047 // Compile all the case bodies.
1048 for (int i = 0; i < clauses->length(); i++) {
1049 Comment cmnt(masm_, "[ Case body");
1050 CaseClause* clause = clauses->at(i);
1051 __ bind(clause->body_target());
1052 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1053 VisitStatements(clause->statements());
1056 __ bind(nested_statement.break_label());
1057 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1061 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1062 Comment cmnt(masm_, "[ ForInStatement");
1063 SetStatementPosition(stmt, SKIP_BREAK);
1065 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1068 ForIn loop_statement(this, stmt);
1069 increment_loop_depth();
1071 // Get the object to enumerate over. If the object is null or undefined, skip
1072 // over the loop. See ECMA-262 version 5, section 12.6.4.
1073 SetExpressionAsStatementPosition(stmt->enumerable());
1074 VisitForAccumulatorValue(stmt->enumerable());
1075 __ cmp(eax, isolate()->factory()->undefined_value());
1077 __ cmp(eax, isolate()->factory()->null_value());
1080 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1082 // Convert the object to a JS object.
1083 Label convert, done_convert;
1084 __ JumpIfSmi(eax, &convert, Label::kNear);
1085 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1086 __ j(above_equal, &done_convert, Label::kNear);
1089 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1090 __ bind(&done_convert);
1091 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1094 // Check for proxies.
1095 Label call_runtime, use_cache, fixed_array;
1096 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1097 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1098 __ j(below_equal, &call_runtime);
1100 // Check cache validity in generated code. This is a fast case for
1101 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1102 // guarantee cache validity, call the runtime system to check cache
1103 // validity or get the property names in a fixed array.
1104 __ CheckEnumCache(&call_runtime);
1106 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1107 __ jmp(&use_cache, Label::kNear);
1109 // Get the set of properties to enumerate.
1110 __ bind(&call_runtime);
1112 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1113 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1114 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1115 isolate()->factory()->meta_map());
1116 __ j(not_equal, &fixed_array);
1119 // We got a map in register eax. Get the enumeration cache from it.
1120 Label no_descriptors;
1121 __ bind(&use_cache);
1123 __ EnumLength(edx, eax);
1124 __ cmp(edx, Immediate(Smi::FromInt(0)));
1125 __ j(equal, &no_descriptors);
1127 __ LoadInstanceDescriptors(eax, ecx);
1128 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1129 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1131 // Set up the four remaining stack slots.
1132 __ push(eax); // Map.
1133 __ push(ecx); // Enumeration cache.
1134 __ push(edx); // Number of valid entries for the map in the enum cache.
1135 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1138 __ bind(&no_descriptors);
1139 __ add(esp, Immediate(kPointerSize));
1142 // We got a fixed array in register eax. Iterate through that.
1144 __ bind(&fixed_array);
1146 // No need for a write barrier, we are storing a Smi in the feedback vector.
1147 __ LoadHeapObject(ebx, FeedbackVector());
1148 int vector_index = FeedbackVector()->GetIndex(slot);
1149 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1150 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1152 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1153 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1154 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1155 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1156 __ j(above, &non_proxy);
1157 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1158 __ bind(&non_proxy);
1159 __ push(ebx); // Smi
1160 __ push(eax); // Array
1161 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1162 __ push(eax); // Fixed array length (as smi).
1163 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1165 // Generate code for doing the condition check.
1166 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1168 SetExpressionAsStatementPosition(stmt->each());
1170 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1171 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1172 __ j(above_equal, loop_statement.break_label());
1174 // Get the current entry of the array into register ebx.
1175 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1176 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1178 // Get the expected map from the stack or a smi in the
1179 // permanent slow case into register edx.
1180 __ mov(edx, Operand(esp, 3 * kPointerSize));
1182 // Check if the expected map still matches that of the enumerable.
1183 // If not, we may have to filter the key.
1185 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1186 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1187 __ j(equal, &update_each, Label::kNear);
1189 // For proxies, no filtering is done.
1190 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1191 DCHECK(Smi::FromInt(0) == 0);
1193 __ j(zero, &update_each);
1195 // Convert the entry to a string or null if it isn't a property
1196 // anymore. If the property has been removed while iterating, we
1198 __ push(ecx); // Enumerable.
1199 __ push(ebx); // Current entry.
1200 __ CallRuntime(Runtime::kForInFilter, 2);
1201 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1202 __ cmp(eax, isolate()->factory()->undefined_value());
1203 __ j(equal, loop_statement.continue_label());
1206 // Update the 'each' property or variable from the possibly filtered
1207 // entry in register ebx.
1208 __ bind(&update_each);
1209 __ mov(result_register(), ebx);
1210 // Perform the assignment as if via '='.
1211 { EffectContext context(this);
1212 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1213 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1216 // Generate code for the body of the loop.
1217 Visit(stmt->body());
1219 // Generate code for going to the next element by incrementing the
1220 // index (smi) stored on top of the stack.
1221 __ bind(loop_statement.continue_label());
1222 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1224 EmitBackEdgeBookkeeping(stmt, &loop);
1227 // Remove the pointers stored on the stack.
1228 __ bind(loop_statement.break_label());
1229 __ add(esp, Immediate(5 * kPointerSize));
1231 // Exit and decrement the loop depth.
1232 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1234 decrement_loop_depth();
1238 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1240 // Use the fast case closure allocation code that allocates in new
1241 // space for nested functions that don't need literals cloning. If
1242 // we're running with the --always-opt or the --prepare-always-opt
1243 // flag, we need to use the runtime function so that the new function
1244 // we are creating here gets a chance to have its code optimized and
1245 // doesn't just get a copy of the existing unoptimized code.
1246 if (!FLAG_always_opt &&
1247 !FLAG_prepare_always_opt &&
1249 scope()->is_function_scope() &&
1250 info->num_literals() == 0) {
1251 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1252 __ mov(ebx, Immediate(info));
1256 __ push(Immediate(info));
1257 __ push(Immediate(pretenure
1258 ? isolate()->factory()->true_value()
1259 : isolate()->factory()->false_value()));
1260 __ CallRuntime(Runtime::kNewClosure, 3);
1262 context()->Plug(eax);
1266 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1267 Comment cmnt(masm_, "[ VariableProxy");
1268 EmitVariableLoad(expr);
1272 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1274 FeedbackVectorICSlot slot) {
1275 if (NeedsHomeObject(initializer)) {
1276 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1277 __ mov(StoreDescriptor::NameRegister(),
1278 Immediate(isolate()->factory()->home_object_symbol()));
1279 __ mov(StoreDescriptor::ValueRegister(),
1280 Operand(esp, offset * kPointerSize));
1281 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1287 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1288 TypeofMode typeof_mode,
1290 Register context = esi;
1291 Register temp = edx;
1295 if (s->num_heap_slots() > 0) {
1296 if (s->calls_sloppy_eval()) {
1297 // Check that extension is NULL.
1298 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1300 __ j(not_equal, slow);
1302 // Load next context in chain.
1303 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1304 // Walk the rest of the chain without clobbering esi.
1307 // If no outer scope calls eval, we do not need to check more
1308 // context extensions. If we have reached an eval scope, we check
1309 // all extensions from this point.
1310 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1311 s = s->outer_scope();
1314 if (s != NULL && s->is_eval_scope()) {
1315 // Loop up the context chain. There is no frame effect so it is
1316 // safe to use raw labels here.
1318 if (!context.is(temp)) {
1319 __ mov(temp, context);
1322 // Terminate at native context.
1323 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1324 Immediate(isolate()->factory()->native_context_map()));
1325 __ j(equal, &fast, Label::kNear);
1326 // Check that extension is NULL.
1327 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1328 __ j(not_equal, slow);
1329 // Load next context in chain.
1330 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1335 // All extension objects were empty and it is safe to use a normal global
1337 EmitGlobalVariableLoad(proxy, typeof_mode);
1341 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1343 DCHECK(var->IsContextSlot());
1344 Register context = esi;
1345 Register temp = ebx;
1347 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1348 if (s->num_heap_slots() > 0) {
1349 if (s->calls_sloppy_eval()) {
1350 // Check that extension is NULL.
1351 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1353 __ j(not_equal, slow);
1355 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1356 // Walk the rest of the chain without clobbering esi.
1360 // Check that last extension is NULL.
1361 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1362 __ j(not_equal, slow);
1364 // This function is used only for loads, not stores, so it's safe to
1365 // return an esi-based operand (the write barrier cannot be allowed to
1366 // destroy the esi register).
1367 return ContextOperand(context, var->index());
1371 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1372 TypeofMode typeof_mode,
1373 Label* slow, Label* done) {
1374 // Generate fast-case code for variables that might be shadowed by
1375 // eval-introduced variables. Eval is used a lot without
1376 // introducing variables. In those cases, we do not want to
1377 // perform a runtime call for all variables in the scope
1378 // containing the eval.
1379 Variable* var = proxy->var();
1380 if (var->mode() == DYNAMIC_GLOBAL) {
1381 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1383 } else if (var->mode() == DYNAMIC_LOCAL) {
1384 Variable* local = var->local_if_not_shadowed();
1385 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1386 if (local->mode() == LET || local->mode() == CONST ||
1387 local->mode() == CONST_LEGACY) {
1388 __ cmp(eax, isolate()->factory()->the_hole_value());
1389 __ j(not_equal, done);
1390 if (local->mode() == CONST_LEGACY) {
1391 __ mov(eax, isolate()->factory()->undefined_value());
1392 } else { // LET || CONST
1393 __ push(Immediate(var->name()));
1394 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1402 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1403 TypeofMode typeof_mode) {
1404 Variable* var = proxy->var();
1405 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1406 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1407 if (var->IsGlobalSlot()) {
1408 DCHECK(var->index() > 0);
1409 DCHECK(var->IsStaticGlobalObjectProperty());
1410 // Each var occupies two slots in the context: for reads and writes.
1411 int slot_index = var->index();
1412 int depth = scope()->ContextChainLength(var->scope());
1413 __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
1414 Immediate(Smi::FromInt(depth)));
1415 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
1416 Immediate(Smi::FromInt(slot_index)));
1417 __ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
1418 LoadGlobalViaContextStub stub(isolate(), depth);
1422 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1423 __ mov(LoadDescriptor::NameRegister(), var->name());
1424 __ mov(LoadDescriptor::SlotRegister(),
1425 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1426 CallLoadIC(typeof_mode);
1431 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1432 TypeofMode typeof_mode) {
1433 SetExpressionPosition(proxy);
1434 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1435 Variable* var = proxy->var();
1437 // Three cases: global variables, lookup variables, and all other types of
1439 switch (var->location()) {
1440 case VariableLocation::GLOBAL:
1441 case VariableLocation::UNALLOCATED: {
1442 Comment cmnt(masm_, "[ Global variable");
1443 EmitGlobalVariableLoad(proxy, typeof_mode);
1444 context()->Plug(eax);
1448 case VariableLocation::PARAMETER:
1449 case VariableLocation::LOCAL:
1450 case VariableLocation::CONTEXT: {
1451 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1452 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1453 : "[ Stack variable");
1454 if (var->binding_needs_init()) {
1455 // var->scope() may be NULL when the proxy is located in eval code and
1456 // refers to a potential outside binding. Currently those bindings are
1457 // always looked up dynamically, i.e. in that case
1458 // var->location() == LOOKUP.
1460 DCHECK(var->scope() != NULL);
1462 // Check if the binding really needs an initialization check. The check
1463 // can be skipped in the following situation: we have a LET or CONST
1464 // binding in harmony mode, both the Variable and the VariableProxy have
1465 // the same declaration scope (i.e. they are both in global code, in the
1466 // same function or in the same eval code) and the VariableProxy is in
1467 // the source physically located after the initializer of the variable.
1469 // We cannot skip any initialization checks for CONST in non-harmony
1470 // mode because const variables may be declared but never initialized:
1471 // if (false) { const x; }; var y = x;
1473 // The condition on the declaration scopes is a conservative check for
1474 // nested functions that access a binding and are called before the
1475 // binding is initialized:
1476 // function() { f(); let x = 1; function f() { x = 2; } }
1478 bool skip_init_check;
1479 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1480 skip_init_check = false;
1481 } else if (var->is_this()) {
1482 CHECK(info_->function() != nullptr &&
1483 (info_->function()->kind() & kSubclassConstructor) != 0);
1484 // TODO(dslomov): implement 'this' hole check elimination.
1485 skip_init_check = false;
1487 // Check that we always have valid source position.
1488 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1489 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1490 skip_init_check = var->mode() != CONST_LEGACY &&
1491 var->initializer_position() < proxy->position();
1494 if (!skip_init_check) {
1495 // Let and const need a read barrier.
1498 __ cmp(eax, isolate()->factory()->the_hole_value());
1499 __ j(not_equal, &done, Label::kNear);
1500 if (var->mode() == LET || var->mode() == CONST) {
1501 // Throw a reference error when using an uninitialized let/const
1502 // binding in harmony mode.
1503 __ push(Immediate(var->name()));
1504 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1506 // Uninitalized const bindings outside of harmony mode are unholed.
1507 DCHECK(var->mode() == CONST_LEGACY);
1508 __ mov(eax, isolate()->factory()->undefined_value());
1511 context()->Plug(eax);
1515 context()->Plug(var);
1519 case VariableLocation::LOOKUP: {
1520 Comment cmnt(masm_, "[ Lookup variable");
1522 // Generate code for loading from variables potentially shadowed
1523 // by eval-introduced variables.
1524 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1526 __ push(esi); // Context.
1527 __ push(Immediate(var->name()));
1528 Runtime::FunctionId function_id =
1529 typeof_mode == NOT_INSIDE_TYPEOF
1530 ? Runtime::kLoadLookupSlot
1531 : Runtime::kLoadLookupSlotNoReferenceError;
1532 __ CallRuntime(function_id, 2);
1534 context()->Plug(eax);
1541 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1542 Comment cmnt(masm_, "[ RegExpLiteral");
1544 // Registers will be used as follows:
1545 // edi = JS function.
1546 // ecx = literals array.
1547 // ebx = regexp literal.
1548 // eax = regexp literal clone.
1549 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1550 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1551 int literal_offset =
1552 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1553 __ mov(ebx, FieldOperand(ecx, literal_offset));
1554 __ cmp(ebx, isolate()->factory()->undefined_value());
1555 __ j(not_equal, &materialized, Label::kNear);
1557 // Create regexp literal using runtime function
1558 // Result will be in eax.
1560 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1561 __ push(Immediate(expr->pattern()));
1562 __ push(Immediate(expr->flags()));
1563 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1566 __ bind(&materialized);
1567 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1568 Label allocated, runtime_allocate;
1569 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1572 __ bind(&runtime_allocate);
1574 __ push(Immediate(Smi::FromInt(size)));
1575 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1578 __ bind(&allocated);
1579 // Copy the content into the newly allocated memory.
1580 // (Unroll copy loop once for better throughput).
1581 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1582 __ mov(edx, FieldOperand(ebx, i));
1583 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1584 __ mov(FieldOperand(eax, i), edx);
1585 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1587 if ((size % (2 * kPointerSize)) != 0) {
1588 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1589 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1591 context()->Plug(eax);
1595 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1596 if (expression == NULL) {
1597 __ push(Immediate(isolate()->factory()->null_value()));
1599 VisitForStackValue(expression);
1604 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1605 Comment cmnt(masm_, "[ ObjectLiteral");
1607 Handle<FixedArray> constant_properties = expr->constant_properties();
1608 int flags = expr->ComputeFlags();
1609 // If any of the keys would store to the elements array, then we shouldn't
1611 if (MustCreateObjectLiteralWithRuntime(expr)) {
1612 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1613 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1614 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1615 __ push(Immediate(constant_properties));
1616 __ push(Immediate(Smi::FromInt(flags)));
1617 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1619 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1620 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1621 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1622 __ mov(ecx, Immediate(constant_properties));
1623 __ mov(edx, Immediate(Smi::FromInt(flags)));
1624 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1627 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1629 // If result_saved is true the result is on top of the stack. If
1630 // result_saved is false the result is in eax.
1631 bool result_saved = false;
1633 AccessorTable accessor_table(zone());
1634 int property_index = 0;
1635 // store_slot_index points to the vector IC slot for the next store IC used.
1636 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1637 // and must be updated if the number of store ICs emitted here changes.
1638 int store_slot_index = 0;
1639 for (; property_index < expr->properties()->length(); property_index++) {
1640 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1641 if (property->is_computed_name()) break;
1642 if (property->IsCompileTimeValue()) continue;
1644 Literal* key = property->key()->AsLiteral();
1645 Expression* value = property->value();
1646 if (!result_saved) {
1647 __ push(eax); // Save result on the stack
1648 result_saved = true;
1650 switch (property->kind()) {
1651 case ObjectLiteral::Property::CONSTANT:
1653 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1654 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1656 case ObjectLiteral::Property::COMPUTED:
1657 // It is safe to use [[Put]] here because the boilerplate already
1658 // contains computed properties with an uninitialized value.
1659 if (key->value()->IsInternalizedString()) {
1660 if (property->emit_store()) {
1661 VisitForAccumulatorValue(value);
1662 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1663 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1664 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1665 if (FLAG_vector_stores) {
1666 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1669 CallStoreIC(key->LiteralFeedbackId());
1671 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1673 if (NeedsHomeObject(value)) {
1674 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1675 __ mov(StoreDescriptor::NameRegister(),
1676 Immediate(isolate()->factory()->home_object_symbol()));
1677 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, 0));
1678 if (FLAG_vector_stores) {
1679 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1684 VisitForEffect(value);
1688 __ push(Operand(esp, 0)); // Duplicate receiver.
1689 VisitForStackValue(key);
1690 VisitForStackValue(value);
1691 if (property->emit_store()) {
1692 EmitSetHomeObjectIfNeeded(
1693 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1694 __ push(Immediate(Smi::FromInt(SLOPPY))); // Language mode
1695 __ CallRuntime(Runtime::kSetProperty, 4);
1700 case ObjectLiteral::Property::PROTOTYPE:
1701 __ push(Operand(esp, 0)); // Duplicate receiver.
1702 VisitForStackValue(value);
1703 DCHECK(property->emit_store());
1704 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1706 case ObjectLiteral::Property::GETTER:
1707 if (property->emit_store()) {
1708 accessor_table.lookup(key)->second->getter = value;
1711 case ObjectLiteral::Property::SETTER:
1712 if (property->emit_store()) {
1713 accessor_table.lookup(key)->second->setter = value;
1719 // Emit code to define accessors, using only a single call to the runtime for
1720 // each pair of corresponding getters and setters.
1721 for (AccessorTable::Iterator it = accessor_table.begin();
1722 it != accessor_table.end();
1724 __ push(Operand(esp, 0)); // Duplicate receiver.
1725 VisitForStackValue(it->first);
1726 EmitAccessor(it->second->getter);
1727 EmitSetHomeObjectIfNeeded(
1728 it->second->getter, 2,
1729 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1731 EmitAccessor(it->second->setter);
1732 EmitSetHomeObjectIfNeeded(
1733 it->second->setter, 3,
1734 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1736 __ push(Immediate(Smi::FromInt(NONE)));
1737 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1740 // Object literals have two parts. The "static" part on the left contains no
1741 // computed property names, and so we can compute its map ahead of time; see
1742 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1743 // starts with the first computed property name, and continues with all
1744 // properties to its right. All the code from above initializes the static
1745 // component of the object literal, and arranges for the map of the result to
1746 // reflect the static order in which the keys appear. For the dynamic
1747 // properties, we compile them into a series of "SetOwnProperty" runtime
1748 // calls. This will preserve insertion order.
1749 for (; property_index < expr->properties()->length(); property_index++) {
1750 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1752 Expression* value = property->value();
1753 if (!result_saved) {
1754 __ push(eax); // Save result on the stack
1755 result_saved = true;
1758 __ push(Operand(esp, 0)); // Duplicate receiver.
1760 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1761 DCHECK(!property->is_computed_name());
1762 VisitForStackValue(value);
1763 DCHECK(property->emit_store());
1764 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1766 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1767 VisitForStackValue(value);
1768 EmitSetHomeObjectIfNeeded(
1769 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1771 switch (property->kind()) {
1772 case ObjectLiteral::Property::CONSTANT:
1773 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1774 case ObjectLiteral::Property::COMPUTED:
1775 if (property->emit_store()) {
1776 __ push(Immediate(Smi::FromInt(NONE)));
1777 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1783 case ObjectLiteral::Property::PROTOTYPE:
1787 case ObjectLiteral::Property::GETTER:
1788 __ push(Immediate(Smi::FromInt(NONE)));
1789 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1792 case ObjectLiteral::Property::SETTER:
1793 __ push(Immediate(Smi::FromInt(NONE)));
1794 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1800 if (expr->has_function()) {
1801 DCHECK(result_saved);
1802 __ push(Operand(esp, 0));
1803 __ CallRuntime(Runtime::kToFastProperties, 1);
1807 context()->PlugTOS();
1809 context()->Plug(eax);
1812 // Verify that compilation exactly consumed the number of store ic slots that
1813 // the ObjectLiteral node had to offer.
1814 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1818 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1819 Comment cmnt(masm_, "[ ArrayLiteral");
1821 expr->BuildConstantElements(isolate());
1822 Handle<FixedArray> constant_elements = expr->constant_elements();
1823 bool has_constant_fast_elements =
1824 IsFastObjectElementsKind(expr->constant_elements_kind());
1826 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1827 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1828 // If the only customer of allocation sites is transitioning, then
1829 // we can turn it off if we don't have anywhere else to transition to.
1830 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1833 if (MustCreateArrayLiteralWithRuntime(expr)) {
1834 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1835 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1836 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1837 __ push(Immediate(constant_elements));
1838 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1839 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1841 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1842 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1843 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1844 __ mov(ecx, Immediate(constant_elements));
1845 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1848 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1850 bool result_saved = false; // Is the result saved to the stack?
1851 ZoneList<Expression*>* subexprs = expr->values();
1852 int length = subexprs->length();
1854 // Emit code to evaluate all the non-constant subexpressions and to store
1855 // them into the newly cloned array.
1856 int array_index = 0;
1857 for (; array_index < length; array_index++) {
1858 Expression* subexpr = subexprs->at(array_index);
1859 if (subexpr->IsSpread()) break;
1861 // If the subexpression is a literal or a simple materialized literal it
1862 // is already set in the cloned array.
1863 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1865 if (!result_saved) {
1866 __ push(eax); // array literal.
1867 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1868 result_saved = true;
1870 VisitForAccumulatorValue(subexpr);
1872 if (has_constant_fast_elements) {
1873 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1874 // cannot transition and don't need to call the runtime stub.
1875 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1876 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1877 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1878 // Store the subexpression value in the array's elements.
1879 __ mov(FieldOperand(ebx, offset), result_register());
1880 // Update the write barrier for the array store.
1881 __ RecordWriteField(ebx, offset, result_register(), ecx,
1883 EMIT_REMEMBERED_SET,
1886 // Store the subexpression value in the array's elements.
1887 __ mov(ecx, Immediate(Smi::FromInt(array_index)));
1888 StoreArrayLiteralElementStub stub(isolate());
1892 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1895 // In case the array literal contains spread expressions it has two parts. The
1896 // first part is the "static" array which has a literal index is handled
1897 // above. The second part is the part after the first spread expression
1898 // (inclusive) and these elements gets appended to the array. Note that the
1899 // number elements an iterable produces is unknown ahead of time.
1900 if (array_index < length && result_saved) {
1901 __ Drop(1); // literal index
1903 result_saved = false;
1905 for (; array_index < length; array_index++) {
1906 Expression* subexpr = subexprs->at(array_index);
1909 if (subexpr->IsSpread()) {
1910 VisitForStackValue(subexpr->AsSpread()->expression());
1911 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1913 VisitForStackValue(subexpr);
1914 __ CallRuntime(Runtime::kAppendElement, 2);
1917 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1921 __ Drop(1); // literal index
1922 context()->PlugTOS();
1924 context()->Plug(eax);
1929 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1930 DCHECK(expr->target()->IsValidReferenceExpression());
1932 Comment cmnt(masm_, "[ Assignment");
1933 SetExpressionPosition(expr, INSERT_BREAK);
1935 Property* property = expr->target()->AsProperty();
1936 LhsKind assign_type = Property::GetAssignType(property);
1938 // Evaluate LHS expression.
1939 switch (assign_type) {
1941 // Nothing to do here.
1943 case NAMED_SUPER_PROPERTY:
1945 property->obj()->AsSuperPropertyReference()->this_var());
1946 VisitForAccumulatorValue(
1947 property->obj()->AsSuperPropertyReference()->home_object());
1948 __ push(result_register());
1949 if (expr->is_compound()) {
1950 __ push(MemOperand(esp, kPointerSize));
1951 __ push(result_register());
1954 case NAMED_PROPERTY:
1955 if (expr->is_compound()) {
1956 // We need the receiver both on the stack and in the register.
1957 VisitForStackValue(property->obj());
1958 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1960 VisitForStackValue(property->obj());
1963 case KEYED_SUPER_PROPERTY:
1965 property->obj()->AsSuperPropertyReference()->this_var());
1967 property->obj()->AsSuperPropertyReference()->home_object());
1968 VisitForAccumulatorValue(property->key());
1969 __ Push(result_register());
1970 if (expr->is_compound()) {
1971 __ push(MemOperand(esp, 2 * kPointerSize));
1972 __ push(MemOperand(esp, 2 * kPointerSize));
1973 __ push(result_register());
1976 case KEYED_PROPERTY: {
1977 if (expr->is_compound()) {
1978 VisitForStackValue(property->obj());
1979 VisitForStackValue(property->key());
1980 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1981 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1983 VisitForStackValue(property->obj());
1984 VisitForStackValue(property->key());
1990 // For compound assignments we need another deoptimization point after the
1991 // variable/property load.
1992 if (expr->is_compound()) {
1993 AccumulatorValueContext result_context(this);
1994 { AccumulatorValueContext left_operand_context(this);
1995 switch (assign_type) {
1997 EmitVariableLoad(expr->target()->AsVariableProxy());
1998 PrepareForBailout(expr->target(), TOS_REG);
2000 case NAMED_SUPER_PROPERTY:
2001 EmitNamedSuperPropertyLoad(property);
2002 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2004 case NAMED_PROPERTY:
2005 EmitNamedPropertyLoad(property);
2006 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2008 case KEYED_SUPER_PROPERTY:
2009 EmitKeyedSuperPropertyLoad(property);
2010 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2012 case KEYED_PROPERTY:
2013 EmitKeyedPropertyLoad(property);
2014 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2019 Token::Value op = expr->binary_op();
2020 __ push(eax); // Left operand goes on the stack.
2021 VisitForAccumulatorValue(expr->value());
2023 if (ShouldInlineSmiCase(op)) {
2024 EmitInlineSmiBinaryOp(expr->binary_operation(),
2029 EmitBinaryOp(expr->binary_operation(), op);
2032 // Deoptimization point in case the binary operation may have side effects.
2033 PrepareForBailout(expr->binary_operation(), TOS_REG);
2035 VisitForAccumulatorValue(expr->value());
2038 SetExpressionPosition(expr);
2041 switch (assign_type) {
2043 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2044 expr->op(), expr->AssignmentSlot());
2045 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2046 context()->Plug(eax);
2048 case NAMED_PROPERTY:
2049 EmitNamedPropertyAssignment(expr);
2051 case NAMED_SUPER_PROPERTY:
2052 EmitNamedSuperPropertyStore(property);
2053 context()->Plug(result_register());
2055 case KEYED_SUPER_PROPERTY:
2056 EmitKeyedSuperPropertyStore(property);
2057 context()->Plug(result_register());
2059 case KEYED_PROPERTY:
2060 EmitKeyedPropertyAssignment(expr);
2066 void FullCodeGenerator::VisitYield(Yield* expr) {
2067 Comment cmnt(masm_, "[ Yield");
2068 SetExpressionPosition(expr);
2070 // Evaluate yielded value first; the initial iterator definition depends on
2071 // this. It stays on the stack while we update the iterator.
2072 VisitForStackValue(expr->expression());
2074 switch (expr->yield_kind()) {
2075 case Yield::kSuspend:
2076 // Pop value from top-of-stack slot; box result into result register.
2077 EmitCreateIteratorResult(false);
2078 __ push(result_register());
2080 case Yield::kInitial: {
2081 Label suspend, continuation, post_runtime, resume;
2084 __ bind(&continuation);
2085 __ RecordGeneratorContinuation();
2089 VisitForAccumulatorValue(expr->generator_object());
2090 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2091 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2092 Immediate(Smi::FromInt(continuation.pos())));
2093 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2095 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2097 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
2099 __ j(equal, &post_runtime);
2100 __ push(eax); // generator object
2101 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2102 __ mov(context_register(),
2103 Operand(ebp, StandardFrameConstants::kContextOffset));
2104 __ bind(&post_runtime);
2105 __ pop(result_register());
2106 EmitReturnSequence();
2109 context()->Plug(result_register());
2113 case Yield::kFinal: {
2114 VisitForAccumulatorValue(expr->generator_object());
2115 __ mov(FieldOperand(result_register(),
2116 JSGeneratorObject::kContinuationOffset),
2117 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2118 // Pop value from top-of-stack slot, box result into result register.
2119 EmitCreateIteratorResult(true);
2120 EmitUnwindBeforeReturn();
2121 EmitReturnSequence();
2125 case Yield::kDelegating: {
2126 VisitForStackValue(expr->generator_object());
2128 // Initial stack layout is as follows:
2129 // [sp + 1 * kPointerSize] iter
2130 // [sp + 0 * kPointerSize] g
2132 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2133 Label l_next, l_call, l_loop;
2134 Register load_receiver = LoadDescriptor::ReceiverRegister();
2135 Register load_name = LoadDescriptor::NameRegister();
2137 // Initial send value is undefined.
2138 __ mov(eax, isolate()->factory()->undefined_value());
2141 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2143 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2144 __ push(load_name); // "throw"
2145 __ push(Operand(esp, 2 * kPointerSize)); // iter
2146 __ push(eax); // exception
2149 // try { received = %yield result }
2150 // Shuffle the received result above a try handler and yield it without
2153 __ pop(eax); // result
2154 int handler_index = NewHandlerTableEntry();
2155 EnterTryBlock(handler_index, &l_catch);
2156 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2157 __ push(eax); // result
2160 __ bind(&l_continuation);
2161 __ RecordGeneratorContinuation();
2164 __ bind(&l_suspend);
2165 const int generator_object_depth = kPointerSize + try_block_size;
2166 __ mov(eax, Operand(esp, generator_object_depth));
2168 __ push(Immediate(Smi::FromInt(handler_index))); // handler-index
2169 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2170 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2171 Immediate(Smi::FromInt(l_continuation.pos())));
2172 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2174 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2176 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2177 __ mov(context_register(),
2178 Operand(ebp, StandardFrameConstants::kContextOffset));
2179 __ pop(eax); // result
2180 EmitReturnSequence();
2181 __ bind(&l_resume); // received in eax
2182 ExitTryBlock(handler_index);
2184 // receiver = iter; f = iter.next; arg = received;
2187 __ mov(load_name, isolate()->factory()->next_string());
2188 __ push(load_name); // "next"
2189 __ push(Operand(esp, 2 * kPointerSize)); // iter
2190 __ push(eax); // received
2192 // result = receiver[f](arg);
2194 __ mov(load_receiver, Operand(esp, kPointerSize));
2195 __ mov(LoadDescriptor::SlotRegister(),
2196 Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2197 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2198 CallIC(ic, TypeFeedbackId::None());
2200 __ mov(Operand(esp, 2 * kPointerSize), edi);
2201 SetCallPosition(expr, 1);
2202 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2205 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2206 __ Drop(1); // The function is still on the stack; drop it.
2208 // if (!result.done) goto l_try;
2210 __ push(eax); // save result
2211 __ Move(load_receiver, eax); // result
2213 isolate()->factory()->done_string()); // "done"
2214 __ mov(LoadDescriptor::SlotRegister(),
2215 Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
2216 CallLoadIC(NOT_INSIDE_TYPEOF); // result.done in eax
2217 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2223 __ pop(load_receiver); // result
2225 isolate()->factory()->value_string()); // "value"
2226 __ mov(LoadDescriptor::SlotRegister(),
2227 Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
2228 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in eax
2229 context()->DropAndPlug(2, eax); // drop iter and g
2236 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2238 JSGeneratorObject::ResumeMode resume_mode) {
2239 // The value stays in eax, and is ultimately read by the resumed generator, as
2240 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2241 // is read to throw the value when the resumed generator is already closed.
2242 // ebx will hold the generator object until the activation has been resumed.
2243 VisitForStackValue(generator);
2244 VisitForAccumulatorValue(value);
2247 // Load suspended function and context.
2248 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2249 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2252 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2254 // Push holes for arguments to generator function.
2255 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2257 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2258 __ mov(ecx, isolate()->factory()->the_hole_value());
2259 Label push_argument_holes, push_frame;
2260 __ bind(&push_argument_holes);
2261 __ sub(edx, Immediate(Smi::FromInt(1)));
2262 __ j(carry, &push_frame);
2264 __ jmp(&push_argument_holes);
2266 // Enter a new JavaScript frame, and initialize its slots as they were when
2267 // the generator was suspended.
2268 Label resume_frame, done;
2269 __ bind(&push_frame);
2270 __ call(&resume_frame);
2272 __ bind(&resume_frame);
2273 __ push(ebp); // Caller's frame pointer.
2275 __ push(esi); // Callee's context.
2276 __ push(edi); // Callee's JS Function.
2278 // Load the operand stack size.
2279 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2280 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2283 // If we are sending a value and there is no operand stack, we can jump back
2285 if (resume_mode == JSGeneratorObject::NEXT) {
2287 __ cmp(edx, Immediate(0));
2288 __ j(not_zero, &slow_resume);
2289 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2290 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2293 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2294 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2296 __ bind(&slow_resume);
2299 // Otherwise, we push holes for the operand stack and call the runtime to fix
2300 // up the stack and the handlers.
2301 Label push_operand_holes, call_resume;
2302 __ bind(&push_operand_holes);
2303 __ sub(edx, Immediate(1));
2304 __ j(carry, &call_resume);
2306 __ jmp(&push_operand_holes);
2307 __ bind(&call_resume);
2309 __ push(result_register());
2310 __ Push(Smi::FromInt(resume_mode));
2311 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2312 // Not reached: the runtime call returns elsewhere.
2313 __ Abort(kGeneratorFailedToResume);
2316 context()->Plug(result_register());
2320 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2324 const int instance_size = 5 * kPointerSize;
2325 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2328 __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2331 __ bind(&gc_required);
2332 __ Push(Smi::FromInt(instance_size));
2333 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2334 __ mov(context_register(),
2335 Operand(ebp, StandardFrameConstants::kContextOffset));
2337 __ bind(&allocated);
2338 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2339 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2340 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2342 __ mov(edx, isolate()->factory()->ToBoolean(done));
2343 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2344 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2345 isolate()->factory()->empty_fixed_array());
2346 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2347 isolate()->factory()->empty_fixed_array());
2348 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2349 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2351 // Only the value field needs a write barrier, as the other values are in the
2353 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2354 ecx, edx, kDontSaveFPRegs);
2358 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2359 SetExpressionPosition(prop);
2360 Literal* key = prop->key()->AsLiteral();
2361 DCHECK(!key->value()->IsSmi());
2362 DCHECK(!prop->IsSuperAccess());
2364 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2365 __ mov(LoadDescriptor::SlotRegister(),
2366 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2367 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2371 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2372 // Stack: receiver, home_object.
2373 SetExpressionPosition(prop);
2374 Literal* key = prop->key()->AsLiteral();
2375 DCHECK(!key->value()->IsSmi());
2376 DCHECK(prop->IsSuperAccess());
2378 __ push(Immediate(key->value()));
2379 __ push(Immediate(Smi::FromInt(language_mode())));
2380 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2384 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2385 SetExpressionPosition(prop);
2386 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2387 __ mov(LoadDescriptor::SlotRegister(),
2388 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2393 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2394 // Stack: receiver, home_object, key.
2395 SetExpressionPosition(prop);
2396 __ push(Immediate(Smi::FromInt(language_mode())));
2397 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2401 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2404 Expression* right) {
2405 // Do combined smi check of the operands. Left operand is on the
2406 // stack. Right operand is in eax.
2407 Label smi_case, done, stub_call;
2411 JumpPatchSite patch_site(masm_);
2412 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2414 __ bind(&stub_call);
2417 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2418 CallIC(code, expr->BinaryOperationFeedbackId());
2419 patch_site.EmitPatchInfo();
2420 __ jmp(&done, Label::kNear);
2424 __ mov(eax, edx); // Copy left operand in case of a stub call.
2429 __ sar_cl(eax); // No checks of result necessary
2430 __ and_(eax, Immediate(~kSmiTagMask));
2437 // Check that the *signed* result fits in a smi.
2438 __ cmp(eax, 0xc0000000);
2439 __ j(positive, &result_ok);
2442 __ bind(&result_ok);
2451 __ test(eax, Immediate(0xc0000000));
2452 __ j(zero, &result_ok);
2455 __ bind(&result_ok);
2461 __ j(overflow, &stub_call);
2465 __ j(overflow, &stub_call);
2470 __ j(overflow, &stub_call);
2472 __ j(not_zero, &done, Label::kNear);
2475 __ j(negative, &stub_call);
2481 case Token::BIT_AND:
2484 case Token::BIT_XOR:
2492 context()->Plug(eax);
2496 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2497 int* used_store_slots) {
2498 // Constructor is in eax.
2499 DCHECK(lit != NULL);
2502 // No access check is needed here since the constructor is created by the
2504 Register scratch = ebx;
2505 __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2508 for (int i = 0; i < lit->properties()->length(); i++) {
2509 ObjectLiteral::Property* property = lit->properties()->at(i);
2510 Expression* value = property->value();
2512 if (property->is_static()) {
2513 __ push(Operand(esp, kPointerSize)); // constructor
2515 __ push(Operand(esp, 0)); // prototype
2517 EmitPropertyKey(property, lit->GetIdForProperty(i));
2519 // The static prototype property is read only. We handle the non computed
2520 // property name case in the parser. Since this is the only case where we
2521 // need to check for an own read only property we special case this so we do
2522 // not need to do this for every property.
2523 if (property->is_static() && property->is_computed_name()) {
2524 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2528 VisitForStackValue(value);
2529 EmitSetHomeObjectIfNeeded(value, 2,
2530 lit->SlotForHomeObject(value, used_store_slots));
2532 switch (property->kind()) {
2533 case ObjectLiteral::Property::CONSTANT:
2534 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2535 case ObjectLiteral::Property::PROTOTYPE:
2537 case ObjectLiteral::Property::COMPUTED:
2538 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2541 case ObjectLiteral::Property::GETTER:
2542 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2543 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2546 case ObjectLiteral::Property::SETTER:
2547 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2548 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2554 __ CallRuntime(Runtime::kToFastProperties, 1);
2557 __ CallRuntime(Runtime::kToFastProperties, 1);
2559 if (is_strong(language_mode())) {
2561 FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2564 // TODO(conradw): It would be more efficient to define the properties with
2565 // the right attributes the first time round.
2566 // Freeze the prototype.
2567 __ CallRuntime(Runtime::kObjectFreeze, 1);
2568 // Freeze the constructor.
2569 __ CallRuntime(Runtime::kObjectFreeze, 1);
2574 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2577 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2578 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2579 CallIC(code, expr->BinaryOperationFeedbackId());
2580 patch_site.EmitPatchInfo();
2581 context()->Plug(eax);
2585 void FullCodeGenerator::EmitAssignment(Expression* expr,
2586 FeedbackVectorICSlot slot) {
2587 DCHECK(expr->IsValidReferenceExpression());
2589 Property* prop = expr->AsProperty();
2590 LhsKind assign_type = Property::GetAssignType(prop);
2592 switch (assign_type) {
2594 Variable* var = expr->AsVariableProxy()->var();
2595 EffectContext context(this);
2596 EmitVariableAssignment(var, Token::ASSIGN, slot);
2599 case NAMED_PROPERTY: {
2600 __ push(eax); // Preserve value.
2601 VisitForAccumulatorValue(prop->obj());
2602 __ Move(StoreDescriptor::ReceiverRegister(), eax);
2603 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2604 __ mov(StoreDescriptor::NameRegister(),
2605 prop->key()->AsLiteral()->value());
2606 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2610 case NAMED_SUPER_PROPERTY: {
2612 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2613 VisitForAccumulatorValue(
2614 prop->obj()->AsSuperPropertyReference()->home_object());
2615 // stack: value, this; eax: home_object
2616 Register scratch = ecx;
2617 Register scratch2 = edx;
2618 __ mov(scratch, result_register()); // home_object
2619 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2620 __ mov(scratch2, MemOperand(esp, 0)); // this
2621 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2622 __ mov(MemOperand(esp, 0), scratch); // home_object
2623 // stack: this, home_object. eax: value
2624 EmitNamedSuperPropertyStore(prop);
2627 case KEYED_SUPER_PROPERTY: {
2629 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2631 prop->obj()->AsSuperPropertyReference()->home_object());
2632 VisitForAccumulatorValue(prop->key());
2633 Register scratch = ecx;
2634 Register scratch2 = edx;
2635 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2636 // stack: value, this, home_object; eax: key, edx: value
2637 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2638 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2639 __ mov(scratch, MemOperand(esp, 0)); // home_object
2640 __ mov(MemOperand(esp, kPointerSize), scratch);
2641 __ mov(MemOperand(esp, 0), eax);
2642 __ mov(eax, scratch2);
2643 // stack: this, home_object, key; eax: value.
2644 EmitKeyedSuperPropertyStore(prop);
2647 case KEYED_PROPERTY: {
2648 __ push(eax); // Preserve value.
2649 VisitForStackValue(prop->obj());
2650 VisitForAccumulatorValue(prop->key());
2651 __ Move(StoreDescriptor::NameRegister(), eax);
2652 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2653 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2654 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2656 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2661 context()->Plug(eax);
2665 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2666 Variable* var, MemOperand location) {
2667 __ mov(location, eax);
2668 if (var->IsContextSlot()) {
2670 int offset = Context::SlotOffset(var->index());
2671 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2676 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2677 FeedbackVectorICSlot slot) {
2678 if (var->IsUnallocated()) {
2679 // Global var, const, or let.
2680 __ mov(StoreDescriptor::NameRegister(), var->name());
2681 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2682 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2685 } else if (var->IsGlobalSlot()) {
2686 // Global var, const, or let.
2687 DCHECK(var->index() > 0);
2688 DCHECK(var->IsStaticGlobalObjectProperty());
2689 // Each var occupies two slots in the context: for reads and writes.
2690 int slot_index = var->index() + 1;
2691 int depth = scope()->ContextChainLength(var->scope());
2692 __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
2693 Immediate(Smi::FromInt(depth)));
2694 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
2695 Immediate(Smi::FromInt(slot_index)));
2696 __ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
2697 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
2698 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2701 } else if (var->mode() == LET && op != Token::INIT_LET) {
2702 // Non-initializing assignment to let variable needs a write barrier.
2703 DCHECK(!var->IsLookupSlot());
2704 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2706 MemOperand location = VarOperand(var, ecx);
2707 __ mov(edx, location);
2708 __ cmp(edx, isolate()->factory()->the_hole_value());
2709 __ j(not_equal, &assign, Label::kNear);
2710 __ push(Immediate(var->name()));
2711 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2713 EmitStoreToStackLocalOrContextSlot(var, location);
2715 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2716 // Assignment to const variable needs a write barrier.
2717 DCHECK(!var->IsLookupSlot());
2718 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2720 MemOperand location = VarOperand(var, ecx);
2721 __ mov(edx, location);
2722 __ cmp(edx, isolate()->factory()->the_hole_value());
2723 __ j(not_equal, &const_error, Label::kNear);
2724 __ push(Immediate(var->name()));
2725 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2726 __ bind(&const_error);
2727 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2729 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2730 if (var->IsLookupSlot()) {
2731 // Assignment to var.
2732 __ push(eax); // Value.
2733 __ push(esi); // Context.
2734 __ push(Immediate(var->name()));
2735 __ push(Immediate(Smi::FromInt(language_mode())));
2736 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2738 // Assignment to var or initializing assignment to let/const in harmony
2740 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2741 MemOperand location = VarOperand(var, ecx);
2742 if (generate_debug_code_ && op == Token::INIT_LET) {
2743 // Check for an uninitialized let binding.
2744 __ mov(edx, location);
2745 __ cmp(edx, isolate()->factory()->the_hole_value());
2746 __ Check(equal, kLetBindingReInitialization);
2748 EmitStoreToStackLocalOrContextSlot(var, location);
2751 } else if (op == Token::INIT_CONST_LEGACY) {
2752 // Const initializers need a write barrier.
2753 DCHECK(var->mode() == CONST_LEGACY);
2754 DCHECK(!var->IsParameter()); // No const parameters.
2755 if (var->IsLookupSlot()) {
2758 __ push(Immediate(var->name()));
2759 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2761 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2763 MemOperand location = VarOperand(var, ecx);
2764 __ mov(edx, location);
2765 __ cmp(edx, isolate()->factory()->the_hole_value());
2766 __ j(not_equal, &skip, Label::kNear);
2767 EmitStoreToStackLocalOrContextSlot(var, location);
2772 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2773 if (is_strict(language_mode())) {
2774 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2776 // Silently ignore store in sloppy mode.
2781 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2782 // Assignment to a property, using a named store IC.
2784 // esp[0] : receiver
2785 Property* prop = expr->target()->AsProperty();
2786 DCHECK(prop != NULL);
2787 DCHECK(prop->key()->IsLiteral());
2789 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2790 __ pop(StoreDescriptor::ReceiverRegister());
2791 if (FLAG_vector_stores) {
2792 EmitLoadStoreICSlot(expr->AssignmentSlot());
2795 CallStoreIC(expr->AssignmentFeedbackId());
2797 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2798 context()->Plug(eax);
2802 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2803 // Assignment to named property of super.
2805 // stack : receiver ('this'), home_object
2806 DCHECK(prop != NULL);
2807 Literal* key = prop->key()->AsLiteral();
2808 DCHECK(key != NULL);
2810 __ push(Immediate(key->value()));
2812 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2813 : Runtime::kStoreToSuper_Sloppy),
2818 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2819 // Assignment to named property of super.
2821 // stack : receiver ('this'), home_object, key
2825 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2826 : Runtime::kStoreKeyedToSuper_Sloppy),
2831 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2832 // Assignment to a property, using a keyed store IC.
2835 // esp[kPointerSize] : receiver
2837 __ pop(StoreDescriptor::NameRegister()); // Key.
2838 __ pop(StoreDescriptor::ReceiverRegister());
2839 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2841 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2842 if (FLAG_vector_stores) {
2843 EmitLoadStoreICSlot(expr->AssignmentSlot());
2846 CallIC(ic, expr->AssignmentFeedbackId());
2849 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2850 context()->Plug(eax);
2854 void FullCodeGenerator::VisitProperty(Property* expr) {
2855 Comment cmnt(masm_, "[ Property");
2856 SetExpressionPosition(expr);
2858 Expression* key = expr->key();
2860 if (key->IsPropertyName()) {
2861 if (!expr->IsSuperAccess()) {
2862 VisitForAccumulatorValue(expr->obj());
2863 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2864 EmitNamedPropertyLoad(expr);
2866 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2868 expr->obj()->AsSuperPropertyReference()->home_object());
2869 EmitNamedSuperPropertyLoad(expr);
2872 if (!expr->IsSuperAccess()) {
2873 VisitForStackValue(expr->obj());
2874 VisitForAccumulatorValue(expr->key());
2875 __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2876 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2877 EmitKeyedPropertyLoad(expr);
2879 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2881 expr->obj()->AsSuperPropertyReference()->home_object());
2882 VisitForStackValue(expr->key());
2883 EmitKeyedSuperPropertyLoad(expr);
2886 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2887 context()->Plug(eax);
2891 void FullCodeGenerator::CallIC(Handle<Code> code,
2892 TypeFeedbackId ast_id) {
2894 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2898 // Code common for calls using the IC.
2899 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2900 Expression* callee = expr->expression();
2902 CallICState::CallType call_type =
2903 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2904 // Get the target function.
2905 if (call_type == CallICState::FUNCTION) {
2906 { StackValueContext context(this);
2907 EmitVariableLoad(callee->AsVariableProxy());
2908 PrepareForBailout(callee, NO_REGISTERS);
2910 // Push undefined as receiver. This is patched in the method prologue if it
2911 // is a sloppy mode method.
2912 __ push(Immediate(isolate()->factory()->undefined_value()));
2914 // Load the function from the receiver.
2915 DCHECK(callee->IsProperty());
2916 DCHECK(!callee->AsProperty()->IsSuperAccess());
2917 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2918 EmitNamedPropertyLoad(callee->AsProperty());
2919 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2920 // Push the target function under the receiver.
2921 __ push(Operand(esp, 0));
2922 __ mov(Operand(esp, kPointerSize), eax);
2925 EmitCall(expr, call_type);
2929 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2930 SetExpressionPosition(expr);
2931 Expression* callee = expr->expression();
2932 DCHECK(callee->IsProperty());
2933 Property* prop = callee->AsProperty();
2934 DCHECK(prop->IsSuperAccess());
2936 Literal* key = prop->key()->AsLiteral();
2937 DCHECK(!key->value()->IsSmi());
2938 // Load the function from the receiver.
2939 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2940 VisitForStackValue(super_ref->home_object());
2941 VisitForAccumulatorValue(super_ref->this_var());
2944 __ push(Operand(esp, kPointerSize * 2));
2945 __ push(Immediate(key->value()));
2946 __ push(Immediate(Smi::FromInt(language_mode())));
2949 // - this (receiver)
2950 // - this (receiver) <-- LoadFromSuper will pop here and below.
2954 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2956 // Replace home_object with target function.
2957 __ mov(Operand(esp, kPointerSize), eax);
2960 // - target function
2961 // - this (receiver)
2962 EmitCall(expr, CallICState::METHOD);
2966 // Code common for calls using the IC.
2967 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2970 VisitForAccumulatorValue(key);
2972 Expression* callee = expr->expression();
2974 // Load the function from the receiver.
2975 DCHECK(callee->IsProperty());
2976 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2977 __ mov(LoadDescriptor::NameRegister(), eax);
2978 EmitKeyedPropertyLoad(callee->AsProperty());
2979 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2981 // Push the target function under the receiver.
2982 __ push(Operand(esp, 0));
2983 __ mov(Operand(esp, kPointerSize), eax);
2985 EmitCall(expr, CallICState::METHOD);
2989 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2990 Expression* callee = expr->expression();
2991 DCHECK(callee->IsProperty());
2992 Property* prop = callee->AsProperty();
2993 DCHECK(prop->IsSuperAccess());
2995 SetExpressionPosition(prop);
2996 // Load the function from the receiver.
2997 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2998 VisitForStackValue(super_ref->home_object());
2999 VisitForAccumulatorValue(super_ref->this_var());
3002 __ push(Operand(esp, kPointerSize * 2));
3003 VisitForStackValue(prop->key());
3004 __ push(Immediate(Smi::FromInt(language_mode())));
3007 // - this (receiver)
3008 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3012 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3014 // Replace home_object with target function.
3015 __ mov(Operand(esp, kPointerSize), eax);
3018 // - target function
3019 // - this (receiver)
3020 EmitCall(expr, CallICState::METHOD);
3024 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3025 // Load the arguments.
3026 ZoneList<Expression*>* args = expr->arguments();
3027 int arg_count = args->length();
3028 for (int i = 0; i < arg_count; i++) {
3029 VisitForStackValue(args->at(i));
3032 SetCallPosition(expr, arg_count);
3033 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3034 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
3035 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3036 // Don't assign a type feedback id to the IC, since type feedback is provided
3037 // by the vector above.
3040 RecordJSReturnSite(expr);
3042 // Restore context register.
3043 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3045 context()->DropAndPlug(1, eax);
3049 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3050 // Push copy of the first argument or undefined if it doesn't exist.
3051 if (arg_count > 0) {
3052 __ push(Operand(esp, arg_count * kPointerSize));
3054 __ push(Immediate(isolate()->factory()->undefined_value()));
3057 // Push the enclosing function.
3058 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3060 // Push the language mode.
3061 __ push(Immediate(Smi::FromInt(language_mode())));
3063 // Push the start position of the scope the calls resides in.
3064 __ push(Immediate(Smi::FromInt(scope()->start_position())));
3066 // Do the runtime call.
3067 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3071 void FullCodeGenerator::EmitInitializeThisAfterSuper(
3072 SuperCallReference* super_call_ref, FeedbackVectorICSlot slot) {
3073 Variable* this_var = super_call_ref->this_var()->var();
3074 GetVar(ecx, this_var);
3075 __ cmp(ecx, isolate()->factory()->the_hole_value());
3077 Label uninitialized_this;
3078 __ j(equal, &uninitialized_this);
3079 __ push(Immediate(this_var->name()));
3080 __ CallRuntime(Runtime::kThrowReferenceError, 1);
3081 __ bind(&uninitialized_this);
3083 EmitVariableAssignment(this_var, Token::INIT_CONST, slot);
3087 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3088 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3089 VariableProxy* callee = expr->expression()->AsVariableProxy();
3090 if (callee->var()->IsLookupSlot()) {
3092 SetExpressionPosition(callee);
3093 // Generate code for loading from variables potentially shadowed by
3094 // eval-introduced variables.
3095 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3098 // Call the runtime to find the function to call (returned in eax) and
3099 // the object holding it (returned in edx).
3100 __ push(context_register());
3101 __ push(Immediate(callee->name()));
3102 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3103 __ push(eax); // Function.
3104 __ push(edx); // Receiver.
3105 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3107 // If fast case code has been generated, emit code to push the function
3108 // and receiver and have the slow path jump around this code.
3109 if (done.is_linked()) {
3111 __ jmp(&call, Label::kNear);
3115 // The receiver is implicitly the global receiver. Indicate this by
3116 // passing the hole to the call function stub.
3117 __ push(Immediate(isolate()->factory()->undefined_value()));
3121 VisitForStackValue(callee);
3122 // refEnv.WithBaseObject()
3123 __ push(Immediate(isolate()->factory()->undefined_value()));
3128 void FullCodeGenerator::VisitCall(Call* expr) {
3130 // We want to verify that RecordJSReturnSite gets called on all paths
3131 // through this function. Avoid early returns.
3132 expr->return_is_recorded_ = false;
3135 Comment cmnt(masm_, "[ Call");
3136 Expression* callee = expr->expression();
3137 Call::CallType call_type = expr->GetCallType(isolate());
3139 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3140 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3141 // to resolve the function we need to call. Then we call the resolved
3142 // function using the given arguments.
3143 ZoneList<Expression*>* args = expr->arguments();
3144 int arg_count = args->length();
3146 PushCalleeAndWithBaseObject(expr);
3148 // Push the arguments.
3149 for (int i = 0; i < arg_count; i++) {
3150 VisitForStackValue(args->at(i));
3153 // Push a copy of the function (found below the arguments) and
3155 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
3156 EmitResolvePossiblyDirectEval(arg_count);
3158 // Touch up the stack with the resolved function.
3159 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
3161 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3163 SetCallPosition(expr, arg_count);
3164 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3165 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3167 RecordJSReturnSite(expr);
3168 // Restore context register.
3169 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3170 context()->DropAndPlug(1, eax);
3172 } else if (call_type == Call::GLOBAL_CALL) {
3173 EmitCallWithLoadIC(expr);
3174 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3175 // Call to a lookup slot (dynamically introduced variable).
3176 PushCalleeAndWithBaseObject(expr);
3178 } else if (call_type == Call::PROPERTY_CALL) {
3179 Property* property = callee->AsProperty();
3180 bool is_named_call = property->key()->IsPropertyName();
3181 if (property->IsSuperAccess()) {
3182 if (is_named_call) {
3183 EmitSuperCallWithLoadIC(expr);
3185 EmitKeyedSuperCallWithLoadIC(expr);
3188 VisitForStackValue(property->obj());
3189 if (is_named_call) {
3190 EmitCallWithLoadIC(expr);
3192 EmitKeyedCallWithLoadIC(expr, property->key());
3195 } else if (call_type == Call::SUPER_CALL) {
3196 EmitSuperConstructorCall(expr);
3198 DCHECK(call_type == Call::OTHER_CALL);
3199 // Call to an arbitrary expression not handled specially above.
3200 VisitForStackValue(callee);
3201 __ push(Immediate(isolate()->factory()->undefined_value()));
3202 // Emit function call.
3207 // RecordJSReturnSite should have been called.
3208 DCHECK(expr->return_is_recorded_);
3213 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3214 Comment cmnt(masm_, "[ CallNew");
3215 // According to ECMA-262, section 11.2.2, page 44, the function
3216 // expression in new calls must be evaluated before the
3219 // Push constructor on the stack. If it's not a function it's used as
3220 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3222 DCHECK(!expr->expression()->IsSuperPropertyReference());
3223 VisitForStackValue(expr->expression());
3225 // Push the arguments ("left-to-right") on the stack.
3226 ZoneList<Expression*>* args = expr->arguments();
3227 int arg_count = args->length();
3228 for (int i = 0; i < arg_count; i++) {
3229 VisitForStackValue(args->at(i));
3232 // Call the construct call builtin that handles allocation and
3233 // constructor invocation.
3234 SetConstructCallPosition(expr);
3236 // Load function and argument count into edi and eax.
3237 __ Move(eax, Immediate(arg_count));
3238 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3240 // Record call targets in unoptimized code.
3241 if (FLAG_pretenuring_call_new) {
3242 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3243 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3244 expr->CallNewFeedbackSlot().ToInt() + 1);
3247 __ LoadHeapObject(ebx, FeedbackVector());
3248 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
3250 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3251 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3252 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3253 context()->Plug(eax);
3257 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3258 SuperCallReference* super_call_ref =
3259 expr->expression()->AsSuperCallReference();
3260 DCHECK_NOT_NULL(super_call_ref);
3262 EmitLoadSuperConstructor(super_call_ref);
3263 __ push(result_register());
3265 // Push the arguments ("left-to-right") on the stack.
3266 ZoneList<Expression*>* args = expr->arguments();
3267 int arg_count = args->length();
3268 for (int i = 0; i < arg_count; i++) {
3269 VisitForStackValue(args->at(i));
3272 // Call the construct call builtin that handles allocation and
3273 // constructor invocation.
3274 SetConstructCallPosition(expr);
3276 // Load original constructor into ecx.
3277 VisitForAccumulatorValue(super_call_ref->new_target_var());
3278 __ mov(ecx, result_register());
3280 // Load function and argument count into edi and eax.
3281 __ Move(eax, Immediate(arg_count));
3282 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3284 // Record call targets in unoptimized code.
3285 if (FLAG_pretenuring_call_new) {
3287 /* TODO(dslomov): support pretenuring.
3288 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3289 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3290 expr->CallNewFeedbackSlot().ToInt() + 1);
3294 __ LoadHeapObject(ebx, FeedbackVector());
3295 __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
3297 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3298 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3300 RecordJSReturnSite(expr);
3302 EmitInitializeThisAfterSuper(super_call_ref, expr->CallFeedbackICSlot());
3303 context()->Plug(eax);
3307 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3308 ZoneList<Expression*>* args = expr->arguments();
3309 DCHECK(args->length() == 1);
3311 VisitForAccumulatorValue(args->at(0));
3313 Label materialize_true, materialize_false;
3314 Label* if_true = NULL;
3315 Label* if_false = NULL;
3316 Label* fall_through = NULL;
3317 context()->PrepareTest(&materialize_true, &materialize_false,
3318 &if_true, &if_false, &fall_through);
3320 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3321 __ test(eax, Immediate(kSmiTagMask));
3322 Split(zero, if_true, if_false, fall_through);
3324 context()->Plug(if_true, if_false);
3328 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3329 ZoneList<Expression*>* args = expr->arguments();
3330 DCHECK(args->length() == 1);
3332 VisitForAccumulatorValue(args->at(0));
3334 Label materialize_true, materialize_false;
3335 Label* if_true = NULL;
3336 Label* if_false = NULL;
3337 Label* fall_through = NULL;
3338 context()->PrepareTest(&materialize_true, &materialize_false,
3339 &if_true, &if_false, &fall_through);
3341 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3342 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
3343 Split(zero, if_true, if_false, fall_through);
3345 context()->Plug(if_true, if_false);
3349 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3350 ZoneList<Expression*>* args = expr->arguments();
3351 DCHECK(args->length() == 1);
3353 VisitForAccumulatorValue(args->at(0));
3355 Label materialize_true, materialize_false;
3356 Label* if_true = NULL;
3357 Label* if_false = NULL;
3358 Label* fall_through = NULL;
3359 context()->PrepareTest(&materialize_true, &materialize_false,
3360 &if_true, &if_false, &fall_through);
3362 __ JumpIfSmi(eax, if_false);
3363 __ cmp(eax, isolate()->factory()->null_value());
3364 __ j(equal, if_true);
3365 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3366 // Undetectable objects behave like undefined when tested with typeof.
3367 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
3368 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3369 __ j(not_zero, if_false);
3370 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3371 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3372 __ j(below, if_false);
3373 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3374 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3375 Split(below_equal, if_true, if_false, fall_through);
3377 context()->Plug(if_true, if_false);
3381 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3382 ZoneList<Expression*>* args = expr->arguments();
3383 DCHECK(args->length() == 1);
3385 VisitForAccumulatorValue(args->at(0));
3387 Label materialize_true, materialize_false;
3388 Label* if_true = NULL;
3389 Label* if_false = NULL;
3390 Label* fall_through = NULL;
3391 context()->PrepareTest(&materialize_true, &materialize_false,
3392 &if_true, &if_false, &fall_through);
3394 __ JumpIfSmi(eax, if_false);
3395 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3396 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3397 Split(above_equal, if_true, if_false, fall_through);
3399 context()->Plug(if_true, if_false);
3403 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3404 ZoneList<Expression*>* args = expr->arguments();
3405 DCHECK(args->length() == 1);
3407 VisitForAccumulatorValue(args->at(0));
3409 Label materialize_true, materialize_false;
3410 Label* if_true = NULL;
3411 Label* if_false = NULL;
3412 Label* fall_through = NULL;
3413 context()->PrepareTest(&materialize_true, &materialize_false,
3414 &if_true, &if_false, &fall_through);
3416 __ JumpIfSmi(eax, if_false);
3417 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3418 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
3419 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3420 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3421 Split(not_zero, if_true, if_false, fall_through);
3423 context()->Plug(if_true, if_false);
3427 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3428 CallRuntime* expr) {
3429 ZoneList<Expression*>* args = expr->arguments();
3430 DCHECK(args->length() == 1);
3432 VisitForAccumulatorValue(args->at(0));
3434 Label materialize_true, materialize_false, skip_lookup;
3435 Label* if_true = NULL;
3436 Label* if_false = NULL;
3437 Label* fall_through = NULL;
3438 context()->PrepareTest(&materialize_true, &materialize_false,
3439 &if_true, &if_false, &fall_through);
3441 __ AssertNotSmi(eax);
3443 // Check whether this map has already been checked to be safe for default
3445 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3446 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3447 1 << Map::kStringWrapperSafeForDefaultValueOf);
3448 __ j(not_zero, &skip_lookup);
3450 // Check for fast case object. Return false for slow case objects.
3451 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3452 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3453 __ cmp(ecx, isolate()->factory()->hash_table_map());
3454 __ j(equal, if_false);
3456 // Look for valueOf string in the descriptor array, and indicate false if
3457 // found. Since we omit an enumeration index check, if it is added via a
3458 // transition that shares its descriptor array, this is a false positive.
3459 Label entry, loop, done;
3461 // Skip loop if no descriptors are valid.
3462 __ NumberOfOwnDescriptors(ecx, ebx);
3466 __ LoadInstanceDescriptors(ebx, ebx);
3467 // ebx: descriptor array.
3468 // ecx: valid entries in the descriptor array.
3469 // Calculate the end of the descriptor array.
3470 STATIC_ASSERT(kSmiTag == 0);
3471 STATIC_ASSERT(kSmiTagSize == 1);
3472 STATIC_ASSERT(kPointerSize == 4);
3473 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3474 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3475 // Calculate location of the first key name.
3476 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3477 // Loop through all the keys in the descriptor array. If one of these is the
3478 // internalized string "valueOf" the result is false.
3481 __ mov(edx, FieldOperand(ebx, 0));
3482 __ cmp(edx, isolate()->factory()->value_of_string());
3483 __ j(equal, if_false);
3484 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3487 __ j(not_equal, &loop);
3491 // Reload map as register ebx was used as temporary above.
3492 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3494 // Set the bit in the map to indicate that there is no local valueOf field.
3495 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3496 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3498 __ bind(&skip_lookup);
3500 // If a valueOf property is not found on the object check that its
3501 // prototype is the un-modified String prototype. If not result is false.
3502 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3503 __ JumpIfSmi(ecx, if_false);
3504 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3505 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3507 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3510 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3511 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3512 Split(equal, if_true, if_false, fall_through);
3514 context()->Plug(if_true, if_false);
3518 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3519 ZoneList<Expression*>* args = expr->arguments();
3520 DCHECK(args->length() == 1);
3522 VisitForAccumulatorValue(args->at(0));
3524 Label materialize_true, materialize_false;
3525 Label* if_true = NULL;
3526 Label* if_false = NULL;
3527 Label* fall_through = NULL;
3528 context()->PrepareTest(&materialize_true, &materialize_false,
3529 &if_true, &if_false, &fall_through);
3531 __ JumpIfSmi(eax, if_false);
3532 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3533 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3534 Split(equal, if_true, if_false, fall_through);
3536 context()->Plug(if_true, if_false);
3540 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3541 ZoneList<Expression*>* args = expr->arguments();
3542 DCHECK(args->length() == 1);
3544 VisitForAccumulatorValue(args->at(0));
3546 Label materialize_true, materialize_false;
3547 Label* if_true = NULL;
3548 Label* if_false = NULL;
3549 Label* fall_through = NULL;
3550 context()->PrepareTest(&materialize_true, &materialize_false,
3551 &if_true, &if_false, &fall_through);
3553 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3554 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3555 // Check if the exponent half is 0x80000000. Comparing against 1 and
3556 // checking for overflow is the shortest possible encoding.
3557 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3558 __ j(no_overflow, if_false);
3559 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3560 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3561 Split(equal, if_true, if_false, fall_through);
3563 context()->Plug(if_true, if_false);
3567 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3568 ZoneList<Expression*>* args = expr->arguments();
3569 DCHECK(args->length() == 1);
3571 VisitForAccumulatorValue(args->at(0));
3573 Label materialize_true, materialize_false;
3574 Label* if_true = NULL;
3575 Label* if_false = NULL;
3576 Label* fall_through = NULL;
3577 context()->PrepareTest(&materialize_true, &materialize_false,
3578 &if_true, &if_false, &fall_through);
3580 __ JumpIfSmi(eax, if_false);
3581 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3582 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3583 Split(equal, if_true, if_false, fall_through);
3585 context()->Plug(if_true, if_false);
3589 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3590 ZoneList<Expression*>* args = expr->arguments();
3591 DCHECK(args->length() == 1);
3593 VisitForAccumulatorValue(args->at(0));
3595 Label materialize_true, materialize_false;
3596 Label* if_true = NULL;
3597 Label* if_false = NULL;
3598 Label* fall_through = NULL;
3599 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3600 &if_false, &fall_through);
3602 __ JumpIfSmi(eax, if_false);
3603 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
3604 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3605 Split(equal, if_true, if_false, fall_through);
3607 context()->Plug(if_true, if_false);
3611 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3612 ZoneList<Expression*>* args = expr->arguments();
3613 DCHECK(args->length() == 1);
3615 VisitForAccumulatorValue(args->at(0));
3617 Label materialize_true, materialize_false;
3618 Label* if_true = NULL;
3619 Label* if_false = NULL;
3620 Label* fall_through = NULL;
3621 context()->PrepareTest(&materialize_true, &materialize_false,
3622 &if_true, &if_false, &fall_through);
3624 __ JumpIfSmi(eax, if_false);
3625 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3626 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3627 Split(equal, if_true, if_false, fall_through);
3629 context()->Plug(if_true, if_false);
3633 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3634 ZoneList<Expression*>* args = expr->arguments();
3635 DCHECK(args->length() == 1);
3637 VisitForAccumulatorValue(args->at(0));
3639 Label materialize_true, materialize_false;
3640 Label* if_true = NULL;
3641 Label* if_false = NULL;
3642 Label* fall_through = NULL;
3643 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3644 &if_false, &fall_through);
3646 __ JumpIfSmi(eax, if_false);
3648 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3649 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3650 __ j(less, if_false);
3651 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3652 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3653 Split(less_equal, if_true, if_false, fall_through);
3655 context()->Plug(if_true, if_false);
3659 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3660 DCHECK(expr->arguments()->length() == 0);
3662 Label materialize_true, materialize_false;
3663 Label* if_true = NULL;
3664 Label* if_false = NULL;
3665 Label* fall_through = NULL;
3666 context()->PrepareTest(&materialize_true, &materialize_false,
3667 &if_true, &if_false, &fall_through);
3669 // Get the frame pointer for the calling frame.
3670 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3672 // Skip the arguments adaptor frame if it exists.
3673 Label check_frame_marker;
3674 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3675 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3676 __ j(not_equal, &check_frame_marker);
3677 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3679 // Check the marker in the calling frame.
3680 __ bind(&check_frame_marker);
3681 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3682 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3683 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3684 Split(equal, if_true, if_false, fall_through);
3686 context()->Plug(if_true, if_false);
3690 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3691 ZoneList<Expression*>* args = expr->arguments();
3692 DCHECK(args->length() == 2);
3694 // Load the two objects into registers and perform the comparison.
3695 VisitForStackValue(args->at(0));
3696 VisitForAccumulatorValue(args->at(1));
3698 Label materialize_true, materialize_false;
3699 Label* if_true = NULL;
3700 Label* if_false = NULL;
3701 Label* fall_through = NULL;
3702 context()->PrepareTest(&materialize_true, &materialize_false,
3703 &if_true, &if_false, &fall_through);
3707 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3708 Split(equal, if_true, if_false, fall_through);
3710 context()->Plug(if_true, if_false);
3714 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3715 ZoneList<Expression*>* args = expr->arguments();
3716 DCHECK(args->length() == 1);
3718 // ArgumentsAccessStub expects the key in edx and the formal
3719 // parameter count in eax.
3720 VisitForAccumulatorValue(args->at(0));
3722 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3723 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3725 context()->Plug(eax);
3729 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3730 DCHECK(expr->arguments()->length() == 0);
3733 // Get the number of formal parameters.
3734 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3736 // Check if the calling frame is an arguments adaptor frame.
3737 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3738 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3739 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3740 __ j(not_equal, &exit);
3742 // Arguments adaptor case: Read the arguments length from the
3744 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3748 context()->Plug(eax);
3752 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3753 ZoneList<Expression*>* args = expr->arguments();
3754 DCHECK(args->length() == 1);
3755 Label done, null, function, non_function_constructor;
3757 VisitForAccumulatorValue(args->at(0));
3759 // If the object is a smi, we return null.
3760 __ JumpIfSmi(eax, &null);
3762 // Check that the object is a JS object but take special care of JS
3763 // functions to make sure they have 'Function' as their class.
3764 // Assume that there are only two callable types, and one of them is at
3765 // either end of the type range for JS object types. Saves extra comparisons.
3766 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3767 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3768 // Map is now in eax.
3770 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3771 FIRST_SPEC_OBJECT_TYPE + 1);
3772 __ j(equal, &function);
3774 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3775 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3776 LAST_SPEC_OBJECT_TYPE - 1);
3777 __ j(equal, &function);
3778 // Assume that there is no larger type.
3779 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3781 // Check if the constructor in the map is a JS function.
3782 __ GetMapConstructor(eax, eax, ebx);
3783 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3784 __ j(not_equal, &non_function_constructor);
3786 // eax now contains the constructor function. Grab the
3787 // instance class name from there.
3788 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3789 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3792 // Functions have class 'Function'.
3794 __ mov(eax, isolate()->factory()->Function_string());
3797 // Objects with a non-function constructor have class 'Object'.
3798 __ bind(&non_function_constructor);
3799 __ mov(eax, isolate()->factory()->Object_string());
3802 // Non-JS objects have class null.
3804 __ mov(eax, isolate()->factory()->null_value());
3809 context()->Plug(eax);
3813 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3814 // Load the arguments on the stack and call the stub.
3815 SubStringStub stub(isolate());
3816 ZoneList<Expression*>* args = expr->arguments();
3817 DCHECK(args->length() == 3);
3818 VisitForStackValue(args->at(0));
3819 VisitForStackValue(args->at(1));
3820 VisitForStackValue(args->at(2));
3822 context()->Plug(eax);
3826 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3827 // Load the arguments on the stack and call the stub.
3828 RegExpExecStub stub(isolate());
3829 ZoneList<Expression*>* args = expr->arguments();
3830 DCHECK(args->length() == 4);
3831 VisitForStackValue(args->at(0));
3832 VisitForStackValue(args->at(1));
3833 VisitForStackValue(args->at(2));
3834 VisitForStackValue(args->at(3));
3836 context()->Plug(eax);
3840 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3841 ZoneList<Expression*>* args = expr->arguments();
3842 DCHECK(args->length() == 1);
3844 VisitForAccumulatorValue(args->at(0)); // Load the object.
3847 // If the object is a smi return the object.
3848 __ JumpIfSmi(eax, &done, Label::kNear);
3849 // If the object is not a value type, return the object.
3850 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3851 __ j(not_equal, &done, Label::kNear);
3852 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3855 context()->Plug(eax);
3859 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3860 ZoneList<Expression*>* args = expr->arguments();
3861 DCHECK_EQ(1, args->length());
3863 VisitForAccumulatorValue(args->at(0));
3865 Label materialize_true, materialize_false;
3866 Label* if_true = nullptr;
3867 Label* if_false = nullptr;
3868 Label* fall_through = nullptr;
3869 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3870 &if_false, &fall_through);
3872 __ JumpIfSmi(eax, if_false);
3873 __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
3874 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3875 Split(equal, if_true, if_false, fall_through);
3877 context()->Plug(if_true, if_false);
3881 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3882 ZoneList<Expression*>* args = expr->arguments();
3883 DCHECK(args->length() == 2);
3884 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3885 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3887 VisitForAccumulatorValue(args->at(0)); // Load the object.
3889 Register object = eax;
3890 Register result = eax;
3891 Register scratch = ecx;
3893 if (index->value() == 0) {
3894 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3896 Label runtime, done;
3897 if (index->value() < JSDate::kFirstUncachedField) {
3898 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3899 __ mov(scratch, Operand::StaticVariable(stamp));
3900 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3901 __ j(not_equal, &runtime, Label::kNear);
3902 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3903 kPointerSize * index->value()));
3904 __ jmp(&done, Label::kNear);
3907 __ PrepareCallCFunction(2, scratch);
3908 __ mov(Operand(esp, 0), object);
3909 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3910 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3914 context()->Plug(result);
3918 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3919 ZoneList<Expression*>* args = expr->arguments();
3920 DCHECK_EQ(3, args->length());
3922 Register string = eax;
3923 Register index = ebx;
3924 Register value = ecx;
3926 VisitForStackValue(args->at(0)); // index
3927 VisitForStackValue(args->at(1)); // value
3928 VisitForAccumulatorValue(args->at(2)); // string
3933 if (FLAG_debug_code) {
3934 __ test(value, Immediate(kSmiTagMask));
3935 __ Check(zero, kNonSmiValue);
3936 __ test(index, Immediate(kSmiTagMask));
3937 __ Check(zero, kNonSmiValue);
3943 if (FLAG_debug_code) {
3944 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3945 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3948 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3950 context()->Plug(string);
3954 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3955 ZoneList<Expression*>* args = expr->arguments();
3956 DCHECK_EQ(3, args->length());
3958 Register string = eax;
3959 Register index = ebx;
3960 Register value = ecx;
3962 VisitForStackValue(args->at(0)); // index
3963 VisitForStackValue(args->at(1)); // value
3964 VisitForAccumulatorValue(args->at(2)); // string
3968 if (FLAG_debug_code) {
3969 __ test(value, Immediate(kSmiTagMask));
3970 __ Check(zero, kNonSmiValue);
3971 __ test(index, Immediate(kSmiTagMask));
3972 __ Check(zero, kNonSmiValue);
3974 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3975 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3980 // No need to untag a smi for two-byte addressing.
3981 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3983 context()->Plug(string);
3987 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3988 // Load the arguments on the stack and call the runtime function.
3989 ZoneList<Expression*>* args = expr->arguments();
3990 DCHECK(args->length() == 2);
3991 VisitForStackValue(args->at(0));
3992 VisitForStackValue(args->at(1));
3994 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3996 context()->Plug(eax);
4000 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
4001 ZoneList<Expression*>* args = expr->arguments();
4002 DCHECK(args->length() == 2);
4004 VisitForStackValue(args->at(0)); // Load the object.
4005 VisitForAccumulatorValue(args->at(1)); // Load the value.
4006 __ pop(ebx); // eax = value. ebx = object.
4009 // If the object is a smi, return the value.
4010 __ JumpIfSmi(ebx, &done, Label::kNear);
4012 // If the object is not a value type, return the value.
4013 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
4014 __ j(not_equal, &done, Label::kNear);
4017 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
4019 // Update the write barrier. Save the value as it will be
4020 // overwritten by the write barrier code and is needed afterward.
4022 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
4025 context()->Plug(eax);
4029 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4030 ZoneList<Expression*>* args = expr->arguments();
4031 DCHECK_EQ(args->length(), 1);
4033 // Load the argument into eax and call the stub.
4034 VisitForAccumulatorValue(args->at(0));
4036 NumberToStringStub stub(isolate());
4038 context()->Plug(eax);
4042 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4043 ZoneList<Expression*>* args = expr->arguments();
4044 DCHECK(args->length() == 1);
4046 VisitForAccumulatorValue(args->at(0));
4049 StringCharFromCodeGenerator generator(eax, ebx);
4050 generator.GenerateFast(masm_);
4053 NopRuntimeCallHelper call_helper;
4054 generator.GenerateSlow(masm_, call_helper);
4057 context()->Plug(ebx);
4061 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4062 ZoneList<Expression*>* args = expr->arguments();
4063 DCHECK(args->length() == 2);
4065 VisitForStackValue(args->at(0));
4066 VisitForAccumulatorValue(args->at(1));
4068 Register object = ebx;
4069 Register index = eax;
4070 Register result = edx;
4074 Label need_conversion;
4075 Label index_out_of_range;
4077 StringCharCodeAtGenerator generator(object,
4082 &index_out_of_range,
4083 STRING_INDEX_IS_NUMBER);
4084 generator.GenerateFast(masm_);
4087 __ bind(&index_out_of_range);
4088 // When the index is out of range, the spec requires us to return
4090 __ Move(result, Immediate(isolate()->factory()->nan_value()));
4093 __ bind(&need_conversion);
4094 // Move the undefined value into the result register, which will
4095 // trigger conversion.
4096 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
4099 NopRuntimeCallHelper call_helper;
4100 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4103 context()->Plug(result);
4107 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4108 ZoneList<Expression*>* args = expr->arguments();
4109 DCHECK(args->length() == 2);
4111 VisitForStackValue(args->at(0));
4112 VisitForAccumulatorValue(args->at(1));
4114 Register object = ebx;
4115 Register index = eax;
4116 Register scratch = edx;
4117 Register result = eax;
4121 Label need_conversion;
4122 Label index_out_of_range;
4124 StringCharAtGenerator generator(object,
4130 &index_out_of_range,
4131 STRING_INDEX_IS_NUMBER);
4132 generator.GenerateFast(masm_);
4135 __ bind(&index_out_of_range);
4136 // When the index is out of range, the spec requires us to return
4137 // the empty string.
4138 __ Move(result, Immediate(isolate()->factory()->empty_string()));
4141 __ bind(&need_conversion);
4142 // Move smi zero into the result register, which will trigger
4144 __ Move(result, Immediate(Smi::FromInt(0)));
4147 NopRuntimeCallHelper call_helper;
4148 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4151 context()->Plug(result);
4155 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4156 ZoneList<Expression*>* args = expr->arguments();
4157 DCHECK_EQ(2, args->length());
4158 VisitForStackValue(args->at(0));
4159 VisitForAccumulatorValue(args->at(1));
4162 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4164 context()->Plug(eax);
4168 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4169 ZoneList<Expression*>* args = expr->arguments();
4170 DCHECK_EQ(2, args->length());
4172 VisitForStackValue(args->at(0));
4173 VisitForStackValue(args->at(1));
4175 StringCompareStub stub(isolate());
4177 context()->Plug(eax);
4181 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4182 ZoneList<Expression*>* args = expr->arguments();
4183 DCHECK(args->length() >= 2);
4185 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4186 for (int i = 0; i < arg_count + 1; ++i) {
4187 VisitForStackValue(args->at(i));
4189 VisitForAccumulatorValue(args->last()); // Function.
4191 Label runtime, done;
4192 // Check for non-function argument (including proxy).
4193 __ JumpIfSmi(eax, &runtime);
4194 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
4195 __ j(not_equal, &runtime);
4197 // InvokeFunction requires the function in edi. Move it in there.
4198 __ mov(edi, result_register());
4199 ParameterCount count(arg_count);
4200 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
4201 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4206 __ CallRuntime(Runtime::kCall, args->length());
4209 context()->Plug(eax);
4213 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4214 ZoneList<Expression*>* args = expr->arguments();
4215 DCHECK(args->length() == 2);
4218 VisitForStackValue(args->at(0));
4221 VisitForStackValue(args->at(1));
4222 __ CallRuntime(Runtime::kGetPrototype, 1);
4223 __ push(result_register());
4225 // Load original constructor into ecx.
4226 __ mov(ecx, Operand(esp, 1 * kPointerSize));
4228 // Check if the calling frame is an arguments adaptor frame.
4229 Label adaptor_frame, args_set_up, runtime;
4230 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4231 __ mov(ebx, Operand(edx, StandardFrameConstants::kContextOffset));
4232 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4233 __ j(equal, &adaptor_frame);
4234 // default constructor has no arguments, so no adaptor frame means no args.
4235 __ mov(eax, Immediate(0));
4236 __ jmp(&args_set_up);
4238 // Copy arguments from adaptor frame.
4240 __ bind(&adaptor_frame);
4241 __ mov(ebx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4245 __ lea(edx, Operand(edx, ebx, times_pointer_size,
4246 StandardFrameConstants::kCallerSPOffset));
4249 __ push(Operand(edx, -1 * kPointerSize));
4250 __ sub(edx, Immediate(kPointerSize));
4252 __ j(not_zero, &loop);
4255 __ bind(&args_set_up);
4257 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
4258 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
4259 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4260 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4264 context()->Plug(eax);
4268 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4269 // Load the arguments on the stack and call the stub.
4270 RegExpConstructResultStub stub(isolate());
4271 ZoneList<Expression*>* args = expr->arguments();
4272 DCHECK(args->length() == 3);
4273 VisitForStackValue(args->at(0));
4274 VisitForStackValue(args->at(1));
4275 VisitForAccumulatorValue(args->at(2));
4279 context()->Plug(eax);
4283 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4284 ZoneList<Expression*>* args = expr->arguments();
4285 DCHECK_EQ(2, args->length());
4287 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4288 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4290 Handle<FixedArray> jsfunction_result_caches(
4291 isolate()->native_context()->jsfunction_result_caches());
4292 if (jsfunction_result_caches->length() <= cache_id) {
4293 __ Abort(kAttemptToUseUndefinedCache);
4294 __ mov(eax, isolate()->factory()->undefined_value());
4295 context()->Plug(eax);
4299 VisitForAccumulatorValue(args->at(1));
4302 Register cache = ebx;
4304 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
4306 FieldOperand(cache, GlobalObject::kNativeContextOffset));
4307 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4309 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4311 Label done, not_found;
4312 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4313 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
4314 // tmp now holds finger offset as a smi.
4315 __ cmp(key, FixedArrayElementOperand(cache, tmp));
4316 __ j(not_equal, ¬_found);
4318 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
4321 __ bind(¬_found);
4322 // Call runtime to perform the lookup.
4325 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4328 context()->Plug(eax);
4332 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4333 ZoneList<Expression*>* args = expr->arguments();
4334 DCHECK(args->length() == 1);
4336 VisitForAccumulatorValue(args->at(0));
4338 __ AssertString(eax);
4340 Label materialize_true, materialize_false;
4341 Label* if_true = NULL;
4342 Label* if_false = NULL;
4343 Label* fall_through = NULL;
4344 context()->PrepareTest(&materialize_true, &materialize_false,
4345 &if_true, &if_false, &fall_through);
4347 __ test(FieldOperand(eax, String::kHashFieldOffset),
4348 Immediate(String::kContainsCachedArrayIndexMask));
4349 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4350 Split(zero, if_true, if_false, fall_through);
4352 context()->Plug(if_true, if_false);
4356 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4357 ZoneList<Expression*>* args = expr->arguments();
4358 DCHECK(args->length() == 1);
4359 VisitForAccumulatorValue(args->at(0));
4361 __ AssertString(eax);
4363 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
4364 __ IndexFromHash(eax, eax);
4366 context()->Plug(eax);
4370 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4371 Label bailout, done, one_char_separator, long_separator,
4372 non_trivial_array, not_size_one_array, loop,
4373 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4375 ZoneList<Expression*>* args = expr->arguments();
4376 DCHECK(args->length() == 2);
4377 // We will leave the separator on the stack until the end of the function.
4378 VisitForStackValue(args->at(1));
4379 // Load this to eax (= array)
4380 VisitForAccumulatorValue(args->at(0));
4381 // All aliases of the same register have disjoint lifetimes.
4382 Register array = eax;
4383 Register elements = no_reg; // Will be eax.
4385 Register index = edx;
4387 Register string_length = ecx;
4389 Register string = esi;
4391 Register scratch = ebx;
4393 Register array_length = edi;
4394 Register result_pos = no_reg; // Will be edi.
4396 // Separator operand is already pushed.
4397 Operand separator_operand = Operand(esp, 2 * kPointerSize);
4398 Operand result_operand = Operand(esp, 1 * kPointerSize);
4399 Operand array_length_operand = Operand(esp, 0);
4400 __ sub(esp, Immediate(2 * kPointerSize));
4402 // Check that the array is a JSArray
4403 __ JumpIfSmi(array, &bailout);
4404 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4405 __ j(not_equal, &bailout);
4407 // Check that the array has fast elements.
4408 __ CheckFastElements(scratch, &bailout);
4410 // If the array has length zero, return the empty string.
4411 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
4412 __ SmiUntag(array_length);
4413 __ j(not_zero, &non_trivial_array);
4414 __ mov(result_operand, isolate()->factory()->empty_string());
4417 // Save the array length.
4418 __ bind(&non_trivial_array);
4419 __ mov(array_length_operand, array_length);
4421 // Save the FixedArray containing array's elements.
4422 // End of array's live range.
4424 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
4428 // Check that all array elements are sequential one-byte strings, and
4429 // accumulate the sum of their lengths, as a smi-encoded value.
4430 __ Move(index, Immediate(0));
4431 __ Move(string_length, Immediate(0));
4432 // Loop condition: while (index < length).
4433 // Live loop registers: index, array_length, string,
4434 // scratch, string_length, elements.
4435 if (generate_debug_code_) {
4436 __ cmp(index, array_length);
4437 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4440 __ mov(string, FieldOperand(elements,
4443 FixedArray::kHeaderSize));
4444 __ JumpIfSmi(string, &bailout);
4445 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4446 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4447 __ and_(scratch, Immediate(
4448 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4449 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4450 __ j(not_equal, &bailout);
4451 __ add(string_length,
4452 FieldOperand(string, SeqOneByteString::kLengthOffset));
4453 __ j(overflow, &bailout);
4454 __ add(index, Immediate(1));
4455 __ cmp(index, array_length);
4458 // If array_length is 1, return elements[0], a string.
4459 __ cmp(array_length, 1);
4460 __ j(not_equal, ¬_size_one_array);
4461 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4462 __ mov(result_operand, scratch);
4465 __ bind(¬_size_one_array);
4467 // End of array_length live range.
4468 result_pos = array_length;
4469 array_length = no_reg;
4472 // string_length: Sum of string lengths, as a smi.
4473 // elements: FixedArray of strings.
4475 // Check that the separator is a flat one-byte string.
4476 __ mov(string, separator_operand);
4477 __ JumpIfSmi(string, &bailout);
4478 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4479 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4480 __ and_(scratch, Immediate(
4481 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4482 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4483 __ j(not_equal, &bailout);
4485 // Add (separator length times array_length) - separator length
4486 // to string_length.
4487 __ mov(scratch, separator_operand);
4488 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4489 __ sub(string_length, scratch); // May be negative, temporarily.
4490 __ imul(scratch, array_length_operand);
4491 __ j(overflow, &bailout);
4492 __ add(string_length, scratch);
4493 __ j(overflow, &bailout);
4495 __ shr(string_length, 1);
4496 // Live registers and stack values:
4499 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4501 __ mov(result_operand, result_pos);
4502 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4505 __ mov(string, separator_operand);
4506 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4507 Immediate(Smi::FromInt(1)));
4508 __ j(equal, &one_char_separator);
4509 __ j(greater, &long_separator);
4512 // Empty separator case
4513 __ mov(index, Immediate(0));
4514 __ jmp(&loop_1_condition);
4515 // Loop condition: while (index < length).
4517 // Each iteration of the loop concatenates one string to the result.
4518 // Live values in registers:
4519 // index: which element of the elements array we are adding to the result.
4520 // result_pos: the position to which we are currently copying characters.
4521 // elements: the FixedArray of strings we are joining.
4523 // Get string = array[index].
4524 __ mov(string, FieldOperand(elements, index,
4526 FixedArray::kHeaderSize));
4527 __ mov(string_length,
4528 FieldOperand(string, String::kLengthOffset));
4529 __ shr(string_length, 1);
4531 FieldOperand(string, SeqOneByteString::kHeaderSize));
4532 __ CopyBytes(string, result_pos, string_length, scratch);
4533 __ add(index, Immediate(1));
4534 __ bind(&loop_1_condition);
4535 __ cmp(index, array_length_operand);
4536 __ j(less, &loop_1); // End while (index < length).
4541 // One-character separator case
4542 __ bind(&one_char_separator);
4543 // Replace separator with its one-byte character value.
4544 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4545 __ mov_b(separator_operand, scratch);
4547 __ Move(index, Immediate(0));
4548 // Jump into the loop after the code that copies the separator, so the first
4549 // element is not preceded by a separator
4550 __ jmp(&loop_2_entry);
4551 // Loop condition: while (index < length).
4553 // Each iteration of the loop concatenates one string to the result.
4554 // Live values in registers:
4555 // index: which element of the elements array we are adding to the result.
4556 // result_pos: the position to which we are currently copying characters.
4558 // Copy the separator character to the result.
4559 __ mov_b(scratch, separator_operand);
4560 __ mov_b(Operand(result_pos, 0), scratch);
4563 __ bind(&loop_2_entry);
4564 // Get string = array[index].
4565 __ mov(string, FieldOperand(elements, index,
4567 FixedArray::kHeaderSize));
4568 __ mov(string_length,
4569 FieldOperand(string, String::kLengthOffset));
4570 __ shr(string_length, 1);
4572 FieldOperand(string, SeqOneByteString::kHeaderSize));
4573 __ CopyBytes(string, result_pos, string_length, scratch);
4574 __ add(index, Immediate(1));
4576 __ cmp(index, array_length_operand);
4577 __ j(less, &loop_2); // End while (index < length).
4581 // Long separator case (separator is more than one character).
4582 __ bind(&long_separator);
4584 __ Move(index, Immediate(0));
4585 // Jump into the loop after the code that copies the separator, so the first
4586 // element is not preceded by a separator
4587 __ jmp(&loop_3_entry);
4588 // Loop condition: while (index < length).
4590 // Each iteration of the loop concatenates one string to the result.
4591 // Live values in registers:
4592 // index: which element of the elements array we are adding to the result.
4593 // result_pos: the position to which we are currently copying characters.
4595 // Copy the separator to the result.
4596 __ mov(string, separator_operand);
4597 __ mov(string_length,
4598 FieldOperand(string, String::kLengthOffset));
4599 __ shr(string_length, 1);
4601 FieldOperand(string, SeqOneByteString::kHeaderSize));
4602 __ CopyBytes(string, result_pos, string_length, scratch);
4604 __ bind(&loop_3_entry);
4605 // Get string = array[index].
4606 __ mov(string, FieldOperand(elements, index,
4608 FixedArray::kHeaderSize));
4609 __ mov(string_length,
4610 FieldOperand(string, String::kLengthOffset));
4611 __ shr(string_length, 1);
4613 FieldOperand(string, SeqOneByteString::kHeaderSize));
4614 __ CopyBytes(string, result_pos, string_length, scratch);
4615 __ add(index, Immediate(1));
4617 __ cmp(index, array_length_operand);
4618 __ j(less, &loop_3); // End while (index < length).
4623 __ mov(result_operand, isolate()->factory()->undefined_value());
4625 __ mov(eax, result_operand);
4626 // Drop temp values from the stack, and restore context register.
4627 __ add(esp, Immediate(3 * kPointerSize));
4629 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4630 context()->Plug(eax);
4634 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4635 DCHECK(expr->arguments()->length() == 0);
4636 ExternalReference debug_is_active =
4637 ExternalReference::debug_is_active_address(isolate());
4638 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4640 context()->Plug(eax);
4644 void FullCodeGenerator::EmitCallSuperWithSpread(CallRuntime* expr) {
4645 // Assert: expr == CallRuntime("ReflectConstruct")
4646 DCHECK_EQ(1, expr->arguments()->length());
4647 CallRuntime* call = expr->arguments()->at(0)->AsCallRuntime();
4649 ZoneList<Expression*>* args = call->arguments();
4650 DCHECK_EQ(3, args->length());
4652 SuperCallReference* super_call_ref = args->at(0)->AsSuperCallReference();
4653 DCHECK_NOT_NULL(super_call_ref);
4655 // Load ReflectConstruct function
4656 EmitLoadJSRuntimeFunction(call);
4658 // Push the target function under the receiver
4659 __ push(Operand(esp, 0));
4660 __ mov(Operand(esp, kPointerSize), eax);
4662 // Push super constructor
4663 EmitLoadSuperConstructor(super_call_ref);
4664 __ Push(result_register());
4666 // Push arguments array
4667 VisitForStackValue(args->at(1));
4670 DCHECK(args->at(2)->IsVariableProxy());
4671 VisitForStackValue(args->at(2));
4673 EmitCallJSRuntimeFunction(call);
4675 // Restore context register.
4676 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4677 context()->DropAndPlug(1, eax);
4679 // TODO(mvstanton): with FLAG_vector_stores this needs a slot id.
4680 EmitInitializeThisAfterSuper(super_call_ref);
4684 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4685 // Push the builtins object as receiver.
4686 __ mov(eax, GlobalObjectOperand());
4687 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4689 // Load the function from the receiver.
4690 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4691 __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4692 __ mov(LoadDescriptor::SlotRegister(),
4693 Immediate(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4694 CallLoadIC(NOT_INSIDE_TYPEOF);
4698 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4699 ZoneList<Expression*>* args = expr->arguments();
4700 int arg_count = args->length();
4702 SetCallPosition(expr, arg_count);
4703 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4704 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4709 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4710 ZoneList<Expression*>* args = expr->arguments();
4711 int arg_count = args->length();
4713 if (expr->is_jsruntime()) {
4714 Comment cmnt(masm_, "[ CallRuntime");
4715 EmitLoadJSRuntimeFunction(expr);
4717 // Push the target function under the receiver.
4718 __ push(Operand(esp, 0));
4719 __ mov(Operand(esp, kPointerSize), eax);
4721 // Push the arguments ("left-to-right").
4722 for (int i = 0; i < arg_count; i++) {
4723 VisitForStackValue(args->at(i));
4726 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4727 EmitCallJSRuntimeFunction(expr);
4729 // Restore context register.
4730 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4731 context()->DropAndPlug(1, eax);
4734 const Runtime::Function* function = expr->function();
4735 switch (function->function_id) {
4736 #define CALL_INTRINSIC_GENERATOR(Name) \
4737 case Runtime::kInline##Name: { \
4738 Comment cmnt(masm_, "[ Inline" #Name); \
4739 return Emit##Name(expr); \
4741 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4742 #undef CALL_INTRINSIC_GENERATOR
4744 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4745 // Push the arguments ("left-to-right").
4746 for (int i = 0; i < arg_count; i++) {
4747 VisitForStackValue(args->at(i));
4750 // Call the C runtime function.
4751 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4752 __ CallRuntime(expr->function(), arg_count);
4753 context()->Plug(eax);
4760 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4761 switch (expr->op()) {
4762 case Token::DELETE: {
4763 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4764 Property* property = expr->expression()->AsProperty();
4765 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4767 if (property != NULL) {
4768 VisitForStackValue(property->obj());
4769 VisitForStackValue(property->key());
4770 __ push(Immediate(Smi::FromInt(language_mode())));
4771 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4772 context()->Plug(eax);
4773 } else if (proxy != NULL) {
4774 Variable* var = proxy->var();
4775 // Delete of an unqualified identifier is disallowed in strict mode but
4776 // "delete this" is allowed.
4777 bool is_this = var->HasThisName(isolate());
4778 DCHECK(is_sloppy(language_mode()) || is_this);
4779 if (var->IsUnallocatedOrGlobalSlot()) {
4780 __ push(GlobalObjectOperand());
4781 __ push(Immediate(var->name()));
4782 __ push(Immediate(Smi::FromInt(SLOPPY)));
4783 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4784 context()->Plug(eax);
4785 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4786 // Result of deleting non-global variables is false. 'this' is
4787 // not really a variable, though we implement it as one. The
4788 // subexpression does not have side effects.
4789 context()->Plug(is_this);
4791 // Non-global variable. Call the runtime to try to delete from the
4792 // context where the variable was introduced.
4793 __ push(context_register());
4794 __ push(Immediate(var->name()));
4795 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4796 context()->Plug(eax);
4799 // Result of deleting non-property, non-variable reference is true.
4800 // The subexpression may have side effects.
4801 VisitForEffect(expr->expression());
4802 context()->Plug(true);
4808 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4809 VisitForEffect(expr->expression());
4810 context()->Plug(isolate()->factory()->undefined_value());
4815 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4816 if (context()->IsEffect()) {
4817 // Unary NOT has no side effects so it's only necessary to visit the
4818 // subexpression. Match the optimizing compiler by not branching.
4819 VisitForEffect(expr->expression());
4820 } else if (context()->IsTest()) {
4821 const TestContext* test = TestContext::cast(context());
4822 // The labels are swapped for the recursive call.
4823 VisitForControl(expr->expression(),
4824 test->false_label(),
4826 test->fall_through());
4827 context()->Plug(test->true_label(), test->false_label());
4829 // We handle value contexts explicitly rather than simply visiting
4830 // for control and plugging the control flow into the context,
4831 // because we need to prepare a pair of extra administrative AST ids
4832 // for the optimizing compiler.
4833 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4834 Label materialize_true, materialize_false, done;
4835 VisitForControl(expr->expression(),
4839 __ bind(&materialize_true);
4840 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4841 if (context()->IsAccumulatorValue()) {
4842 __ mov(eax, isolate()->factory()->true_value());
4844 __ Push(isolate()->factory()->true_value());
4846 __ jmp(&done, Label::kNear);
4847 __ bind(&materialize_false);
4848 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4849 if (context()->IsAccumulatorValue()) {
4850 __ mov(eax, isolate()->factory()->false_value());
4852 __ Push(isolate()->factory()->false_value());
4859 case Token::TYPEOF: {
4860 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4862 AccumulatorValueContext context(this);
4863 VisitForTypeofValue(expr->expression());
4866 TypeofStub typeof_stub(isolate());
4867 __ CallStub(&typeof_stub);
4868 context()->Plug(eax);
4878 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4879 DCHECK(expr->expression()->IsValidReferenceExpression());
4881 Comment cmnt(masm_, "[ CountOperation");
4883 Property* prop = expr->expression()->AsProperty();
4884 LhsKind assign_type = Property::GetAssignType(prop);
4886 // Evaluate expression and get value.
4887 if (assign_type == VARIABLE) {
4888 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4889 AccumulatorValueContext context(this);
4890 EmitVariableLoad(expr->expression()->AsVariableProxy());
4892 // Reserve space for result of postfix operation.
4893 if (expr->is_postfix() && !context()->IsEffect()) {
4894 __ push(Immediate(Smi::FromInt(0)));
4896 switch (assign_type) {
4897 case NAMED_PROPERTY: {
4898 // Put the object both on the stack and in the register.
4899 VisitForStackValue(prop->obj());
4900 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4901 EmitNamedPropertyLoad(prop);
4905 case NAMED_SUPER_PROPERTY: {
4906 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4907 VisitForAccumulatorValue(
4908 prop->obj()->AsSuperPropertyReference()->home_object());
4909 __ push(result_register());
4910 __ push(MemOperand(esp, kPointerSize));
4911 __ push(result_register());
4912 EmitNamedSuperPropertyLoad(prop);
4916 case KEYED_SUPER_PROPERTY: {
4917 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4919 prop->obj()->AsSuperPropertyReference()->home_object());
4920 VisitForAccumulatorValue(prop->key());
4921 __ push(result_register());
4922 __ push(MemOperand(esp, 2 * kPointerSize));
4923 __ push(MemOperand(esp, 2 * kPointerSize));
4924 __ push(result_register());
4925 EmitKeyedSuperPropertyLoad(prop);
4929 case KEYED_PROPERTY: {
4930 VisitForStackValue(prop->obj());
4931 VisitForStackValue(prop->key());
4932 __ mov(LoadDescriptor::ReceiverRegister(),
4933 Operand(esp, kPointerSize)); // Object.
4934 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4935 EmitKeyedPropertyLoad(prop);
4944 // We need a second deoptimization point after loading the value
4945 // in case evaluating the property load my have a side effect.
4946 if (assign_type == VARIABLE) {
4947 PrepareForBailout(expr->expression(), TOS_REG);
4949 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4952 // Inline smi case if we are in a loop.
4953 Label done, stub_call;
4954 JumpPatchSite patch_site(masm_);
4955 if (ShouldInlineSmiCase(expr->op())) {
4957 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4959 // Save result for postfix expressions.
4960 if (expr->is_postfix()) {
4961 if (!context()->IsEffect()) {
4962 // Save the result on the stack. If we have a named or keyed property
4963 // we store the result under the receiver that is currently on top
4965 switch (assign_type) {
4969 case NAMED_PROPERTY:
4970 __ mov(Operand(esp, kPointerSize), eax);
4972 case NAMED_SUPER_PROPERTY:
4973 __ mov(Operand(esp, 2 * kPointerSize), eax);
4975 case KEYED_PROPERTY:
4976 __ mov(Operand(esp, 2 * kPointerSize), eax);
4978 case KEYED_SUPER_PROPERTY:
4979 __ mov(Operand(esp, 3 * kPointerSize), eax);
4985 if (expr->op() == Token::INC) {
4986 __ add(eax, Immediate(Smi::FromInt(1)));
4988 __ sub(eax, Immediate(Smi::FromInt(1)));
4990 __ j(no_overflow, &done, Label::kNear);
4991 // Call stub. Undo operation first.
4992 if (expr->op() == Token::INC) {
4993 __ sub(eax, Immediate(Smi::FromInt(1)));
4995 __ add(eax, Immediate(Smi::FromInt(1)));
4997 __ jmp(&stub_call, Label::kNear);
5000 if (!is_strong(language_mode())) {
5001 ToNumberStub convert_stub(isolate());
5002 __ CallStub(&convert_stub);
5003 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5006 // Save result for postfix expressions.
5007 if (expr->is_postfix()) {
5008 if (!context()->IsEffect()) {
5009 // Save the result on the stack. If we have a named or keyed property
5010 // we store the result under the receiver that is currently on top
5012 switch (assign_type) {
5016 case NAMED_PROPERTY:
5017 __ mov(Operand(esp, kPointerSize), eax);
5019 case NAMED_SUPER_PROPERTY:
5020 __ mov(Operand(esp, 2 * kPointerSize), eax);
5022 case KEYED_PROPERTY:
5023 __ mov(Operand(esp, 2 * kPointerSize), eax);
5025 case KEYED_SUPER_PROPERTY:
5026 __ mov(Operand(esp, 3 * kPointerSize), eax);
5032 SetExpressionPosition(expr);
5034 // Call stub for +1/-1.
5035 __ bind(&stub_call);
5037 __ mov(eax, Immediate(Smi::FromInt(1)));
5038 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
5039 strength(language_mode())).code();
5040 CallIC(code, expr->CountBinOpFeedbackId());
5041 patch_site.EmitPatchInfo();
5044 if (is_strong(language_mode())) {
5045 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
5047 // Store the value returned in eax.
5048 switch (assign_type) {
5050 if (expr->is_postfix()) {
5051 // Perform the assignment as if via '='.
5052 { EffectContext context(this);
5053 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5054 Token::ASSIGN, expr->CountSlot());
5055 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5058 // For all contexts except EffectContext We have the result on
5059 // top of the stack.
5060 if (!context()->IsEffect()) {
5061 context()->PlugTOS();
5064 // Perform the assignment as if via '='.
5065 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
5066 Token::ASSIGN, expr->CountSlot());
5067 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5068 context()->Plug(eax);
5071 case NAMED_PROPERTY: {
5072 __ mov(StoreDescriptor::NameRegister(),
5073 prop->key()->AsLiteral()->value());
5074 __ pop(StoreDescriptor::ReceiverRegister());
5075 if (FLAG_vector_stores) {
5076 EmitLoadStoreICSlot(expr->CountSlot());
5079 CallStoreIC(expr->CountStoreFeedbackId());
5081 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5082 if (expr->is_postfix()) {
5083 if (!context()->IsEffect()) {
5084 context()->PlugTOS();
5087 context()->Plug(eax);
5091 case NAMED_SUPER_PROPERTY: {
5092 EmitNamedSuperPropertyStore(prop);
5093 if (expr->is_postfix()) {
5094 if (!context()->IsEffect()) {
5095 context()->PlugTOS();
5098 context()->Plug(eax);
5102 case KEYED_SUPER_PROPERTY: {
5103 EmitKeyedSuperPropertyStore(prop);
5104 if (expr->is_postfix()) {
5105 if (!context()->IsEffect()) {
5106 context()->PlugTOS();
5109 context()->Plug(eax);
5113 case KEYED_PROPERTY: {
5114 __ pop(StoreDescriptor::NameRegister());
5115 __ pop(StoreDescriptor::ReceiverRegister());
5117 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5118 if (FLAG_vector_stores) {
5119 EmitLoadStoreICSlot(expr->CountSlot());
5122 CallIC(ic, expr->CountStoreFeedbackId());
5124 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5125 if (expr->is_postfix()) {
5126 // Result is on the stack
5127 if (!context()->IsEffect()) {
5128 context()->PlugTOS();
5131 context()->Plug(eax);
5139 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5140 Expression* sub_expr,
5141 Handle<String> check) {
5142 Label materialize_true, materialize_false;
5143 Label* if_true = NULL;
5144 Label* if_false = NULL;
5145 Label* fall_through = NULL;
5146 context()->PrepareTest(&materialize_true, &materialize_false,
5147 &if_true, &if_false, &fall_through);
5149 { AccumulatorValueContext context(this);
5150 VisitForTypeofValue(sub_expr);
5152 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5154 Factory* factory = isolate()->factory();
5155 if (String::Equals(check, factory->number_string())) {
5156 __ JumpIfSmi(eax, if_true);
5157 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
5158 isolate()->factory()->heap_number_map());
5159 Split(equal, if_true, if_false, fall_through);
5160 } else if (String::Equals(check, factory->string_string())) {
5161 __ JumpIfSmi(eax, if_false);
5162 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
5163 __ j(above_equal, if_false);
5164 // Check for undetectable objects => false.
5165 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5166 1 << Map::kIsUndetectable);
5167 Split(zero, if_true, if_false, fall_through);
5168 } else if (String::Equals(check, factory->symbol_string())) {
5169 __ JumpIfSmi(eax, if_false);
5170 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
5171 Split(equal, if_true, if_false, fall_through);
5172 } else if (String::Equals(check, factory->float32x4_string())) {
5173 __ JumpIfSmi(eax, if_false);
5174 __ CmpObjectType(eax, FLOAT32X4_TYPE, edx);
5175 Split(equal, if_true, if_false, fall_through);
5176 } else if (String::Equals(check, factory->boolean_string())) {
5177 __ cmp(eax, isolate()->factory()->true_value());
5178 __ j(equal, if_true);
5179 __ cmp(eax, isolate()->factory()->false_value());
5180 Split(equal, if_true, if_false, fall_through);
5181 } else if (String::Equals(check, factory->undefined_string())) {
5182 __ cmp(eax, isolate()->factory()->undefined_value());
5183 __ j(equal, if_true);
5184 __ JumpIfSmi(eax, if_false);
5185 // Check for undetectable objects => true.
5186 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
5187 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
5188 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
5189 Split(not_zero, if_true, if_false, fall_through);
5190 } else if (String::Equals(check, factory->function_string())) {
5191 __ JumpIfSmi(eax, if_false);
5192 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5193 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
5194 __ j(equal, if_true);
5195 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
5196 Split(equal, if_true, if_false, fall_through);
5197 } else if (String::Equals(check, factory->object_string())) {
5198 __ JumpIfSmi(eax, if_false);
5199 __ cmp(eax, isolate()->factory()->null_value());
5200 __ j(equal, if_true);
5201 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
5202 __ j(below, if_false);
5203 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5204 __ j(above, if_false);
5205 // Check for undetectable objects => false.
5206 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5207 1 << Map::kIsUndetectable);
5208 Split(zero, if_true, if_false, fall_through);
5210 if (if_false != fall_through) __ jmp(if_false);
5212 context()->Plug(if_true, if_false);
5216 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5217 Comment cmnt(masm_, "[ CompareOperation");
5218 SetExpressionPosition(expr);
5220 // First we try a fast inlined version of the compare when one of
5221 // the operands is a literal.
5222 if (TryLiteralCompare(expr)) return;
5224 // Always perform the comparison for its control flow. Pack the result
5225 // into the expression's context after the comparison is performed.
5226 Label materialize_true, materialize_false;
5227 Label* if_true = NULL;
5228 Label* if_false = NULL;
5229 Label* fall_through = NULL;
5230 context()->PrepareTest(&materialize_true, &materialize_false,
5231 &if_true, &if_false, &fall_through);
5233 Token::Value op = expr->op();
5234 VisitForStackValue(expr->left());
5237 VisitForStackValue(expr->right());
5238 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5239 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5240 __ cmp(eax, isolate()->factory()->true_value());
5241 Split(equal, if_true, if_false, fall_through);
5244 case Token::INSTANCEOF: {
5245 VisitForStackValue(expr->right());
5246 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5248 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5250 // The stub returns 0 for true.
5251 Split(zero, if_true, if_false, fall_through);
5256 VisitForAccumulatorValue(expr->right());
5257 Condition cc = CompareIC::ComputeCondition(op);
5260 bool inline_smi_code = ShouldInlineSmiCase(op);
5261 JumpPatchSite patch_site(masm_);
5262 if (inline_smi_code) {
5266 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
5268 Split(cc, if_true, if_false, NULL);
5269 __ bind(&slow_case);
5272 Handle<Code> ic = CodeFactory::CompareIC(
5273 isolate(), op, strength(language_mode())).code();
5274 CallIC(ic, expr->CompareOperationFeedbackId());
5275 patch_site.EmitPatchInfo();
5277 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5279 Split(cc, if_true, if_false, fall_through);
5283 // Convert the result of the comparison into one expected for this
5284 // expression's context.
5285 context()->Plug(if_true, if_false);
5289 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5290 Expression* sub_expr,
5292 Label materialize_true, materialize_false;
5293 Label* if_true = NULL;
5294 Label* if_false = NULL;
5295 Label* fall_through = NULL;
5296 context()->PrepareTest(&materialize_true, &materialize_false,
5297 &if_true, &if_false, &fall_through);
5299 VisitForAccumulatorValue(sub_expr);
5300 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5302 Handle<Object> nil_value = nil == kNullValue
5303 ? isolate()->factory()->null_value()
5304 : isolate()->factory()->undefined_value();
5305 if (expr->op() == Token::EQ_STRICT) {
5306 __ cmp(eax, nil_value);
5307 Split(equal, if_true, if_false, fall_through);
5309 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5310 CallIC(ic, expr->CompareOperationFeedbackId());
5312 Split(not_zero, if_true, if_false, fall_through);
5314 context()->Plug(if_true, if_false);
5318 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5319 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5320 context()->Plug(eax);
5324 Register FullCodeGenerator::result_register() {
5329 Register FullCodeGenerator::context_register() {
5334 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5335 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5336 __ mov(Operand(ebp, frame_offset), value);
5340 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5341 __ mov(dst, ContextOperand(esi, context_index));
5345 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5346 Scope* declaration_scope = scope()->DeclarationScope();
5347 if (declaration_scope->is_script_scope() ||
5348 declaration_scope->is_module_scope()) {
5349 // Contexts nested in the native context have a canonical empty function
5350 // as their closure, not the anonymous closure containing the global
5351 // code. Pass a smi sentinel and let the runtime look up the empty
5353 __ push(Immediate(Smi::FromInt(0)));
5354 } else if (declaration_scope->is_eval_scope()) {
5355 // Contexts nested inside eval code have the same closure as the context
5356 // calling eval, not the anonymous closure containing the eval code.
5357 // Fetch it from the context.
5358 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
5360 DCHECK(declaration_scope->is_function_scope());
5361 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5366 // ----------------------------------------------------------------------------
5367 // Non-local control flow support.
5369 void FullCodeGenerator::EnterFinallyBlock() {
5370 // Cook return address on top of stack (smi encoded Code* delta)
5371 DCHECK(!result_register().is(edx));
5373 __ sub(edx, Immediate(masm_->CodeObject()));
5374 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5375 STATIC_ASSERT(kSmiTag == 0);
5379 // Store result register while executing finally block.
5380 __ push(result_register());
5382 // Store pending message while executing finally block.
5383 ExternalReference pending_message_obj =
5384 ExternalReference::address_of_pending_message_obj(isolate());
5385 __ mov(edx, Operand::StaticVariable(pending_message_obj));
5388 ClearPendingMessage();
5392 void FullCodeGenerator::ExitFinallyBlock() {
5393 DCHECK(!result_register().is(edx));
5394 // Restore pending message from stack.
5396 ExternalReference pending_message_obj =
5397 ExternalReference::address_of_pending_message_obj(isolate());
5398 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5400 // Restore result register from stack.
5401 __ pop(result_register());
5403 // Uncook return address.
5406 __ add(edx, Immediate(masm_->CodeObject()));
5411 void FullCodeGenerator::ClearPendingMessage() {
5412 DCHECK(!result_register().is(edx));
5413 ExternalReference pending_message_obj =
5414 ExternalReference::address_of_pending_message_obj(isolate());
5415 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
5416 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5420 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5421 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5422 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5423 Immediate(SmiFromSlot(slot)));
5430 static const byte kJnsInstruction = 0x79;
5431 static const byte kJnsOffset = 0x11;
5432 static const byte kNopByteOne = 0x66;
5433 static const byte kNopByteTwo = 0x90;
5435 static const byte kCallInstruction = 0xe8;
5439 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5441 BackEdgeState target_state,
5442 Code* replacement_code) {
5443 Address call_target_address = pc - kIntSize;
5444 Address jns_instr_address = call_target_address - 3;
5445 Address jns_offset_address = call_target_address - 2;
5447 switch (target_state) {
5449 // sub <profiling_counter>, <delta> ;; Not changed
5451 // call <interrupt stub>
5453 *jns_instr_address = kJnsInstruction;
5454 *jns_offset_address = kJnsOffset;
5456 case ON_STACK_REPLACEMENT:
5457 case OSR_AFTER_STACK_CHECK:
5458 // sub <profiling_counter>, <delta> ;; Not changed
5461 // call <on-stack replacment>
5463 *jns_instr_address = kNopByteOne;
5464 *jns_offset_address = kNopByteTwo;
5468 Assembler::set_target_address_at(call_target_address,
5470 replacement_code->entry());
5471 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5472 unoptimized_code, call_target_address, replacement_code);
5476 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5478 Code* unoptimized_code,
5480 Address call_target_address = pc - kIntSize;
5481 Address jns_instr_address = call_target_address - 3;
5482 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5484 if (*jns_instr_address == kJnsInstruction) {
5485 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5486 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5487 Assembler::target_address_at(call_target_address,
5492 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5493 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5495 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
5496 isolate->builtins()->OnStackReplacement()->entry()) {
5497 return ON_STACK_REPLACEMENT;
5500 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5501 Assembler::target_address_at(call_target_address,
5503 return OSR_AFTER_STACK_CHECK;
5507 } // namespace internal
5510 #endif // V8_TARGET_ARCH_IA32