- add third_party src.
[platform/framework/web/crosswalk.git] / src / v8 / src / code-stubs-hydrogen.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "code-stubs.h"
31 #include "hydrogen.h"
32 #include "lithium.h"
33
34 namespace v8 {
35 namespace internal {
36
37
38 static LChunk* OptimizeGraph(HGraph* graph) {
39   DisallowHeapAllocation no_allocation;
40   DisallowHandleAllocation no_handles;
41   DisallowHandleDereference no_deref;
42
43   ASSERT(graph != NULL);
44   BailoutReason bailout_reason = kNoReason;
45   if (!graph->Optimize(&bailout_reason)) {
46     FATAL(GetBailoutReason(bailout_reason));
47   }
48   LChunk* chunk = LChunk::NewChunk(graph);
49   if (chunk == NULL) {
50     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
51   }
52   return chunk;
53 }
54
55
56 class CodeStubGraphBuilderBase : public HGraphBuilder {
57  public:
58   CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
59       : HGraphBuilder(&info_),
60         arguments_length_(NULL),
61         info_(stub, isolate),
62         context_(NULL) {
63     descriptor_ = stub->GetInterfaceDescriptor(isolate);
64     parameters_.Reset(new HParameter*[descriptor_->register_param_count_]);
65   }
66   virtual bool BuildGraph();
67
68  protected:
69   virtual HValue* BuildCodeStub() = 0;
70   HParameter* GetParameter(int parameter) {
71     ASSERT(parameter < descriptor_->register_param_count_);
72     return parameters_[parameter];
73   }
74   HValue* GetArgumentsLength() {
75     // This is initialized in BuildGraph()
76     ASSERT(arguments_length_ != NULL);
77     return arguments_length_;
78   }
79   CompilationInfo* info() { return &info_; }
80   HydrogenCodeStub* stub() { return info_.code_stub(); }
81   HContext* context() { return context_; }
82   Isolate* isolate() { return info_.isolate(); }
83
84   class ArrayContextChecker {
85    public:
86     ArrayContextChecker(HGraphBuilder* builder, HValue* constructor,
87                         HValue* array_function)
88         : checker_(builder) {
89       checker_.If<HCompareObjectEqAndBranch, HValue*>(constructor,
90                                                       array_function);
91       checker_.Then();
92     }
93
94     ~ArrayContextChecker() {
95       checker_.ElseDeopt("Array constructor called from different context");
96       checker_.End();
97     }
98    private:
99     IfBuilder checker_;
100   };
101
102   enum ArgumentClass {
103     NONE,
104     SINGLE,
105     MULTIPLE
106   };
107
108   HValue* BuildArrayConstructor(ElementsKind kind,
109                                 ContextCheckMode context_mode,
110                                 AllocationSiteOverrideMode override_mode,
111                                 ArgumentClass argument_class);
112   HValue* BuildInternalArrayConstructor(ElementsKind kind,
113                                         ArgumentClass argument_class);
114
115   void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
116                                  HValue* code_object);
117   void BuildInstallCode(HValue* js_function, HValue* shared_info);
118   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
119                                         HValue* shared_info,
120                                         HValue* native_context);
121
122  private:
123   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
124   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
125                                           ElementsKind kind);
126
127   SmartArrayPointer<HParameter*> parameters_;
128   HValue* arguments_length_;
129   CompilationInfoWithZone info_;
130   CodeStubInterfaceDescriptor* descriptor_;
131   HContext* context_;
132 };
133
134
135 bool CodeStubGraphBuilderBase::BuildGraph() {
136   // Update the static counter each time a new code stub is generated.
137   isolate()->counters()->code_stubs()->Increment();
138
139   if (FLAG_trace_hydrogen_stubs) {
140     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
141     PrintF("-----------------------------------------------------------\n");
142     PrintF("Compiling stub %s using hydrogen\n", name);
143     isolate()->GetHTracer()->TraceCompilation(&info_);
144   }
145
146   int param_count = descriptor_->register_param_count_;
147   HEnvironment* start_environment = graph()->start_environment();
148   HBasicBlock* next_block = CreateBasicBlock(start_environment);
149   Goto(next_block);
150   next_block->SetJoinId(BailoutId::StubEntry());
151   set_current_block(next_block);
152
153   for (int i = 0; i < param_count; ++i) {
154     HParameter* param =
155         Add<HParameter>(i, HParameter::REGISTER_PARAMETER);
156     start_environment->Bind(i, param);
157     parameters_[i] = param;
158   }
159
160   HInstruction* stack_parameter_count;
161   if (descriptor_->stack_parameter_count_.is_valid()) {
162     ASSERT(descriptor_->environment_length() == (param_count + 1));
163     stack_parameter_count = New<HParameter>(param_count,
164                                             HParameter::REGISTER_PARAMETER,
165                                             Representation::Integer32());
166     stack_parameter_count->set_type(HType::Smi());
167     // It's essential to bind this value to the environment in case of deopt.
168     AddInstruction(stack_parameter_count);
169     start_environment->Bind(param_count, stack_parameter_count);
170     arguments_length_ = stack_parameter_count;
171   } else {
172     ASSERT(descriptor_->environment_length() == param_count);
173     stack_parameter_count = graph()->GetConstantMinus1();
174     arguments_length_ = graph()->GetConstant0();
175   }
176
177   context_ = Add<HContext>();
178   start_environment->BindContext(context_);
179
180   Add<HSimulate>(BailoutId::StubEntry());
181
182   NoObservableSideEffectsScope no_effects(this);
183
184   HValue* return_value = BuildCodeStub();
185
186   // We might have extra expressions to pop from the stack in addition to the
187   // arguments above.
188   HInstruction* stack_pop_count = stack_parameter_count;
189   if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
190     if (!stack_parameter_count->IsConstant() &&
191         descriptor_->hint_stack_parameter_count_ < 0) {
192       HInstruction* amount = graph()->GetConstant1();
193       stack_pop_count = Add<HAdd>(stack_parameter_count, amount);
194       stack_pop_count->ChangeRepresentation(Representation::Integer32());
195       stack_pop_count->ClearFlag(HValue::kCanOverflow);
196     } else {
197       int count = descriptor_->hint_stack_parameter_count_;
198       stack_pop_count = Add<HConstant>(count);
199     }
200   }
201
202   if (current_block() != NULL) {
203     HReturn* hreturn_instruction = New<HReturn>(return_value,
204                                                 stack_pop_count);
205     FinishCurrentBlock(hreturn_instruction);
206   }
207   return true;
208 }
209
210
211 template <class Stub>
212 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
213  public:
214   explicit CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
215       : CodeStubGraphBuilderBase(isolate, stub) {}
216
217  protected:
218   virtual HValue* BuildCodeStub() {
219     if (casted_stub()->IsUninitialized()) {
220       return BuildCodeUninitializedStub();
221     } else {
222       return BuildCodeInitializedStub();
223     }
224   }
225
226   virtual HValue* BuildCodeInitializedStub() {
227     UNIMPLEMENTED();
228     return NULL;
229   }
230
231   virtual HValue* BuildCodeUninitializedStub() {
232     // Force a deopt that falls back to the runtime.
233     HValue* undefined = graph()->GetConstantUndefined();
234     IfBuilder builder(this);
235     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
236     builder.Then();
237     builder.ElseDeopt("Forced deopt to runtime");
238     return undefined;
239   }
240
241   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
242 };
243
244
245 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) {
246   Factory* factory = isolate->factory();
247
248   // Generate the new code.
249   MacroAssembler masm(isolate, NULL, 256);
250
251   {
252     // Update the static counter each time a new code stub is generated.
253     isolate->counters()->code_stubs()->Increment();
254
255     // Nested stubs are not allowed for leaves.
256     AllowStubCallsScope allow_scope(&masm, false);
257
258     // Generate the code for the stub.
259     masm.set_generating_stub(true);
260     NoCurrentFrameScope scope(&masm);
261     GenerateLightweightMiss(&masm);
262   }
263
264   // Create the code object.
265   CodeDesc desc;
266   masm.GetCode(&desc);
267
268   // Copy the generated code into a heap object.
269   Code::Flags flags = Code::ComputeFlags(
270       GetCodeKind(),
271       GetICState(),
272       GetExtraICState(),
273       GetStubType(),
274       GetStubFlags());
275   Handle<Code> new_object = factory->NewCode(
276       desc, flags, masm.CodeObject(), NeedsImmovableCode());
277   return new_object;
278 }
279
280
281 template <class Stub>
282 static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) {
283   CodeStub::Major  major_key =
284       static_cast<HydrogenCodeStub*>(stub)->MajorKey();
285   CodeStubInterfaceDescriptor* descriptor =
286       isolate->code_stub_interface_descriptor(major_key);
287   if (descriptor->register_param_count_ < 0) {
288     stub->InitializeInterfaceDescriptor(isolate, descriptor);
289   }
290
291   // If we are uninitialized we can use a light-weight stub to enter
292   // the runtime that is significantly faster than using the standard
293   // stub-failure deopt mechanism.
294   if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
295     ASSERT(!descriptor->stack_parameter_count_.is_valid());
296     return stub->GenerateLightweightMissCode(isolate);
297   }
298   ElapsedTimer timer;
299   if (FLAG_profile_hydrogen_code_stub_compilation) {
300     timer.Start();
301   }
302   CodeStubGraphBuilder<Stub> builder(isolate, stub);
303   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
304   Handle<Code> code = chunk->Codegen();
305   if (FLAG_profile_hydrogen_code_stub_compilation) {
306     double ms = timer.Elapsed().InMillisecondsF();
307     PrintF("[Lazy compilation of %s took %0.3f ms]\n", *stub->GetName(), ms);
308   }
309   return code;
310 }
311
312
313 template <>
314 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
315   HValue* value = GetParameter(0);
316
317   // Check if the parameter is already a SMI or heap number.
318   IfBuilder if_number(this);
319   if_number.If<HIsSmiAndBranch>(value);
320   if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
321   if_number.Then();
322
323   // Return the number.
324   Push(value);
325
326   if_number.Else();
327
328   // Convert the parameter to number using the builtin.
329   HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
330   Add<HPushArgument>(value);
331   Push(Add<HInvokeFunction>(function, 1));
332
333   if_number.End();
334
335   return Pop();
336 }
337
338
339 Handle<Code> ToNumberStub::GenerateCode(Isolate* isolate) {
340   return DoGenerateCode(isolate, this);
341 }
342
343
344 template <>
345 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
346   info()->MarkAsSavesCallerDoubles();
347   HValue* number = GetParameter(NumberToStringStub::kNumber);
348   return BuildNumberToString(number, handle(Type::Number(), isolate()));
349 }
350
351
352 Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) {
353   return DoGenerateCode(isolate, this);
354 }
355
356
357 template <>
358 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
359   Factory* factory = isolate()->factory();
360   HValue* undefined = graph()->GetConstantUndefined();
361   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
362   FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
363   int length = casted_stub()->length();
364
365   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
366                                                   GetParameter(1),
367                                                   static_cast<HValue*>(NULL),
368                                                   FAST_ELEMENTS);
369   IfBuilder checker(this);
370   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
371                                                     undefined);
372   checker.Then();
373
374   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
375       AllocationSite::kTransitionInfoOffset);
376   HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access);
377   HValue* push_value;
378   if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
379     HValue* elements = AddLoadElements(boilerplate);
380
381     IfBuilder if_fixed_cow(this);
382     if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
383     if_fixed_cow.Then();
384     push_value = BuildCloneShallowArray(boilerplate,
385                                         allocation_site,
386                                         alloc_site_mode,
387                                         FAST_ELEMENTS,
388                                         0/*copy-on-write*/);
389     environment()->Push(push_value);
390     if_fixed_cow.Else();
391
392     IfBuilder if_fixed(this);
393     if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
394     if_fixed.Then();
395     push_value = BuildCloneShallowArray(boilerplate,
396                                         allocation_site,
397                                         alloc_site_mode,
398                                         FAST_ELEMENTS,
399                                         length);
400     environment()->Push(push_value);
401     if_fixed.Else();
402     push_value = BuildCloneShallowArray(boilerplate,
403                                         allocation_site,
404                                         alloc_site_mode,
405                                         FAST_DOUBLE_ELEMENTS,
406                                         length);
407     environment()->Push(push_value);
408   } else {
409     ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
410     push_value = BuildCloneShallowArray(boilerplate,
411                                         allocation_site,
412                                         alloc_site_mode,
413                                         elements_kind,
414                                         length);
415     environment()->Push(push_value);
416   }
417
418   checker.ElseDeopt("Uninitialized boilerplate literals");
419   checker.End();
420
421   return environment()->Pop();
422 }
423
424
425 Handle<Code> FastCloneShallowArrayStub::GenerateCode(Isolate* isolate) {
426   return DoGenerateCode(isolate, this);
427 }
428
429
430 template <>
431 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
432   HValue* undefined = graph()->GetConstantUndefined();
433
434   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
435                                                   GetParameter(1),
436                                                   static_cast<HValue*>(NULL),
437                                                   FAST_ELEMENTS);
438
439   IfBuilder checker(this);
440   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
441                                                     undefined);
442   checker.And();
443
444   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
445       AllocationSite::kTransitionInfoOffset);
446   HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access);
447
448   int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
449   int object_size = size;
450   if (FLAG_allocation_site_pretenuring) {
451     size += AllocationMemento::kSize;
452   }
453
454   HValue* boilerplate_map = Add<HLoadNamedField>(
455       boilerplate, HObjectAccess::ForMap());
456   HValue* boilerplate_size = Add<HLoadNamedField>(
457       boilerplate_map, HObjectAccess::ForMapInstanceSize());
458   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
459   checker.If<HCompareNumericAndBranch>(boilerplate_size,
460                                        size_in_words, Token::EQ);
461   checker.Then();
462
463   HValue* size_in_bytes = Add<HConstant>(size);
464
465   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
466       isolate()->heap()->GetPretenureMode(), JS_OBJECT_TYPE);
467
468   for (int i = 0; i < object_size; i += kPointerSize) {
469     HObjectAccess access = HObjectAccess::ForJSObjectOffset(i);
470     Add<HStoreNamedField>(object, access,
471                           Add<HLoadNamedField>(boilerplate, access));
472   }
473
474   ASSERT(FLAG_allocation_site_pretenuring || (size == object_size));
475   if (FLAG_allocation_site_pretenuring) {
476     BuildCreateAllocationMemento(object, object_size, allocation_site);
477   }
478
479   environment()->Push(object);
480   checker.ElseDeopt("Uninitialized boilerplate in fast clone");
481   checker.End();
482
483   return environment()->Pop();
484 }
485
486
487 Handle<Code> FastCloneShallowObjectStub::GenerateCode(Isolate* isolate) {
488   return DoGenerateCode(isolate, this);
489 }
490
491
492 template <>
493 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
494   HValue* size = Add<HConstant>(AllocationSite::kSize);
495   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
496       JS_OBJECT_TYPE);
497
498   // Store the map
499   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
500   AddStoreMapConstant(object, allocation_site_map);
501
502   // Store the payload (smi elements kind)
503   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
504   Add<HStoreNamedField>(object,
505                         HObjectAccess::ForAllocationSiteOffset(
506                             AllocationSite::kTransitionInfoOffset),
507                         initial_elements_kind);
508
509   // Unlike literals, constructed arrays don't have nested sites
510   Add<HStoreNamedField>(object,
511                         HObjectAccess::ForAllocationSiteOffset(
512                             AllocationSite::kNestedSiteOffset),
513                         graph()->GetConstant0());
514
515   // Store an empty fixed array for the code dependency.
516   HConstant* empty_fixed_array =
517     Add<HConstant>(isolate()->factory()->empty_fixed_array());
518   HStoreNamedField* store = Add<HStoreNamedField>(
519       object,
520       HObjectAccess::ForAllocationSiteOffset(
521           AllocationSite::kDependentCodeOffset),
522       empty_fixed_array);
523
524   // Link the object to the allocation site list
525   HValue* site_list = Add<HConstant>(
526       ExternalReference::allocation_sites_list_address(isolate()));
527   HValue* site = Add<HLoadNamedField>(site_list,
528                                       HObjectAccess::ForAllocationSiteList());
529   store = Add<HStoreNamedField>(object,
530       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
531       site);
532   store->SkipWriteBarrier();
533   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
534                         object);
535
536   // We use a hammer (SkipWriteBarrier()) to indicate that we know the input
537   // cell is really a Cell, and so no write barrier is needed.
538   // TODO(mvstanton): Add a debug_code check to verify the input cell is really
539   // a cell. (perhaps with a new instruction, HAssert).
540   HInstruction* cell = GetParameter(0);
541   HObjectAccess access = HObjectAccess::ForCellValue();
542   store = Add<HStoreNamedField>(cell, access, object);
543   store->SkipWriteBarrier();
544   return cell;
545 }
546
547
548 Handle<Code> CreateAllocationSiteStub::GenerateCode(Isolate* isolate) {
549   return DoGenerateCode(isolate, this);
550 }
551
552
553 template <>
554 HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() {
555   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
556       GetParameter(0), GetParameter(1), NULL,
557       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
558       false, NEVER_RETURN_HOLE, STANDARD_STORE);
559   return load;
560 }
561
562
563 Handle<Code> KeyedLoadFastElementStub::GenerateCode(Isolate* isolate) {
564   return DoGenerateCode(isolate, this);
565 }
566
567
568 template<>
569 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
570   Representation rep = casted_stub()->representation();
571   HObjectAccess access = casted_stub()->is_inobject() ?
572       HObjectAccess::ForJSObjectOffset(casted_stub()->offset(), rep) :
573       HObjectAccess::ForBackingStoreOffset(casted_stub()->offset(), rep);
574   return AddLoadNamedField(GetParameter(0), access);
575 }
576
577
578 Handle<Code> LoadFieldStub::GenerateCode(Isolate* isolate) {
579   return DoGenerateCode(isolate, this);
580 }
581
582
583 template<>
584 HValue* CodeStubGraphBuilder<KeyedLoadFieldStub>::BuildCodeStub() {
585   Representation rep = casted_stub()->representation();
586   HObjectAccess access = casted_stub()->is_inobject() ?
587       HObjectAccess::ForJSObjectOffset(casted_stub()->offset(), rep) :
588       HObjectAccess::ForBackingStoreOffset(casted_stub()->offset(), rep);
589   return AddLoadNamedField(GetParameter(0), access);
590 }
591
592
593 Handle<Code> KeyedLoadFieldStub::GenerateCode(Isolate* isolate) {
594   return DoGenerateCode(isolate, this);
595 }
596
597
598 template <>
599 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
600   BuildUncheckedMonomorphicElementAccess(
601       GetParameter(0), GetParameter(1), GetParameter(2),
602       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
603       true, NEVER_RETURN_HOLE, casted_stub()->store_mode());
604
605   return GetParameter(2);
606 }
607
608
609 Handle<Code> KeyedStoreFastElementStub::GenerateCode(Isolate* isolate) {
610   return DoGenerateCode(isolate, this);
611 }
612
613
614 template <>
615 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
616   info()->MarkAsSavesCallerDoubles();
617
618   BuildTransitionElementsKind(GetParameter(0),
619                               GetParameter(1),
620                               casted_stub()->from_kind(),
621                               casted_stub()->to_kind(),
622                               true);
623
624   return GetParameter(0);
625 }
626
627
628 Handle<Code> TransitionElementsKindStub::GenerateCode(Isolate* isolate) {
629   return DoGenerateCode(isolate, this);
630 }
631
632 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
633     ElementsKind kind,
634     ContextCheckMode context_mode,
635     AllocationSiteOverrideMode override_mode,
636     ArgumentClass argument_class) {
637   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
638   if (context_mode == CONTEXT_CHECK_REQUIRED) {
639     HInstruction* array_function = BuildGetArrayFunction();
640     ArrayContextChecker checker(this, constructor, array_function);
641   }
642
643   HValue* property_cell = GetParameter(ArrayConstructorStubBase::kPropertyCell);
644   // Walk through the property cell to the AllocationSite
645   HValue* alloc_site = Add<HLoadNamedField>(property_cell,
646                                             HObjectAccess::ForCellValue());
647   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
648                                override_mode);
649   HValue* result = NULL;
650   switch (argument_class) {
651     case NONE:
652       result = array_builder.AllocateEmptyArray();
653       break;
654     case SINGLE:
655       result = BuildArraySingleArgumentConstructor(&array_builder);
656       break;
657     case MULTIPLE:
658       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
659       break;
660   }
661
662   return result;
663 }
664
665
666 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
667     ElementsKind kind, ArgumentClass argument_class) {
668   HValue* constructor = GetParameter(
669       InternalArrayConstructorStubBase::kConstructor);
670   JSArrayBuilder array_builder(this, kind, constructor);
671
672   HValue* result = NULL;
673   switch (argument_class) {
674     case NONE:
675       result = array_builder.AllocateEmptyArray();
676       break;
677     case SINGLE:
678       result = BuildArraySingleArgumentConstructor(&array_builder);
679       break;
680     case MULTIPLE:
681       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
682       break;
683   }
684   return result;
685 }
686
687
688 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
689     JSArrayBuilder* array_builder) {
690   // Smi check and range check on the input arg.
691   HValue* constant_one = graph()->GetConstant1();
692   HValue* constant_zero = graph()->GetConstant0();
693
694   HInstruction* elements = Add<HArgumentsElements>(false);
695   HInstruction* argument = Add<HAccessArgumentsAt>(
696       elements, constant_one, constant_zero);
697
698   HConstant* max_alloc_length =
699       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
700   const int initial_capacity = JSArray::kPreallocatedArrayElements;
701   HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
702
703   HInstruction* checked_arg = Add<HBoundsCheck>(argument, max_alloc_length);
704   IfBuilder if_builder(this);
705   if_builder.If<HCompareNumericAndBranch>(checked_arg, constant_zero,
706                                           Token::EQ);
707   if_builder.Then();
708   Push(initial_capacity_node);  // capacity
709   Push(constant_zero);  // length
710   if_builder.Else();
711   Push(checked_arg);  // capacity
712   Push(checked_arg);  // length
713   if_builder.End();
714
715   // Figure out total size
716   HValue* length = Pop();
717   HValue* capacity = Pop();
718   return array_builder->AllocateArray(capacity, length, true);
719 }
720
721
722 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
723     JSArrayBuilder* array_builder, ElementsKind kind) {
724   // We need to fill with the hole if it's a smi array in the multi-argument
725   // case because we might have to bail out while copying arguments into
726   // the array because they aren't compatible with a smi array.
727   // If it's a double array, no problem, and if it's fast then no
728   // problem either because doubles are boxed.
729   HValue* length = GetArgumentsLength();
730   bool fill_with_hole = IsFastSmiElementsKind(kind);
731   HValue* new_object = array_builder->AllocateArray(length,
732                                                     length,
733                                                     fill_with_hole);
734   HValue* elements = array_builder->GetElementsLocation();
735   ASSERT(elements != NULL);
736
737   // Now populate the elements correctly.
738   LoopBuilder builder(this,
739                       context(),
740                       LoopBuilder::kPostIncrement);
741   HValue* start = graph()->GetConstant0();
742   HValue* key = builder.BeginBody(start, length, Token::LT);
743   HInstruction* argument_elements = Add<HArgumentsElements>(false);
744   HInstruction* argument = Add<HAccessArgumentsAt>(
745       argument_elements, length, key);
746
747   Add<HStoreKeyed>(elements, key, argument, kind);
748   builder.EndBody();
749   return new_object;
750 }
751
752
753 template <>
754 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
755   ElementsKind kind = casted_stub()->elements_kind();
756   ContextCheckMode context_mode = casted_stub()->context_mode();
757   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
758   return BuildArrayConstructor(kind, context_mode, override_mode, NONE);
759 }
760
761
762 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode(Isolate* isolate) {
763   return DoGenerateCode(isolate, this);
764 }
765
766
767 template <>
768 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
769     BuildCodeStub() {
770   ElementsKind kind = casted_stub()->elements_kind();
771   ContextCheckMode context_mode = casted_stub()->context_mode();
772   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
773   return BuildArrayConstructor(kind, context_mode, override_mode, SINGLE);
774 }
775
776
777 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode(
778     Isolate* isolate) {
779   return DoGenerateCode(isolate, this);
780 }
781
782
783 template <>
784 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
785   ElementsKind kind = casted_stub()->elements_kind();
786   ContextCheckMode context_mode = casted_stub()->context_mode();
787   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
788   return BuildArrayConstructor(kind, context_mode, override_mode, MULTIPLE);
789 }
790
791
792 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode(Isolate* isolate) {
793   return DoGenerateCode(isolate, this);
794 }
795
796
797 template <>
798 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
799     BuildCodeStub() {
800   ElementsKind kind = casted_stub()->elements_kind();
801   return BuildInternalArrayConstructor(kind, NONE);
802 }
803
804
805 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode(
806     Isolate* isolate) {
807   return DoGenerateCode(isolate, this);
808 }
809
810
811 template <>
812 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
813     BuildCodeStub() {
814   ElementsKind kind = casted_stub()->elements_kind();
815   return BuildInternalArrayConstructor(kind, SINGLE);
816 }
817
818
819 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode(
820     Isolate* isolate) {
821   return DoGenerateCode(isolate, this);
822 }
823
824
825 template <>
826 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
827     BuildCodeStub() {
828   ElementsKind kind = casted_stub()->elements_kind();
829   return BuildInternalArrayConstructor(kind, MULTIPLE);
830 }
831
832
833 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode(
834     Isolate* isolate) {
835   return DoGenerateCode(isolate, this);
836 }
837
838
839 template <>
840 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
841   Isolate* isolate = graph()->isolate();
842   CompareNilICStub* stub = casted_stub();
843   HIfContinuation continuation;
844   Handle<Map> sentinel_map(isolate->heap()->meta_map());
845   Handle<Type> type = stub->GetType(isolate, sentinel_map);
846   BuildCompareNil(GetParameter(0), type, &continuation);
847   IfBuilder if_nil(this, &continuation);
848   if_nil.Then();
849   if (continuation.IsFalseReachable()) {
850     if_nil.Else();
851     if_nil.Return(graph()->GetConstant0());
852   }
853   if_nil.End();
854   return continuation.IsTrueReachable()
855       ? graph()->GetConstant1()
856       : graph()->GetConstantUndefined();
857 }
858
859
860 Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) {
861   return DoGenerateCode(isolate, this);
862 }
863
864
865 template <>
866 HValue* CodeStubGraphBuilder<BinaryOpStub>::BuildCodeInitializedStub() {
867   BinaryOpStub* stub = casted_stub();
868   HValue* left = GetParameter(0);
869   HValue* right = GetParameter(1);
870
871   Handle<Type> left_type = stub->GetLeftType(isolate());
872   Handle<Type> right_type = stub->GetRightType(isolate());
873   Handle<Type> result_type = stub->GetResultType(isolate());
874
875   ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
876          (stub->HasSideEffects(isolate()) || !result_type->Is(Type::None())));
877
878   HValue* result = NULL;
879   if (stub->operation() == Token::ADD &&
880       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
881       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
882     // For the generic add stub a fast case for string addition is performance
883     // critical.
884     if (left_type->Maybe(Type::String())) {
885       IfBuilder if_leftisstring(this);
886       if_leftisstring.If<HIsStringAndBranch>(left);
887       if_leftisstring.Then();
888       {
889         Push(AddInstruction(BuildBinaryOperation(
890                     stub->operation(), left, right,
891                     handle(Type::String(), isolate()), right_type,
892                     result_type, stub->fixed_right_arg(), true)));
893       }
894       if_leftisstring.Else();
895       {
896         Push(AddInstruction(BuildBinaryOperation(
897                     stub->operation(), left, right,
898                     left_type, right_type, result_type,
899                     stub->fixed_right_arg(), true)));
900       }
901       if_leftisstring.End();
902       result = Pop();
903     } else {
904       IfBuilder if_rightisstring(this);
905       if_rightisstring.If<HIsStringAndBranch>(right);
906       if_rightisstring.Then();
907       {
908         Push(AddInstruction(BuildBinaryOperation(
909                     stub->operation(), left, right,
910                     left_type, handle(Type::String(), isolate()),
911                     result_type, stub->fixed_right_arg(), true)));
912       }
913       if_rightisstring.Else();
914       {
915         Push(AddInstruction(BuildBinaryOperation(
916                     stub->operation(), left, right,
917                     left_type, right_type, result_type,
918                     stub->fixed_right_arg(), true)));
919       }
920       if_rightisstring.End();
921       result = Pop();
922     }
923   } else {
924     result = AddInstruction(BuildBinaryOperation(
925             stub->operation(), left, right,
926             left_type, right_type, result_type,
927             stub->fixed_right_arg(), true));
928   }
929
930   // If we encounter a generic argument, the number conversion is
931   // observable, thus we cannot afford to bail out after the fact.
932   if (!stub->HasSideEffects(isolate())) {
933     if (result_type->Is(Type::Smi())) {
934       if (stub->operation() == Token::SHR) {
935         // TODO(olivf) Replace this by a SmiTagU Instruction.
936         // 0x40000000: this number would convert to negative when interpreting
937         // the register as signed value;
938         IfBuilder if_of(this);
939         if_of.IfNot<HCompareNumericAndBranch>(result,
940             Add<HConstant>(static_cast<int>(SmiValuesAre32Bits()
941                 ? 0x80000000 : 0x40000000)), Token::EQ_STRICT);
942         if_of.Then();
943         if_of.ElseDeopt("UInt->Smi oveflow");
944         if_of.End();
945       }
946     }
947     result = EnforceNumberType(result, result_type);
948   }
949
950   // Reuse the double box of one of the operands if we are allowed to (i.e.
951   // chained binops).
952   if (stub->CanReuseDoubleBox()) {
953     HValue* operand = (stub->mode() == OVERWRITE_LEFT) ? left : right;
954     IfBuilder if_heap_number(this);
955     if_heap_number.IfNot<HIsSmiAndBranch>(operand);
956     if_heap_number.Then();
957     Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
958     Push(operand);
959     if_heap_number.Else();
960     Push(result);
961     if_heap_number.End();
962     result = Pop();
963   }
964
965   return result;
966 }
967
968
969 Handle<Code> BinaryOpStub::GenerateCode(Isolate* isolate) {
970   return DoGenerateCode(isolate, this);
971 }
972
973
974 template <>
975 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
976   ToBooleanStub* stub = casted_stub();
977
978   IfBuilder if_true(this);
979   if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
980   if_true.Then();
981   if_true.Return(graph()->GetConstant1());
982   if_true.Else();
983   if_true.End();
984   return graph()->GetConstant0();
985 }
986
987
988 Handle<Code> ToBooleanStub::GenerateCode(Isolate* isolate) {
989   return DoGenerateCode(isolate, this);
990 }
991
992
993 template <>
994 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
995   StoreGlobalStub* stub = casted_stub();
996   Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
997   Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
998   Handle<PropertyCell> placeholder_cell =
999       isolate()->factory()->NewPropertyCell(placeholer_value);
1000
1001   HParameter* receiver = GetParameter(0);
1002   HParameter* value = GetParameter(2);
1003
1004   // Check that the map of the global has not changed: use a placeholder map
1005   // that will be replaced later with the global object's map.
1006   Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1007   Add<HCheckMaps>(receiver, placeholder_map, top_info());
1008
1009   HValue* cell = Add<HConstant>(placeholder_cell);
1010   HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1011   HValue* cell_contents = Add<HLoadNamedField>(cell, access);
1012
1013   if (stub->is_constant()) {
1014     IfBuilder builder(this);
1015     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1016     builder.Then();
1017     builder.ElseDeopt("Unexpected cell contents in constant global store");
1018     builder.End();
1019   } else {
1020     // Load the payload of the global parameter cell. A hole indicates that the
1021     // property has been deleted and that the store must be handled by the
1022     // runtime.
1023     IfBuilder builder(this);
1024     HValue* hole_value = Add<HConstant>(hole);
1025     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1026     builder.Then();
1027     builder.Deopt("Unexpected cell contents in global store");
1028     builder.Else();
1029     Add<HStoreNamedField>(cell, access, value);
1030     builder.End();
1031   }
1032
1033   return value;
1034 }
1035
1036
1037 Handle<Code> StoreGlobalStub::GenerateCode(Isolate* isolate) {
1038   return DoGenerateCode(isolate, this);
1039 }
1040
1041
1042 template<>
1043 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1044   HValue* value = GetParameter(0);
1045   HValue* map = GetParameter(1);
1046   HValue* key = GetParameter(2);
1047   HValue* object = GetParameter(3);
1048
1049   if (FLAG_trace_elements_transitions) {
1050     // Tracing elements transitions is the job of the runtime.
1051     Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1052   } else {
1053     info()->MarkAsSavesCallerDoubles();
1054
1055     BuildTransitionElementsKind(object, map,
1056                                 casted_stub()->from_kind(),
1057                                 casted_stub()->to_kind(),
1058                                 casted_stub()->is_jsarray());
1059
1060     BuildUncheckedMonomorphicElementAccess(object, key, value,
1061                                            casted_stub()->is_jsarray(),
1062                                            casted_stub()->to_kind(),
1063                                            true, ALLOW_RETURN_HOLE,
1064                                            casted_stub()->store_mode());
1065   }
1066
1067   return value;
1068 }
1069
1070
1071 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode(Isolate* isolate) {
1072   return DoGenerateCode(isolate, this);
1073 }
1074
1075
1076 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(
1077     HValue* js_function,
1078     HValue* native_context,
1079     HValue* code_object) {
1080   Counters* counters = isolate()->counters();
1081   AddIncrementCounter(counters->fast_new_closure_install_optimized());
1082
1083   // TODO(fschneider): Idea: store proper code pointers in the optimized code
1084   // map and either unmangle them on marking or do nothing as the whole map is
1085   // discarded on major GC anyway.
1086   Add<HStoreCodeEntry>(js_function, code_object);
1087
1088   // Now link a function into a list of optimized functions.
1089   HValue* optimized_functions_list = Add<HLoadNamedField>(native_context,
1090       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1091   Add<HStoreNamedField>(js_function,
1092                         HObjectAccess::ForNextFunctionLinkPointer(),
1093                         optimized_functions_list);
1094
1095   // This store is the only one that should have a write barrier.
1096   Add<HStoreNamedField>(native_context,
1097            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1098            js_function);
1099 }
1100
1101
1102 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1103                                                 HValue* shared_info) {
1104   Add<HStoreNamedField>(js_function,
1105                         HObjectAccess::ForNextFunctionLinkPointer(),
1106                         graph()->GetConstantUndefined());
1107   HValue* code_object = Add<HLoadNamedField>(shared_info,
1108                                              HObjectAccess::ForCodeOffset());
1109   Add<HStoreCodeEntry>(js_function, code_object);
1110 }
1111
1112
1113 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1114     HValue* js_function,
1115     HValue* shared_info,
1116     HValue* native_context) {
1117   Counters* counters = isolate()->counters();
1118   IfBuilder is_optimized(this);
1119   HInstruction* optimized_map = Add<HLoadNamedField>(shared_info,
1120       HObjectAccess::ForOptimizedCodeMap());
1121   HValue* null_constant = Add<HConstant>(0);
1122   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1123   is_optimized.Then();
1124   {
1125     BuildInstallCode(js_function, shared_info);
1126   }
1127   is_optimized.Else();
1128   {
1129     AddIncrementCounter(counters->fast_new_closure_try_optimized());
1130     // optimized_map points to fixed array of 3-element entries
1131     // (native context, optimized code, literals).
1132     // Map must never be empty, so check the first elements.
1133     Label install_optimized;
1134     HValue* first_context_slot = Add<HLoadNamedField>(optimized_map,
1135         HObjectAccess::ForFirstContextSlot());
1136     IfBuilder already_in(this);
1137     already_in.If<HCompareObjectEqAndBranch>(native_context,
1138                                              first_context_slot);
1139     already_in.Then();
1140     {
1141       HValue* code_object = Add<HLoadNamedField>(optimized_map,
1142         HObjectAccess::ForFirstCodeSlot());
1143       BuildInstallOptimizedCode(js_function, native_context, code_object);
1144     }
1145     already_in.Else();
1146     {
1147       HValue* shared_function_entry_length =
1148           Add<HConstant>(SharedFunctionInfo::kEntryLength);
1149       LoopBuilder loop_builder(this,
1150                                context(),
1151                                LoopBuilder::kPostDecrement,
1152                                shared_function_entry_length);
1153       HValue* array_length = Add<HLoadNamedField>(optimized_map,
1154           HObjectAccess::ForFixedArrayLength());
1155       HValue* key = loop_builder.BeginBody(array_length,
1156                                            graph()->GetConstant0(),
1157                                            Token::GT);
1158       {
1159         // Iterate through the rest of map backwards.
1160         // Do not double check first entry.
1161         HValue* second_entry_index =
1162             Add<HConstant>(SharedFunctionInfo::kSecondEntryIndex);
1163         IfBuilder restore_check(this);
1164         restore_check.If<HCompareNumericAndBranch>(key, second_entry_index,
1165                                                    Token::EQ);
1166         restore_check.Then();
1167         {
1168           // Store the unoptimized code
1169           BuildInstallCode(js_function, shared_info);
1170           loop_builder.Break();
1171         }
1172         restore_check.Else();
1173         {
1174           HValue* keyed_minus = AddUncasted<HSub>(
1175               key, shared_function_entry_length);
1176           HInstruction* keyed_lookup = Add<HLoadKeyed>(optimized_map,
1177               keyed_minus, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1178           IfBuilder done_check(this);
1179           done_check.If<HCompareObjectEqAndBranch>(native_context,
1180                                                    keyed_lookup);
1181           done_check.Then();
1182           {
1183             // Hit: fetch the optimized code.
1184             HValue* keyed_plus = AddUncasted<HAdd>(
1185                 keyed_minus, graph()->GetConstant1());
1186             HValue* code_object = Add<HLoadKeyed>(optimized_map,
1187                 keyed_plus, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1188             BuildInstallOptimizedCode(js_function, native_context, code_object);
1189
1190             // Fall out of the loop
1191             loop_builder.Break();
1192           }
1193           done_check.Else();
1194           done_check.End();
1195         }
1196         restore_check.End();
1197       }
1198       loop_builder.EndBody();
1199     }
1200     already_in.End();
1201   }
1202   is_optimized.End();
1203 }
1204
1205
1206 template<>
1207 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1208   Counters* counters = isolate()->counters();
1209   Factory* factory = isolate()->factory();
1210   HInstruction* empty_fixed_array =
1211       Add<HConstant>(factory->empty_fixed_array());
1212   HValue* shared_info = GetParameter(0);
1213
1214   AddIncrementCounter(counters->fast_new_closure_total());
1215
1216   // Create a new closure from the given function info in new space
1217   HValue* size = Add<HConstant>(JSFunction::kSize);
1218   HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1219                                              NOT_TENURED, JS_FUNCTION_TYPE);
1220
1221   int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1222                                             casted_stub()->is_generator());
1223
1224   // Compute the function map in the current native context and set that
1225   // as the map of the allocated object.
1226   HInstruction* native_context = BuildGetNativeContext();
1227   HInstruction* map_slot_value = Add<HLoadNamedField>(native_context,
1228       HObjectAccess::ForContextSlot(map_index));
1229   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1230
1231   // Initialize the rest of the function.
1232   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1233                         empty_fixed_array);
1234   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1235                         empty_fixed_array);
1236   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1237                         empty_fixed_array);
1238   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1239                         graph()->GetConstantHole());
1240   Add<HStoreNamedField>(js_function,
1241                         HObjectAccess::ForSharedFunctionInfoPointer(),
1242                         shared_info);
1243   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1244                         shared_info);
1245   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1246                         context());
1247
1248   // Initialize the code pointer in the function to be the one
1249   // found in the shared function info object.
1250   // But first check if there is an optimized version for our context.
1251   if (FLAG_cache_optimized_code) {
1252     BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1253   } else {
1254     BuildInstallCode(js_function, shared_info);
1255   }
1256
1257   return js_function;
1258 }
1259
1260
1261 Handle<Code> FastNewClosureStub::GenerateCode(Isolate* isolate) {
1262   return DoGenerateCode(isolate, this);
1263 }
1264
1265
1266 } }  // namespace v8::internal