Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / v8 / src / code-stubs-hydrogen.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "code-stubs.h"
31 #include "hydrogen.h"
32 #include "lithium.h"
33
34 namespace v8 {
35 namespace internal {
36
37
38 static LChunk* OptimizeGraph(HGraph* graph) {
39   DisallowHeapAllocation no_allocation;
40   DisallowHandleAllocation no_handles;
41   DisallowHandleDereference no_deref;
42
43   ASSERT(graph != NULL);
44   BailoutReason bailout_reason = kNoReason;
45   if (!graph->Optimize(&bailout_reason)) {
46     FATAL(GetBailoutReason(bailout_reason));
47   }
48   LChunk* chunk = LChunk::NewChunk(graph);
49   if (chunk == NULL) {
50     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
51   }
52   return chunk;
53 }
54
55
56 class CodeStubGraphBuilderBase : public HGraphBuilder {
57  public:
58   CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
59       : HGraphBuilder(&info_),
60         arguments_length_(NULL),
61         info_(stub, isolate),
62         context_(NULL) {
63     descriptor_ = stub->GetInterfaceDescriptor(isolate);
64     parameters_.Reset(new HParameter*[descriptor_->register_param_count_]);
65   }
66   virtual bool BuildGraph();
67
68  protected:
69   virtual HValue* BuildCodeStub() = 0;
70   HParameter* GetParameter(int parameter) {
71     ASSERT(parameter < descriptor_->register_param_count_);
72     return parameters_[parameter];
73   }
74   HValue* GetArgumentsLength() {
75     // This is initialized in BuildGraph()
76     ASSERT(arguments_length_ != NULL);
77     return arguments_length_;
78   }
79   CompilationInfo* info() { return &info_; }
80   HydrogenCodeStub* stub() { return info_.code_stub(); }
81   HContext* context() { return context_; }
82   Isolate* isolate() { return info_.isolate(); }
83
84   enum ArgumentClass {
85     NONE,
86     SINGLE,
87     MULTIPLE
88   };
89
90   HValue* BuildArrayConstructor(ElementsKind kind,
91                                 AllocationSiteOverrideMode override_mode,
92                                 ArgumentClass argument_class);
93   HValue* BuildInternalArrayConstructor(ElementsKind kind,
94                                         ArgumentClass argument_class);
95
96   void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
97                                  HValue* code_object);
98   void BuildInstallCode(HValue* js_function, HValue* shared_info);
99   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
100                                         HValue* shared_info,
101                                         HValue* native_context);
102
103  private:
104   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
105   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
106                                           ElementsKind kind);
107
108   SmartArrayPointer<HParameter*> parameters_;
109   HValue* arguments_length_;
110   CompilationInfoWithZone info_;
111   CodeStubInterfaceDescriptor* descriptor_;
112   HContext* context_;
113 };
114
115
116 bool CodeStubGraphBuilderBase::BuildGraph() {
117   // Update the static counter each time a new code stub is generated.
118   isolate()->counters()->code_stubs()->Increment();
119
120   if (FLAG_trace_hydrogen_stubs) {
121     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
122     PrintF("-----------------------------------------------------------\n");
123     PrintF("Compiling stub %s using hydrogen\n", name);
124     isolate()->GetHTracer()->TraceCompilation(&info_);
125   }
126
127   int param_count = descriptor_->register_param_count_;
128   HEnvironment* start_environment = graph()->start_environment();
129   HBasicBlock* next_block = CreateBasicBlock(start_environment);
130   Goto(next_block);
131   next_block->SetJoinId(BailoutId::StubEntry());
132   set_current_block(next_block);
133
134   bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
135   HInstruction* stack_parameter_count = NULL;
136   for (int i = 0; i < param_count; ++i) {
137     Representation r = descriptor_->IsParameterCountRegister(i)
138         ? Representation::Integer32()
139         : Representation::Tagged();
140     HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
141     start_environment->Bind(i, param);
142     parameters_[i] = param;
143     if (descriptor_->IsParameterCountRegister(i)) {
144       param->set_type(HType::Smi());
145       stack_parameter_count = param;
146       arguments_length_ = stack_parameter_count;
147     }
148   }
149
150   ASSERT(!runtime_stack_params || arguments_length_ != NULL);
151   if (!runtime_stack_params) {
152     stack_parameter_count = graph()->GetConstantMinus1();
153     arguments_length_ = graph()->GetConstant0();
154   }
155
156   context_ = Add<HContext>();
157   start_environment->BindContext(context_);
158
159   Add<HSimulate>(BailoutId::StubEntry());
160
161   NoObservableSideEffectsScope no_effects(this);
162
163   HValue* return_value = BuildCodeStub();
164
165   // We might have extra expressions to pop from the stack in addition to the
166   // arguments above.
167   HInstruction* stack_pop_count = stack_parameter_count;
168   if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
169     if (!stack_parameter_count->IsConstant() &&
170         descriptor_->hint_stack_parameter_count_ < 0) {
171       HInstruction* constant_one = graph()->GetConstant1();
172       stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
173       stack_pop_count->ClearFlag(HValue::kCanOverflow);
174       // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
175       // smi.
176     } else {
177       int count = descriptor_->hint_stack_parameter_count_;
178       stack_pop_count = Add<HConstant>(count);
179     }
180   }
181
182   if (current_block() != NULL) {
183     HReturn* hreturn_instruction = New<HReturn>(return_value,
184                                                 stack_pop_count);
185     FinishCurrentBlock(hreturn_instruction);
186   }
187   return true;
188 }
189
190
191 template <class Stub>
192 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
193  public:
194   CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
195       : CodeStubGraphBuilderBase(isolate, stub) {}
196
197  protected:
198   virtual HValue* BuildCodeStub() {
199     if (casted_stub()->IsUninitialized()) {
200       return BuildCodeUninitializedStub();
201     } else {
202       return BuildCodeInitializedStub();
203     }
204   }
205
206   virtual HValue* BuildCodeInitializedStub() {
207     UNIMPLEMENTED();
208     return NULL;
209   }
210
211   virtual HValue* BuildCodeUninitializedStub() {
212     // Force a deopt that falls back to the runtime.
213     HValue* undefined = graph()->GetConstantUndefined();
214     IfBuilder builder(this);
215     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
216     builder.Then();
217     builder.ElseDeopt("Forced deopt to runtime");
218     return undefined;
219   }
220
221   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
222 };
223
224
225 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) {
226   Factory* factory = isolate->factory();
227
228   // Generate the new code.
229   MacroAssembler masm(isolate, NULL, 256);
230
231   {
232     // Update the static counter each time a new code stub is generated.
233     isolate->counters()->code_stubs()->Increment();
234
235     // Generate the code for the stub.
236     masm.set_generating_stub(true);
237     NoCurrentFrameScope scope(&masm);
238     GenerateLightweightMiss(&masm);
239   }
240
241   // Create the code object.
242   CodeDesc desc;
243   masm.GetCode(&desc);
244
245   // Copy the generated code into a heap object.
246   Code::Flags flags = Code::ComputeFlags(
247       GetCodeKind(),
248       GetICState(),
249       GetExtraICState(),
250       GetStubType(),
251       GetStubFlags());
252   Handle<Code> new_object = factory->NewCode(
253       desc, flags, masm.CodeObject(), NeedsImmovableCode());
254   return new_object;
255 }
256
257
258 template <class Stub>
259 static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) {
260   CodeStub::Major  major_key =
261       static_cast<HydrogenCodeStub*>(stub)->MajorKey();
262   CodeStubInterfaceDescriptor* descriptor =
263       isolate->code_stub_interface_descriptor(major_key);
264   if (descriptor->register_param_count_ < 0) {
265     stub->InitializeInterfaceDescriptor(isolate, descriptor);
266   }
267
268   // If we are uninitialized we can use a light-weight stub to enter
269   // the runtime that is significantly faster than using the standard
270   // stub-failure deopt mechanism.
271   if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
272     ASSERT(!descriptor->stack_parameter_count_.is_valid());
273     return stub->GenerateLightweightMissCode(isolate);
274   }
275   ElapsedTimer timer;
276   if (FLAG_profile_hydrogen_code_stub_compilation) {
277     timer.Start();
278   }
279   CodeStubGraphBuilder<Stub> builder(isolate, stub);
280   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
281   Handle<Code> code = chunk->Codegen();
282   if (FLAG_profile_hydrogen_code_stub_compilation) {
283     double ms = timer.Elapsed().InMillisecondsF();
284     PrintF("[Lazy compilation of %s took %0.3f ms]\n",
285            stub->GetName().get(), ms);
286   }
287   return code;
288 }
289
290
291 template <>
292 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
293   HValue* value = GetParameter(0);
294
295   // Check if the parameter is already a SMI or heap number.
296   IfBuilder if_number(this);
297   if_number.If<HIsSmiAndBranch>(value);
298   if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
299   if_number.Then();
300
301   // Return the number.
302   Push(value);
303
304   if_number.Else();
305
306   // Convert the parameter to number using the builtin.
307   HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
308   Add<HPushArgument>(value);
309   Push(Add<HInvokeFunction>(function, 1));
310
311   if_number.End();
312
313   return Pop();
314 }
315
316
317 Handle<Code> ToNumberStub::GenerateCode(Isolate* isolate) {
318   return DoGenerateCode(isolate, this);
319 }
320
321
322 template <>
323 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
324   info()->MarkAsSavesCallerDoubles();
325   HValue* number = GetParameter(NumberToStringStub::kNumber);
326   return BuildNumberToString(number, Type::Number(zone()));
327 }
328
329
330 Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) {
331   return DoGenerateCode(isolate, this);
332 }
333
334
335 template <>
336 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
337   Factory* factory = isolate()->factory();
338   HValue* undefined = graph()->GetConstantUndefined();
339   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
340   FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
341   int length = casted_stub()->length();
342
343   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
344                                                   GetParameter(1),
345                                                   static_cast<HValue*>(NULL),
346                                                   FAST_ELEMENTS);
347   IfBuilder checker(this);
348   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
349                                                     undefined);
350   checker.Then();
351
352   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
353       AllocationSite::kTransitionInfoOffset);
354   HInstruction* boilerplate = Add<HLoadNamedField>(
355       allocation_site, static_cast<HValue*>(NULL), access);
356   HValue* push_value;
357   if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
358     HValue* elements = AddLoadElements(boilerplate);
359
360     IfBuilder if_fixed_cow(this);
361     if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
362     if_fixed_cow.Then();
363     push_value = BuildCloneShallowArray(boilerplate,
364                                         allocation_site,
365                                         alloc_site_mode,
366                                         FAST_ELEMENTS,
367                                         0/*copy-on-write*/);
368     environment()->Push(push_value);
369     if_fixed_cow.Else();
370
371     IfBuilder if_fixed(this);
372     if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
373     if_fixed.Then();
374     push_value = BuildCloneShallowArray(boilerplate,
375                                         allocation_site,
376                                         alloc_site_mode,
377                                         FAST_ELEMENTS,
378                                         length);
379     environment()->Push(push_value);
380     if_fixed.Else();
381     push_value = BuildCloneShallowArray(boilerplate,
382                                         allocation_site,
383                                         alloc_site_mode,
384                                         FAST_DOUBLE_ELEMENTS,
385                                         length);
386     environment()->Push(push_value);
387   } else {
388     ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
389     push_value = BuildCloneShallowArray(boilerplate,
390                                         allocation_site,
391                                         alloc_site_mode,
392                                         elements_kind,
393                                         length);
394     environment()->Push(push_value);
395   }
396
397   checker.ElseDeopt("Uninitialized boilerplate literals");
398   checker.End();
399
400   return environment()->Pop();
401 }
402
403
404 Handle<Code> FastCloneShallowArrayStub::GenerateCode(Isolate* isolate) {
405   return DoGenerateCode(isolate, this);
406 }
407
408
409 template <>
410 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
411   HValue* undefined = graph()->GetConstantUndefined();
412
413   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
414                                                   GetParameter(1),
415                                                   static_cast<HValue*>(NULL),
416                                                   FAST_ELEMENTS);
417
418   IfBuilder checker(this);
419   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
420                                                     undefined);
421   checker.And();
422
423   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
424       AllocationSite::kTransitionInfoOffset);
425   HInstruction* boilerplate = Add<HLoadNamedField>(
426       allocation_site, static_cast<HValue*>(NULL), access);
427
428   int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
429   int object_size = size;
430   if (FLAG_allocation_site_pretenuring) {
431     size += AllocationMemento::kSize;
432   }
433
434   HValue* boilerplate_map = Add<HLoadNamedField>(
435       boilerplate, static_cast<HValue*>(NULL),
436       HObjectAccess::ForMap());
437   HValue* boilerplate_size = Add<HLoadNamedField>(
438       boilerplate_map, static_cast<HValue*>(NULL),
439       HObjectAccess::ForMapInstanceSize());
440   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
441   checker.If<HCompareNumericAndBranch>(boilerplate_size,
442                                        size_in_words, Token::EQ);
443   checker.Then();
444
445   HValue* size_in_bytes = Add<HConstant>(size);
446
447   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
448       NOT_TENURED, JS_OBJECT_TYPE);
449
450   for (int i = 0; i < object_size; i += kPointerSize) {
451     HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
452     Add<HStoreNamedField>(
453         object, access, Add<HLoadNamedField>(
454             boilerplate, static_cast<HValue*>(NULL), access));
455   }
456
457   ASSERT(FLAG_allocation_site_pretenuring || (size == object_size));
458   if (FLAG_allocation_site_pretenuring) {
459     BuildCreateAllocationMemento(
460         object, Add<HConstant>(object_size), allocation_site);
461   }
462
463   environment()->Push(object);
464   checker.ElseDeopt("Uninitialized boilerplate in fast clone");
465   checker.End();
466
467   return environment()->Pop();
468 }
469
470
471 Handle<Code> FastCloneShallowObjectStub::GenerateCode(Isolate* isolate) {
472   return DoGenerateCode(isolate, this);
473 }
474
475
476 template <>
477 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
478   HValue* size = Add<HConstant>(AllocationSite::kSize);
479   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
480       JS_OBJECT_TYPE);
481
482   // Store the map
483   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
484   AddStoreMapConstant(object, allocation_site_map);
485
486   // Store the payload (smi elements kind)
487   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
488   Add<HStoreNamedField>(object,
489                         HObjectAccess::ForAllocationSiteOffset(
490                             AllocationSite::kTransitionInfoOffset),
491                         initial_elements_kind);
492
493   // Unlike literals, constructed arrays don't have nested sites
494   Add<HStoreNamedField>(object,
495                         HObjectAccess::ForAllocationSiteOffset(
496                             AllocationSite::kNestedSiteOffset),
497                         graph()->GetConstant0());
498
499   // Pretenuring calculation field.
500   Add<HStoreNamedField>(object,
501                         HObjectAccess::ForAllocationSiteOffset(
502                             AllocationSite::kPretenureDataOffset),
503                         graph()->GetConstant0());
504
505   // Pretenuring memento creation count field.
506   Add<HStoreNamedField>(object,
507                         HObjectAccess::ForAllocationSiteOffset(
508                             AllocationSite::kPretenureCreateCountOffset),
509                         graph()->GetConstant0());
510
511   // Store an empty fixed array for the code dependency.
512   HConstant* empty_fixed_array =
513     Add<HConstant>(isolate()->factory()->empty_fixed_array());
514   HStoreNamedField* store = Add<HStoreNamedField>(
515       object,
516       HObjectAccess::ForAllocationSiteOffset(
517           AllocationSite::kDependentCodeOffset),
518       empty_fixed_array);
519
520   // Link the object to the allocation site list
521   HValue* site_list = Add<HConstant>(
522       ExternalReference::allocation_sites_list_address(isolate()));
523   HValue* site = Add<HLoadNamedField>(
524       site_list, static_cast<HValue*>(NULL),
525       HObjectAccess::ForAllocationSiteList());
526   store = Add<HStoreNamedField>(object,
527       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
528       site);
529   store->SkipWriteBarrier();
530   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
531                         object);
532
533   // We use a hammer (SkipWriteBarrier()) to indicate that we know the input
534   // cell is really a Cell, and so no write barrier is needed.
535   // TODO(mvstanton): Add a debug_code check to verify the input cell is really
536   // a cell. (perhaps with a new instruction, HAssert).
537   HInstruction* cell = GetParameter(0);
538   HObjectAccess access = HObjectAccess::ForCellValue();
539   store = Add<HStoreNamedField>(cell, access, object);
540   store->SkipWriteBarrier();
541   return cell;
542 }
543
544
545 Handle<Code> CreateAllocationSiteStub::GenerateCode(Isolate* isolate) {
546   return DoGenerateCode(isolate, this);
547 }
548
549
550 template <>
551 HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() {
552   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
553       GetParameter(0), GetParameter(1), NULL,
554       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
555       false, NEVER_RETURN_HOLE, STANDARD_STORE);
556   return load;
557 }
558
559
560 Handle<Code> KeyedLoadFastElementStub::GenerateCode(Isolate* isolate) {
561   return DoGenerateCode(isolate, this);
562 }
563
564
565 template<>
566 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
567   Representation rep = casted_stub()->representation();
568   int offset = casted_stub()->offset();
569   HObjectAccess access = casted_stub()->is_inobject() ?
570       HObjectAccess::ForObservableJSObjectOffset(offset, rep) :
571       HObjectAccess::ForBackingStoreOffset(offset, rep);
572   return AddLoadNamedField(GetParameter(0), access);
573 }
574
575
576 Handle<Code> LoadFieldStub::GenerateCode(Isolate* isolate) {
577   return DoGenerateCode(isolate, this);
578 }
579
580
581 template<>
582 HValue* CodeStubGraphBuilder<KeyedLoadFieldStub>::BuildCodeStub() {
583   Representation rep = casted_stub()->representation();
584   int offset = casted_stub()->offset();
585   HObjectAccess access = casted_stub()->is_inobject() ?
586       HObjectAccess::ForObservableJSObjectOffset(offset, rep) :
587       HObjectAccess::ForBackingStoreOffset(offset, rep);
588   return AddLoadNamedField(GetParameter(0), access);
589 }
590
591
592 Handle<Code> KeyedLoadFieldStub::GenerateCode(Isolate* isolate) {
593   return DoGenerateCode(isolate, this);
594 }
595
596
597 template <>
598 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
599   BuildUncheckedMonomorphicElementAccess(
600       GetParameter(0), GetParameter(1), GetParameter(2),
601       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
602       true, NEVER_RETURN_HOLE, casted_stub()->store_mode());
603
604   return GetParameter(2);
605 }
606
607
608 Handle<Code> KeyedStoreFastElementStub::GenerateCode(Isolate* isolate) {
609   return DoGenerateCode(isolate, this);
610 }
611
612
613 template <>
614 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
615   info()->MarkAsSavesCallerDoubles();
616
617   BuildTransitionElementsKind(GetParameter(0),
618                               GetParameter(1),
619                               casted_stub()->from_kind(),
620                               casted_stub()->to_kind(),
621                               true);
622
623   return GetParameter(0);
624 }
625
626
627 Handle<Code> TransitionElementsKindStub::GenerateCode(Isolate* isolate) {
628   return DoGenerateCode(isolate, this);
629 }
630
631 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
632     ElementsKind kind,
633     AllocationSiteOverrideMode override_mode,
634     ArgumentClass argument_class) {
635   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
636   HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
637   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
638                                override_mode);
639   HValue* result = NULL;
640   switch (argument_class) {
641     case NONE:
642       result = array_builder.AllocateEmptyArray();
643       break;
644     case SINGLE:
645       result = BuildArraySingleArgumentConstructor(&array_builder);
646       break;
647     case MULTIPLE:
648       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
649       break;
650   }
651
652   return result;
653 }
654
655
656 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
657     ElementsKind kind, ArgumentClass argument_class) {
658   HValue* constructor = GetParameter(
659       InternalArrayConstructorStubBase::kConstructor);
660   JSArrayBuilder array_builder(this, kind, constructor);
661
662   HValue* result = NULL;
663   switch (argument_class) {
664     case NONE:
665       result = array_builder.AllocateEmptyArray();
666       break;
667     case SINGLE:
668       result = BuildArraySingleArgumentConstructor(&array_builder);
669       break;
670     case MULTIPLE:
671       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
672       break;
673   }
674   return result;
675 }
676
677
678 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
679     JSArrayBuilder* array_builder) {
680   // Smi check and range check on the input arg.
681   HValue* constant_one = graph()->GetConstant1();
682   HValue* constant_zero = graph()->GetConstant0();
683
684   HInstruction* elements = Add<HArgumentsElements>(false);
685   HInstruction* argument = Add<HAccessArgumentsAt>(
686       elements, constant_one, constant_zero);
687
688   return BuildAllocateArrayFromLength(array_builder, argument);
689 }
690
691
692 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
693     JSArrayBuilder* array_builder, ElementsKind kind) {
694   // Insert a bounds check because the number of arguments might exceed
695   // the kInitialMaxFastElementArray limit. This cannot happen for code
696   // that was parsed, but calling via Array.apply(thisArg, [...]) might
697   // trigger it.
698   HValue* length = GetArgumentsLength();
699   HConstant* max_alloc_length =
700       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
701   HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
702
703   // We need to fill with the hole if it's a smi array in the multi-argument
704   // case because we might have to bail out while copying arguments into
705   // the array because they aren't compatible with a smi array.
706   // If it's a double array, no problem, and if it's fast then no
707   // problem either because doubles are boxed.
708   //
709   // TODO(mvstanton): consider an instruction to memset fill the array
710   // with zero in this case instead.
711   JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
712       ? JSArrayBuilder::FILL_WITH_HOLE
713       : JSArrayBuilder::DONT_FILL_WITH_HOLE;
714   HValue* new_object = array_builder->AllocateArray(checked_length,
715                                                     checked_length,
716                                                     fill_mode);
717   HValue* elements = array_builder->GetElementsLocation();
718   ASSERT(elements != NULL);
719
720   // Now populate the elements correctly.
721   LoopBuilder builder(this,
722                       context(),
723                       LoopBuilder::kPostIncrement);
724   HValue* start = graph()->GetConstant0();
725   HValue* key = builder.BeginBody(start, checked_length, Token::LT);
726   HInstruction* argument_elements = Add<HArgumentsElements>(false);
727   HInstruction* argument = Add<HAccessArgumentsAt>(
728       argument_elements, checked_length, key);
729
730   Add<HStoreKeyed>(elements, key, argument, kind);
731   builder.EndBody();
732   return new_object;
733 }
734
735
736 template <>
737 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
738   ElementsKind kind = casted_stub()->elements_kind();
739   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
740   return BuildArrayConstructor(kind, override_mode, NONE);
741 }
742
743
744 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode(Isolate* isolate) {
745   return DoGenerateCode(isolate, this);
746 }
747
748
749 template <>
750 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
751     BuildCodeStub() {
752   ElementsKind kind = casted_stub()->elements_kind();
753   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
754   return BuildArrayConstructor(kind, override_mode, SINGLE);
755 }
756
757
758 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode(
759     Isolate* isolate) {
760   return DoGenerateCode(isolate, this);
761 }
762
763
764 template <>
765 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
766   ElementsKind kind = casted_stub()->elements_kind();
767   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
768   return BuildArrayConstructor(kind, override_mode, MULTIPLE);
769 }
770
771
772 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode(Isolate* isolate) {
773   return DoGenerateCode(isolate, this);
774 }
775
776
777 template <>
778 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
779     BuildCodeStub() {
780   ElementsKind kind = casted_stub()->elements_kind();
781   return BuildInternalArrayConstructor(kind, NONE);
782 }
783
784
785 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode(
786     Isolate* isolate) {
787   return DoGenerateCode(isolate, this);
788 }
789
790
791 template <>
792 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
793     BuildCodeStub() {
794   ElementsKind kind = casted_stub()->elements_kind();
795   return BuildInternalArrayConstructor(kind, SINGLE);
796 }
797
798
799 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode(
800     Isolate* isolate) {
801   return DoGenerateCode(isolate, this);
802 }
803
804
805 template <>
806 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
807     BuildCodeStub() {
808   ElementsKind kind = casted_stub()->elements_kind();
809   return BuildInternalArrayConstructor(kind, MULTIPLE);
810 }
811
812
813 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode(
814     Isolate* isolate) {
815   return DoGenerateCode(isolate, this);
816 }
817
818
819 template <>
820 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
821   Isolate* isolate = graph()->isolate();
822   CompareNilICStub* stub = casted_stub();
823   HIfContinuation continuation;
824   Handle<Map> sentinel_map(isolate->heap()->meta_map());
825   Type* type = stub->GetType(zone(), sentinel_map);
826   BuildCompareNil(GetParameter(0), type, &continuation);
827   IfBuilder if_nil(this, &continuation);
828   if_nil.Then();
829   if (continuation.IsFalseReachable()) {
830     if_nil.Else();
831     if_nil.Return(graph()->GetConstant0());
832   }
833   if_nil.End();
834   return continuation.IsTrueReachable()
835       ? graph()->GetConstant1()
836       : graph()->GetConstantUndefined();
837 }
838
839
840 Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) {
841   return DoGenerateCode(isolate, this);
842 }
843
844
845 template <>
846 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
847   BinaryOpIC::State state = casted_stub()->state();
848
849   HValue* left = GetParameter(BinaryOpICStub::kLeft);
850   HValue* right = GetParameter(BinaryOpICStub::kRight);
851
852   Type* left_type = state.GetLeftType(zone());
853   Type* right_type = state.GetRightType(zone());
854   Type* result_type = state.GetResultType(zone());
855
856   ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
857          (state.HasSideEffects() || !result_type->Is(Type::None())));
858
859   HValue* result = NULL;
860   HAllocationMode allocation_mode(NOT_TENURED);
861   if (state.op() == Token::ADD &&
862       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
863       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
864     // For the generic add stub a fast case for string addition is performance
865     // critical.
866     if (left_type->Maybe(Type::String())) {
867       IfBuilder if_leftisstring(this);
868       if_leftisstring.If<HIsStringAndBranch>(left);
869       if_leftisstring.Then();
870       {
871         Push(BuildBinaryOperation(
872                     state.op(), left, right,
873                     Type::String(zone()), right_type,
874                     result_type, state.fixed_right_arg(),
875                     allocation_mode));
876       }
877       if_leftisstring.Else();
878       {
879         Push(BuildBinaryOperation(
880                     state.op(), left, right,
881                     left_type, right_type, result_type,
882                     state.fixed_right_arg(), allocation_mode));
883       }
884       if_leftisstring.End();
885       result = Pop();
886     } else {
887       IfBuilder if_rightisstring(this);
888       if_rightisstring.If<HIsStringAndBranch>(right);
889       if_rightisstring.Then();
890       {
891         Push(BuildBinaryOperation(
892                     state.op(), left, right,
893                     left_type, Type::String(zone()),
894                     result_type, state.fixed_right_arg(),
895                     allocation_mode));
896       }
897       if_rightisstring.Else();
898       {
899         Push(BuildBinaryOperation(
900                     state.op(), left, right,
901                     left_type, right_type, result_type,
902                     state.fixed_right_arg(), allocation_mode));
903       }
904       if_rightisstring.End();
905       result = Pop();
906     }
907   } else {
908     result = BuildBinaryOperation(
909             state.op(), left, right,
910             left_type, right_type, result_type,
911             state.fixed_right_arg(), allocation_mode);
912   }
913
914   // If we encounter a generic argument, the number conversion is
915   // observable, thus we cannot afford to bail out after the fact.
916   if (!state.HasSideEffects()) {
917     if (result_type->Is(Type::Smi())) {
918       if (state.op() == Token::SHR) {
919         // TODO(olivf) Replace this by a SmiTagU Instruction.
920         // 0x40000000: this number would convert to negative when interpreting
921         // the register as signed value;
922         IfBuilder if_of(this);
923         if_of.IfNot<HCompareNumericAndBranch>(result,
924             Add<HConstant>(static_cast<int>(SmiValuesAre32Bits()
925                 ? 0x80000000 : 0x40000000)), Token::EQ_STRICT);
926         if_of.Then();
927         if_of.ElseDeopt("UInt->Smi oveflow");
928         if_of.End();
929       }
930     }
931     result = EnforceNumberType(result, result_type);
932   }
933
934   // Reuse the double box of one of the operands if we are allowed to (i.e.
935   // chained binops).
936   if (state.CanReuseDoubleBox()) {
937     HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
938     IfBuilder if_heap_number(this);
939     if_heap_number.IfNot<HIsSmiAndBranch>(operand);
940     if_heap_number.Then();
941     Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
942     Push(operand);
943     if_heap_number.Else();
944     Push(result);
945     if_heap_number.End();
946     result = Pop();
947   }
948
949   return result;
950 }
951
952
953 Handle<Code> BinaryOpICStub::GenerateCode(Isolate* isolate) {
954   return DoGenerateCode(isolate, this);
955 }
956
957
958 template <>
959 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
960   BinaryOpIC::State state = casted_stub()->state();
961
962   HValue* allocation_site = GetParameter(
963       BinaryOpWithAllocationSiteStub::kAllocationSite);
964   HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
965   HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
966
967   Type* left_type = state.GetLeftType(zone());
968   Type* right_type = state.GetRightType(zone());
969   Type* result_type = state.GetResultType(zone());
970   HAllocationMode allocation_mode(allocation_site);
971
972   return BuildBinaryOperation(state.op(), left, right,
973                               left_type, right_type, result_type,
974                               state.fixed_right_arg(), allocation_mode);
975 }
976
977
978 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode(Isolate* isolate) {
979   return DoGenerateCode(isolate, this);
980 }
981
982
983 template <>
984 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
985   StringAddStub* stub = casted_stub();
986   StringAddFlags flags = stub->flags();
987   PretenureFlag pretenure_flag = stub->pretenure_flag();
988
989   HValue* left = GetParameter(StringAddStub::kLeft);
990   HValue* right = GetParameter(StringAddStub::kRight);
991
992   // Make sure that both arguments are strings if not known in advance.
993   if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
994     left = BuildCheckString(left);
995   }
996   if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
997     right = BuildCheckString(right);
998   }
999
1000   return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1001 }
1002
1003
1004 Handle<Code> StringAddStub::GenerateCode(Isolate* isolate) {
1005   return DoGenerateCode(isolate, this);
1006 }
1007
1008
1009 template <>
1010 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1011   ToBooleanStub* stub = casted_stub();
1012
1013   IfBuilder if_true(this);
1014   if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
1015   if_true.Then();
1016   if_true.Return(graph()->GetConstant1());
1017   if_true.Else();
1018   if_true.End();
1019   return graph()->GetConstant0();
1020 }
1021
1022
1023 Handle<Code> ToBooleanStub::GenerateCode(Isolate* isolate) {
1024   return DoGenerateCode(isolate, this);
1025 }
1026
1027
1028 template <>
1029 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1030   StoreGlobalStub* stub = casted_stub();
1031   Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
1032   Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1033   Handle<PropertyCell> placeholder_cell =
1034       isolate()->factory()->NewPropertyCell(placeholer_value);
1035
1036   HParameter* receiver = GetParameter(0);
1037   HParameter* value = GetParameter(2);
1038
1039   // Check that the map of the global has not changed: use a placeholder map
1040   // that will be replaced later with the global object's map.
1041   Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1042   Add<HCheckMaps>(receiver, placeholder_map, top_info());
1043
1044   HValue* cell = Add<HConstant>(placeholder_cell);
1045   HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1046   HValue* cell_contents = Add<HLoadNamedField>(
1047       cell, static_cast<HValue*>(NULL), access);
1048
1049   if (stub->is_constant()) {
1050     IfBuilder builder(this);
1051     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1052     builder.Then();
1053     builder.ElseDeopt("Unexpected cell contents in constant global store");
1054     builder.End();
1055   } else {
1056     // Load the payload of the global parameter cell. A hole indicates that the
1057     // property has been deleted and that the store must be handled by the
1058     // runtime.
1059     IfBuilder builder(this);
1060     HValue* hole_value = Add<HConstant>(hole);
1061     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1062     builder.Then();
1063     builder.Deopt("Unexpected cell contents in global store");
1064     builder.Else();
1065     Add<HStoreNamedField>(cell, access, value);
1066     builder.End();
1067   }
1068
1069   return value;
1070 }
1071
1072
1073 Handle<Code> StoreGlobalStub::GenerateCode(Isolate* isolate) {
1074   return DoGenerateCode(isolate, this);
1075 }
1076
1077
1078 template<>
1079 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1080   HValue* value = GetParameter(0);
1081   HValue* map = GetParameter(1);
1082   HValue* key = GetParameter(2);
1083   HValue* object = GetParameter(3);
1084
1085   if (FLAG_trace_elements_transitions) {
1086     // Tracing elements transitions is the job of the runtime.
1087     Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1088   } else {
1089     info()->MarkAsSavesCallerDoubles();
1090
1091     BuildTransitionElementsKind(object, map,
1092                                 casted_stub()->from_kind(),
1093                                 casted_stub()->to_kind(),
1094                                 casted_stub()->is_jsarray());
1095
1096     BuildUncheckedMonomorphicElementAccess(object, key, value,
1097                                            casted_stub()->is_jsarray(),
1098                                            casted_stub()->to_kind(),
1099                                            true, ALLOW_RETURN_HOLE,
1100                                            casted_stub()->store_mode());
1101   }
1102
1103   return value;
1104 }
1105
1106
1107 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode(Isolate* isolate) {
1108   return DoGenerateCode(isolate, this);
1109 }
1110
1111
1112 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(
1113     HValue* js_function,
1114     HValue* native_context,
1115     HValue* code_object) {
1116   Counters* counters = isolate()->counters();
1117   AddIncrementCounter(counters->fast_new_closure_install_optimized());
1118
1119   // TODO(fschneider): Idea: store proper code pointers in the optimized code
1120   // map and either unmangle them on marking or do nothing as the whole map is
1121   // discarded on major GC anyway.
1122   Add<HStoreCodeEntry>(js_function, code_object);
1123
1124   // Now link a function into a list of optimized functions.
1125   HValue* optimized_functions_list = Add<HLoadNamedField>(
1126       native_context, static_cast<HValue*>(NULL),
1127       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1128   Add<HStoreNamedField>(js_function,
1129                         HObjectAccess::ForNextFunctionLinkPointer(),
1130                         optimized_functions_list);
1131
1132   // This store is the only one that should have a write barrier.
1133   Add<HStoreNamedField>(native_context,
1134            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1135            js_function);
1136 }
1137
1138
1139 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1140                                                 HValue* shared_info) {
1141   Add<HStoreNamedField>(js_function,
1142                         HObjectAccess::ForNextFunctionLinkPointer(),
1143                         graph()->GetConstantUndefined());
1144   HValue* code_object = Add<HLoadNamedField>(
1145       shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
1146   Add<HStoreCodeEntry>(js_function, code_object);
1147 }
1148
1149
1150 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1151     HValue* js_function,
1152     HValue* shared_info,
1153     HValue* native_context) {
1154   Counters* counters = isolate()->counters();
1155   IfBuilder is_optimized(this);
1156   HInstruction* optimized_map = Add<HLoadNamedField>(
1157       shared_info, static_cast<HValue*>(NULL),
1158       HObjectAccess::ForOptimizedCodeMap());
1159   HValue* null_constant = Add<HConstant>(0);
1160   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1161   is_optimized.Then();
1162   {
1163     BuildInstallCode(js_function, shared_info);
1164   }
1165   is_optimized.Else();
1166   {
1167     AddIncrementCounter(counters->fast_new_closure_try_optimized());
1168     // optimized_map points to fixed array of 3-element entries
1169     // (native context, optimized code, literals).
1170     // Map must never be empty, so check the first elements.
1171     Label install_optimized;
1172     HValue* first_context_slot = Add<HLoadNamedField>(
1173         optimized_map, static_cast<HValue*>(NULL),
1174         HObjectAccess::ForFirstContextSlot());
1175     HValue* first_osr_ast_slot = Add<HLoadNamedField>(
1176         optimized_map, static_cast<HValue*>(NULL),
1177         HObjectAccess::ForFirstOsrAstIdSlot());
1178     HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1179     IfBuilder already_in(this);
1180     already_in.If<HCompareObjectEqAndBranch>(native_context,
1181                                              first_context_slot);
1182     already_in.AndIf<HCompareObjectEqAndBranch>(first_osr_ast_slot,
1183                                                 osr_ast_id_none);
1184     already_in.Then();
1185     {
1186       HValue* code_object = Add<HLoadNamedField>(
1187           optimized_map, static_cast<HValue*>(NULL),
1188           HObjectAccess::ForFirstCodeSlot());
1189       BuildInstallOptimizedCode(js_function, native_context, code_object);
1190     }
1191     already_in.Else();
1192     {
1193       HValue* shared_function_entry_length =
1194           Add<HConstant>(SharedFunctionInfo::kEntryLength);
1195       LoopBuilder loop_builder(this,
1196                                context(),
1197                                LoopBuilder::kPostDecrement,
1198                                shared_function_entry_length);
1199       HValue* array_length = Add<HLoadNamedField>(
1200           optimized_map, static_cast<HValue*>(NULL),
1201           HObjectAccess::ForFixedArrayLength());
1202       HValue* slot_iterator = loop_builder.BeginBody(array_length,
1203                                            graph()->GetConstant0(),
1204                                            Token::GT);
1205       {
1206         // Iterate through the rest of map backwards.
1207         // Do not double check first entry.
1208         HValue* second_entry_index =
1209             Add<HConstant>(SharedFunctionInfo::kSecondEntryIndex);
1210         IfBuilder restore_check(this);
1211         restore_check.If<HCompareNumericAndBranch>(
1212             slot_iterator, second_entry_index, Token::EQ);
1213         restore_check.Then();
1214         {
1215           // Store the unoptimized code
1216           BuildInstallCode(js_function, shared_info);
1217           loop_builder.Break();
1218         }
1219         restore_check.Else();
1220         {
1221           STATIC_ASSERT(SharedFunctionInfo::kContextOffset == 0);
1222           STATIC_ASSERT(SharedFunctionInfo::kEntryLength -
1223                             SharedFunctionInfo::kOsrAstIdOffset == 1);
1224           HValue* native_context_slot = AddUncasted<HSub>(
1225               slot_iterator, shared_function_entry_length);
1226           HValue* osr_ast_id_slot = AddUncasted<HSub>(
1227               slot_iterator, graph()->GetConstant1());
1228           HInstruction* native_context_entry = Add<HLoadKeyed>(optimized_map,
1229               native_context_slot, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1230           HInstruction* osr_ast_id_entry = Add<HLoadKeyed>(optimized_map,
1231               osr_ast_id_slot, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1232           IfBuilder done_check(this);
1233           done_check.If<HCompareObjectEqAndBranch>(native_context,
1234                                                    native_context_entry);
1235           done_check.AndIf<HCompareObjectEqAndBranch>(osr_ast_id_entry,
1236                                                       osr_ast_id_none);
1237           done_check.Then();
1238           {
1239             // Hit: fetch the optimized code.
1240             HValue* code_slot = AddUncasted<HAdd>(
1241                 native_context_slot, graph()->GetConstant1());
1242             HValue* code_object = Add<HLoadKeyed>(optimized_map,
1243                 code_slot, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1244             BuildInstallOptimizedCode(js_function, native_context, code_object);
1245
1246             // Fall out of the loop
1247             loop_builder.Break();
1248           }
1249           done_check.Else();
1250           done_check.End();
1251         }
1252         restore_check.End();
1253       }
1254       loop_builder.EndBody();
1255     }
1256     already_in.End();
1257   }
1258   is_optimized.End();
1259 }
1260
1261
1262 template<>
1263 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1264   Counters* counters = isolate()->counters();
1265   Factory* factory = isolate()->factory();
1266   HInstruction* empty_fixed_array =
1267       Add<HConstant>(factory->empty_fixed_array());
1268   HValue* shared_info = GetParameter(0);
1269
1270   AddIncrementCounter(counters->fast_new_closure_total());
1271
1272   // Create a new closure from the given function info in new space
1273   HValue* size = Add<HConstant>(JSFunction::kSize);
1274   HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1275                                              NOT_TENURED, JS_FUNCTION_TYPE);
1276
1277   int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1278                                             casted_stub()->is_generator());
1279
1280   // Compute the function map in the current native context and set that
1281   // as the map of the allocated object.
1282   HInstruction* native_context = BuildGetNativeContext();
1283   HInstruction* map_slot_value = Add<HLoadNamedField>(
1284       native_context, static_cast<HValue*>(NULL),
1285       HObjectAccess::ForContextSlot(map_index));
1286   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1287
1288   // Initialize the rest of the function.
1289   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1290                         empty_fixed_array);
1291   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1292                         empty_fixed_array);
1293   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1294                         empty_fixed_array);
1295   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1296                         graph()->GetConstantHole());
1297   Add<HStoreNamedField>(js_function,
1298                         HObjectAccess::ForSharedFunctionInfoPointer(),
1299                         shared_info);
1300   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1301                         context());
1302
1303   // Initialize the code pointer in the function to be the one
1304   // found in the shared function info object.
1305   // But first check if there is an optimized version for our context.
1306   if (FLAG_cache_optimized_code) {
1307     BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1308   } else {
1309     BuildInstallCode(js_function, shared_info);
1310   }
1311
1312   return js_function;
1313 }
1314
1315
1316 Handle<Code> FastNewClosureStub::GenerateCode(Isolate* isolate) {
1317   return DoGenerateCode(isolate, this);
1318 }
1319
1320
1321 template<>
1322 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1323   int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1324
1325   // Get the function.
1326   HParameter* function = GetParameter(FastNewContextStub::kFunction);
1327
1328   // Allocate the context in new space.
1329   HAllocate* function_context = Add<HAllocate>(
1330       Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1331       HType::Tagged(), NOT_TENURED, FIXED_ARRAY_TYPE);
1332
1333   // Set up the object header.
1334   AddStoreMapConstant(function_context,
1335                       isolate()->factory()->function_context_map());
1336   Add<HStoreNamedField>(function_context,
1337                         HObjectAccess::ForFixedArrayLength(),
1338                         Add<HConstant>(length));
1339
1340   // Set up the fixed slots.
1341   Add<HStoreNamedField>(function_context,
1342                         HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1343                         function);
1344   Add<HStoreNamedField>(function_context,
1345                         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1346                         context());
1347   Add<HStoreNamedField>(function_context,
1348                         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1349                         graph()->GetConstant0());
1350
1351   // Copy the global object from the previous context.
1352   HValue* global_object = Add<HLoadNamedField>(
1353       context(), static_cast<HValue*>(NULL),
1354       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1355   Add<HStoreNamedField>(function_context,
1356                         HObjectAccess::ForContextSlot(
1357                             Context::GLOBAL_OBJECT_INDEX),
1358                         global_object);
1359
1360   // Initialize the rest of the slots to undefined.
1361   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1362     Add<HStoreNamedField>(function_context,
1363                           HObjectAccess::ForContextSlot(i),
1364                           graph()->GetConstantUndefined());
1365   }
1366
1367   return function_context;
1368 }
1369
1370
1371 Handle<Code> FastNewContextStub::GenerateCode(Isolate* isolate) {
1372   return DoGenerateCode(isolate, this);
1373 }
1374
1375
1376 template<>
1377 HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() {
1378   HValue* receiver = GetParameter(0);
1379   HValue* key = GetParameter(1);
1380
1381   Add<HCheckSmi>(key);
1382
1383   return BuildUncheckedDictionaryElementLoad(receiver, key);
1384 }
1385
1386
1387 Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode(Isolate* isolate) {
1388   return DoGenerateCode(isolate, this);
1389 }
1390
1391
1392 template<>
1393 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1394   // Determine the parameters.
1395   HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1396   HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1397   HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1398
1399   return BuildRegExpConstructResult(length, index, input);
1400 }
1401
1402
1403 Handle<Code> RegExpConstructResultStub::GenerateCode(Isolate* isolate) {
1404   return DoGenerateCode(isolate, this);
1405 }
1406
1407
1408 } }  // namespace v8::internal