Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / v8 / src / code-stubs-hydrogen.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/code-stubs.h"
8 #include "src/field-index.h"
9 #include "src/hydrogen.h"
10 #include "src/lithium.h"
11
12 namespace v8 {
13 namespace internal {
14
15
16 static LChunk* OptimizeGraph(HGraph* graph) {
17   DisallowHeapAllocation no_allocation;
18   DisallowHandleAllocation no_handles;
19   DisallowHandleDereference no_deref;
20
21   DCHECK(graph != NULL);
22   BailoutReason bailout_reason = kNoReason;
23   if (!graph->Optimize(&bailout_reason)) {
24     FATAL(GetBailoutReason(bailout_reason));
25   }
26   LChunk* chunk = LChunk::NewChunk(graph);
27   if (chunk == NULL) {
28     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
29   }
30   return chunk;
31 }
32
33
34 class CodeStubGraphBuilderBase : public HGraphBuilder {
35  public:
36   CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
37       : HGraphBuilder(&info_),
38         arguments_length_(NULL),
39         info_(stub, isolate),
40         context_(NULL) {
41     descriptor_ = stub->GetInterfaceDescriptor();
42     int parameter_count = descriptor_->GetEnvironmentParameterCount();
43     parameters_.Reset(new HParameter*[parameter_count]);
44   }
45   virtual bool BuildGraph();
46
47  protected:
48   virtual HValue* BuildCodeStub() = 0;
49   HParameter* GetParameter(int parameter) {
50     DCHECK(parameter < descriptor_->GetEnvironmentParameterCount());
51     return parameters_[parameter];
52   }
53   HValue* GetArgumentsLength() {
54     // This is initialized in BuildGraph()
55     DCHECK(arguments_length_ != NULL);
56     return arguments_length_;
57   }
58   CompilationInfo* info() { return &info_; }
59   HydrogenCodeStub* stub() { return info_.code_stub(); }
60   HContext* context() { return context_; }
61   Isolate* isolate() { return info_.isolate(); }
62
63   HLoadNamedField* BuildLoadNamedField(HValue* object,
64                                        FieldIndex index);
65   void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
66                             Representation representation);
67
68   enum ArgumentClass {
69     NONE,
70     SINGLE,
71     MULTIPLE
72   };
73
74   HValue* BuildArrayConstructor(ElementsKind kind,
75                                 AllocationSiteOverrideMode override_mode,
76                                 ArgumentClass argument_class);
77   HValue* BuildInternalArrayConstructor(ElementsKind kind,
78                                         ArgumentClass argument_class);
79
80   // BuildCheckAndInstallOptimizedCode emits code to install the optimized
81   // function found in the optimized code map at map_index in js_function, if
82   // the function at map_index matches the given native_context. Builder is
83   // left in the "Then()" state after the install.
84   void BuildCheckAndInstallOptimizedCode(HValue* js_function,
85                                          HValue* native_context,
86                                          IfBuilder* builder,
87                                          HValue* optimized_map,
88                                          HValue* map_index);
89   void BuildInstallCode(HValue* js_function, HValue* shared_info);
90
91   HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
92                                          HValue* iterator,
93                                          int field_offset);
94   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
95                                         HValue* shared_info,
96                                         HValue* native_context);
97
98  private:
99   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
100   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
101                                           ElementsKind kind);
102
103   SmartArrayPointer<HParameter*> parameters_;
104   HValue* arguments_length_;
105   CompilationInfoWithZone info_;
106   CodeStubInterfaceDescriptor* descriptor_;
107   HContext* context_;
108 };
109
110
111 bool CodeStubGraphBuilderBase::BuildGraph() {
112   // Update the static counter each time a new code stub is generated.
113   isolate()->counters()->code_stubs()->Increment();
114
115   if (FLAG_trace_hydrogen_stubs) {
116     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
117     PrintF("-----------------------------------------------------------\n");
118     PrintF("Compiling stub %s using hydrogen\n", name);
119     isolate()->GetHTracer()->TraceCompilation(&info_);
120   }
121
122   int param_count = descriptor_->GetEnvironmentParameterCount();
123   HEnvironment* start_environment = graph()->start_environment();
124   HBasicBlock* next_block = CreateBasicBlock(start_environment);
125   Goto(next_block);
126   next_block->SetJoinId(BailoutId::StubEntry());
127   set_current_block(next_block);
128
129   bool runtime_stack_params = descriptor_->stack_parameter_count().is_valid();
130   HInstruction* stack_parameter_count = NULL;
131   for (int i = 0; i < param_count; ++i) {
132     Representation r = descriptor_->GetEnvironmentParameterRepresentation(i);
133     HParameter* param = Add<HParameter>(i,
134                                         HParameter::REGISTER_PARAMETER, r);
135     start_environment->Bind(i, param);
136     parameters_[i] = param;
137     if (descriptor_->IsEnvironmentParameterCountRegister(i)) {
138       param->set_type(HType::Smi());
139       stack_parameter_count = param;
140       arguments_length_ = stack_parameter_count;
141     }
142   }
143
144   DCHECK(!runtime_stack_params || arguments_length_ != NULL);
145   if (!runtime_stack_params) {
146     stack_parameter_count = graph()->GetConstantMinus1();
147     arguments_length_ = graph()->GetConstant0();
148   }
149
150   context_ = Add<HContext>();
151   start_environment->BindContext(context_);
152
153   Add<HSimulate>(BailoutId::StubEntry());
154
155   NoObservableSideEffectsScope no_effects(this);
156
157   HValue* return_value = BuildCodeStub();
158
159   // We might have extra expressions to pop from the stack in addition to the
160   // arguments above.
161   HInstruction* stack_pop_count = stack_parameter_count;
162   if (descriptor_->function_mode() == JS_FUNCTION_STUB_MODE) {
163     if (!stack_parameter_count->IsConstant() &&
164         descriptor_->hint_stack_parameter_count() < 0) {
165       HInstruction* constant_one = graph()->GetConstant1();
166       stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
167       stack_pop_count->ClearFlag(HValue::kCanOverflow);
168       // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
169       // smi.
170     } else {
171       int count = descriptor_->hint_stack_parameter_count();
172       stack_pop_count = Add<HConstant>(count);
173     }
174   }
175
176   if (current_block() != NULL) {
177     HReturn* hreturn_instruction = New<HReturn>(return_value,
178                                                 stack_pop_count);
179     FinishCurrentBlock(hreturn_instruction);
180   }
181   return true;
182 }
183
184
185 template <class Stub>
186 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
187  public:
188   CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
189       : CodeStubGraphBuilderBase(isolate, stub) {}
190
191  protected:
192   virtual HValue* BuildCodeStub() {
193     if (casted_stub()->IsUninitialized()) {
194       return BuildCodeUninitializedStub();
195     } else {
196       return BuildCodeInitializedStub();
197     }
198   }
199
200   virtual HValue* BuildCodeInitializedStub() {
201     UNIMPLEMENTED();
202     return NULL;
203   }
204
205   virtual HValue* BuildCodeUninitializedStub() {
206     // Force a deopt that falls back to the runtime.
207     HValue* undefined = graph()->GetConstantUndefined();
208     IfBuilder builder(this);
209     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
210     builder.Then();
211     builder.ElseDeopt("Forced deopt to runtime");
212     return undefined;
213   }
214
215   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
216 };
217
218
219 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode() {
220   Factory* factory = isolate()->factory();
221
222   // Generate the new code.
223   MacroAssembler masm(isolate(), NULL, 256);
224
225   {
226     // Update the static counter each time a new code stub is generated.
227     isolate()->counters()->code_stubs()->Increment();
228
229     // Generate the code for the stub.
230     masm.set_generating_stub(true);
231     NoCurrentFrameScope scope(&masm);
232     GenerateLightweightMiss(&masm);
233   }
234
235   // Create the code object.
236   CodeDesc desc;
237   masm.GetCode(&desc);
238
239   // Copy the generated code into a heap object.
240   Code::Flags flags = Code::ComputeFlags(
241       GetCodeKind(),
242       GetICState(),
243       GetExtraICState(),
244       GetStubType());
245   Handle<Code> new_object = factory->NewCode(
246       desc, flags, masm.CodeObject(), NeedsImmovableCode());
247   return new_object;
248 }
249
250
251 template <class Stub>
252 static Handle<Code> DoGenerateCode(Stub* stub) {
253   Isolate* isolate = stub->isolate();
254   CodeStub::Major major_key = static_cast<CodeStub*>(stub)->MajorKey();
255   CodeStubInterfaceDescriptor* descriptor =
256       isolate->code_stub_interface_descriptor(major_key);
257   if (!descriptor->IsInitialized()) {
258     stub->InitializeInterfaceDescriptor(descriptor);
259   }
260
261   // If we are uninitialized we can use a light-weight stub to enter
262   // the runtime that is significantly faster than using the standard
263   // stub-failure deopt mechanism.
264   if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
265     DCHECK(!descriptor->stack_parameter_count().is_valid());
266     return stub->GenerateLightweightMissCode();
267   }
268   base::ElapsedTimer timer;
269   if (FLAG_profile_hydrogen_code_stub_compilation) {
270     timer.Start();
271   }
272   CodeStubGraphBuilder<Stub> builder(isolate, stub);
273   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
274   // TODO(yangguo) remove this once the code serializer handles code stubs.
275   if (FLAG_serialize_toplevel) chunk->info()->PrepareForSerializing();
276   Handle<Code> code = chunk->Codegen();
277   if (FLAG_profile_hydrogen_code_stub_compilation) {
278     OFStream os(stdout);
279     os << "[Lazy compilation of " << stub << " took "
280        << timer.Elapsed().InMillisecondsF() << " ms]" << endl;
281   }
282   return code;
283 }
284
285
286 template <>
287 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
288   HValue* value = GetParameter(0);
289
290   // Check if the parameter is already a SMI or heap number.
291   IfBuilder if_number(this);
292   if_number.If<HIsSmiAndBranch>(value);
293   if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
294   if_number.Then();
295
296   // Return the number.
297   Push(value);
298
299   if_number.Else();
300
301   // Convert the parameter to number using the builtin.
302   HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
303   Add<HPushArguments>(value);
304   Push(Add<HInvokeFunction>(function, 1));
305
306   if_number.End();
307
308   return Pop();
309 }
310
311
312 Handle<Code> ToNumberStub::GenerateCode() {
313   return DoGenerateCode(this);
314 }
315
316
317 template <>
318 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
319   info()->MarkAsSavesCallerDoubles();
320   HValue* number = GetParameter(NumberToStringStub::kNumber);
321   return BuildNumberToString(number, Type::Number(zone()));
322 }
323
324
325 Handle<Code> NumberToStringStub::GenerateCode() {
326   return DoGenerateCode(this);
327 }
328
329
330 template <>
331 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
332   Factory* factory = isolate()->factory();
333   HValue* undefined = graph()->GetConstantUndefined();
334   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
335
336   // This stub is very performance sensitive, the generated code must be tuned
337   // so that it doesn't build and eager frame.
338   info()->MarkMustNotHaveEagerFrame();
339
340   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
341                                                   GetParameter(1),
342                                                   static_cast<HValue*>(NULL),
343                                                   FAST_ELEMENTS);
344   IfBuilder checker(this);
345   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
346                                                     undefined);
347   checker.Then();
348
349   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
350       AllocationSite::kTransitionInfoOffset);
351   HInstruction* boilerplate = Add<HLoadNamedField>(
352       allocation_site, static_cast<HValue*>(NULL), access);
353   HValue* elements = AddLoadElements(boilerplate);
354   HValue* capacity = AddLoadFixedArrayLength(elements);
355   IfBuilder zero_capacity(this);
356   zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
357                                            Token::EQ);
358   zero_capacity.Then();
359   Push(BuildCloneShallowArrayEmpty(boilerplate,
360                                    allocation_site,
361                                    alloc_site_mode));
362   zero_capacity.Else();
363   IfBuilder if_fixed_cow(this);
364   if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
365   if_fixed_cow.Then();
366   Push(BuildCloneShallowArrayCow(boilerplate,
367                                  allocation_site,
368                                  alloc_site_mode,
369                                  FAST_ELEMENTS));
370   if_fixed_cow.Else();
371   IfBuilder if_fixed(this);
372   if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
373   if_fixed.Then();
374   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
375                                       allocation_site,
376                                       alloc_site_mode,
377                                       FAST_ELEMENTS));
378
379   if_fixed.Else();
380   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
381                                       allocation_site,
382                                       alloc_site_mode,
383                                       FAST_DOUBLE_ELEMENTS));
384   if_fixed.End();
385   if_fixed_cow.End();
386   zero_capacity.End();
387
388   checker.ElseDeopt("Uninitialized boilerplate literals");
389   checker.End();
390
391   return environment()->Pop();
392 }
393
394
395 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
396   return DoGenerateCode(this);
397 }
398
399
400 template <>
401 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
402   HValue* undefined = graph()->GetConstantUndefined();
403
404   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
405                                                   GetParameter(1),
406                                                   static_cast<HValue*>(NULL),
407                                                   FAST_ELEMENTS);
408
409   IfBuilder checker(this);
410   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
411                                                     undefined);
412   checker.And();
413
414   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
415       AllocationSite::kTransitionInfoOffset);
416   HInstruction* boilerplate = Add<HLoadNamedField>(
417       allocation_site, static_cast<HValue*>(NULL), access);
418
419   int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
420   int object_size = size;
421   if (FLAG_allocation_site_pretenuring) {
422     size += AllocationMemento::kSize;
423   }
424
425   HValue* boilerplate_map = Add<HLoadNamedField>(
426       boilerplate, static_cast<HValue*>(NULL),
427       HObjectAccess::ForMap());
428   HValue* boilerplate_size = Add<HLoadNamedField>(
429       boilerplate_map, static_cast<HValue*>(NULL),
430       HObjectAccess::ForMapInstanceSize());
431   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
432   checker.If<HCompareNumericAndBranch>(boilerplate_size,
433                                        size_in_words, Token::EQ);
434   checker.Then();
435
436   HValue* size_in_bytes = Add<HConstant>(size);
437
438   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
439       NOT_TENURED, JS_OBJECT_TYPE);
440
441   for (int i = 0; i < object_size; i += kPointerSize) {
442     HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
443     Add<HStoreNamedField>(
444         object, access, Add<HLoadNamedField>(
445             boilerplate, static_cast<HValue*>(NULL), access));
446   }
447
448   DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
449   if (FLAG_allocation_site_pretenuring) {
450     BuildCreateAllocationMemento(
451         object, Add<HConstant>(object_size), allocation_site);
452   }
453
454   environment()->Push(object);
455   checker.ElseDeopt("Uninitialized boilerplate in fast clone");
456   checker.End();
457
458   return environment()->Pop();
459 }
460
461
462 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
463   return DoGenerateCode(this);
464 }
465
466
467 template <>
468 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
469   HValue* size = Add<HConstant>(AllocationSite::kSize);
470   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
471       JS_OBJECT_TYPE);
472
473   // Store the map
474   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
475   AddStoreMapConstant(object, allocation_site_map);
476
477   // Store the payload (smi elements kind)
478   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
479   Add<HStoreNamedField>(object,
480                         HObjectAccess::ForAllocationSiteOffset(
481                             AllocationSite::kTransitionInfoOffset),
482                         initial_elements_kind);
483
484   // Unlike literals, constructed arrays don't have nested sites
485   Add<HStoreNamedField>(object,
486                         HObjectAccess::ForAllocationSiteOffset(
487                             AllocationSite::kNestedSiteOffset),
488                         graph()->GetConstant0());
489
490   // Pretenuring calculation field.
491   Add<HStoreNamedField>(object,
492                         HObjectAccess::ForAllocationSiteOffset(
493                             AllocationSite::kPretenureDataOffset),
494                         graph()->GetConstant0());
495
496   // Pretenuring memento creation count field.
497   Add<HStoreNamedField>(object,
498                         HObjectAccess::ForAllocationSiteOffset(
499                             AllocationSite::kPretenureCreateCountOffset),
500                         graph()->GetConstant0());
501
502   // Store an empty fixed array for the code dependency.
503   HConstant* empty_fixed_array =
504     Add<HConstant>(isolate()->factory()->empty_fixed_array());
505   Add<HStoreNamedField>(
506       object,
507       HObjectAccess::ForAllocationSiteOffset(
508           AllocationSite::kDependentCodeOffset),
509       empty_fixed_array);
510
511   // Link the object to the allocation site list
512   HValue* site_list = Add<HConstant>(
513       ExternalReference::allocation_sites_list_address(isolate()));
514   HValue* site = Add<HLoadNamedField>(
515       site_list, static_cast<HValue*>(NULL),
516       HObjectAccess::ForAllocationSiteList());
517   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
518   // mark as such in order to skip the write barrier, once we have a unified
519   // system for weakness. For now we decided to keep it like this because having
520   // an initial write barrier backed store makes this pointer strong until the
521   // next GC, and allocation sites are designed to survive several GCs anyway.
522   Add<HStoreNamedField>(
523       object,
524       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
525       site);
526   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
527                         object);
528
529   HInstruction* feedback_vector = GetParameter(0);
530   HInstruction* slot = GetParameter(1);
531   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
532                    INITIALIZING_STORE);
533   return feedback_vector;
534 }
535
536
537 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
538   return DoGenerateCode(this);
539 }
540
541
542 template <>
543 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
544   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
545       GetParameter(KeyedLoadIC::kReceiverIndex),
546       GetParameter(KeyedLoadIC::kNameIndex),
547       NULL,
548       casted_stub()->is_js_array(),
549       casted_stub()->elements_kind(),
550       LOAD,
551       NEVER_RETURN_HOLE,
552       STANDARD_STORE);
553   return load;
554 }
555
556
557 Handle<Code> LoadFastElementStub::GenerateCode() {
558   return DoGenerateCode(this);
559 }
560
561
562 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
563     HValue* object, FieldIndex index) {
564   Representation representation = index.is_double()
565       ? Representation::Double()
566       : Representation::Tagged();
567   int offset = index.offset();
568   HObjectAccess access = index.is_inobject()
569       ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
570       : HObjectAccess::ForBackingStoreOffset(offset, representation);
571   if (index.is_double()) {
572     // Load the heap number.
573     object = Add<HLoadNamedField>(
574         object, static_cast<HValue*>(NULL),
575         access.WithRepresentation(Representation::Tagged()));
576     // Load the double value from it.
577     access = HObjectAccess::ForHeapNumberValue();
578   }
579   return Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), access);
580 }
581
582
583 template<>
584 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
585   return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
586 }
587
588
589 Handle<Code> LoadFieldStub::GenerateCode() {
590   return DoGenerateCode(this);
591 }
592
593
594 template <>
595 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
596   HValue* map = AddLoadMap(GetParameter(0), NULL);
597   HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
598       Map::kDescriptorsOffset, Representation::Tagged());
599   HValue* descriptors =
600       Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), descriptors_access);
601   HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
602       DescriptorArray::GetValueOffset(casted_stub()->descriptor()));
603   return Add<HLoadNamedField>(descriptors, static_cast<HValue*>(NULL),
604                               value_access);
605 }
606
607
608 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
609
610
611 void CodeStubGraphBuilderBase::BuildStoreNamedField(
612     HValue* object, HValue* value, FieldIndex index,
613     Representation representation) {
614   DCHECK(!index.is_double() || representation.IsDouble());
615   int offset = index.offset();
616   HObjectAccess access =
617       index.is_inobject()
618           ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
619           : HObjectAccess::ForBackingStoreOffset(offset, representation);
620
621   if (representation.IsDouble()) {
622     // Load the heap number.
623     object = Add<HLoadNamedField>(
624         object, static_cast<HValue*>(NULL),
625         access.WithRepresentation(Representation::Tagged()));
626     // Store the double value into it.
627     access = HObjectAccess::ForHeapNumberValue();
628   } else if (representation.IsHeapObject()) {
629     BuildCheckHeapObject(value);
630   }
631
632   Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
633 }
634
635
636 template <>
637 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
638   BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
639                        casted_stub()->representation());
640   return GetParameter(2);
641 }
642
643
644 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
645
646
647 template <>
648 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
649   HValue* string = BuildLoadNamedField(GetParameter(0),
650       FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
651   return BuildLoadNamedField(string,
652       FieldIndex::ForInObjectOffset(String::kLengthOffset));
653 }
654
655
656 Handle<Code> StringLengthStub::GenerateCode() {
657   return DoGenerateCode(this);
658 }
659
660
661 template <>
662 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
663   BuildUncheckedMonomorphicElementAccess(
664       GetParameter(StoreIC::kReceiverIndex),
665       GetParameter(StoreIC::kNameIndex),
666       GetParameter(StoreIC::kValueIndex),
667       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
668       STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode());
669
670   return GetParameter(2);
671 }
672
673
674 Handle<Code> StoreFastElementStub::GenerateCode() {
675   return DoGenerateCode(this);
676 }
677
678
679 template <>
680 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
681   info()->MarkAsSavesCallerDoubles();
682
683   BuildTransitionElementsKind(GetParameter(0),
684                               GetParameter(1),
685                               casted_stub()->from_kind(),
686                               casted_stub()->to_kind(),
687                               casted_stub()->is_js_array());
688
689   return GetParameter(0);
690 }
691
692
693 Handle<Code> TransitionElementsKindStub::GenerateCode() {
694   return DoGenerateCode(this);
695 }
696
697 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
698     ElementsKind kind,
699     AllocationSiteOverrideMode override_mode,
700     ArgumentClass argument_class) {
701   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
702   HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
703   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
704                                override_mode);
705   HValue* result = NULL;
706   switch (argument_class) {
707     case NONE:
708       // This stub is very performance sensitive, the generated code must be
709       // tuned so that it doesn't build and eager frame.
710       info()->MarkMustNotHaveEagerFrame();
711       result = array_builder.AllocateEmptyArray();
712       break;
713     case SINGLE:
714       result = BuildArraySingleArgumentConstructor(&array_builder);
715       break;
716     case MULTIPLE:
717       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
718       break;
719   }
720
721   return result;
722 }
723
724
725 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
726     ElementsKind kind, ArgumentClass argument_class) {
727   HValue* constructor = GetParameter(
728       InternalArrayConstructorStubBase::kConstructor);
729   JSArrayBuilder array_builder(this, kind, constructor);
730
731   HValue* result = NULL;
732   switch (argument_class) {
733     case NONE:
734       // This stub is very performance sensitive, the generated code must be
735       // tuned so that it doesn't build and eager frame.
736       info()->MarkMustNotHaveEagerFrame();
737       result = array_builder.AllocateEmptyArray();
738       break;
739     case SINGLE:
740       result = BuildArraySingleArgumentConstructor(&array_builder);
741       break;
742     case MULTIPLE:
743       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
744       break;
745   }
746   return result;
747 }
748
749
750 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
751     JSArrayBuilder* array_builder) {
752   // Smi check and range check on the input arg.
753   HValue* constant_one = graph()->GetConstant1();
754   HValue* constant_zero = graph()->GetConstant0();
755
756   HInstruction* elements = Add<HArgumentsElements>(false);
757   HInstruction* argument = Add<HAccessArgumentsAt>(
758       elements, constant_one, constant_zero);
759
760   return BuildAllocateArrayFromLength(array_builder, argument);
761 }
762
763
764 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
765     JSArrayBuilder* array_builder, ElementsKind kind) {
766   // Insert a bounds check because the number of arguments might exceed
767   // the kInitialMaxFastElementArray limit. This cannot happen for code
768   // that was parsed, but calling via Array.apply(thisArg, [...]) might
769   // trigger it.
770   HValue* length = GetArgumentsLength();
771   HConstant* max_alloc_length =
772       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
773   HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
774
775   // We need to fill with the hole if it's a smi array in the multi-argument
776   // case because we might have to bail out while copying arguments into
777   // the array because they aren't compatible with a smi array.
778   // If it's a double array, no problem, and if it's fast then no
779   // problem either because doubles are boxed.
780   //
781   // TODO(mvstanton): consider an instruction to memset fill the array
782   // with zero in this case instead.
783   JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
784       ? JSArrayBuilder::FILL_WITH_HOLE
785       : JSArrayBuilder::DONT_FILL_WITH_HOLE;
786   HValue* new_object = array_builder->AllocateArray(checked_length,
787                                                     max_alloc_length,
788                                                     checked_length,
789                                                     fill_mode);
790   HValue* elements = array_builder->GetElementsLocation();
791   DCHECK(elements != NULL);
792
793   // Now populate the elements correctly.
794   LoopBuilder builder(this,
795                       context(),
796                       LoopBuilder::kPostIncrement);
797   HValue* start = graph()->GetConstant0();
798   HValue* key = builder.BeginBody(start, checked_length, Token::LT);
799   HInstruction* argument_elements = Add<HArgumentsElements>(false);
800   HInstruction* argument = Add<HAccessArgumentsAt>(
801       argument_elements, checked_length, key);
802
803   Add<HStoreKeyed>(elements, key, argument, kind);
804   builder.EndBody();
805   return new_object;
806 }
807
808
809 template <>
810 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
811   ElementsKind kind = casted_stub()->elements_kind();
812   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
813   return BuildArrayConstructor(kind, override_mode, NONE);
814 }
815
816
817 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
818   return DoGenerateCode(this);
819 }
820
821
822 template <>
823 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
824     BuildCodeStub() {
825   ElementsKind kind = casted_stub()->elements_kind();
826   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
827   return BuildArrayConstructor(kind, override_mode, SINGLE);
828 }
829
830
831 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
832   return DoGenerateCode(this);
833 }
834
835
836 template <>
837 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
838   ElementsKind kind = casted_stub()->elements_kind();
839   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
840   return BuildArrayConstructor(kind, override_mode, MULTIPLE);
841 }
842
843
844 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
845   return DoGenerateCode(this);
846 }
847
848
849 template <>
850 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
851     BuildCodeStub() {
852   ElementsKind kind = casted_stub()->elements_kind();
853   return BuildInternalArrayConstructor(kind, NONE);
854 }
855
856
857 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
858   return DoGenerateCode(this);
859 }
860
861
862 template <>
863 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
864     BuildCodeStub() {
865   ElementsKind kind = casted_stub()->elements_kind();
866   return BuildInternalArrayConstructor(kind, SINGLE);
867 }
868
869
870 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
871   return DoGenerateCode(this);
872 }
873
874
875 template <>
876 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
877     BuildCodeStub() {
878   ElementsKind kind = casted_stub()->elements_kind();
879   return BuildInternalArrayConstructor(kind, MULTIPLE);
880 }
881
882
883 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
884   return DoGenerateCode(this);
885 }
886
887
888 template <>
889 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
890   Isolate* isolate = graph()->isolate();
891   CompareNilICStub* stub = casted_stub();
892   HIfContinuation continuation;
893   Handle<Map> sentinel_map(isolate->heap()->meta_map());
894   Type* type = stub->GetType(zone(), sentinel_map);
895   BuildCompareNil(GetParameter(0), type, &continuation);
896   IfBuilder if_nil(this, &continuation);
897   if_nil.Then();
898   if (continuation.IsFalseReachable()) {
899     if_nil.Else();
900     if_nil.Return(graph()->GetConstant0());
901   }
902   if_nil.End();
903   return continuation.IsTrueReachable()
904       ? graph()->GetConstant1()
905       : graph()->GetConstantUndefined();
906 }
907
908
909 Handle<Code> CompareNilICStub::GenerateCode() {
910   return DoGenerateCode(this);
911 }
912
913
914 template <>
915 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
916   BinaryOpIC::State state = casted_stub()->state();
917
918   HValue* left = GetParameter(BinaryOpICStub::kLeft);
919   HValue* right = GetParameter(BinaryOpICStub::kRight);
920
921   Type* left_type = state.GetLeftType(zone());
922   Type* right_type = state.GetRightType(zone());
923   Type* result_type = state.GetResultType(zone());
924
925   DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
926          (state.HasSideEffects() || !result_type->Is(Type::None())));
927
928   HValue* result = NULL;
929   HAllocationMode allocation_mode(NOT_TENURED);
930   if (state.op() == Token::ADD &&
931       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
932       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
933     // For the generic add stub a fast case for string addition is performance
934     // critical.
935     if (left_type->Maybe(Type::String())) {
936       IfBuilder if_leftisstring(this);
937       if_leftisstring.If<HIsStringAndBranch>(left);
938       if_leftisstring.Then();
939       {
940         Push(BuildBinaryOperation(
941                     state.op(), left, right,
942                     Type::String(zone()), right_type,
943                     result_type, state.fixed_right_arg(),
944                     allocation_mode));
945       }
946       if_leftisstring.Else();
947       {
948         Push(BuildBinaryOperation(
949                     state.op(), left, right,
950                     left_type, right_type, result_type,
951                     state.fixed_right_arg(), allocation_mode));
952       }
953       if_leftisstring.End();
954       result = Pop();
955     } else {
956       IfBuilder if_rightisstring(this);
957       if_rightisstring.If<HIsStringAndBranch>(right);
958       if_rightisstring.Then();
959       {
960         Push(BuildBinaryOperation(
961                     state.op(), left, right,
962                     left_type, Type::String(zone()),
963                     result_type, state.fixed_right_arg(),
964                     allocation_mode));
965       }
966       if_rightisstring.Else();
967       {
968         Push(BuildBinaryOperation(
969                     state.op(), left, right,
970                     left_type, right_type, result_type,
971                     state.fixed_right_arg(), allocation_mode));
972       }
973       if_rightisstring.End();
974       result = Pop();
975     }
976   } else {
977     result = BuildBinaryOperation(
978             state.op(), left, right,
979             left_type, right_type, result_type,
980             state.fixed_right_arg(), allocation_mode);
981   }
982
983   // If we encounter a generic argument, the number conversion is
984   // observable, thus we cannot afford to bail out after the fact.
985   if (!state.HasSideEffects()) {
986     result = EnforceNumberType(result, result_type);
987   }
988
989   // Reuse the double box of one of the operands if we are allowed to (i.e.
990   // chained binops).
991   if (state.CanReuseDoubleBox()) {
992     HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
993     IfBuilder if_heap_number(this);
994     if_heap_number.If<HHasInstanceTypeAndBranch>(operand, HEAP_NUMBER_TYPE);
995     if_heap_number.Then();
996     Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
997     Push(operand);
998     if_heap_number.Else();
999     Push(result);
1000     if_heap_number.End();
1001     result = Pop();
1002   }
1003
1004   return result;
1005 }
1006
1007
1008 Handle<Code> BinaryOpICStub::GenerateCode() {
1009   return DoGenerateCode(this);
1010 }
1011
1012
1013 template <>
1014 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1015   BinaryOpIC::State state = casted_stub()->state();
1016
1017   HValue* allocation_site = GetParameter(
1018       BinaryOpWithAllocationSiteStub::kAllocationSite);
1019   HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1020   HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1021
1022   Type* left_type = state.GetLeftType(zone());
1023   Type* right_type = state.GetRightType(zone());
1024   Type* result_type = state.GetResultType(zone());
1025   HAllocationMode allocation_mode(allocation_site);
1026
1027   return BuildBinaryOperation(state.op(), left, right,
1028                               left_type, right_type, result_type,
1029                               state.fixed_right_arg(), allocation_mode);
1030 }
1031
1032
1033 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1034   return DoGenerateCode(this);
1035 }
1036
1037
1038 template <>
1039 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1040   StringAddStub* stub = casted_stub();
1041   StringAddFlags flags = stub->flags();
1042   PretenureFlag pretenure_flag = stub->pretenure_flag();
1043
1044   HValue* left = GetParameter(StringAddStub::kLeft);
1045   HValue* right = GetParameter(StringAddStub::kRight);
1046
1047   // Make sure that both arguments are strings if not known in advance.
1048   if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1049     left = BuildCheckString(left);
1050   }
1051   if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1052     right = BuildCheckString(right);
1053   }
1054
1055   return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1056 }
1057
1058
1059 Handle<Code> StringAddStub::GenerateCode() {
1060   return DoGenerateCode(this);
1061 }
1062
1063
1064 template <>
1065 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1066   ToBooleanStub* stub = casted_stub();
1067   HValue* true_value = NULL;
1068   HValue* false_value = NULL;
1069
1070   switch (stub->GetMode()) {
1071     case ToBooleanStub::RESULT_AS_SMI:
1072       true_value = graph()->GetConstant1();
1073       false_value = graph()->GetConstant0();
1074       break;
1075     case ToBooleanStub::RESULT_AS_ODDBALL:
1076       true_value = graph()->GetConstantTrue();
1077       false_value = graph()->GetConstantFalse();
1078       break;
1079     case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1080       true_value = graph()->GetConstantFalse();
1081       false_value = graph()->GetConstantTrue();
1082       break;
1083   }
1084
1085   IfBuilder if_true(this);
1086   if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
1087   if_true.Then();
1088   if_true.Return(true_value);
1089   if_true.Else();
1090   if_true.End();
1091   return false_value;
1092 }
1093
1094
1095 Handle<Code> ToBooleanStub::GenerateCode() {
1096   return DoGenerateCode(this);
1097 }
1098
1099
1100 template <>
1101 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1102   StoreGlobalStub* stub = casted_stub();
1103   Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
1104   Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1105   Handle<PropertyCell> placeholder_cell =
1106       isolate()->factory()->NewPropertyCell(placeholer_value);
1107
1108   HParameter* value = GetParameter(StoreIC::kValueIndex);
1109
1110   if (stub->check_global()) {
1111     // Check that the map of the global has not changed: use a placeholder map
1112     // that will be replaced later with the global object's map.
1113     Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1114     HValue* global = Add<HConstant>(
1115         StoreGlobalStub::global_placeholder(isolate()));
1116     Add<HCheckMaps>(global, placeholder_map);
1117   }
1118
1119   HValue* cell = Add<HConstant>(placeholder_cell);
1120   HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1121   HValue* cell_contents = Add<HLoadNamedField>(
1122       cell, static_cast<HValue*>(NULL), access);
1123
1124   if (stub->is_constant()) {
1125     IfBuilder builder(this);
1126     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1127     builder.Then();
1128     builder.ElseDeopt("Unexpected cell contents in constant global store");
1129     builder.End();
1130   } else {
1131     // Load the payload of the global parameter cell. A hole indicates that the
1132     // property has been deleted and that the store must be handled by the
1133     // runtime.
1134     IfBuilder builder(this);
1135     HValue* hole_value = Add<HConstant>(hole);
1136     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1137     builder.Then();
1138     builder.Deopt("Unexpected cell contents in global store");
1139     builder.Else();
1140     Add<HStoreNamedField>(cell, access, value);
1141     builder.End();
1142   }
1143
1144   return value;
1145 }
1146
1147
1148 Handle<Code> StoreGlobalStub::GenerateCode() {
1149   return DoGenerateCode(this);
1150 }
1151
1152
1153 template<>
1154 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1155   HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1156   HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1157   HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1158   HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1159
1160   if (FLAG_trace_elements_transitions) {
1161     // Tracing elements transitions is the job of the runtime.
1162     Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1163   } else {
1164     info()->MarkAsSavesCallerDoubles();
1165
1166     BuildTransitionElementsKind(object, map,
1167                                 casted_stub()->from_kind(),
1168                                 casted_stub()->to_kind(),
1169                                 casted_stub()->is_jsarray());
1170
1171     BuildUncheckedMonomorphicElementAccess(object, key, value,
1172                                            casted_stub()->is_jsarray(),
1173                                            casted_stub()->to_kind(),
1174                                            STORE, ALLOW_RETURN_HOLE,
1175                                            casted_stub()->store_mode());
1176   }
1177
1178   return value;
1179 }
1180
1181
1182 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1183   return DoGenerateCode(this);
1184 }
1185
1186
1187 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1188     HValue* js_function,
1189     HValue* native_context,
1190     IfBuilder* builder,
1191     HValue* optimized_map,
1192     HValue* map_index) {
1193   HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1194   HValue* context_slot = LoadFromOptimizedCodeMap(
1195       optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1196   HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1197       optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1198   builder->If<HCompareObjectEqAndBranch>(native_context,
1199                                          context_slot);
1200   builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1201   builder->Then();
1202   HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1203       map_index, SharedFunctionInfo::kCachedCodeOffset);
1204   // and the literals
1205   HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1206       map_index, SharedFunctionInfo::kLiteralsOffset);
1207
1208   Counters* counters = isolate()->counters();
1209   AddIncrementCounter(counters->fast_new_closure_install_optimized());
1210
1211   // TODO(fschneider): Idea: store proper code pointers in the optimized code
1212   // map and either unmangle them on marking or do nothing as the whole map is
1213   // discarded on major GC anyway.
1214   Add<HStoreCodeEntry>(js_function, code_object);
1215   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1216                         literals);
1217
1218   // Now link a function into a list of optimized functions.
1219   HValue* optimized_functions_list = Add<HLoadNamedField>(
1220       native_context, static_cast<HValue*>(NULL),
1221       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1222   Add<HStoreNamedField>(js_function,
1223                         HObjectAccess::ForNextFunctionLinkPointer(),
1224                         optimized_functions_list);
1225
1226   // This store is the only one that should have a write barrier.
1227   Add<HStoreNamedField>(native_context,
1228            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1229            js_function);
1230
1231   // The builder continues in the "then" after this function.
1232 }
1233
1234
1235 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1236                                                 HValue* shared_info) {
1237   Add<HStoreNamedField>(js_function,
1238                         HObjectAccess::ForNextFunctionLinkPointer(),
1239                         graph()->GetConstantUndefined());
1240   HValue* code_object = Add<HLoadNamedField>(
1241       shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
1242   Add<HStoreCodeEntry>(js_function, code_object);
1243 }
1244
1245
1246 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1247     HValue* optimized_map,
1248     HValue* iterator,
1249     int field_offset) {
1250   // By making sure to express these loads in the form [<hvalue> + constant]
1251   // the keyed load can be hoisted.
1252   DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1253   HValue* field_slot = iterator;
1254   if (field_offset > 0) {
1255     HValue* field_offset_value = Add<HConstant>(field_offset);
1256     field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1257   }
1258   HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1259       static_cast<HValue*>(NULL), FAST_ELEMENTS);
1260   return field_entry;
1261 }
1262
1263
1264 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1265     HValue* js_function,
1266     HValue* shared_info,
1267     HValue* native_context) {
1268   Counters* counters = isolate()->counters();
1269   IfBuilder is_optimized(this);
1270   HInstruction* optimized_map = Add<HLoadNamedField>(
1271       shared_info, static_cast<HValue*>(NULL),
1272       HObjectAccess::ForOptimizedCodeMap());
1273   HValue* null_constant = Add<HConstant>(0);
1274   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1275   is_optimized.Then();
1276   {
1277     BuildInstallCode(js_function, shared_info);
1278   }
1279   is_optimized.Else();
1280   {
1281     AddIncrementCounter(counters->fast_new_closure_try_optimized());
1282     // optimized_map points to fixed array of 3-element entries
1283     // (native context, optimized code, literals).
1284     // Map must never be empty, so check the first elements.
1285     HValue* first_entry_index =
1286         Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1287     IfBuilder already_in(this);
1288     BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1289                                       optimized_map, first_entry_index);
1290     already_in.Else();
1291     {
1292       // Iterate through the rest of map backwards. Do not double check first
1293       // entry. After the loop, if no matching optimized code was found,
1294       // install unoptimized code.
1295       // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1296       //     i > SharedFunctionInfo::kEntriesStart;
1297       //     i -= SharedFunctionInfo::kEntryLength) { .. }
1298       HValue* shared_function_entry_length =
1299           Add<HConstant>(SharedFunctionInfo::kEntryLength);
1300       LoopBuilder loop_builder(this,
1301                                context(),
1302                                LoopBuilder::kPostDecrement,
1303                                shared_function_entry_length);
1304       HValue* array_length = Add<HLoadNamedField>(
1305           optimized_map, static_cast<HValue*>(NULL),
1306           HObjectAccess::ForFixedArrayLength());
1307       HValue* start_pos = AddUncasted<HSub>(array_length,
1308                                             shared_function_entry_length);
1309       HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1310                                                      first_entry_index,
1311                                                      Token::GT);
1312       {
1313         IfBuilder done_check(this);
1314         BuildCheckAndInstallOptimizedCode(js_function, native_context,
1315                                           &done_check,
1316                                           optimized_map,
1317                                           slot_iterator);
1318         // Fall out of the loop
1319         loop_builder.Break();
1320       }
1321       loop_builder.EndBody();
1322
1323       // If slot_iterator equals first entry index, then we failed to find and
1324       // install optimized code
1325       IfBuilder no_optimized_code_check(this);
1326       no_optimized_code_check.If<HCompareNumericAndBranch>(
1327           slot_iterator, first_entry_index, Token::EQ);
1328       no_optimized_code_check.Then();
1329       {
1330         // Store the unoptimized code
1331         BuildInstallCode(js_function, shared_info);
1332       }
1333     }
1334   }
1335 }
1336
1337
1338 template<>
1339 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1340   Counters* counters = isolate()->counters();
1341   Factory* factory = isolate()->factory();
1342   HInstruction* empty_fixed_array =
1343       Add<HConstant>(factory->empty_fixed_array());
1344   HValue* shared_info = GetParameter(0);
1345
1346   AddIncrementCounter(counters->fast_new_closure_total());
1347
1348   // Create a new closure from the given function info in new space
1349   HValue* size = Add<HConstant>(JSFunction::kSize);
1350   HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1351                                              NOT_TENURED, JS_FUNCTION_TYPE);
1352
1353   int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
1354                                             casted_stub()->is_generator());
1355
1356   // Compute the function map in the current native context and set that
1357   // as the map of the allocated object.
1358   HInstruction* native_context = BuildGetNativeContext();
1359   HInstruction* map_slot_value = Add<HLoadNamedField>(
1360       native_context, static_cast<HValue*>(NULL),
1361       HObjectAccess::ForContextSlot(map_index));
1362   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1363
1364   // Initialize the rest of the function.
1365   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1366                         empty_fixed_array);
1367   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1368                         empty_fixed_array);
1369   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1370                         empty_fixed_array);
1371   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1372                         graph()->GetConstantHole());
1373   Add<HStoreNamedField>(js_function,
1374                         HObjectAccess::ForSharedFunctionInfoPointer(),
1375                         shared_info);
1376   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1377                         context());
1378
1379   // Initialize the code pointer in the function to be the one
1380   // found in the shared function info object.
1381   // But first check if there is an optimized version for our context.
1382   if (FLAG_cache_optimized_code) {
1383     BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1384   } else {
1385     BuildInstallCode(js_function, shared_info);
1386   }
1387
1388   return js_function;
1389 }
1390
1391
1392 Handle<Code> FastNewClosureStub::GenerateCode() {
1393   return DoGenerateCode(this);
1394 }
1395
1396
1397 template<>
1398 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1399   int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1400
1401   // Get the function.
1402   HParameter* function = GetParameter(FastNewContextStub::kFunction);
1403
1404   // Allocate the context in new space.
1405   HAllocate* function_context = Add<HAllocate>(
1406       Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1407       HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1408
1409   // Set up the object header.
1410   AddStoreMapConstant(function_context,
1411                       isolate()->factory()->function_context_map());
1412   Add<HStoreNamedField>(function_context,
1413                         HObjectAccess::ForFixedArrayLength(),
1414                         Add<HConstant>(length));
1415
1416   // Set up the fixed slots.
1417   Add<HStoreNamedField>(function_context,
1418                         HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1419                         function);
1420   Add<HStoreNamedField>(function_context,
1421                         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1422                         context());
1423   Add<HStoreNamedField>(function_context,
1424                         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1425                         graph()->GetConstant0());
1426
1427   // Copy the global object from the previous context.
1428   HValue* global_object = Add<HLoadNamedField>(
1429       context(), static_cast<HValue*>(NULL),
1430       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1431   Add<HStoreNamedField>(function_context,
1432                         HObjectAccess::ForContextSlot(
1433                             Context::GLOBAL_OBJECT_INDEX),
1434                         global_object);
1435
1436   // Initialize the rest of the slots to undefined.
1437   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1438     Add<HStoreNamedField>(function_context,
1439                           HObjectAccess::ForContextSlot(i),
1440                           graph()->GetConstantUndefined());
1441   }
1442
1443   return function_context;
1444 }
1445
1446
1447 Handle<Code> FastNewContextStub::GenerateCode() {
1448   return DoGenerateCode(this);
1449 }
1450
1451
1452 template <>
1453 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
1454   HValue* receiver = GetParameter(KeyedLoadIC::kReceiverIndex);
1455   HValue* key = GetParameter(KeyedLoadIC::kNameIndex);
1456
1457   Add<HCheckSmi>(key);
1458
1459   HValue* elements = AddLoadElements(receiver);
1460
1461   HValue* hash = BuildElementIndexHash(key);
1462
1463   return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1464 }
1465
1466
1467 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
1468   return DoGenerateCode(this);
1469 }
1470
1471
1472 template<>
1473 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1474   // Determine the parameters.
1475   HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1476   HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1477   HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1478
1479   info()->MarkMustNotHaveEagerFrame();
1480
1481   return BuildRegExpConstructResult(length, index, input);
1482 }
1483
1484
1485 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1486   return DoGenerateCode(this);
1487 }
1488
1489
1490 template <>
1491 class CodeStubGraphBuilder<KeyedLoadGenericStub>
1492     : public CodeStubGraphBuilderBase {
1493  public:
1494   CodeStubGraphBuilder(Isolate* isolate, KeyedLoadGenericStub* stub)
1495       : CodeStubGraphBuilderBase(isolate, stub) {}
1496
1497  protected:
1498   virtual HValue* BuildCodeStub();
1499
1500   void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1501                                    HValue* bit_field2,
1502                                    ElementsKind kind);
1503
1504   void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1505                             HValue* receiver,
1506                             HValue* key,
1507                             HValue* instance_type,
1508                             HValue* bit_field2,
1509                             ElementsKind kind);
1510
1511   void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1512                                 HValue* receiver,
1513                                 HValue* key,
1514                                 HValue* instance_type,
1515                                 HValue* bit_field2,
1516                                 ElementsKind kind);
1517
1518   KeyedLoadGenericStub* casted_stub() {
1519     return static_cast<KeyedLoadGenericStub*>(stub());
1520   }
1521 };
1522
1523
1524 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
1525     HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1526     ElementsKind kind) {
1527   ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1528   HValue* kind_limit = Add<HConstant>(
1529       static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1530
1531   if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1532   if_builder->Then();
1533 }
1534
1535
1536 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
1537     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1538     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1539   DCHECK(!IsExternalArrayElementsKind(kind));
1540
1541   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1542
1543   IfBuilder js_array_check(this);
1544   js_array_check.If<HCompareNumericAndBranch>(
1545       instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1546   js_array_check.Then();
1547   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1548                                               true, kind,
1549                                               LOAD, NEVER_RETURN_HOLE,
1550                                               STANDARD_STORE));
1551   js_array_check.Else();
1552   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1553                                               false, kind,
1554                                               LOAD, NEVER_RETURN_HOLE,
1555                                               STANDARD_STORE));
1556   js_array_check.End();
1557 }
1558
1559
1560 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
1561     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1562     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1563   DCHECK(IsExternalArrayElementsKind(kind));
1564
1565   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1566
1567   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1568                                               false, kind,
1569                                               LOAD, NEVER_RETURN_HOLE,
1570                                               STANDARD_STORE));
1571 }
1572
1573
1574 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
1575   HValue* receiver = GetParameter(KeyedLoadIC::kReceiverIndex);
1576   HValue* key = GetParameter(KeyedLoadIC::kNameIndex);
1577
1578   // Split into a smi/integer case and unique string case.
1579   HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
1580                                                 graph()->CreateBasicBlock());
1581
1582   BuildKeyedIndexCheck(key, &index_name_split_continuation);
1583
1584   IfBuilder index_name_split(this, &index_name_split_continuation);
1585   index_name_split.Then();
1586   {
1587     // Key is an index (number)
1588     key = Pop();
1589
1590     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1591       (1 << Map::kHasIndexedInterceptor);
1592     BuildJSObjectCheck(receiver, bit_field_mask);
1593
1594     HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1595                                        HObjectAccess::ForMap());
1596
1597     HValue* instance_type =
1598       Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1599                            HObjectAccess::ForMapInstanceType());
1600
1601     HValue* bit_field2 = Add<HLoadNamedField>(map,
1602                                               static_cast<HValue*>(NULL),
1603                                               HObjectAccess::ForMapBitField2());
1604
1605     IfBuilder kind_if(this);
1606     BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1607                          FAST_HOLEY_ELEMENTS);
1608
1609     kind_if.Else();
1610     {
1611       BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1612                            FAST_HOLEY_DOUBLE_ELEMENTS);
1613     }
1614     kind_if.Else();
1615
1616     // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
1617     BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
1618     {
1619       HValue* elements = AddLoadElements(receiver);
1620
1621       HValue* hash = BuildElementIndexHash(key);
1622
1623       Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
1624     }
1625     kind_if.Else();
1626
1627     // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
1628     BuildElementsKindLimitCheck(&kind_if, bit_field2,
1629                                 SLOPPY_ARGUMENTS_ELEMENTS);
1630     // Non-strict elements are not handled.
1631     Add<HDeoptimize>("non-strict elements in KeyedLoadGenericStub",
1632                      Deoptimizer::EAGER);
1633     Push(graph()->GetConstant0());
1634
1635     kind_if.Else();
1636     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1637                              EXTERNAL_INT8_ELEMENTS);
1638
1639     kind_if.Else();
1640     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1641                              EXTERNAL_UINT8_ELEMENTS);
1642
1643     kind_if.Else();
1644     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1645                              EXTERNAL_INT16_ELEMENTS);
1646
1647     kind_if.Else();
1648     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1649                              EXTERNAL_UINT16_ELEMENTS);
1650
1651     kind_if.Else();
1652     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1653                              EXTERNAL_INT32_ELEMENTS);
1654
1655     kind_if.Else();
1656     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1657                              EXTERNAL_UINT32_ELEMENTS);
1658
1659     kind_if.Else();
1660     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1661                              EXTERNAL_FLOAT32_ELEMENTS);
1662
1663     kind_if.Else();
1664     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1665                              EXTERNAL_FLOAT64_ELEMENTS);
1666
1667     kind_if.Else();
1668     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1669                              EXTERNAL_UINT8_CLAMPED_ELEMENTS);
1670
1671     kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericStub");
1672
1673     kind_if.End();
1674   }
1675   index_name_split.Else();
1676   {
1677     // Key is a unique string.
1678     key = Pop();
1679
1680     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1681         (1 << Map::kHasNamedInterceptor);
1682     BuildJSObjectCheck(receiver, bit_field_mask);
1683
1684     HIfContinuation continuation;
1685     BuildTestForDictionaryProperties(receiver, &continuation);
1686     IfBuilder if_dict_properties(this, &continuation);
1687     if_dict_properties.Then();
1688     {
1689       //  Key is string, properties are dictionary mode
1690       BuildNonGlobalObjectCheck(receiver);
1691
1692       HValue* properties = Add<HLoadNamedField>(
1693           receiver, static_cast<HValue*>(NULL),
1694           HObjectAccess::ForPropertiesPointer());
1695
1696       HValue* hash =
1697           Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1698           HObjectAccess::ForNameHashField());
1699
1700       hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
1701
1702       HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
1703                                                           properties,
1704                                                           key,
1705                                                           hash);
1706       Push(value);
1707     }
1708     if_dict_properties.Else();
1709     {
1710       //  Key is string, properties are fast mode
1711       HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
1712
1713       ExternalReference cache_keys_ref =
1714           ExternalReference::keyed_lookup_cache_keys(isolate());
1715       HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1716
1717       HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1718                                          HObjectAccess::ForMap());
1719       HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1720       base_index->ClearFlag(HValue::kCanOverflow);
1721
1722       HIfContinuation inline_or_runtime_continuation(
1723           graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
1724       {
1725         IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
1726         for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
1727              ++probe) {
1728           IfBuilder* lookup_if = &lookup_ifs[probe];
1729           lookup_if->Initialize(this);
1730           int probe_base = probe * KeyedLookupCache::kEntryLength;
1731           HValue* map_index = AddUncasted<HAdd>(
1732               base_index,
1733               Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
1734           map_index->ClearFlag(HValue::kCanOverflow);
1735           HValue* key_index = AddUncasted<HAdd>(
1736               base_index,
1737               Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
1738           key_index->ClearFlag(HValue::kCanOverflow);
1739           HValue* map_to_check =
1740               Add<HLoadKeyed>(cache_keys, map_index, static_cast<HValue*>(NULL),
1741                               FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
1742           lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
1743           lookup_if->And();
1744           HValue* key_to_check =
1745               Add<HLoadKeyed>(cache_keys, key_index, static_cast<HValue*>(NULL),
1746                               FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
1747           lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
1748           lookup_if->Then();
1749           {
1750             ExternalReference cache_field_offsets_ref =
1751                 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
1752             HValue* cache_field_offsets =
1753                 Add<HConstant>(cache_field_offsets_ref);
1754             HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1755             index->ClearFlag(HValue::kCanOverflow);
1756             HValue* property_index = Add<HLoadKeyed>(
1757                 cache_field_offsets, index, static_cast<HValue*>(NULL),
1758                 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
1759             Push(property_index);
1760           }
1761           lookup_if->Else();
1762         }
1763         for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
1764           lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
1765         }
1766       }
1767
1768       IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
1769       inline_or_runtime.Then();
1770       {
1771         // Found a cached index, load property inline.
1772         Push(Add<HLoadFieldByIndex>(receiver, Pop()));
1773       }
1774       inline_or_runtime.Else();
1775       {
1776         // KeyedLookupCache miss; call runtime.
1777         Add<HPushArguments>(receiver, key);
1778         Push(Add<HCallRuntime>(
1779             isolate()->factory()->empty_string(),
1780             Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
1781       }
1782       inline_or_runtime.End();
1783     }
1784     if_dict_properties.End();
1785   }
1786   index_name_split.End();
1787
1788   return Pop();
1789 }
1790
1791
1792 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
1793   return DoGenerateCode(this);
1794 }
1795
1796
1797 } }  // namespace v8::internal