f776abc043cb8d7e8f4c26f4ad4365819df2f5c8
[platform/upstream/nodejs.git] / deps / v8 / src / code-stubs-hydrogen.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/ic/ic.h"
12 #include "src/lithium.h"
13
14 namespace v8 {
15 namespace internal {
16
17
18 static LChunk* OptimizeGraph(HGraph* graph) {
19   DisallowHeapAllocation no_allocation;
20   DisallowHandleAllocation no_handles;
21   DisallowHandleDereference no_deref;
22
23   DCHECK(graph != NULL);
24   BailoutReason bailout_reason = kNoReason;
25   if (!graph->Optimize(&bailout_reason)) {
26     FATAL(GetBailoutReason(bailout_reason));
27   }
28   LChunk* chunk = LChunk::NewChunk(graph);
29   if (chunk == NULL) {
30     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
31   }
32   return chunk;
33 }
34
35
36 class CodeStubGraphBuilderBase : public HGraphBuilder {
37  public:
38   explicit CodeStubGraphBuilderBase(CompilationInfoWithZone* info)
39       : HGraphBuilder(info),
40         arguments_length_(NULL),
41         info_(info),
42         descriptor_(info->code_stub()),
43         context_(NULL) {
44     int parameter_count = descriptor_.GetEnvironmentParameterCount();
45     parameters_.Reset(new HParameter*[parameter_count]);
46   }
47   virtual bool BuildGraph();
48
49  protected:
50   virtual HValue* BuildCodeStub() = 0;
51   HParameter* GetParameter(int parameter) {
52     DCHECK(parameter < descriptor_.GetEnvironmentParameterCount());
53     return parameters_[parameter];
54   }
55   HValue* GetArgumentsLength() {
56     // This is initialized in BuildGraph()
57     DCHECK(arguments_length_ != NULL);
58     return arguments_length_;
59   }
60   CompilationInfo* info() { return info_; }
61   CodeStub* stub() { return info_->code_stub(); }
62   HContext* context() { return context_; }
63   Isolate* isolate() { return info_->isolate(); }
64
65   HLoadNamedField* BuildLoadNamedField(HValue* object,
66                                        FieldIndex index);
67   void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
68                             Representation representation,
69                             bool transition_to_field);
70
71   enum ArgumentClass {
72     NONE,
73     SINGLE,
74     MULTIPLE
75   };
76
77   HValue* UnmappedCase(HValue* elements, HValue* key);
78
79   HValue* BuildArrayConstructor(ElementsKind kind,
80                                 AllocationSiteOverrideMode override_mode,
81                                 ArgumentClass argument_class);
82   HValue* BuildInternalArrayConstructor(ElementsKind kind,
83                                         ArgumentClass argument_class);
84
85   // BuildCheckAndInstallOptimizedCode emits code to install the optimized
86   // function found in the optimized code map at map_index in js_function, if
87   // the function at map_index matches the given native_context. Builder is
88   // left in the "Then()" state after the install.
89   void BuildCheckAndInstallOptimizedCode(HValue* js_function,
90                                          HValue* native_context,
91                                          IfBuilder* builder,
92                                          HValue* optimized_map,
93                                          HValue* map_index);
94   void BuildInstallCode(HValue* js_function, HValue* shared_info);
95
96   HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
97                                          HValue* iterator,
98                                          int field_offset);
99   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
100                                         HValue* shared_info,
101                                         HValue* native_context);
102
103   // Tail calls handler found at array[map_index + 1].
104   void TailCallHandler(HValue* receiver, HValue* name, HValue* array,
105                        HValue* map_index, HValue* slot, HValue* vector);
106
107   // Tail calls handler_code.
108   void TailCallHandler(HValue* receiver, HValue* name, HValue* slot,
109                        HValue* vector, HValue* handler_code);
110
111   void TailCallMiss(HValue* receiver, HValue* name, HValue* slot,
112                     HValue* vector, bool keyed_load);
113
114   // Handle MONOMORPHIC and POLYMORPHIC LoadIC and KeyedLoadIC cases.
115   void HandleArrayCases(HValue* array, HValue* receiver, HValue* name,
116                         HValue* slot, HValue* vector, bool keyed_load);
117
118  private:
119   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
120   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
121                                           ElementsKind kind);
122
123   SmartArrayPointer<HParameter*> parameters_;
124   HValue* arguments_length_;
125   CompilationInfoWithZone* info_;
126   CodeStubDescriptor descriptor_;
127   HContext* context_;
128 };
129
130
131 bool CodeStubGraphBuilderBase::BuildGraph() {
132   // Update the static counter each time a new code stub is generated.
133   isolate()->counters()->code_stubs()->Increment();
134
135   if (FLAG_trace_hydrogen_stubs) {
136     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
137     PrintF("-----------------------------------------------------------\n");
138     PrintF("Compiling stub %s using hydrogen\n", name);
139     isolate()->GetHTracer()->TraceCompilation(info());
140   }
141
142   int param_count = descriptor_.GetEnvironmentParameterCount();
143   HEnvironment* start_environment = graph()->start_environment();
144   HBasicBlock* next_block = CreateBasicBlock(start_environment);
145   Goto(next_block);
146   next_block->SetJoinId(BailoutId::StubEntry());
147   set_current_block(next_block);
148
149   bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
150   HInstruction* stack_parameter_count = NULL;
151   for (int i = 0; i < param_count; ++i) {
152     Representation r = descriptor_.GetEnvironmentParameterRepresentation(i);
153     HParameter* param = Add<HParameter>(i,
154                                         HParameter::REGISTER_PARAMETER, r);
155     start_environment->Bind(i, param);
156     parameters_[i] = param;
157     if (descriptor_.IsEnvironmentParameterCountRegister(i)) {
158       param->set_type(HType::Smi());
159       stack_parameter_count = param;
160       arguments_length_ = stack_parameter_count;
161     }
162   }
163
164   DCHECK(!runtime_stack_params || arguments_length_ != NULL);
165   if (!runtime_stack_params) {
166     stack_parameter_count = graph()->GetConstantMinus1();
167     arguments_length_ = graph()->GetConstant0();
168   }
169
170   context_ = Add<HContext>();
171   start_environment->BindContext(context_);
172
173   Add<HSimulate>(BailoutId::StubEntry());
174
175   NoObservableSideEffectsScope no_effects(this);
176
177   HValue* return_value = BuildCodeStub();
178
179   // We might have extra expressions to pop from the stack in addition to the
180   // arguments above.
181   HInstruction* stack_pop_count = stack_parameter_count;
182   if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
183     if (!stack_parameter_count->IsConstant() &&
184         descriptor_.hint_stack_parameter_count() < 0) {
185       HInstruction* constant_one = graph()->GetConstant1();
186       stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
187       stack_pop_count->ClearFlag(HValue::kCanOverflow);
188       // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
189       // smi.
190     } else {
191       int count = descriptor_.hint_stack_parameter_count();
192       stack_pop_count = Add<HConstant>(count);
193     }
194   }
195
196   if (current_block() != NULL) {
197     HReturn* hreturn_instruction = New<HReturn>(return_value,
198                                                 stack_pop_count);
199     FinishCurrentBlock(hreturn_instruction);
200   }
201   return true;
202 }
203
204
205 template <class Stub>
206 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
207  public:
208   explicit CodeStubGraphBuilder(CompilationInfoWithZone* info)
209       : CodeStubGraphBuilderBase(info) {}
210
211  protected:
212   virtual HValue* BuildCodeStub() {
213     if (casted_stub()->IsUninitialized()) {
214       return BuildCodeUninitializedStub();
215     } else {
216       return BuildCodeInitializedStub();
217     }
218   }
219
220   virtual HValue* BuildCodeInitializedStub() {
221     UNIMPLEMENTED();
222     return NULL;
223   }
224
225   virtual HValue* BuildCodeUninitializedStub() {
226     // Force a deopt that falls back to the runtime.
227     HValue* undefined = graph()->GetConstantUndefined();
228     IfBuilder builder(this);
229     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
230     builder.Then();
231     builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
232     return undefined;
233   }
234
235   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
236 };
237
238
239 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
240     ExternalReference miss) {
241   Factory* factory = isolate()->factory();
242
243   // Generate the new code.
244   MacroAssembler masm(isolate(), NULL, 256);
245
246   {
247     // Update the static counter each time a new code stub is generated.
248     isolate()->counters()->code_stubs()->Increment();
249
250     // Generate the code for the stub.
251     masm.set_generating_stub(true);
252     // TODO(yangguo): remove this once we can serialize IC stubs.
253     masm.enable_serializer();
254     NoCurrentFrameScope scope(&masm);
255     GenerateLightweightMiss(&masm, miss);
256   }
257
258   // Create the code object.
259   CodeDesc desc;
260   masm.GetCode(&desc);
261
262   // Copy the generated code into a heap object.
263   Code::Flags flags = Code::ComputeFlags(
264       GetCodeKind(),
265       GetICState(),
266       GetExtraICState(),
267       GetStubType());
268   Handle<Code> new_object = factory->NewCode(
269       desc, flags, masm.CodeObject(), NeedsImmovableCode());
270   return new_object;
271 }
272
273
274 template <class Stub>
275 static Handle<Code> DoGenerateCode(Stub* stub) {
276   Isolate* isolate = stub->isolate();
277   CodeStubDescriptor descriptor(stub);
278
279   // If we are uninitialized we can use a light-weight stub to enter
280   // the runtime that is significantly faster than using the standard
281   // stub-failure deopt mechanism.
282   if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
283     DCHECK(!descriptor.stack_parameter_count().is_valid());
284     return stub->GenerateLightweightMissCode(descriptor.miss_handler());
285   }
286   base::ElapsedTimer timer;
287   if (FLAG_profile_hydrogen_code_stub_compilation) {
288     timer.Start();
289   }
290   CompilationInfoWithZone info(stub, isolate);
291   CodeStubGraphBuilder<Stub> builder(&info);
292   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
293   Handle<Code> code = chunk->Codegen();
294   if (FLAG_profile_hydrogen_code_stub_compilation) {
295     OFStream os(stdout);
296     os << "[Lazy compilation of " << stub << " took "
297        << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
298   }
299   return code;
300 }
301
302
303 template <>
304 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
305   info()->MarkAsSavesCallerDoubles();
306   HValue* number = GetParameter(NumberToStringStub::kNumber);
307   return BuildNumberToString(number, Type::Number(zone()));
308 }
309
310
311 Handle<Code> NumberToStringStub::GenerateCode() {
312   return DoGenerateCode(this);
313 }
314
315
316 template <>
317 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
318   Factory* factory = isolate()->factory();
319   HValue* undefined = graph()->GetConstantUndefined();
320   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
321
322   // This stub is very performance sensitive, the generated code must be tuned
323   // so that it doesn't build and eager frame.
324   info()->MarkMustNotHaveEagerFrame();
325
326   HInstruction* allocation_site =
327       Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
328   IfBuilder checker(this);
329   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
330                                                     undefined);
331   checker.Then();
332
333   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
334       AllocationSite::kTransitionInfoOffset);
335   HInstruction* boilerplate =
336       Add<HLoadNamedField>(allocation_site, nullptr, access);
337   HValue* elements = AddLoadElements(boilerplate);
338   HValue* capacity = AddLoadFixedArrayLength(elements);
339   IfBuilder zero_capacity(this);
340   zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
341                                            Token::EQ);
342   zero_capacity.Then();
343   Push(BuildCloneShallowArrayEmpty(boilerplate,
344                                    allocation_site,
345                                    alloc_site_mode));
346   zero_capacity.Else();
347   IfBuilder if_fixed_cow(this);
348   if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
349   if_fixed_cow.Then();
350   Push(BuildCloneShallowArrayCow(boilerplate,
351                                  allocation_site,
352                                  alloc_site_mode,
353                                  FAST_ELEMENTS));
354   if_fixed_cow.Else();
355   IfBuilder if_fixed(this);
356   if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
357   if_fixed.Then();
358   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
359                                       allocation_site,
360                                       alloc_site_mode,
361                                       FAST_ELEMENTS));
362
363   if_fixed.Else();
364   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
365                                       allocation_site,
366                                       alloc_site_mode,
367                                       FAST_DOUBLE_ELEMENTS));
368   if_fixed.End();
369   if_fixed_cow.End();
370   zero_capacity.End();
371
372   checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
373   checker.End();
374
375   return environment()->Pop();
376 }
377
378
379 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
380   return DoGenerateCode(this);
381 }
382
383
384 template <>
385 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
386   HValue* undefined = graph()->GetConstantUndefined();
387
388   HInstruction* allocation_site =
389       Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
390
391   IfBuilder checker(this);
392   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
393                                                     undefined);
394   checker.And();
395
396   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
397       AllocationSite::kTransitionInfoOffset);
398   HInstruction* boilerplate =
399       Add<HLoadNamedField>(allocation_site, nullptr, access);
400
401   int length = casted_stub()->length();
402   if (length == 0) {
403     // Empty objects have some slack added to them.
404     length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
405   }
406   int size = JSObject::kHeaderSize + length * kPointerSize;
407   int object_size = size;
408   if (FLAG_allocation_site_pretenuring) {
409     size += AllocationMemento::kSize;
410   }
411
412   HValue* boilerplate_map =
413       Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
414   HValue* boilerplate_size = Add<HLoadNamedField>(
415       boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
416   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
417   checker.If<HCompareNumericAndBranch>(boilerplate_size,
418                                        size_in_words, Token::EQ);
419   checker.Then();
420
421   HValue* size_in_bytes = Add<HConstant>(size);
422
423   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
424       NOT_TENURED, JS_OBJECT_TYPE);
425
426   for (int i = 0; i < object_size; i += kPointerSize) {
427     HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
428     Add<HStoreNamedField>(object, access,
429                           Add<HLoadNamedField>(boilerplate, nullptr, access));
430   }
431
432   DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
433   if (FLAG_allocation_site_pretenuring) {
434     BuildCreateAllocationMemento(
435         object, Add<HConstant>(object_size), allocation_site);
436   }
437
438   environment()->Push(object);
439   checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
440   checker.End();
441
442   return environment()->Pop();
443 }
444
445
446 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
447   return DoGenerateCode(this);
448 }
449
450
451 template <>
452 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
453   // This stub is performance sensitive, the generated code must be tuned
454   // so that it doesn't build an eager frame.
455   info()->MarkMustNotHaveEagerFrame();
456
457   HValue* size = Add<HConstant>(AllocationSite::kSize);
458   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
459       JS_OBJECT_TYPE);
460
461   // Store the map
462   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
463   AddStoreMapConstant(object, allocation_site_map);
464
465   // Store the payload (smi elements kind)
466   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
467   Add<HStoreNamedField>(object,
468                         HObjectAccess::ForAllocationSiteOffset(
469                             AllocationSite::kTransitionInfoOffset),
470                         initial_elements_kind);
471
472   // Unlike literals, constructed arrays don't have nested sites
473   Add<HStoreNamedField>(object,
474                         HObjectAccess::ForAllocationSiteOffset(
475                             AllocationSite::kNestedSiteOffset),
476                         graph()->GetConstant0());
477
478   // Pretenuring calculation field.
479   Add<HStoreNamedField>(object,
480                         HObjectAccess::ForAllocationSiteOffset(
481                             AllocationSite::kPretenureDataOffset),
482                         graph()->GetConstant0());
483
484   // Pretenuring memento creation count field.
485   Add<HStoreNamedField>(object,
486                         HObjectAccess::ForAllocationSiteOffset(
487                             AllocationSite::kPretenureCreateCountOffset),
488                         graph()->GetConstant0());
489
490   // Store an empty fixed array for the code dependency.
491   HConstant* empty_fixed_array =
492     Add<HConstant>(isolate()->factory()->empty_fixed_array());
493   Add<HStoreNamedField>(
494       object,
495       HObjectAccess::ForAllocationSiteOffset(
496           AllocationSite::kDependentCodeOffset),
497       empty_fixed_array);
498
499   // Link the object to the allocation site list
500   HValue* site_list = Add<HConstant>(
501       ExternalReference::allocation_sites_list_address(isolate()));
502   HValue* site = Add<HLoadNamedField>(site_list, nullptr,
503                                       HObjectAccess::ForAllocationSiteList());
504   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
505   // mark as such in order to skip the write barrier, once we have a unified
506   // system for weakness. For now we decided to keep it like this because having
507   // an initial write barrier backed store makes this pointer strong until the
508   // next GC, and allocation sites are designed to survive several GCs anyway.
509   Add<HStoreNamedField>(
510       object,
511       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
512       site);
513   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
514                         object);
515
516   HInstruction* feedback_vector = GetParameter(0);
517   HInstruction* slot = GetParameter(1);
518   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
519                    INITIALIZING_STORE);
520   return feedback_vector;
521 }
522
523
524 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
525   return DoGenerateCode(this);
526 }
527
528
529 template <>
530 HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
531   // This stub is performance sensitive, the generated code must be tuned
532   // so that it doesn't build an eager frame.
533   info()->MarkMustNotHaveEagerFrame();
534
535   HValue* size = Add<HConstant>(WeakCell::kSize);
536   HInstruction* object =
537       Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
538
539   Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
540   AddStoreMapConstant(object, weak_cell_map);
541
542   HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
543   Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
544   Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
545                         graph()->GetConstantUndefined());
546
547   HInstruction* feedback_vector =
548       GetParameter(CreateWeakCellDescriptor::kVectorIndex);
549   HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
550   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
551                    INITIALIZING_STORE);
552   return graph()->GetConstant0();
553 }
554
555
556 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
557
558
559 template <>
560 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
561   int context_index = casted_stub()->context_index();
562   int slot_index = casted_stub()->slot_index();
563
564   HValue* script_context = BuildGetScriptContext(context_index);
565   return Add<HLoadNamedField>(script_context, nullptr,
566                               HObjectAccess::ForContextSlot(slot_index));
567 }
568
569
570 Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
571   return DoGenerateCode(this);
572 }
573
574
575 template <>
576 HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
577   int context_index = casted_stub()->context_index();
578   int slot_index = casted_stub()->slot_index();
579
580   HValue* script_context = BuildGetScriptContext(context_index);
581   Add<HStoreNamedField>(script_context,
582                         HObjectAccess::ForContextSlot(slot_index),
583                         GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
584   return GetParameter(2);
585 }
586
587
588 Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
589   return DoGenerateCode(this);
590 }
591
592
593 template <>
594 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
595   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
596       GetParameter(LoadDescriptor::kReceiverIndex),
597       GetParameter(LoadDescriptor::kNameIndex), NULL,
598       casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
599       NEVER_RETURN_HOLE, STANDARD_STORE);
600   return load;
601 }
602
603
604 Handle<Code> LoadFastElementStub::GenerateCode() {
605   return DoGenerateCode(this);
606 }
607
608
609 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
610     HValue* object, FieldIndex index) {
611   Representation representation = index.is_double()
612       ? Representation::Double()
613       : Representation::Tagged();
614   int offset = index.offset();
615   HObjectAccess access = index.is_inobject()
616       ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
617       : HObjectAccess::ForBackingStoreOffset(offset, representation);
618   if (index.is_double() &&
619       (!FLAG_unbox_double_fields || !index.is_inobject())) {
620     // Load the heap number.
621     object = Add<HLoadNamedField>(
622         object, nullptr, access.WithRepresentation(Representation::Tagged()));
623     // Load the double value from it.
624     access = HObjectAccess::ForHeapNumberValue();
625   }
626   return Add<HLoadNamedField>(object, nullptr, access);
627 }
628
629
630 template<>
631 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
632   return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
633 }
634
635
636 Handle<Code> LoadFieldStub::GenerateCode() {
637   return DoGenerateCode(this);
638 }
639
640
641 template <>
642 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
643   HValue* map = AddLoadMap(GetParameter(0), NULL);
644   HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
645       Map::kDescriptorsOffset, Representation::Tagged());
646   HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
647   HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
648       DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
649   return Add<HLoadNamedField>(descriptors, nullptr, value_access);
650 }
651
652
653 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
654
655
656 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key) {
657   HValue* result;
658   HInstruction* backing_store =
659       Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
660                       ALLOW_RETURN_HOLE);
661   Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
662   HValue* backing_store_length = Add<HLoadNamedField>(
663       backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
664   IfBuilder in_unmapped_range(this);
665   in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
666                                                  Token::LT);
667   in_unmapped_range.Then();
668   {
669     result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
670                              NEVER_RETURN_HOLE);
671   }
672   in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
673   in_unmapped_range.End();
674   return result;
675 }
676
677
678 template <>
679 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
680   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
681   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
682
683   // Mapped arguments are actual arguments. Unmapped arguments are values added
684   // to the arguments object after it was created for the call. Mapped arguments
685   // are stored in the context at indexes given by elements[key + 2]. Unmapped
686   // arguments are stored as regular indexed properties in the arguments array,
687   // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
688   // look at argument object construction.
689   //
690   // The sloppy arguments elements array has a special format:
691   //
692   // 0: context
693   // 1: unmapped arguments array
694   // 2: mapped_index0,
695   // 3: mapped_index1,
696   // ...
697   //
698   // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
699   // If key + 2 >= elements.length then attempt to look in the unmapped
700   // arguments array (given by elements[1]) and return the value at key, missing
701   // to the runtime if the unmapped arguments array is not a fixed array or if
702   // key >= unmapped_arguments_array.length.
703   //
704   // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
705   // in the unmapped arguments array, as described above. Otherwise, t is a Smi
706   // index into the context array given at elements[0]. Return the value at
707   // context[t].
708
709   key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
710   IfBuilder positive_smi(this);
711   positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
712                                             Token::LT);
713   positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
714   positive_smi.End();
715
716   HValue* constant_two = Add<HConstant>(2);
717   HValue* elements = AddLoadElements(receiver, nullptr);
718   HValue* elements_length = Add<HLoadNamedField>(
719       elements, nullptr, HObjectAccess::ForFixedArrayLength());
720   HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
721   IfBuilder in_range(this);
722   in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
723   in_range.Then();
724   {
725     HValue* index = AddUncasted<HAdd>(key, constant_two);
726     HInstruction* mapped_index = Add<HLoadKeyed>(
727         elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
728
729     IfBuilder is_valid(this);
730     is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
731                                               graph()->GetConstantHole());
732     is_valid.Then();
733     {
734       // TODO(mvstanton): I'd like to assert from this point, that if the
735       // mapped_index is not the hole that it is indeed, a smi. An unnecessary
736       // smi check is being emitted.
737       HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
738                                             nullptr, FAST_ELEMENTS);
739       STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
740       HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
741                                        FAST_ELEMENTS, ALLOW_RETURN_HOLE);
742       environment()->Push(result);
743     }
744     is_valid.Else();
745     {
746       HValue* result = UnmappedCase(elements, key);
747       environment()->Push(result);
748     }
749     is_valid.End();
750   }
751   in_range.Else();
752   {
753     HValue* result = UnmappedCase(elements, key);
754     environment()->Push(result);
755   }
756   in_range.End();
757
758   return environment()->Pop();
759 }
760
761
762 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
763   return DoGenerateCode(this);
764 }
765
766
767 void CodeStubGraphBuilderBase::BuildStoreNamedField(
768     HValue* object, HValue* value, FieldIndex index,
769     Representation representation, bool transition_to_field) {
770   DCHECK(!index.is_double() || representation.IsDouble());
771   int offset = index.offset();
772   HObjectAccess access =
773       index.is_inobject()
774           ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
775           : HObjectAccess::ForBackingStoreOffset(offset, representation);
776
777   if (representation.IsDouble()) {
778     if (!FLAG_unbox_double_fields || !index.is_inobject()) {
779       HObjectAccess heap_number_access =
780           access.WithRepresentation(Representation::Tagged());
781       if (transition_to_field) {
782         // The store requires a mutable HeapNumber to be allocated.
783         NoObservableSideEffectsScope no_side_effects(this);
784         HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
785
786         // TODO(hpayer): Allocation site pretenuring support.
787         HInstruction* heap_number =
788             Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
789                            MUTABLE_HEAP_NUMBER_TYPE);
790         AddStoreMapConstant(heap_number,
791                             isolate()->factory()->mutable_heap_number_map());
792         Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
793                               value);
794         // Store the new mutable heap number into the object.
795         access = heap_number_access;
796         value = heap_number;
797       } else {
798         // Load the heap number.
799         object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
800         // Store the double value into it.
801         access = HObjectAccess::ForHeapNumberValue();
802       }
803     }
804   } else if (representation.IsHeapObject()) {
805     BuildCheckHeapObject(value);
806   }
807
808   Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
809 }
810
811
812 template <>
813 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
814   BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
815                        casted_stub()->representation(), false);
816   return GetParameter(2);
817 }
818
819
820 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
821
822
823 template <>
824 HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
825   HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
826
827   switch (casted_stub()->store_mode()) {
828     case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
829       HValue* properties = Add<HLoadNamedField>(
830           object, nullptr, HObjectAccess::ForPropertiesPointer());
831       HValue* length = AddLoadFixedArrayLength(properties);
832       HValue* delta =
833           Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
834       HValue* new_capacity = AddUncasted<HAdd>(length, delta);
835
836       // Grow properties array.
837       ElementsKind kind = FAST_ELEMENTS;
838       Add<HBoundsCheck>(new_capacity,
839                         Add<HConstant>((Page::kMaxRegularHeapObjectSize -
840                                         FixedArray::kHeaderSize) >>
841                                        ElementsKindToShiftSize(kind)));
842
843       // Reuse this code for properties backing store allocation.
844       HValue* new_properties =
845           BuildAllocateAndInitializeArray(kind, new_capacity);
846
847       BuildCopyProperties(properties, new_properties, length, new_capacity);
848
849       Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
850                             new_properties);
851     }
852     // Fall through.
853     case StoreTransitionStub::StoreMapAndValue:
854       // Store the new value into the "extended" object.
855       BuildStoreNamedField(
856           object, GetParameter(StoreTransitionDescriptor::kValueIndex),
857           casted_stub()->index(), casted_stub()->representation(), true);
858     // Fall through.
859
860     case StoreTransitionStub::StoreMapOnly:
861       // And finally update the map.
862       Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
863                             GetParameter(StoreTransitionDescriptor::kMapIndex));
864       break;
865   }
866   return GetParameter(StoreTransitionDescriptor::kValueIndex);
867 }
868
869
870 Handle<Code> StoreTransitionStub::GenerateCode() {
871   return DoGenerateCode(this);
872 }
873
874
875 template <>
876 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
877   HValue* string = BuildLoadNamedField(GetParameter(0),
878       FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
879   return BuildLoadNamedField(string,
880       FieldIndex::ForInObjectOffset(String::kLengthOffset));
881 }
882
883
884 Handle<Code> StringLengthStub::GenerateCode() {
885   return DoGenerateCode(this);
886 }
887
888
889 template <>
890 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
891   BuildUncheckedMonomorphicElementAccess(
892       GetParameter(StoreDescriptor::kReceiverIndex),
893       GetParameter(StoreDescriptor::kNameIndex),
894       GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
895       casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
896       casted_stub()->store_mode());
897
898   return GetParameter(2);
899 }
900
901
902 Handle<Code> StoreFastElementStub::GenerateCode() {
903   return DoGenerateCode(this);
904 }
905
906
907 template <>
908 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
909   info()->MarkAsSavesCallerDoubles();
910
911   BuildTransitionElementsKind(GetParameter(0),
912                               GetParameter(1),
913                               casted_stub()->from_kind(),
914                               casted_stub()->to_kind(),
915                               casted_stub()->is_js_array());
916
917   return GetParameter(0);
918 }
919
920
921 Handle<Code> TransitionElementsKindStub::GenerateCode() {
922   return DoGenerateCode(this);
923 }
924
925
926 template <>
927 HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
928   HValue* result =
929       Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
930                      NOT_TENURED, HEAP_NUMBER_TYPE);
931   AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
932   return result;
933 }
934
935
936 Handle<Code> AllocateHeapNumberStub::GenerateCode() {
937   return DoGenerateCode(this);
938 }
939
940
941 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
942     ElementsKind kind,
943     AllocationSiteOverrideMode override_mode,
944     ArgumentClass argument_class) {
945   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
946   HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
947   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
948                                override_mode);
949   HValue* result = NULL;
950   switch (argument_class) {
951     case NONE:
952       // This stub is very performance sensitive, the generated code must be
953       // tuned so that it doesn't build and eager frame.
954       info()->MarkMustNotHaveEagerFrame();
955       result = array_builder.AllocateEmptyArray();
956       break;
957     case SINGLE:
958       result = BuildArraySingleArgumentConstructor(&array_builder);
959       break;
960     case MULTIPLE:
961       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
962       break;
963   }
964
965   return result;
966 }
967
968
969 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
970     ElementsKind kind, ArgumentClass argument_class) {
971   HValue* constructor = GetParameter(
972       InternalArrayConstructorStubBase::kConstructor);
973   JSArrayBuilder array_builder(this, kind, constructor);
974
975   HValue* result = NULL;
976   switch (argument_class) {
977     case NONE:
978       // This stub is very performance sensitive, the generated code must be
979       // tuned so that it doesn't build and eager frame.
980       info()->MarkMustNotHaveEagerFrame();
981       result = array_builder.AllocateEmptyArray();
982       break;
983     case SINGLE:
984       result = BuildArraySingleArgumentConstructor(&array_builder);
985       break;
986     case MULTIPLE:
987       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
988       break;
989   }
990   return result;
991 }
992
993
994 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
995     JSArrayBuilder* array_builder) {
996   // Smi check and range check on the input arg.
997   HValue* constant_one = graph()->GetConstant1();
998   HValue* constant_zero = graph()->GetConstant0();
999
1000   HInstruction* elements = Add<HArgumentsElements>(false);
1001   HInstruction* argument = Add<HAccessArgumentsAt>(
1002       elements, constant_one, constant_zero);
1003
1004   return BuildAllocateArrayFromLength(array_builder, argument);
1005 }
1006
1007
1008 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1009     JSArrayBuilder* array_builder, ElementsKind kind) {
1010   // Insert a bounds check because the number of arguments might exceed
1011   // the kInitialMaxFastElementArray limit. This cannot happen for code
1012   // that was parsed, but calling via Array.apply(thisArg, [...]) might
1013   // trigger it.
1014   HValue* length = GetArgumentsLength();
1015   HConstant* max_alloc_length =
1016       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1017   HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1018
1019   // We need to fill with the hole if it's a smi array in the multi-argument
1020   // case because we might have to bail out while copying arguments into
1021   // the array because they aren't compatible with a smi array.
1022   // If it's a double array, no problem, and if it's fast then no
1023   // problem either because doubles are boxed.
1024   //
1025   // TODO(mvstanton): consider an instruction to memset fill the array
1026   // with zero in this case instead.
1027   JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1028       ? JSArrayBuilder::FILL_WITH_HOLE
1029       : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1030   HValue* new_object = array_builder->AllocateArray(checked_length,
1031                                                     max_alloc_length,
1032                                                     checked_length,
1033                                                     fill_mode);
1034   HValue* elements = array_builder->GetElementsLocation();
1035   DCHECK(elements != NULL);
1036
1037   // Now populate the elements correctly.
1038   LoopBuilder builder(this,
1039                       context(),
1040                       LoopBuilder::kPostIncrement);
1041   HValue* start = graph()->GetConstant0();
1042   HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1043   HInstruction* argument_elements = Add<HArgumentsElements>(false);
1044   HInstruction* argument = Add<HAccessArgumentsAt>(
1045       argument_elements, checked_length, key);
1046
1047   Add<HStoreKeyed>(elements, key, argument, kind);
1048   builder.EndBody();
1049   return new_object;
1050 }
1051
1052
1053 template <>
1054 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1055   ElementsKind kind = casted_stub()->elements_kind();
1056   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1057   return BuildArrayConstructor(kind, override_mode, NONE);
1058 }
1059
1060
1061 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1062   return DoGenerateCode(this);
1063 }
1064
1065
1066 template <>
1067 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1068     BuildCodeStub() {
1069   ElementsKind kind = casted_stub()->elements_kind();
1070   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1071   return BuildArrayConstructor(kind, override_mode, SINGLE);
1072 }
1073
1074
1075 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1076   return DoGenerateCode(this);
1077 }
1078
1079
1080 template <>
1081 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1082   ElementsKind kind = casted_stub()->elements_kind();
1083   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1084   return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1085 }
1086
1087
1088 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1089   return DoGenerateCode(this);
1090 }
1091
1092
1093 template <>
1094 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1095     BuildCodeStub() {
1096   ElementsKind kind = casted_stub()->elements_kind();
1097   return BuildInternalArrayConstructor(kind, NONE);
1098 }
1099
1100
1101 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1102   return DoGenerateCode(this);
1103 }
1104
1105
1106 template <>
1107 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1108     BuildCodeStub() {
1109   ElementsKind kind = casted_stub()->elements_kind();
1110   return BuildInternalArrayConstructor(kind, SINGLE);
1111 }
1112
1113
1114 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1115   return DoGenerateCode(this);
1116 }
1117
1118
1119 template <>
1120 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1121     BuildCodeStub() {
1122   ElementsKind kind = casted_stub()->elements_kind();
1123   return BuildInternalArrayConstructor(kind, MULTIPLE);
1124 }
1125
1126
1127 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1128   return DoGenerateCode(this);
1129 }
1130
1131
1132 template <>
1133 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1134   Isolate* isolate = graph()->isolate();
1135   CompareNilICStub* stub = casted_stub();
1136   HIfContinuation continuation;
1137   Handle<Map> sentinel_map(isolate->heap()->meta_map());
1138   Type* type = stub->GetType(zone(), sentinel_map);
1139   BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
1140   IfBuilder if_nil(this, &continuation);
1141   if_nil.Then();
1142   if (continuation.IsFalseReachable()) {
1143     if_nil.Else();
1144     if_nil.Return(graph()->GetConstant0());
1145   }
1146   if_nil.End();
1147   return continuation.IsTrueReachable()
1148       ? graph()->GetConstant1()
1149       : graph()->GetConstantUndefined();
1150 }
1151
1152
1153 Handle<Code> CompareNilICStub::GenerateCode() {
1154   return DoGenerateCode(this);
1155 }
1156
1157
1158 template <>
1159 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1160   BinaryOpICState state = casted_stub()->state();
1161
1162   HValue* left = GetParameter(BinaryOpICStub::kLeft);
1163   HValue* right = GetParameter(BinaryOpICStub::kRight);
1164
1165   Type* left_type = state.GetLeftType(zone());
1166   Type* right_type = state.GetRightType(zone());
1167   Type* result_type = state.GetResultType(zone());
1168
1169   DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1170          (state.HasSideEffects() || !result_type->Is(Type::None())));
1171
1172   HValue* result = NULL;
1173   HAllocationMode allocation_mode(NOT_TENURED);
1174   if (state.op() == Token::ADD &&
1175       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1176       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1177     // For the generic add stub a fast case for string addition is performance
1178     // critical.
1179     if (left_type->Maybe(Type::String())) {
1180       IfBuilder if_leftisstring(this);
1181       if_leftisstring.If<HIsStringAndBranch>(left);
1182       if_leftisstring.Then();
1183       {
1184         Push(BuildBinaryOperation(
1185                     state.op(), left, right,
1186                     Type::String(zone()), right_type,
1187                     result_type, state.fixed_right_arg(),
1188                     allocation_mode));
1189       }
1190       if_leftisstring.Else();
1191       {
1192         Push(BuildBinaryOperation(
1193                     state.op(), left, right,
1194                     left_type, right_type, result_type,
1195                     state.fixed_right_arg(), allocation_mode));
1196       }
1197       if_leftisstring.End();
1198       result = Pop();
1199     } else {
1200       IfBuilder if_rightisstring(this);
1201       if_rightisstring.If<HIsStringAndBranch>(right);
1202       if_rightisstring.Then();
1203       {
1204         Push(BuildBinaryOperation(
1205                     state.op(), left, right,
1206                     left_type, Type::String(zone()),
1207                     result_type, state.fixed_right_arg(),
1208                     allocation_mode));
1209       }
1210       if_rightisstring.Else();
1211       {
1212         Push(BuildBinaryOperation(
1213                     state.op(), left, right,
1214                     left_type, right_type, result_type,
1215                     state.fixed_right_arg(), allocation_mode));
1216       }
1217       if_rightisstring.End();
1218       result = Pop();
1219     }
1220   } else {
1221     result = BuildBinaryOperation(
1222             state.op(), left, right,
1223             left_type, right_type, result_type,
1224             state.fixed_right_arg(), allocation_mode);
1225   }
1226
1227   // If we encounter a generic argument, the number conversion is
1228   // observable, thus we cannot afford to bail out after the fact.
1229   if (!state.HasSideEffects()) {
1230     result = EnforceNumberType(result, result_type);
1231   }
1232
1233   return result;
1234 }
1235
1236
1237 Handle<Code> BinaryOpICStub::GenerateCode() {
1238   return DoGenerateCode(this);
1239 }
1240
1241
1242 template <>
1243 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1244   BinaryOpICState state = casted_stub()->state();
1245
1246   HValue* allocation_site = GetParameter(
1247       BinaryOpWithAllocationSiteStub::kAllocationSite);
1248   HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1249   HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1250
1251   Type* left_type = state.GetLeftType(zone());
1252   Type* right_type = state.GetRightType(zone());
1253   Type* result_type = state.GetResultType(zone());
1254   HAllocationMode allocation_mode(allocation_site);
1255
1256   return BuildBinaryOperation(state.op(), left, right,
1257                               left_type, right_type, result_type,
1258                               state.fixed_right_arg(), allocation_mode);
1259 }
1260
1261
1262 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1263   return DoGenerateCode(this);
1264 }
1265
1266
1267 template <>
1268 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1269   StringAddStub* stub = casted_stub();
1270   StringAddFlags flags = stub->flags();
1271   PretenureFlag pretenure_flag = stub->pretenure_flag();
1272
1273   HValue* left = GetParameter(StringAddStub::kLeft);
1274   HValue* right = GetParameter(StringAddStub::kRight);
1275
1276   // Make sure that both arguments are strings if not known in advance.
1277   if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1278     left = BuildCheckString(left);
1279   }
1280   if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1281     right = BuildCheckString(right);
1282   }
1283
1284   return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1285 }
1286
1287
1288 Handle<Code> StringAddStub::GenerateCode() {
1289   return DoGenerateCode(this);
1290 }
1291
1292
1293 template <>
1294 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1295   ToBooleanStub* stub = casted_stub();
1296   HValue* true_value = NULL;
1297   HValue* false_value = NULL;
1298
1299   switch (stub->mode()) {
1300     case ToBooleanStub::RESULT_AS_SMI:
1301       true_value = graph()->GetConstant1();
1302       false_value = graph()->GetConstant0();
1303       break;
1304     case ToBooleanStub::RESULT_AS_ODDBALL:
1305       true_value = graph()->GetConstantTrue();
1306       false_value = graph()->GetConstantFalse();
1307       break;
1308     case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1309       true_value = graph()->GetConstantFalse();
1310       false_value = graph()->GetConstantTrue();
1311       break;
1312   }
1313
1314   IfBuilder if_true(this);
1315   if_true.If<HBranch>(GetParameter(0), stub->types());
1316   if_true.Then();
1317   if_true.Return(true_value);
1318   if_true.Else();
1319   if_true.End();
1320   return false_value;
1321 }
1322
1323
1324 Handle<Code> ToBooleanStub::GenerateCode() {
1325   return DoGenerateCode(this);
1326 }
1327
1328
1329 template <>
1330 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1331   StoreGlobalStub* stub = casted_stub();
1332   HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1333   if (stub->check_global()) {
1334     // Check that the map of the global has not changed: use a placeholder map
1335     // that will be replaced later with the global object's map.
1336     HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1337     HValue* proxy_map =
1338         Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1339     HValue* global =
1340         Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1341     Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1342     HValue* cell = Add<HConstant>(Map::WeakCellForMap(placeholder_map));
1343     HValue* expected_map =
1344         Add<HLoadNamedField>(cell, nullptr, HObjectAccess::ForWeakCellValue());
1345     HValue* map =
1346         Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1347     IfBuilder map_check(this);
1348     map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1349     map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1350     map_check.End();
1351   }
1352
1353   HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1354       StoreGlobalStub::property_cell_placeholder(isolate())));
1355   HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1356                                       HObjectAccess::ForWeakCellValue());
1357   HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1358   HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
1359
1360   if (stub->is_constant()) {
1361     IfBuilder builder(this);
1362     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1363     builder.Then();
1364     builder.ElseDeopt(
1365         Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
1366     builder.End();
1367   } else {
1368     // Load the payload of the global parameter cell. A hole indicates that the
1369     // property has been deleted and that the store must be handled by the
1370     // runtime.
1371     IfBuilder builder(this);
1372     HValue* hole_value = graph()->GetConstantHole();
1373     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1374     builder.Then();
1375     builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
1376     builder.Else();
1377     HStoreNamedField* store = Add<HStoreNamedField>(cell, access, value);
1378     store->MarkReceiverAsCell();
1379     builder.End();
1380   }
1381
1382   return value;
1383 }
1384
1385
1386 Handle<Code> StoreGlobalStub::GenerateCode() {
1387   return DoGenerateCode(this);
1388 }
1389
1390
1391 template<>
1392 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1393   HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1394   HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1395   HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1396   HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1397
1398   if (FLAG_trace_elements_transitions) {
1399     // Tracing elements transitions is the job of the runtime.
1400     Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1401                      Deoptimizer::EAGER);
1402   } else {
1403     info()->MarkAsSavesCallerDoubles();
1404
1405     BuildTransitionElementsKind(object, map,
1406                                 casted_stub()->from_kind(),
1407                                 casted_stub()->to_kind(),
1408                                 casted_stub()->is_jsarray());
1409
1410     BuildUncheckedMonomorphicElementAccess(object, key, value,
1411                                            casted_stub()->is_jsarray(),
1412                                            casted_stub()->to_kind(),
1413                                            STORE, ALLOW_RETURN_HOLE,
1414                                            casted_stub()->store_mode());
1415   }
1416
1417   return value;
1418 }
1419
1420
1421 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1422   return DoGenerateCode(this);
1423 }
1424
1425
1426 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1427     HValue* js_function,
1428     HValue* native_context,
1429     IfBuilder* builder,
1430     HValue* optimized_map,
1431     HValue* map_index) {
1432   HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1433   HValue* context_slot = LoadFromOptimizedCodeMap(
1434       optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1435   HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1436       optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1437   builder->If<HCompareObjectEqAndBranch>(native_context,
1438                                          context_slot);
1439   builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1440   builder->Then();
1441   HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1442       map_index, SharedFunctionInfo::kCachedCodeOffset);
1443   // and the literals
1444   HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1445       map_index, SharedFunctionInfo::kLiteralsOffset);
1446
1447   Counters* counters = isolate()->counters();
1448   AddIncrementCounter(counters->fast_new_closure_install_optimized());
1449
1450   // TODO(fschneider): Idea: store proper code pointers in the optimized code
1451   // map and either unmangle them on marking or do nothing as the whole map is
1452   // discarded on major GC anyway.
1453   Add<HStoreCodeEntry>(js_function, code_object);
1454   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1455                         literals);
1456
1457   // Now link a function into a list of optimized functions.
1458   HValue* optimized_functions_list = Add<HLoadNamedField>(
1459       native_context, nullptr,
1460       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1461   Add<HStoreNamedField>(js_function,
1462                         HObjectAccess::ForNextFunctionLinkPointer(),
1463                         optimized_functions_list);
1464
1465   // This store is the only one that should have a write barrier.
1466   Add<HStoreNamedField>(native_context,
1467            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1468            js_function);
1469
1470   // The builder continues in the "then" after this function.
1471 }
1472
1473
1474 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1475                                                 HValue* shared_info) {
1476   Add<HStoreNamedField>(js_function,
1477                         HObjectAccess::ForNextFunctionLinkPointer(),
1478                         graph()->GetConstantUndefined());
1479   HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1480                                              HObjectAccess::ForCodeOffset());
1481   Add<HStoreCodeEntry>(js_function, code_object);
1482 }
1483
1484
1485 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1486     HValue* optimized_map,
1487     HValue* iterator,
1488     int field_offset) {
1489   // By making sure to express these loads in the form [<hvalue> + constant]
1490   // the keyed load can be hoisted.
1491   DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1492   HValue* field_slot = iterator;
1493   if (field_offset > 0) {
1494     HValue* field_offset_value = Add<HConstant>(field_offset);
1495     field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1496   }
1497   HInstruction* field_entry =
1498       Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
1499   return field_entry;
1500 }
1501
1502
1503 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1504     HValue* js_function,
1505     HValue* shared_info,
1506     HValue* native_context) {
1507   Counters* counters = isolate()->counters();
1508   IfBuilder is_optimized(this);
1509   HInstruction* optimized_map = Add<HLoadNamedField>(
1510       shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
1511   HValue* null_constant = Add<HConstant>(0);
1512   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1513   is_optimized.Then();
1514   {
1515     BuildInstallCode(js_function, shared_info);
1516   }
1517   is_optimized.Else();
1518   {
1519     AddIncrementCounter(counters->fast_new_closure_try_optimized());
1520     // optimized_map points to fixed array of 3-element entries
1521     // (native context, optimized code, literals).
1522     // Map must never be empty, so check the first elements.
1523     HValue* first_entry_index =
1524         Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1525     IfBuilder already_in(this);
1526     BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1527                                       optimized_map, first_entry_index);
1528     already_in.Else();
1529     {
1530       // Iterate through the rest of map backwards. Do not double check first
1531       // entry. After the loop, if no matching optimized code was found,
1532       // install unoptimized code.
1533       // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1534       //     i > SharedFunctionInfo::kEntriesStart;
1535       //     i -= SharedFunctionInfo::kEntryLength) { .. }
1536       HValue* shared_function_entry_length =
1537           Add<HConstant>(SharedFunctionInfo::kEntryLength);
1538       LoopBuilder loop_builder(this,
1539                                context(),
1540                                LoopBuilder::kPostDecrement,
1541                                shared_function_entry_length);
1542       HValue* array_length = Add<HLoadNamedField>(
1543           optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1544       HValue* start_pos = AddUncasted<HSub>(array_length,
1545                                             shared_function_entry_length);
1546       HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1547                                                      first_entry_index,
1548                                                      Token::GT);
1549       {
1550         IfBuilder done_check(this);
1551         BuildCheckAndInstallOptimizedCode(js_function, native_context,
1552                                           &done_check,
1553                                           optimized_map,
1554                                           slot_iterator);
1555         // Fall out of the loop
1556         loop_builder.Break();
1557       }
1558       loop_builder.EndBody();
1559
1560       // If slot_iterator equals first entry index, then we failed to find and
1561       // install optimized code
1562       IfBuilder no_optimized_code_check(this);
1563       no_optimized_code_check.If<HCompareNumericAndBranch>(
1564           slot_iterator, first_entry_index, Token::EQ);
1565       no_optimized_code_check.Then();
1566       {
1567         // Store the unoptimized code
1568         BuildInstallCode(js_function, shared_info);
1569       }
1570     }
1571   }
1572 }
1573
1574
1575 template<>
1576 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1577   Counters* counters = isolate()->counters();
1578   Factory* factory = isolate()->factory();
1579   HInstruction* empty_fixed_array =
1580       Add<HConstant>(factory->empty_fixed_array());
1581   HValue* shared_info = GetParameter(0);
1582
1583   AddIncrementCounter(counters->fast_new_closure_total());
1584
1585   // Create a new closure from the given function info in new space
1586   HValue* size = Add<HConstant>(JSFunction::kSize);
1587   HInstruction* js_function =
1588       Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
1589
1590   int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1591                                             casted_stub()->kind());
1592
1593   // Compute the function map in the current native context and set that
1594   // as the map of the allocated object.
1595   HInstruction* native_context = BuildGetNativeContext();
1596   HInstruction* map_slot_value = Add<HLoadNamedField>(
1597       native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
1598   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1599
1600   // Initialize the rest of the function.
1601   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1602                         empty_fixed_array);
1603   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1604                         empty_fixed_array);
1605   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1606                         empty_fixed_array);
1607   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1608                         graph()->GetConstantHole());
1609   Add<HStoreNamedField>(
1610       js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
1611   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1612                         context());
1613
1614   // Initialize the code pointer in the function to be the one
1615   // found in the shared function info object.
1616   // But first check if there is an optimized version for our context.
1617   if (FLAG_cache_optimized_code) {
1618     BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1619   } else {
1620     BuildInstallCode(js_function, shared_info);
1621   }
1622
1623   return js_function;
1624 }
1625
1626
1627 Handle<Code> FastNewClosureStub::GenerateCode() {
1628   return DoGenerateCode(this);
1629 }
1630
1631
1632 template<>
1633 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1634   int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1635
1636   // Get the function.
1637   HParameter* function = GetParameter(FastNewContextStub::kFunction);
1638
1639   // Allocate the context in new space.
1640   HAllocate* function_context = Add<HAllocate>(
1641       Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1642       HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1643
1644   // Set up the object header.
1645   AddStoreMapConstant(function_context,
1646                       isolate()->factory()->function_context_map());
1647   Add<HStoreNamedField>(function_context,
1648                         HObjectAccess::ForFixedArrayLength(),
1649                         Add<HConstant>(length));
1650
1651   // Set up the fixed slots.
1652   Add<HStoreNamedField>(function_context,
1653                         HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1654                         function);
1655   Add<HStoreNamedField>(function_context,
1656                         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1657                         context());
1658   Add<HStoreNamedField>(function_context,
1659                         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1660                         graph()->GetConstant0());
1661
1662   // Copy the global object from the previous context.
1663   HValue* global_object = Add<HLoadNamedField>(
1664       context(), nullptr,
1665       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1666   Add<HStoreNamedField>(function_context,
1667                         HObjectAccess::ForContextSlot(
1668                             Context::GLOBAL_OBJECT_INDEX),
1669                         global_object);
1670
1671   // Initialize the rest of the slots to undefined.
1672   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1673     Add<HStoreNamedField>(function_context,
1674                           HObjectAccess::ForContextSlot(i),
1675                           graph()->GetConstantUndefined());
1676   }
1677
1678   return function_context;
1679 }
1680
1681
1682 Handle<Code> FastNewContextStub::GenerateCode() {
1683   return DoGenerateCode(this);
1684 }
1685
1686
1687 template <>
1688 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
1689   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1690   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1691
1692   Add<HCheckSmi>(key);
1693
1694   HValue* elements = AddLoadElements(receiver);
1695
1696   HValue* hash = BuildElementIndexHash(key);
1697
1698   return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1699 }
1700
1701
1702 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
1703   return DoGenerateCode(this);
1704 }
1705
1706
1707 template<>
1708 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1709   // Determine the parameters.
1710   HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1711   HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1712   HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1713
1714   info()->MarkMustNotHaveEagerFrame();
1715
1716   return BuildRegExpConstructResult(length, index, input);
1717 }
1718
1719
1720 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1721   return DoGenerateCode(this);
1722 }
1723
1724
1725 template <>
1726 class CodeStubGraphBuilder<KeyedLoadGenericStub>
1727     : public CodeStubGraphBuilderBase {
1728  public:
1729   explicit CodeStubGraphBuilder(CompilationInfoWithZone* info)
1730       : CodeStubGraphBuilderBase(info) {}
1731
1732  protected:
1733   virtual HValue* BuildCodeStub();
1734
1735   void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1736                                    HValue* bit_field2,
1737                                    ElementsKind kind);
1738
1739   void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1740                             HValue* receiver,
1741                             HValue* key,
1742                             HValue* instance_type,
1743                             HValue* bit_field2,
1744                             ElementsKind kind);
1745
1746   void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1747                                 HValue* receiver,
1748                                 HValue* key,
1749                                 HValue* instance_type,
1750                                 HValue* bit_field2,
1751                                 ElementsKind kind);
1752
1753   KeyedLoadGenericStub* casted_stub() {
1754     return static_cast<KeyedLoadGenericStub*>(stub());
1755   }
1756 };
1757
1758
1759 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
1760     HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1761     ElementsKind kind) {
1762   ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1763   HValue* kind_limit = Add<HConstant>(
1764       static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1765
1766   if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1767   if_builder->Then();
1768 }
1769
1770
1771 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
1772     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1773     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1774   DCHECK(!IsExternalArrayElementsKind(kind));
1775
1776   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1777
1778   IfBuilder js_array_check(this);
1779   js_array_check.If<HCompareNumericAndBranch>(
1780       instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1781   js_array_check.Then();
1782   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1783                                               true, kind,
1784                                               LOAD, NEVER_RETURN_HOLE,
1785                                               STANDARD_STORE));
1786   js_array_check.Else();
1787   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1788                                               false, kind,
1789                                               LOAD, NEVER_RETURN_HOLE,
1790                                               STANDARD_STORE));
1791   js_array_check.End();
1792 }
1793
1794
1795 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
1796     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1797     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1798   DCHECK(IsExternalArrayElementsKind(kind));
1799
1800   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1801
1802   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1803                                               false, kind,
1804                                               LOAD, NEVER_RETURN_HOLE,
1805                                               STANDARD_STORE));
1806 }
1807
1808
1809 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
1810   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1811   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1812
1813   // Split into a smi/integer case and unique string case.
1814   HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
1815                                                 graph()->CreateBasicBlock());
1816
1817   BuildKeyedIndexCheck(key, &index_name_split_continuation);
1818
1819   IfBuilder index_name_split(this, &index_name_split_continuation);
1820   index_name_split.Then();
1821   {
1822     // Key is an index (number)
1823     key = Pop();
1824
1825     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1826       (1 << Map::kHasIndexedInterceptor);
1827     BuildJSObjectCheck(receiver, bit_field_mask);
1828
1829     HValue* map =
1830         Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1831
1832     HValue* instance_type =
1833         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1834
1835     HValue* bit_field2 =
1836         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1837
1838     IfBuilder kind_if(this);
1839     BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1840                          FAST_HOLEY_ELEMENTS);
1841
1842     kind_if.Else();
1843     {
1844       BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1845                            FAST_HOLEY_DOUBLE_ELEMENTS);
1846     }
1847     kind_if.Else();
1848
1849     // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
1850     BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
1851     {
1852       HValue* elements = AddLoadElements(receiver);
1853
1854       HValue* hash = BuildElementIndexHash(key);
1855
1856       Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
1857     }
1858     kind_if.Else();
1859
1860     // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
1861     BuildElementsKindLimitCheck(&kind_if, bit_field2,
1862                                 SLOPPY_ARGUMENTS_ELEMENTS);
1863     // Non-strict elements are not handled.
1864     Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
1865                      Deoptimizer::EAGER);
1866     Push(graph()->GetConstant0());
1867
1868     kind_if.Else();
1869     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1870                              EXTERNAL_INT8_ELEMENTS);
1871
1872     kind_if.Else();
1873     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1874                              EXTERNAL_UINT8_ELEMENTS);
1875
1876     kind_if.Else();
1877     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1878                              EXTERNAL_INT16_ELEMENTS);
1879
1880     kind_if.Else();
1881     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1882                              EXTERNAL_UINT16_ELEMENTS);
1883
1884     kind_if.Else();
1885     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1886                              EXTERNAL_INT32_ELEMENTS);
1887
1888     kind_if.Else();
1889     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1890                              EXTERNAL_UINT32_ELEMENTS);
1891
1892     kind_if.Else();
1893     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1894                              EXTERNAL_FLOAT32_ELEMENTS);
1895
1896     kind_if.Else();
1897     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1898                              EXTERNAL_FLOAT64_ELEMENTS);
1899
1900     kind_if.Else();
1901     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1902                              EXTERNAL_UINT8_CLAMPED_ELEMENTS);
1903
1904     kind_if.ElseDeopt(
1905         Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
1906
1907     kind_if.End();
1908   }
1909   index_name_split.Else();
1910   {
1911     // Key is a unique string.
1912     key = Pop();
1913
1914     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1915         (1 << Map::kHasNamedInterceptor);
1916     BuildJSObjectCheck(receiver, bit_field_mask);
1917
1918     HIfContinuation continuation;
1919     BuildTestForDictionaryProperties(receiver, &continuation);
1920     IfBuilder if_dict_properties(this, &continuation);
1921     if_dict_properties.Then();
1922     {
1923       //  Key is string, properties are dictionary mode
1924       BuildNonGlobalObjectCheck(receiver);
1925
1926       HValue* properties = Add<HLoadNamedField>(
1927           receiver, nullptr, HObjectAccess::ForPropertiesPointer());
1928
1929       HValue* hash =
1930           Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
1931
1932       hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
1933
1934       HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
1935                                                           properties,
1936                                                           key,
1937                                                           hash);
1938       Push(value);
1939     }
1940     if_dict_properties.Else();
1941     {
1942       // TODO(dcarney): don't use keyed lookup cache, but convert to use
1943       // megamorphic stub cache.
1944       UNREACHABLE();
1945       //  Key is string, properties are fast mode
1946       HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
1947
1948       ExternalReference cache_keys_ref =
1949           ExternalReference::keyed_lookup_cache_keys(isolate());
1950       HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1951
1952       HValue* map =
1953           Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1954       HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1955       base_index->ClearFlag(HValue::kCanOverflow);
1956
1957       HIfContinuation inline_or_runtime_continuation(
1958           graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
1959       {
1960         IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
1961         for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
1962              ++probe) {
1963           IfBuilder* lookup_if = &lookup_ifs[probe];
1964           lookup_if->Initialize(this);
1965           int probe_base = probe * KeyedLookupCache::kEntryLength;
1966           HValue* map_index = AddUncasted<HAdd>(
1967               base_index,
1968               Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
1969           map_index->ClearFlag(HValue::kCanOverflow);
1970           HValue* key_index = AddUncasted<HAdd>(
1971               base_index,
1972               Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
1973           key_index->ClearFlag(HValue::kCanOverflow);
1974           HValue* map_to_check =
1975               Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
1976                               NEVER_RETURN_HOLE, 0);
1977           lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
1978           lookup_if->And();
1979           HValue* key_to_check =
1980               Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
1981                               NEVER_RETURN_HOLE, 0);
1982           lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
1983           lookup_if->Then();
1984           {
1985             ExternalReference cache_field_offsets_ref =
1986                 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
1987             HValue* cache_field_offsets =
1988                 Add<HConstant>(cache_field_offsets_ref);
1989             HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1990             index->ClearFlag(HValue::kCanOverflow);
1991             HValue* property_index =
1992                 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
1993                                 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
1994             Push(property_index);
1995           }
1996           lookup_if->Else();
1997         }
1998         for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
1999           lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2000         }
2001       }
2002
2003       IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2004       inline_or_runtime.Then();
2005       {
2006         // Found a cached index, load property inline.
2007         Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2008       }
2009       inline_or_runtime.Else();
2010       {
2011         // KeyedLookupCache miss; call runtime.
2012         Add<HPushArguments>(receiver, key);
2013         Push(Add<HCallRuntime>(
2014             isolate()->factory()->empty_string(),
2015             Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
2016       }
2017       inline_or_runtime.End();
2018     }
2019     if_dict_properties.End();
2020   }
2021   index_name_split.End();
2022
2023   return Pop();
2024 }
2025
2026
2027 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2028   return DoGenerateCode(this);
2029 }
2030
2031
2032 void CodeStubGraphBuilderBase::TailCallHandler(HValue* receiver, HValue* name,
2033                                                HValue* array, HValue* map_index,
2034                                                HValue* slot, HValue* vector) {
2035   // The handler is at array[map_index + 1]. Compute this with a custom offset
2036   // to HLoadKeyed.
2037   int offset =
2038       GetDefaultHeaderSizeForElementsKind(FAST_ELEMENTS) + kPointerSize;
2039   HValue* handler_code = Add<HLoadKeyed>(
2040       array, map_index, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, offset);
2041   TailCallHandler(receiver, name, slot, vector, handler_code);
2042 }
2043
2044
2045 void CodeStubGraphBuilderBase::TailCallHandler(HValue* receiver, HValue* name,
2046                                                HValue* slot, HValue* vector,
2047                                                HValue* handler_code) {
2048   VectorLoadICDescriptor descriptor(isolate());
2049   HValue* op_vals[] = {context(), receiver, name, slot, vector};
2050   Add<HCallWithDescriptor>(handler_code, 0, descriptor,
2051                            Vector<HValue*>(op_vals, 5), TAIL_CALL);
2052   // We never return here, it is a tail call.
2053 }
2054
2055
2056 void CodeStubGraphBuilderBase::TailCallMiss(HValue* receiver, HValue* name,
2057                                             HValue* slot, HValue* vector,
2058                                             bool keyed_load) {
2059   DCHECK(FLAG_vector_ics);
2060   Add<HTailCallThroughMegamorphicCache>(
2061       receiver, name, slot, vector,
2062       HTailCallThroughMegamorphicCache::ComputeFlags(keyed_load, true));
2063   // We never return here, it is a tail call.
2064 }
2065
2066
2067 void CodeStubGraphBuilderBase::HandleArrayCases(HValue* array, HValue* receiver,
2068                                                 HValue* name, HValue* slot,
2069                                                 HValue* vector,
2070                                                 bool keyed_load) {
2071   HConstant* constant_two = Add<HConstant>(2);
2072   HConstant* constant_three = Add<HConstant>(3);
2073
2074   IfBuilder if_receiver_heap_object(this);
2075   if_receiver_heap_object.IfNot<HIsSmiAndBranch>(receiver);
2076   if_receiver_heap_object.Then();
2077   Push(AddLoadMap(receiver, nullptr));
2078   if_receiver_heap_object.Else();
2079   HConstant* heap_number_map =
2080       Add<HConstant>(isolate()->factory()->heap_number_map());
2081   Push(heap_number_map);
2082   if_receiver_heap_object.End();
2083   HValue* receiver_map = Pop();
2084
2085   HValue* start =
2086       keyed_load ? graph()->GetConstant1() : graph()->GetConstant0();
2087   HValue* weak_cell =
2088       Add<HLoadKeyed>(array, start, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2089   // Load the weak cell value. It may be Smi(0), or a map. Compare nonetheless
2090   // against the receiver_map.
2091   HValue* array_map = Add<HLoadNamedField>(weak_cell, nullptr,
2092                                            HObjectAccess::ForWeakCellValue());
2093
2094   IfBuilder if_correct_map(this);
2095   if_correct_map.If<HCompareObjectEqAndBranch>(receiver_map, array_map);
2096   if_correct_map.Then();
2097   { TailCallHandler(receiver, name, array, start, slot, vector); }
2098   if_correct_map.Else();
2099   {
2100     // If our array has more elements, the ic is polymorphic. Look for the
2101     // receiver map in the rest of the array.
2102     HValue* length = AddLoadFixedArrayLength(array, nullptr);
2103     LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement,
2104                         constant_two);
2105     start = keyed_load ? constant_three : constant_two;
2106     HValue* key = builder.BeginBody(start, length, Token::LT);
2107     {
2108       HValue* weak_cell = Add<HLoadKeyed>(array, key, nullptr, FAST_ELEMENTS,
2109                                           ALLOW_RETURN_HOLE);
2110       HValue* array_map = Add<HLoadNamedField>(
2111           weak_cell, nullptr, HObjectAccess::ForWeakCellValue());
2112       IfBuilder if_correct_poly_map(this);
2113       if_correct_poly_map.If<HCompareObjectEqAndBranch>(receiver_map,
2114                                                         array_map);
2115       if_correct_poly_map.Then();
2116       { TailCallHandler(receiver, name, array, key, slot, vector); }
2117     }
2118     builder.EndBody();
2119   }
2120   if_correct_map.End();
2121 }
2122
2123
2124 template <>
2125 HValue* CodeStubGraphBuilder<VectorLoadStub>::BuildCodeStub() {
2126   HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
2127   HValue* name = GetParameter(VectorLoadICDescriptor::kNameIndex);
2128   HValue* slot = GetParameter(VectorLoadICDescriptor::kSlotIndex);
2129   HValue* vector = GetParameter(VectorLoadICDescriptor::kVectorIndex);
2130
2131   // If the feedback is an array, then the IC is in the monomorphic or
2132   // polymorphic state.
2133   HValue* feedback =
2134       Add<HLoadKeyed>(vector, slot, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2135   IfBuilder array_checker(this);
2136   array_checker.If<HCompareMap>(feedback,
2137                                 isolate()->factory()->fixed_array_map());
2138   array_checker.Then();
2139   { HandleArrayCases(feedback, receiver, name, slot, vector, false); }
2140   array_checker.Else();
2141   {
2142     // Is the IC megamorphic?
2143     IfBuilder mega_checker(this);
2144     HConstant* megamorphic_symbol =
2145         Add<HConstant>(isolate()->factory()->megamorphic_symbol());
2146     mega_checker.If<HCompareObjectEqAndBranch>(feedback, megamorphic_symbol);
2147     mega_checker.Then();
2148     {
2149       // Probe the stub cache.
2150       Add<HTailCallThroughMegamorphicCache>(
2151           receiver, name, slot, vector,
2152           HTailCallThroughMegamorphicCache::ComputeFlags(false, false));
2153     }
2154     mega_checker.End();
2155   }
2156   array_checker.End();
2157
2158   TailCallMiss(receiver, name, slot, vector, false);
2159   return graph()->GetConstant0();
2160 }
2161
2162
2163 Handle<Code> VectorLoadStub::GenerateCode() { return DoGenerateCode(this); }
2164
2165
2166 template <>
2167 HValue* CodeStubGraphBuilder<VectorKeyedLoadStub>::BuildCodeStub() {
2168   HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
2169   HValue* name = GetParameter(VectorLoadICDescriptor::kNameIndex);
2170   HValue* slot = GetParameter(VectorLoadICDescriptor::kSlotIndex);
2171   HValue* vector = GetParameter(VectorLoadICDescriptor::kVectorIndex);
2172   HConstant* zero = graph()->GetConstant0();
2173
2174   // If the feedback is an array, then the IC is in the monomorphic or
2175   // polymorphic state.
2176   HValue* feedback =
2177       Add<HLoadKeyed>(vector, slot, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2178   IfBuilder array_checker(this);
2179   array_checker.If<HCompareMap>(feedback,
2180                                 isolate()->factory()->fixed_array_map());
2181   array_checker.Then();
2182   {
2183     // If feedback[0] is 0, then the IC has element handlers and name should be
2184     // a smi. If feedback[0] is a string, verify that it matches name.
2185     HValue* recorded_name = Add<HLoadKeyed>(feedback, zero, nullptr,
2186                                             FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2187
2188     IfBuilder recorded_name_is_zero(this);
2189     recorded_name_is_zero.If<HCompareObjectEqAndBranch>(recorded_name, zero);
2190     recorded_name_is_zero.Then();
2191     { Add<HCheckSmi>(name); }
2192     recorded_name_is_zero.Else();
2193     {
2194       IfBuilder strings_match(this);
2195       strings_match.IfNot<HCompareObjectEqAndBranch>(name, recorded_name);
2196       strings_match.Then();
2197       TailCallMiss(receiver, name, slot, vector, true);
2198       strings_match.End();
2199     }
2200     recorded_name_is_zero.End();
2201
2202     HandleArrayCases(feedback, receiver, name, slot, vector, true);
2203   }
2204   array_checker.Else();
2205   {
2206     // Check if the IC is in megamorphic state.
2207     IfBuilder megamorphic_checker(this);
2208     HConstant* megamorphic_symbol =
2209         Add<HConstant>(isolate()->factory()->megamorphic_symbol());
2210     megamorphic_checker.If<HCompareObjectEqAndBranch>(feedback,
2211                                                       megamorphic_symbol);
2212     megamorphic_checker.Then();
2213     {
2214       // Tail-call to the megamorphic KeyedLoadIC, treating it like a handler.
2215       Handle<Code> stub = KeyedLoadIC::ChooseMegamorphicStub(isolate());
2216       HValue* constant_stub = Add<HConstant>(stub);
2217       LoadDescriptor descriptor(isolate());
2218       HValue* op_vals[] = {context(), receiver, name};
2219       Add<HCallWithDescriptor>(constant_stub, 0, descriptor,
2220                                Vector<HValue*>(op_vals, 3), TAIL_CALL);
2221       // We never return here, it is a tail call.
2222     }
2223     megamorphic_checker.End();
2224   }
2225   array_checker.End();
2226
2227   TailCallMiss(receiver, name, slot, vector, true);
2228   return zero;
2229 }
2230
2231
2232 Handle<Code> VectorKeyedLoadStub::GenerateCode() {
2233   return DoGenerateCode(this);
2234 }
2235
2236
2237 Handle<Code> MegamorphicLoadStub::GenerateCode() {
2238   return DoGenerateCode(this);
2239 }
2240
2241
2242 template <>
2243 HValue* CodeStubGraphBuilder<MegamorphicLoadStub>::BuildCodeStub() {
2244   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2245   HValue* name = GetParameter(LoadDescriptor::kNameIndex);
2246
2247   // We shouldn't generate this when FLAG_vector_ics is true because the
2248   // megamorphic case is handled as part of the default stub.
2249   DCHECK(!FLAG_vector_ics);
2250
2251   // Probe the stub cache.
2252   Add<HTailCallThroughMegamorphicCache>(receiver, name);
2253
2254   // We never continue.
2255   return graph()->GetConstant0();
2256 }
2257 } }  // namespace v8::internal