deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / code-stubs-hydrogen.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/ic/ic.h"
12 #include "src/lithium.h"
13
14 namespace v8 {
15 namespace internal {
16
17
18 static LChunk* OptimizeGraph(HGraph* graph) {
19   DisallowHeapAllocation no_allocation;
20   DisallowHandleAllocation no_handles;
21   DisallowHandleDereference no_deref;
22
23   DCHECK(graph != NULL);
24   BailoutReason bailout_reason = kNoReason;
25   if (!graph->Optimize(&bailout_reason)) {
26     FATAL(GetBailoutReason(bailout_reason));
27   }
28   LChunk* chunk = LChunk::NewChunk(graph);
29   if (chunk == NULL) {
30     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
31   }
32   return chunk;
33 }
34
35
36 class CodeStubGraphBuilderBase : public HGraphBuilder {
37  public:
38   explicit CodeStubGraphBuilderBase(CompilationInfo* info)
39       : HGraphBuilder(info),
40         arguments_length_(NULL),
41         info_(info),
42         descriptor_(info->code_stub()),
43         context_(NULL) {
44     int parameter_count = descriptor_.GetEnvironmentParameterCount();
45     parameters_.Reset(new HParameter*[parameter_count]);
46   }
47   virtual bool BuildGraph();
48
49  protected:
50   virtual HValue* BuildCodeStub() = 0;
51   HParameter* GetParameter(int parameter) {
52     DCHECK(parameter < descriptor_.GetEnvironmentParameterCount());
53     return parameters_[parameter];
54   }
55   HValue* GetArgumentsLength() {
56     // This is initialized in BuildGraph()
57     DCHECK(arguments_length_ != NULL);
58     return arguments_length_;
59   }
60   CompilationInfo* info() { return info_; }
61   CodeStub* stub() { return info_->code_stub(); }
62   HContext* context() { return context_; }
63   Isolate* isolate() { return info_->isolate(); }
64
65   HLoadNamedField* BuildLoadNamedField(HValue* object,
66                                        FieldIndex index);
67   void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
68                             Representation representation,
69                             bool transition_to_field);
70
71   enum ArgumentClass {
72     NONE,
73     SINGLE,
74     MULTIPLE
75   };
76
77   HValue* UnmappedCase(HValue* elements, HValue* key);
78
79   HValue* BuildArrayConstructor(ElementsKind kind,
80                                 AllocationSiteOverrideMode override_mode,
81                                 ArgumentClass argument_class);
82   HValue* BuildInternalArrayConstructor(ElementsKind kind,
83                                         ArgumentClass argument_class);
84
85   // BuildCheckAndInstallOptimizedCode emits code to install the optimized
86   // function found in the optimized code map at map_index in js_function, if
87   // the function at map_index matches the given native_context. Builder is
88   // left in the "Then()" state after the install.
89   void BuildCheckAndInstallOptimizedCode(HValue* js_function,
90                                          HValue* native_context,
91                                          IfBuilder* builder,
92                                          HValue* optimized_map,
93                                          HValue* map_index);
94   void BuildInstallCode(HValue* js_function, HValue* shared_info);
95
96   HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
97                                          HValue* iterator,
98                                          int field_offset);
99   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
100                                         HValue* shared_info,
101                                         HValue* native_context);
102
103  private:
104   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
105   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
106                                           ElementsKind kind);
107
108   SmartArrayPointer<HParameter*> parameters_;
109   HValue* arguments_length_;
110   CompilationInfo* info_;
111   CodeStubDescriptor descriptor_;
112   HContext* context_;
113 };
114
115
116 bool CodeStubGraphBuilderBase::BuildGraph() {
117   // Update the static counter each time a new code stub is generated.
118   isolate()->counters()->code_stubs()->Increment();
119
120   if (FLAG_trace_hydrogen_stubs) {
121     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
122     PrintF("-----------------------------------------------------------\n");
123     PrintF("Compiling stub %s using hydrogen\n", name);
124     isolate()->GetHTracer()->TraceCompilation(info());
125   }
126
127   int param_count = descriptor_.GetEnvironmentParameterCount();
128   HEnvironment* start_environment = graph()->start_environment();
129   HBasicBlock* next_block = CreateBasicBlock(start_environment);
130   Goto(next_block);
131   next_block->SetJoinId(BailoutId::StubEntry());
132   set_current_block(next_block);
133
134   bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
135   HInstruction* stack_parameter_count = NULL;
136   for (int i = 0; i < param_count; ++i) {
137     Representation r = descriptor_.GetEnvironmentParameterRepresentation(i);
138     HParameter* param = Add<HParameter>(i,
139                                         HParameter::REGISTER_PARAMETER, r);
140     start_environment->Bind(i, param);
141     parameters_[i] = param;
142     if (descriptor_.IsEnvironmentParameterCountRegister(i)) {
143       param->set_type(HType::Smi());
144       stack_parameter_count = param;
145       arguments_length_ = stack_parameter_count;
146     }
147   }
148
149   DCHECK(!runtime_stack_params || arguments_length_ != NULL);
150   if (!runtime_stack_params) {
151     stack_parameter_count = graph()->GetConstantMinus1();
152     arguments_length_ = graph()->GetConstant0();
153   }
154
155   context_ = Add<HContext>();
156   start_environment->BindContext(context_);
157
158   Add<HSimulate>(BailoutId::StubEntry());
159
160   NoObservableSideEffectsScope no_effects(this);
161
162   HValue* return_value = BuildCodeStub();
163
164   // We might have extra expressions to pop from the stack in addition to the
165   // arguments above.
166   HInstruction* stack_pop_count = stack_parameter_count;
167   if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
168     if (!stack_parameter_count->IsConstant() &&
169         descriptor_.hint_stack_parameter_count() < 0) {
170       HInstruction* constant_one = graph()->GetConstant1();
171       stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
172       stack_pop_count->ClearFlag(HValue::kCanOverflow);
173       // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
174       // smi.
175     } else {
176       int count = descriptor_.hint_stack_parameter_count();
177       stack_pop_count = Add<HConstant>(count);
178     }
179   }
180
181   if (current_block() != NULL) {
182     HReturn* hreturn_instruction = New<HReturn>(return_value,
183                                                 stack_pop_count);
184     FinishCurrentBlock(hreturn_instruction);
185   }
186   return true;
187 }
188
189
190 template <class Stub>
191 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
192  public:
193   explicit CodeStubGraphBuilder(CompilationInfo* info)
194       : CodeStubGraphBuilderBase(info) {}
195
196  protected:
197   virtual HValue* BuildCodeStub() {
198     if (casted_stub()->IsUninitialized()) {
199       return BuildCodeUninitializedStub();
200     } else {
201       return BuildCodeInitializedStub();
202     }
203   }
204
205   virtual HValue* BuildCodeInitializedStub() {
206     UNIMPLEMENTED();
207     return NULL;
208   }
209
210   virtual HValue* BuildCodeUninitializedStub() {
211     // Force a deopt that falls back to the runtime.
212     HValue* undefined = graph()->GetConstantUndefined();
213     IfBuilder builder(this);
214     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
215     builder.Then();
216     builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
217     return undefined;
218   }
219
220   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
221 };
222
223
224 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
225     ExternalReference miss) {
226   Factory* factory = isolate()->factory();
227
228   // Generate the new code.
229   MacroAssembler masm(isolate(), NULL, 256);
230
231   {
232     // Update the static counter each time a new code stub is generated.
233     isolate()->counters()->code_stubs()->Increment();
234
235     // Generate the code for the stub.
236     masm.set_generating_stub(true);
237     // TODO(yangguo): remove this once we can serialize IC stubs.
238     masm.enable_serializer();
239     NoCurrentFrameScope scope(&masm);
240     GenerateLightweightMiss(&masm, miss);
241   }
242
243   // Create the code object.
244   CodeDesc desc;
245   masm.GetCode(&desc);
246
247   // Copy the generated code into a heap object.
248   Code::Flags flags = Code::ComputeFlags(
249       GetCodeKind(),
250       GetICState(),
251       GetExtraICState(),
252       GetStubType());
253   Handle<Code> new_object = factory->NewCode(
254       desc, flags, masm.CodeObject(), NeedsImmovableCode());
255   return new_object;
256 }
257
258
259 template <class Stub>
260 static Handle<Code> DoGenerateCode(Stub* stub) {
261   Isolate* isolate = stub->isolate();
262   CodeStubDescriptor descriptor(stub);
263
264   // If we are uninitialized we can use a light-weight stub to enter
265   // the runtime that is significantly faster than using the standard
266   // stub-failure deopt mechanism.
267   if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
268     DCHECK(!descriptor.stack_parameter_count().is_valid());
269     return stub->GenerateLightweightMissCode(descriptor.miss_handler());
270   }
271   base::ElapsedTimer timer;
272   if (FLAG_profile_hydrogen_code_stub_compilation) {
273     timer.Start();
274   }
275   Zone zone;
276   CompilationInfo info(stub, isolate, &zone);
277   CodeStubGraphBuilder<Stub> builder(&info);
278   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
279   Handle<Code> code = chunk->Codegen();
280   if (FLAG_profile_hydrogen_code_stub_compilation) {
281     OFStream os(stdout);
282     os << "[Lazy compilation of " << stub << " took "
283        << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
284   }
285   return code;
286 }
287
288
289 template <>
290 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
291   info()->MarkAsSavesCallerDoubles();
292   HValue* number = GetParameter(NumberToStringStub::kNumber);
293   return BuildNumberToString(number, Type::Number(zone()));
294 }
295
296
297 Handle<Code> NumberToStringStub::GenerateCode() {
298   return DoGenerateCode(this);
299 }
300
301
302 template <>
303 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
304   Factory* factory = isolate()->factory();
305   HValue* undefined = graph()->GetConstantUndefined();
306   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
307
308   // This stub is very performance sensitive, the generated code must be tuned
309   // so that it doesn't build and eager frame.
310   info()->MarkMustNotHaveEagerFrame();
311
312   HInstruction* allocation_site =
313       Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
314   IfBuilder checker(this);
315   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
316                                                     undefined);
317   checker.Then();
318
319   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
320       AllocationSite::kTransitionInfoOffset);
321   HInstruction* boilerplate =
322       Add<HLoadNamedField>(allocation_site, nullptr, access);
323   HValue* elements = AddLoadElements(boilerplate);
324   HValue* capacity = AddLoadFixedArrayLength(elements);
325   IfBuilder zero_capacity(this);
326   zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
327                                            Token::EQ);
328   zero_capacity.Then();
329   Push(BuildCloneShallowArrayEmpty(boilerplate,
330                                    allocation_site,
331                                    alloc_site_mode));
332   zero_capacity.Else();
333   IfBuilder if_fixed_cow(this);
334   if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
335   if_fixed_cow.Then();
336   Push(BuildCloneShallowArrayCow(boilerplate,
337                                  allocation_site,
338                                  alloc_site_mode,
339                                  FAST_ELEMENTS));
340   if_fixed_cow.Else();
341   IfBuilder if_fixed(this);
342   if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
343   if_fixed.Then();
344   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
345                                       allocation_site,
346                                       alloc_site_mode,
347                                       FAST_ELEMENTS));
348
349   if_fixed.Else();
350   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
351                                       allocation_site,
352                                       alloc_site_mode,
353                                       FAST_DOUBLE_ELEMENTS));
354   if_fixed.End();
355   if_fixed_cow.End();
356   zero_capacity.End();
357
358   checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
359   checker.End();
360
361   return environment()->Pop();
362 }
363
364
365 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
366   return DoGenerateCode(this);
367 }
368
369
370 template <>
371 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
372   HValue* undefined = graph()->GetConstantUndefined();
373
374   HInstruction* allocation_site =
375       Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
376
377   IfBuilder checker(this);
378   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
379                                                     undefined);
380   checker.And();
381
382   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
383       AllocationSite::kTransitionInfoOffset);
384   HInstruction* boilerplate =
385       Add<HLoadNamedField>(allocation_site, nullptr, access);
386
387   int length = casted_stub()->length();
388   if (length == 0) {
389     // Empty objects have some slack added to them.
390     length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
391   }
392   int size = JSObject::kHeaderSize + length * kPointerSize;
393   int object_size = size;
394   if (FLAG_allocation_site_pretenuring) {
395     size += AllocationMemento::kSize;
396   }
397
398   HValue* boilerplate_map =
399       Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
400   HValue* boilerplate_size = Add<HLoadNamedField>(
401       boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
402   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
403   checker.If<HCompareNumericAndBranch>(boilerplate_size,
404                                        size_in_words, Token::EQ);
405   checker.Then();
406
407   HValue* size_in_bytes = Add<HConstant>(size);
408
409   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
410       NOT_TENURED, JS_OBJECT_TYPE);
411
412   for (int i = 0; i < object_size; i += kPointerSize) {
413     HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
414     Add<HStoreNamedField>(object, access,
415                           Add<HLoadNamedField>(boilerplate, nullptr, access));
416   }
417
418   DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
419   if (FLAG_allocation_site_pretenuring) {
420     BuildCreateAllocationMemento(
421         object, Add<HConstant>(object_size), allocation_site);
422   }
423
424   environment()->Push(object);
425   checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
426   checker.End();
427
428   return environment()->Pop();
429 }
430
431
432 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
433   return DoGenerateCode(this);
434 }
435
436
437 template <>
438 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
439   // This stub is performance sensitive, the generated code must be tuned
440   // so that it doesn't build an eager frame.
441   info()->MarkMustNotHaveEagerFrame();
442
443   HValue* size = Add<HConstant>(AllocationSite::kSize);
444   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
445       JS_OBJECT_TYPE);
446
447   // Store the map
448   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
449   AddStoreMapConstant(object, allocation_site_map);
450
451   // Store the payload (smi elements kind)
452   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
453   Add<HStoreNamedField>(object,
454                         HObjectAccess::ForAllocationSiteOffset(
455                             AllocationSite::kTransitionInfoOffset),
456                         initial_elements_kind);
457
458   // Unlike literals, constructed arrays don't have nested sites
459   Add<HStoreNamedField>(object,
460                         HObjectAccess::ForAllocationSiteOffset(
461                             AllocationSite::kNestedSiteOffset),
462                         graph()->GetConstant0());
463
464   // Pretenuring calculation field.
465   Add<HStoreNamedField>(object,
466                         HObjectAccess::ForAllocationSiteOffset(
467                             AllocationSite::kPretenureDataOffset),
468                         graph()->GetConstant0());
469
470   // Pretenuring memento creation count field.
471   Add<HStoreNamedField>(object,
472                         HObjectAccess::ForAllocationSiteOffset(
473                             AllocationSite::kPretenureCreateCountOffset),
474                         graph()->GetConstant0());
475
476   // Store an empty fixed array for the code dependency.
477   HConstant* empty_fixed_array =
478     Add<HConstant>(isolate()->factory()->empty_fixed_array());
479   Add<HStoreNamedField>(
480       object,
481       HObjectAccess::ForAllocationSiteOffset(
482           AllocationSite::kDependentCodeOffset),
483       empty_fixed_array);
484
485   // Link the object to the allocation site list
486   HValue* site_list = Add<HConstant>(
487       ExternalReference::allocation_sites_list_address(isolate()));
488   HValue* site = Add<HLoadNamedField>(site_list, nullptr,
489                                       HObjectAccess::ForAllocationSiteList());
490   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
491   // mark as such in order to skip the write barrier, once we have a unified
492   // system for weakness. For now we decided to keep it like this because having
493   // an initial write barrier backed store makes this pointer strong until the
494   // next GC, and allocation sites are designed to survive several GCs anyway.
495   Add<HStoreNamedField>(
496       object,
497       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
498       site);
499   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
500                         object);
501
502   HInstruction* feedback_vector = GetParameter(0);
503   HInstruction* slot = GetParameter(1);
504   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
505                    INITIALIZING_STORE);
506   return feedback_vector;
507 }
508
509
510 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
511   return DoGenerateCode(this);
512 }
513
514
515 template <>
516 HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
517   // This stub is performance sensitive, the generated code must be tuned
518   // so that it doesn't build an eager frame.
519   info()->MarkMustNotHaveEagerFrame();
520
521   HValue* size = Add<HConstant>(WeakCell::kSize);
522   HInstruction* object =
523       Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
524
525   Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
526   AddStoreMapConstant(object, weak_cell_map);
527
528   HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
529   Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
530   Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
531                         graph()->GetConstantUndefined());
532
533   HInstruction* feedback_vector =
534       GetParameter(CreateWeakCellDescriptor::kVectorIndex);
535   HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
536   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
537                    INITIALIZING_STORE);
538   return graph()->GetConstant0();
539 }
540
541
542 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
543
544
545 template <>
546 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
547   int context_index = casted_stub()->context_index();
548   int slot_index = casted_stub()->slot_index();
549
550   HValue* script_context = BuildGetScriptContext(context_index);
551   return Add<HLoadNamedField>(script_context, nullptr,
552                               HObjectAccess::ForContextSlot(slot_index));
553 }
554
555
556 Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
557   return DoGenerateCode(this);
558 }
559
560
561 template <>
562 HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
563   int context_index = casted_stub()->context_index();
564   int slot_index = casted_stub()->slot_index();
565
566   HValue* script_context = BuildGetScriptContext(context_index);
567   Add<HStoreNamedField>(script_context,
568                         HObjectAccess::ForContextSlot(slot_index),
569                         GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
570   return GetParameter(2);
571 }
572
573
574 Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
575   return DoGenerateCode(this);
576 }
577
578
579 template <>
580 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
581   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
582       GetParameter(LoadDescriptor::kReceiverIndex),
583       GetParameter(LoadDescriptor::kNameIndex), NULL,
584       casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
585       NEVER_RETURN_HOLE, STANDARD_STORE);
586   return load;
587 }
588
589
590 Handle<Code> LoadFastElementStub::GenerateCode() {
591   return DoGenerateCode(this);
592 }
593
594
595 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
596     HValue* object, FieldIndex index) {
597   Representation representation = index.is_double()
598       ? Representation::Double()
599       : Representation::Tagged();
600   int offset = index.offset();
601   HObjectAccess access = index.is_inobject()
602       ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
603       : HObjectAccess::ForBackingStoreOffset(offset, representation);
604   if (index.is_double() &&
605       (!FLAG_unbox_double_fields || !index.is_inobject())) {
606     // Load the heap number.
607     object = Add<HLoadNamedField>(
608         object, nullptr, access.WithRepresentation(Representation::Tagged()));
609     // Load the double value from it.
610     access = HObjectAccess::ForHeapNumberValue();
611   }
612   return Add<HLoadNamedField>(object, nullptr, access);
613 }
614
615
616 template<>
617 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
618   return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
619 }
620
621
622 Handle<Code> LoadFieldStub::GenerateCode() {
623   return DoGenerateCode(this);
624 }
625
626
627 template <>
628 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
629   HValue* map = AddLoadMap(GetParameter(0), NULL);
630   HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
631       Map::kDescriptorsOffset, Representation::Tagged());
632   HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
633   HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
634       DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
635   return Add<HLoadNamedField>(descriptors, nullptr, value_access);
636 }
637
638
639 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
640
641
642 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key) {
643   HValue* result;
644   HInstruction* backing_store =
645       Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
646                       ALLOW_RETURN_HOLE);
647   Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
648   HValue* backing_store_length = Add<HLoadNamedField>(
649       backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
650   IfBuilder in_unmapped_range(this);
651   in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
652                                                  Token::LT);
653   in_unmapped_range.Then();
654   {
655     result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
656                              NEVER_RETURN_HOLE);
657   }
658   in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
659   in_unmapped_range.End();
660   return result;
661 }
662
663
664 template <>
665 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
666   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
667   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
668
669   // Mapped arguments are actual arguments. Unmapped arguments are values added
670   // to the arguments object after it was created for the call. Mapped arguments
671   // are stored in the context at indexes given by elements[key + 2]. Unmapped
672   // arguments are stored as regular indexed properties in the arguments array,
673   // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
674   // look at argument object construction.
675   //
676   // The sloppy arguments elements array has a special format:
677   //
678   // 0: context
679   // 1: unmapped arguments array
680   // 2: mapped_index0,
681   // 3: mapped_index1,
682   // ...
683   //
684   // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
685   // If key + 2 >= elements.length then attempt to look in the unmapped
686   // arguments array (given by elements[1]) and return the value at key, missing
687   // to the runtime if the unmapped arguments array is not a fixed array or if
688   // key >= unmapped_arguments_array.length.
689   //
690   // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
691   // in the unmapped arguments array, as described above. Otherwise, t is a Smi
692   // index into the context array given at elements[0]. Return the value at
693   // context[t].
694
695   key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
696   IfBuilder positive_smi(this);
697   positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
698                                             Token::LT);
699   positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
700   positive_smi.End();
701
702   HValue* constant_two = Add<HConstant>(2);
703   HValue* elements = AddLoadElements(receiver, nullptr);
704   HValue* elements_length = Add<HLoadNamedField>(
705       elements, nullptr, HObjectAccess::ForFixedArrayLength());
706   HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
707   IfBuilder in_range(this);
708   in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
709   in_range.Then();
710   {
711     HValue* index = AddUncasted<HAdd>(key, constant_two);
712     HInstruction* mapped_index = Add<HLoadKeyed>(
713         elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
714
715     IfBuilder is_valid(this);
716     is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
717                                               graph()->GetConstantHole());
718     is_valid.Then();
719     {
720       // TODO(mvstanton): I'd like to assert from this point, that if the
721       // mapped_index is not the hole that it is indeed, a smi. An unnecessary
722       // smi check is being emitted.
723       HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
724                                             nullptr, FAST_ELEMENTS);
725       STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
726       HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
727                                        FAST_ELEMENTS, ALLOW_RETURN_HOLE);
728       environment()->Push(result);
729     }
730     is_valid.Else();
731     {
732       HValue* result = UnmappedCase(elements, key);
733       environment()->Push(result);
734     }
735     is_valid.End();
736   }
737   in_range.Else();
738   {
739     HValue* result = UnmappedCase(elements, key);
740     environment()->Push(result);
741   }
742   in_range.End();
743
744   return environment()->Pop();
745 }
746
747
748 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
749   return DoGenerateCode(this);
750 }
751
752
753 void CodeStubGraphBuilderBase::BuildStoreNamedField(
754     HValue* object, HValue* value, FieldIndex index,
755     Representation representation, bool transition_to_field) {
756   DCHECK(!index.is_double() || representation.IsDouble());
757   int offset = index.offset();
758   HObjectAccess access =
759       index.is_inobject()
760           ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
761           : HObjectAccess::ForBackingStoreOffset(offset, representation);
762
763   if (representation.IsDouble()) {
764     if (!FLAG_unbox_double_fields || !index.is_inobject()) {
765       HObjectAccess heap_number_access =
766           access.WithRepresentation(Representation::Tagged());
767       if (transition_to_field) {
768         // The store requires a mutable HeapNumber to be allocated.
769         NoObservableSideEffectsScope no_side_effects(this);
770         HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
771
772         // TODO(hpayer): Allocation site pretenuring support.
773         HInstruction* heap_number =
774             Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
775                            MUTABLE_HEAP_NUMBER_TYPE);
776         AddStoreMapConstant(heap_number,
777                             isolate()->factory()->mutable_heap_number_map());
778         Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
779                               value);
780         // Store the new mutable heap number into the object.
781         access = heap_number_access;
782         value = heap_number;
783       } else {
784         // Load the heap number.
785         object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
786         // Store the double value into it.
787         access = HObjectAccess::ForHeapNumberValue();
788       }
789     }
790   } else if (representation.IsHeapObject()) {
791     BuildCheckHeapObject(value);
792   }
793
794   Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
795 }
796
797
798 template <>
799 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
800   BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
801                        casted_stub()->representation(), false);
802   return GetParameter(2);
803 }
804
805
806 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
807
808
809 template <>
810 HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
811   HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
812
813   switch (casted_stub()->store_mode()) {
814     case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
815       HValue* properties = Add<HLoadNamedField>(
816           object, nullptr, HObjectAccess::ForPropertiesPointer());
817       HValue* length = AddLoadFixedArrayLength(properties);
818       HValue* delta =
819           Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
820       HValue* new_capacity = AddUncasted<HAdd>(length, delta);
821
822       // Grow properties array.
823       ElementsKind kind = FAST_ELEMENTS;
824       Add<HBoundsCheck>(new_capacity,
825                         Add<HConstant>((Page::kMaxRegularHeapObjectSize -
826                                         FixedArray::kHeaderSize) >>
827                                        ElementsKindToShiftSize(kind)));
828
829       // Reuse this code for properties backing store allocation.
830       HValue* new_properties =
831           BuildAllocateAndInitializeArray(kind, new_capacity);
832
833       BuildCopyProperties(properties, new_properties, length, new_capacity);
834
835       Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
836                             new_properties);
837     }
838     // Fall through.
839     case StoreTransitionStub::StoreMapAndValue:
840       // Store the new value into the "extended" object.
841       BuildStoreNamedField(
842           object, GetParameter(StoreTransitionDescriptor::kValueIndex),
843           casted_stub()->index(), casted_stub()->representation(), true);
844     // Fall through.
845
846     case StoreTransitionStub::StoreMapOnly:
847       // And finally update the map.
848       Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
849                             GetParameter(StoreTransitionDescriptor::kMapIndex));
850       break;
851   }
852   return GetParameter(StoreTransitionDescriptor::kValueIndex);
853 }
854
855
856 Handle<Code> StoreTransitionStub::GenerateCode() {
857   return DoGenerateCode(this);
858 }
859
860
861 template <>
862 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
863   HValue* string = BuildLoadNamedField(GetParameter(0),
864       FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
865   return BuildLoadNamedField(string,
866       FieldIndex::ForInObjectOffset(String::kLengthOffset));
867 }
868
869
870 Handle<Code> StringLengthStub::GenerateCode() {
871   return DoGenerateCode(this);
872 }
873
874
875 template <>
876 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
877   BuildUncheckedMonomorphicElementAccess(
878       GetParameter(StoreDescriptor::kReceiverIndex),
879       GetParameter(StoreDescriptor::kNameIndex),
880       GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
881       casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
882       casted_stub()->store_mode());
883
884   return GetParameter(2);
885 }
886
887
888 Handle<Code> StoreFastElementStub::GenerateCode() {
889   return DoGenerateCode(this);
890 }
891
892
893 template <>
894 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
895   info()->MarkAsSavesCallerDoubles();
896
897   BuildTransitionElementsKind(GetParameter(0),
898                               GetParameter(1),
899                               casted_stub()->from_kind(),
900                               casted_stub()->to_kind(),
901                               casted_stub()->is_js_array());
902
903   return GetParameter(0);
904 }
905
906
907 Handle<Code> TransitionElementsKindStub::GenerateCode() {
908   return DoGenerateCode(this);
909 }
910
911
912 template <>
913 HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
914   HValue* result =
915       Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
916                      NOT_TENURED, HEAP_NUMBER_TYPE);
917   AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
918   return result;
919 }
920
921
922 Handle<Code> AllocateHeapNumberStub::GenerateCode() {
923   return DoGenerateCode(this);
924 }
925
926
927 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
928     ElementsKind kind,
929     AllocationSiteOverrideMode override_mode,
930     ArgumentClass argument_class) {
931   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
932   HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
933   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
934                                override_mode);
935   HValue* result = NULL;
936   switch (argument_class) {
937     case NONE:
938       // This stub is very performance sensitive, the generated code must be
939       // tuned so that it doesn't build and eager frame.
940       info()->MarkMustNotHaveEagerFrame();
941       result = array_builder.AllocateEmptyArray();
942       break;
943     case SINGLE:
944       result = BuildArraySingleArgumentConstructor(&array_builder);
945       break;
946     case MULTIPLE:
947       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
948       break;
949   }
950
951   return result;
952 }
953
954
955 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
956     ElementsKind kind, ArgumentClass argument_class) {
957   HValue* constructor = GetParameter(
958       InternalArrayConstructorStubBase::kConstructor);
959   JSArrayBuilder array_builder(this, kind, constructor);
960
961   HValue* result = NULL;
962   switch (argument_class) {
963     case NONE:
964       // This stub is very performance sensitive, the generated code must be
965       // tuned so that it doesn't build and eager frame.
966       info()->MarkMustNotHaveEagerFrame();
967       result = array_builder.AllocateEmptyArray();
968       break;
969     case SINGLE:
970       result = BuildArraySingleArgumentConstructor(&array_builder);
971       break;
972     case MULTIPLE:
973       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
974       break;
975   }
976   return result;
977 }
978
979
980 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
981     JSArrayBuilder* array_builder) {
982   // Smi check and range check on the input arg.
983   HValue* constant_one = graph()->GetConstant1();
984   HValue* constant_zero = graph()->GetConstant0();
985
986   HInstruction* elements = Add<HArgumentsElements>(false);
987   HInstruction* argument = Add<HAccessArgumentsAt>(
988       elements, constant_one, constant_zero);
989
990   return BuildAllocateArrayFromLength(array_builder, argument);
991 }
992
993
994 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
995     JSArrayBuilder* array_builder, ElementsKind kind) {
996   // Insert a bounds check because the number of arguments might exceed
997   // the kInitialMaxFastElementArray limit. This cannot happen for code
998   // that was parsed, but calling via Array.apply(thisArg, [...]) might
999   // trigger it.
1000   HValue* length = GetArgumentsLength();
1001   HConstant* max_alloc_length =
1002       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1003   HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1004
1005   // We need to fill with the hole if it's a smi array in the multi-argument
1006   // case because we might have to bail out while copying arguments into
1007   // the array because they aren't compatible with a smi array.
1008   // If it's a double array, no problem, and if it's fast then no
1009   // problem either because doubles are boxed.
1010   //
1011   // TODO(mvstanton): consider an instruction to memset fill the array
1012   // with zero in this case instead.
1013   JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1014       ? JSArrayBuilder::FILL_WITH_HOLE
1015       : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1016   HValue* new_object = array_builder->AllocateArray(checked_length,
1017                                                     max_alloc_length,
1018                                                     checked_length,
1019                                                     fill_mode);
1020   HValue* elements = array_builder->GetElementsLocation();
1021   DCHECK(elements != NULL);
1022
1023   // Now populate the elements correctly.
1024   LoopBuilder builder(this,
1025                       context(),
1026                       LoopBuilder::kPostIncrement);
1027   HValue* start = graph()->GetConstant0();
1028   HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1029   HInstruction* argument_elements = Add<HArgumentsElements>(false);
1030   HInstruction* argument = Add<HAccessArgumentsAt>(
1031       argument_elements, checked_length, key);
1032
1033   Add<HStoreKeyed>(elements, key, argument, kind);
1034   builder.EndBody();
1035   return new_object;
1036 }
1037
1038
1039 template <>
1040 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1041   ElementsKind kind = casted_stub()->elements_kind();
1042   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1043   return BuildArrayConstructor(kind, override_mode, NONE);
1044 }
1045
1046
1047 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1048   return DoGenerateCode(this);
1049 }
1050
1051
1052 template <>
1053 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1054     BuildCodeStub() {
1055   ElementsKind kind = casted_stub()->elements_kind();
1056   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1057   return BuildArrayConstructor(kind, override_mode, SINGLE);
1058 }
1059
1060
1061 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1062   return DoGenerateCode(this);
1063 }
1064
1065
1066 template <>
1067 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1068   ElementsKind kind = casted_stub()->elements_kind();
1069   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1070   return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1071 }
1072
1073
1074 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1075   return DoGenerateCode(this);
1076 }
1077
1078
1079 template <>
1080 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1081     BuildCodeStub() {
1082   ElementsKind kind = casted_stub()->elements_kind();
1083   return BuildInternalArrayConstructor(kind, NONE);
1084 }
1085
1086
1087 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1088   return DoGenerateCode(this);
1089 }
1090
1091
1092 template <>
1093 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1094     BuildCodeStub() {
1095   ElementsKind kind = casted_stub()->elements_kind();
1096   return BuildInternalArrayConstructor(kind, SINGLE);
1097 }
1098
1099
1100 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1101   return DoGenerateCode(this);
1102 }
1103
1104
1105 template <>
1106 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1107     BuildCodeStub() {
1108   ElementsKind kind = casted_stub()->elements_kind();
1109   return BuildInternalArrayConstructor(kind, MULTIPLE);
1110 }
1111
1112
1113 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1114   return DoGenerateCode(this);
1115 }
1116
1117
1118 template <>
1119 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1120   Isolate* isolate = graph()->isolate();
1121   CompareNilICStub* stub = casted_stub();
1122   HIfContinuation continuation;
1123   Handle<Map> sentinel_map(isolate->heap()->meta_map());
1124   Type* type = stub->GetType(zone(), sentinel_map);
1125   BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
1126   IfBuilder if_nil(this, &continuation);
1127   if_nil.Then();
1128   if (continuation.IsFalseReachable()) {
1129     if_nil.Else();
1130     if_nil.Return(graph()->GetConstant0());
1131   }
1132   if_nil.End();
1133   return continuation.IsTrueReachable()
1134       ? graph()->GetConstant1()
1135       : graph()->GetConstantUndefined();
1136 }
1137
1138
1139 Handle<Code> CompareNilICStub::GenerateCode() {
1140   return DoGenerateCode(this);
1141 }
1142
1143
1144 template <>
1145 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1146   BinaryOpICState state = casted_stub()->state();
1147
1148   HValue* left = GetParameter(BinaryOpICStub::kLeft);
1149   HValue* right = GetParameter(BinaryOpICStub::kRight);
1150
1151   Type* left_type = state.GetLeftType(zone());
1152   Type* right_type = state.GetRightType(zone());
1153   Type* result_type = state.GetResultType(zone());
1154
1155   DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1156          (state.HasSideEffects() || !result_type->Is(Type::None())));
1157
1158   HValue* result = NULL;
1159   HAllocationMode allocation_mode(NOT_TENURED);
1160   if (state.op() == Token::ADD &&
1161       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1162       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1163     // For the generic add stub a fast case for string addition is performance
1164     // critical.
1165     if (left_type->Maybe(Type::String())) {
1166       IfBuilder if_leftisstring(this);
1167       if_leftisstring.If<HIsStringAndBranch>(left);
1168       if_leftisstring.Then();
1169       {
1170         Push(BuildBinaryOperation(
1171                     state.op(), left, right,
1172                     Type::String(zone()), right_type,
1173                     result_type, state.fixed_right_arg(),
1174                     allocation_mode));
1175       }
1176       if_leftisstring.Else();
1177       {
1178         Push(BuildBinaryOperation(
1179                     state.op(), left, right,
1180                     left_type, right_type, result_type,
1181                     state.fixed_right_arg(), allocation_mode));
1182       }
1183       if_leftisstring.End();
1184       result = Pop();
1185     } else {
1186       IfBuilder if_rightisstring(this);
1187       if_rightisstring.If<HIsStringAndBranch>(right);
1188       if_rightisstring.Then();
1189       {
1190         Push(BuildBinaryOperation(
1191                     state.op(), left, right,
1192                     left_type, Type::String(zone()),
1193                     result_type, state.fixed_right_arg(),
1194                     allocation_mode));
1195       }
1196       if_rightisstring.Else();
1197       {
1198         Push(BuildBinaryOperation(
1199                     state.op(), left, right,
1200                     left_type, right_type, result_type,
1201                     state.fixed_right_arg(), allocation_mode));
1202       }
1203       if_rightisstring.End();
1204       result = Pop();
1205     }
1206   } else {
1207     result = BuildBinaryOperation(
1208             state.op(), left, right,
1209             left_type, right_type, result_type,
1210             state.fixed_right_arg(), allocation_mode);
1211   }
1212
1213   // If we encounter a generic argument, the number conversion is
1214   // observable, thus we cannot afford to bail out after the fact.
1215   if (!state.HasSideEffects()) {
1216     result = EnforceNumberType(result, result_type);
1217   }
1218
1219   return result;
1220 }
1221
1222
1223 Handle<Code> BinaryOpICStub::GenerateCode() {
1224   return DoGenerateCode(this);
1225 }
1226
1227
1228 template <>
1229 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1230   BinaryOpICState state = casted_stub()->state();
1231
1232   HValue* allocation_site = GetParameter(
1233       BinaryOpWithAllocationSiteStub::kAllocationSite);
1234   HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1235   HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1236
1237   Type* left_type = state.GetLeftType(zone());
1238   Type* right_type = state.GetRightType(zone());
1239   Type* result_type = state.GetResultType(zone());
1240   HAllocationMode allocation_mode(allocation_site);
1241
1242   return BuildBinaryOperation(state.op(), left, right,
1243                               left_type, right_type, result_type,
1244                               state.fixed_right_arg(), allocation_mode);
1245 }
1246
1247
1248 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1249   return DoGenerateCode(this);
1250 }
1251
1252
1253 template <>
1254 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1255   StringAddStub* stub = casted_stub();
1256   StringAddFlags flags = stub->flags();
1257   PretenureFlag pretenure_flag = stub->pretenure_flag();
1258
1259   HValue* left = GetParameter(StringAddStub::kLeft);
1260   HValue* right = GetParameter(StringAddStub::kRight);
1261
1262   // Make sure that both arguments are strings if not known in advance.
1263   if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1264     left = BuildCheckString(left);
1265   }
1266   if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1267     right = BuildCheckString(right);
1268   }
1269
1270   return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1271 }
1272
1273
1274 Handle<Code> StringAddStub::GenerateCode() {
1275   return DoGenerateCode(this);
1276 }
1277
1278
1279 template <>
1280 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1281   ToBooleanStub* stub = casted_stub();
1282   HValue* true_value = NULL;
1283   HValue* false_value = NULL;
1284
1285   switch (stub->mode()) {
1286     case ToBooleanStub::RESULT_AS_SMI:
1287       true_value = graph()->GetConstant1();
1288       false_value = graph()->GetConstant0();
1289       break;
1290     case ToBooleanStub::RESULT_AS_ODDBALL:
1291       true_value = graph()->GetConstantTrue();
1292       false_value = graph()->GetConstantFalse();
1293       break;
1294     case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1295       true_value = graph()->GetConstantFalse();
1296       false_value = graph()->GetConstantTrue();
1297       break;
1298   }
1299
1300   IfBuilder if_true(this);
1301   if_true.If<HBranch>(GetParameter(0), stub->types());
1302   if_true.Then();
1303   if_true.Return(true_value);
1304   if_true.Else();
1305   if_true.End();
1306   return false_value;
1307 }
1308
1309
1310 Handle<Code> ToBooleanStub::GenerateCode() {
1311   return DoGenerateCode(this);
1312 }
1313
1314
1315 template <>
1316 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1317   StoreGlobalStub* stub = casted_stub();
1318   HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1319   if (stub->check_global()) {
1320     // Check that the map of the global has not changed: use a placeholder map
1321     // that will be replaced later with the global object's map.
1322     HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1323     HValue* proxy_map =
1324         Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1325     HValue* global =
1326         Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1327     Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1328     HValue* cell = Add<HConstant>(Map::WeakCellForMap(placeholder_map));
1329     HValue* expected_map =
1330         Add<HLoadNamedField>(cell, nullptr, HObjectAccess::ForWeakCellValue());
1331     HValue* map =
1332         Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1333     IfBuilder map_check(this);
1334     map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1335     map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1336     map_check.End();
1337   }
1338
1339   HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1340       StoreGlobalStub::property_cell_placeholder(isolate())));
1341   HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1342                                       HObjectAccess::ForWeakCellValue());
1343   Add<HCheckHeapObject>(cell);
1344   HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1345   HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
1346
1347   if (stub->is_constant()) {
1348     IfBuilder builder(this);
1349     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1350     builder.Then();
1351     builder.ElseDeopt(
1352         Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
1353     builder.End();
1354   } else {
1355     // Load the payload of the global parameter cell. A hole indicates that the
1356     // property has been deleted and that the store must be handled by the
1357     // runtime.
1358     IfBuilder builder(this);
1359     HValue* hole_value = graph()->GetConstantHole();
1360     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1361     builder.Then();
1362     builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
1363     builder.Else();
1364     Add<HStoreNamedField>(cell, access, value);
1365     builder.End();
1366   }
1367
1368   return value;
1369 }
1370
1371
1372 Handle<Code> StoreGlobalStub::GenerateCode() {
1373   return DoGenerateCode(this);
1374 }
1375
1376
1377 template<>
1378 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1379   HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1380   HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1381   HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1382   HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1383
1384   if (FLAG_trace_elements_transitions) {
1385     // Tracing elements transitions is the job of the runtime.
1386     Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1387                      Deoptimizer::EAGER);
1388   } else {
1389     info()->MarkAsSavesCallerDoubles();
1390
1391     BuildTransitionElementsKind(object, map,
1392                                 casted_stub()->from_kind(),
1393                                 casted_stub()->to_kind(),
1394                                 casted_stub()->is_jsarray());
1395
1396     BuildUncheckedMonomorphicElementAccess(object, key, value,
1397                                            casted_stub()->is_jsarray(),
1398                                            casted_stub()->to_kind(),
1399                                            STORE, ALLOW_RETURN_HOLE,
1400                                            casted_stub()->store_mode());
1401   }
1402
1403   return value;
1404 }
1405
1406
1407 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1408   return DoGenerateCode(this);
1409 }
1410
1411
1412 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1413     HValue* js_function,
1414     HValue* native_context,
1415     IfBuilder* builder,
1416     HValue* optimized_map,
1417     HValue* map_index) {
1418   HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1419   HValue* context_slot = LoadFromOptimizedCodeMap(
1420       optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1421   HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1422       optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1423   builder->If<HCompareObjectEqAndBranch>(native_context,
1424                                          context_slot);
1425   builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1426   builder->Then();
1427   HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1428       map_index, SharedFunctionInfo::kCachedCodeOffset);
1429   // and the literals
1430   HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1431       map_index, SharedFunctionInfo::kLiteralsOffset);
1432
1433   Counters* counters = isolate()->counters();
1434   AddIncrementCounter(counters->fast_new_closure_install_optimized());
1435
1436   // TODO(fschneider): Idea: store proper code pointers in the optimized code
1437   // map and either unmangle them on marking or do nothing as the whole map is
1438   // discarded on major GC anyway.
1439   Add<HStoreCodeEntry>(js_function, code_object);
1440   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1441                         literals);
1442
1443   // Now link a function into a list of optimized functions.
1444   HValue* optimized_functions_list = Add<HLoadNamedField>(
1445       native_context, nullptr,
1446       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1447   Add<HStoreNamedField>(js_function,
1448                         HObjectAccess::ForNextFunctionLinkPointer(),
1449                         optimized_functions_list);
1450
1451   // This store is the only one that should have a write barrier.
1452   Add<HStoreNamedField>(native_context,
1453            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1454            js_function);
1455
1456   // The builder continues in the "then" after this function.
1457 }
1458
1459
1460 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1461                                                 HValue* shared_info) {
1462   Add<HStoreNamedField>(js_function,
1463                         HObjectAccess::ForNextFunctionLinkPointer(),
1464                         graph()->GetConstantUndefined());
1465   HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1466                                              HObjectAccess::ForCodeOffset());
1467   Add<HStoreCodeEntry>(js_function, code_object);
1468 }
1469
1470
1471 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1472     HValue* optimized_map,
1473     HValue* iterator,
1474     int field_offset) {
1475   // By making sure to express these loads in the form [<hvalue> + constant]
1476   // the keyed load can be hoisted.
1477   DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1478   HValue* field_slot = iterator;
1479   if (field_offset > 0) {
1480     HValue* field_offset_value = Add<HConstant>(field_offset);
1481     field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1482   }
1483   HInstruction* field_entry =
1484       Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
1485   return field_entry;
1486 }
1487
1488
1489 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1490     HValue* js_function,
1491     HValue* shared_info,
1492     HValue* native_context) {
1493   Counters* counters = isolate()->counters();
1494   IfBuilder is_optimized(this);
1495   HInstruction* optimized_map = Add<HLoadNamedField>(
1496       shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
1497   HValue* null_constant = Add<HConstant>(0);
1498   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1499   is_optimized.Then();
1500   {
1501     BuildInstallCode(js_function, shared_info);
1502   }
1503   is_optimized.Else();
1504   {
1505     AddIncrementCounter(counters->fast_new_closure_try_optimized());
1506     // optimized_map points to fixed array of 3-element entries
1507     // (native context, optimized code, literals).
1508     // Map must never be empty, so check the first elements.
1509     HValue* first_entry_index =
1510         Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1511     IfBuilder already_in(this);
1512     BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1513                                       optimized_map, first_entry_index);
1514     already_in.Else();
1515     {
1516       // Iterate through the rest of map backwards. Do not double check first
1517       // entry. After the loop, if no matching optimized code was found,
1518       // install unoptimized code.
1519       // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1520       //     i > SharedFunctionInfo::kEntriesStart;
1521       //     i -= SharedFunctionInfo::kEntryLength) { .. }
1522       HValue* shared_function_entry_length =
1523           Add<HConstant>(SharedFunctionInfo::kEntryLength);
1524       LoopBuilder loop_builder(this,
1525                                context(),
1526                                LoopBuilder::kPostDecrement,
1527                                shared_function_entry_length);
1528       HValue* array_length = Add<HLoadNamedField>(
1529           optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1530       HValue* start_pos = AddUncasted<HSub>(array_length,
1531                                             shared_function_entry_length);
1532       HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1533                                                      first_entry_index,
1534                                                      Token::GT);
1535       {
1536         IfBuilder done_check(this);
1537         BuildCheckAndInstallOptimizedCode(js_function, native_context,
1538                                           &done_check,
1539                                           optimized_map,
1540                                           slot_iterator);
1541         // Fall out of the loop
1542         loop_builder.Break();
1543       }
1544       loop_builder.EndBody();
1545
1546       // If slot_iterator equals first entry index, then we failed to find and
1547       // install optimized code
1548       IfBuilder no_optimized_code_check(this);
1549       no_optimized_code_check.If<HCompareNumericAndBranch>(
1550           slot_iterator, first_entry_index, Token::EQ);
1551       no_optimized_code_check.Then();
1552       {
1553         // Store the unoptimized code
1554         BuildInstallCode(js_function, shared_info);
1555       }
1556     }
1557   }
1558 }
1559
1560
1561 template<>
1562 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1563   Counters* counters = isolate()->counters();
1564   Factory* factory = isolate()->factory();
1565   HInstruction* empty_fixed_array =
1566       Add<HConstant>(factory->empty_fixed_array());
1567   HValue* shared_info = GetParameter(0);
1568
1569   AddIncrementCounter(counters->fast_new_closure_total());
1570
1571   // Create a new closure from the given function info in new space
1572   HValue* size = Add<HConstant>(JSFunction::kSize);
1573   HInstruction* js_function =
1574       Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
1575
1576   int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1577                                             casted_stub()->kind());
1578
1579   // Compute the function map in the current native context and set that
1580   // as the map of the allocated object.
1581   HInstruction* native_context = BuildGetNativeContext();
1582   HInstruction* map_slot_value = Add<HLoadNamedField>(
1583       native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
1584   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1585
1586   // Initialize the rest of the function.
1587   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1588                         empty_fixed_array);
1589   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1590                         empty_fixed_array);
1591   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1592                         empty_fixed_array);
1593   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1594                         graph()->GetConstantHole());
1595   Add<HStoreNamedField>(
1596       js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
1597   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1598                         context());
1599
1600   // Initialize the code pointer in the function to be the one
1601   // found in the shared function info object.
1602   // But first check if there is an optimized version for our context.
1603   if (FLAG_cache_optimized_code) {
1604     BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1605   } else {
1606     BuildInstallCode(js_function, shared_info);
1607   }
1608
1609   return js_function;
1610 }
1611
1612
1613 Handle<Code> FastNewClosureStub::GenerateCode() {
1614   return DoGenerateCode(this);
1615 }
1616
1617
1618 template<>
1619 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1620   int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1621
1622   // Get the function.
1623   HParameter* function = GetParameter(FastNewContextStub::kFunction);
1624
1625   // Allocate the context in new space.
1626   HAllocate* function_context = Add<HAllocate>(
1627       Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1628       HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1629
1630   // Set up the object header.
1631   AddStoreMapConstant(function_context,
1632                       isolate()->factory()->function_context_map());
1633   Add<HStoreNamedField>(function_context,
1634                         HObjectAccess::ForFixedArrayLength(),
1635                         Add<HConstant>(length));
1636
1637   // Set up the fixed slots.
1638   Add<HStoreNamedField>(function_context,
1639                         HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1640                         function);
1641   Add<HStoreNamedField>(function_context,
1642                         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1643                         context());
1644   Add<HStoreNamedField>(function_context,
1645                         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1646                         graph()->GetConstant0());
1647
1648   // Copy the global object from the previous context.
1649   HValue* global_object = Add<HLoadNamedField>(
1650       context(), nullptr,
1651       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1652   Add<HStoreNamedField>(function_context,
1653                         HObjectAccess::ForContextSlot(
1654                             Context::GLOBAL_OBJECT_INDEX),
1655                         global_object);
1656
1657   // Initialize the rest of the slots to undefined.
1658   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1659     Add<HStoreNamedField>(function_context,
1660                           HObjectAccess::ForContextSlot(i),
1661                           graph()->GetConstantUndefined());
1662   }
1663
1664   return function_context;
1665 }
1666
1667
1668 Handle<Code> FastNewContextStub::GenerateCode() {
1669   return DoGenerateCode(this);
1670 }
1671
1672
1673 template <>
1674 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
1675   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1676   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1677
1678   Add<HCheckSmi>(key);
1679
1680   HValue* elements = AddLoadElements(receiver);
1681
1682   HValue* hash = BuildElementIndexHash(key);
1683
1684   return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1685 }
1686
1687
1688 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
1689   return DoGenerateCode(this);
1690 }
1691
1692
1693 template<>
1694 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1695   // Determine the parameters.
1696   HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1697   HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1698   HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1699
1700   info()->MarkMustNotHaveEagerFrame();
1701
1702   return BuildRegExpConstructResult(length, index, input);
1703 }
1704
1705
1706 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1707   return DoGenerateCode(this);
1708 }
1709
1710
1711 template <>
1712 class CodeStubGraphBuilder<KeyedLoadGenericStub>
1713     : public CodeStubGraphBuilderBase {
1714  public:
1715   explicit CodeStubGraphBuilder(CompilationInfo* info)
1716       : CodeStubGraphBuilderBase(info) {}
1717
1718  protected:
1719   virtual HValue* BuildCodeStub();
1720
1721   void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1722                                    HValue* bit_field2,
1723                                    ElementsKind kind);
1724
1725   void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1726                             HValue* receiver,
1727                             HValue* key,
1728                             HValue* instance_type,
1729                             HValue* bit_field2,
1730                             ElementsKind kind);
1731
1732   void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1733                                 HValue* receiver,
1734                                 HValue* key,
1735                                 HValue* instance_type,
1736                                 HValue* bit_field2,
1737                                 ElementsKind kind);
1738
1739   KeyedLoadGenericStub* casted_stub() {
1740     return static_cast<KeyedLoadGenericStub*>(stub());
1741   }
1742 };
1743
1744
1745 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
1746     HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1747     ElementsKind kind) {
1748   ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1749   HValue* kind_limit = Add<HConstant>(
1750       static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1751
1752   if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1753   if_builder->Then();
1754 }
1755
1756
1757 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
1758     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1759     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1760   DCHECK(!IsExternalArrayElementsKind(kind));
1761
1762   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1763
1764   IfBuilder js_array_check(this);
1765   js_array_check.If<HCompareNumericAndBranch>(
1766       instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1767   js_array_check.Then();
1768   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1769                                               true, kind,
1770                                               LOAD, NEVER_RETURN_HOLE,
1771                                               STANDARD_STORE));
1772   js_array_check.Else();
1773   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1774                                               false, kind,
1775                                               LOAD, NEVER_RETURN_HOLE,
1776                                               STANDARD_STORE));
1777   js_array_check.End();
1778 }
1779
1780
1781 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
1782     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1783     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1784   DCHECK(IsExternalArrayElementsKind(kind));
1785
1786   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1787
1788   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1789                                               false, kind,
1790                                               LOAD, NEVER_RETURN_HOLE,
1791                                               STANDARD_STORE));
1792 }
1793
1794
1795 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
1796   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1797   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1798
1799   // Split into a smi/integer case and unique string case.
1800   HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
1801                                                 graph()->CreateBasicBlock());
1802
1803   BuildKeyedIndexCheck(key, &index_name_split_continuation);
1804
1805   IfBuilder index_name_split(this, &index_name_split_continuation);
1806   index_name_split.Then();
1807   {
1808     // Key is an index (number)
1809     key = Pop();
1810
1811     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1812       (1 << Map::kHasIndexedInterceptor);
1813     BuildJSObjectCheck(receiver, bit_field_mask);
1814
1815     HValue* map =
1816         Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1817
1818     HValue* instance_type =
1819         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1820
1821     HValue* bit_field2 =
1822         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1823
1824     IfBuilder kind_if(this);
1825     BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1826                          FAST_HOLEY_ELEMENTS);
1827
1828     kind_if.Else();
1829     {
1830       BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1831                            FAST_HOLEY_DOUBLE_ELEMENTS);
1832     }
1833     kind_if.Else();
1834
1835     // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
1836     BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
1837     {
1838       HValue* elements = AddLoadElements(receiver);
1839
1840       HValue* hash = BuildElementIndexHash(key);
1841
1842       Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
1843     }
1844     kind_if.Else();
1845
1846     // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
1847     BuildElementsKindLimitCheck(&kind_if, bit_field2,
1848                                 SLOPPY_ARGUMENTS_ELEMENTS);
1849     // Non-strict elements are not handled.
1850     Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
1851                      Deoptimizer::EAGER);
1852     Push(graph()->GetConstant0());
1853
1854     kind_if.Else();
1855     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1856                              EXTERNAL_INT8_ELEMENTS);
1857
1858     kind_if.Else();
1859     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1860                              EXTERNAL_UINT8_ELEMENTS);
1861
1862     kind_if.Else();
1863     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1864                              EXTERNAL_INT16_ELEMENTS);
1865
1866     kind_if.Else();
1867     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1868                              EXTERNAL_UINT16_ELEMENTS);
1869
1870     kind_if.Else();
1871     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1872                              EXTERNAL_INT32_ELEMENTS);
1873
1874     kind_if.Else();
1875     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1876                              EXTERNAL_UINT32_ELEMENTS);
1877
1878     kind_if.Else();
1879     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1880                              EXTERNAL_FLOAT32_ELEMENTS);
1881
1882     kind_if.Else();
1883     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1884                              EXTERNAL_FLOAT64_ELEMENTS);
1885
1886     kind_if.Else();
1887     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1888                              EXTERNAL_UINT8_CLAMPED_ELEMENTS);
1889
1890     kind_if.ElseDeopt(
1891         Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
1892
1893     kind_if.End();
1894   }
1895   index_name_split.Else();
1896   {
1897     // Key is a unique string.
1898     key = Pop();
1899
1900     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1901         (1 << Map::kHasNamedInterceptor);
1902     BuildJSObjectCheck(receiver, bit_field_mask);
1903
1904     HIfContinuation continuation;
1905     BuildTestForDictionaryProperties(receiver, &continuation);
1906     IfBuilder if_dict_properties(this, &continuation);
1907     if_dict_properties.Then();
1908     {
1909       //  Key is string, properties are dictionary mode
1910       BuildNonGlobalObjectCheck(receiver);
1911
1912       HValue* properties = Add<HLoadNamedField>(
1913           receiver, nullptr, HObjectAccess::ForPropertiesPointer());
1914
1915       HValue* hash =
1916           Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
1917
1918       hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
1919
1920       HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
1921                                                           properties,
1922                                                           key,
1923                                                           hash);
1924       Push(value);
1925     }
1926     if_dict_properties.Else();
1927     {
1928       // TODO(dcarney): don't use keyed lookup cache, but convert to use
1929       // megamorphic stub cache.
1930       UNREACHABLE();
1931       //  Key is string, properties are fast mode
1932       HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
1933
1934       ExternalReference cache_keys_ref =
1935           ExternalReference::keyed_lookup_cache_keys(isolate());
1936       HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1937
1938       HValue* map =
1939           Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1940       HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1941       base_index->ClearFlag(HValue::kCanOverflow);
1942
1943       HIfContinuation inline_or_runtime_continuation(
1944           graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
1945       {
1946         IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
1947         for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
1948              ++probe) {
1949           IfBuilder* lookup_if = &lookup_ifs[probe];
1950           lookup_if->Initialize(this);
1951           int probe_base = probe * KeyedLookupCache::kEntryLength;
1952           HValue* map_index = AddUncasted<HAdd>(
1953               base_index,
1954               Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
1955           map_index->ClearFlag(HValue::kCanOverflow);
1956           HValue* key_index = AddUncasted<HAdd>(
1957               base_index,
1958               Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
1959           key_index->ClearFlag(HValue::kCanOverflow);
1960           HValue* map_to_check =
1961               Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
1962                               NEVER_RETURN_HOLE, 0);
1963           lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
1964           lookup_if->And();
1965           HValue* key_to_check =
1966               Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
1967                               NEVER_RETURN_HOLE, 0);
1968           lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
1969           lookup_if->Then();
1970           {
1971             ExternalReference cache_field_offsets_ref =
1972                 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
1973             HValue* cache_field_offsets =
1974                 Add<HConstant>(cache_field_offsets_ref);
1975             HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1976             index->ClearFlag(HValue::kCanOverflow);
1977             HValue* property_index =
1978                 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
1979                                 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
1980             Push(property_index);
1981           }
1982           lookup_if->Else();
1983         }
1984         for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
1985           lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
1986         }
1987       }
1988
1989       IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
1990       inline_or_runtime.Then();
1991       {
1992         // Found a cached index, load property inline.
1993         Push(Add<HLoadFieldByIndex>(receiver, Pop()));
1994       }
1995       inline_or_runtime.Else();
1996       {
1997         // KeyedLookupCache miss; call runtime.
1998         Add<HPushArguments>(receiver, key);
1999         Push(Add<HCallRuntime>(
2000             isolate()->factory()->empty_string(),
2001             Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
2002       }
2003       inline_or_runtime.End();
2004     }
2005     if_dict_properties.End();
2006   }
2007   index_name_split.End();
2008
2009   return Pop();
2010 }
2011
2012
2013 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2014   return DoGenerateCode(this);
2015 }
2016
2017
2018 Handle<Code> MegamorphicLoadStub::GenerateCode() {
2019   return DoGenerateCode(this);
2020 }
2021
2022
2023 template <>
2024 HValue* CodeStubGraphBuilder<MegamorphicLoadStub>::BuildCodeStub() {
2025   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2026   HValue* name = GetParameter(LoadDescriptor::kNameIndex);
2027
2028   // We shouldn't generate this when FLAG_vector_ics is true because the
2029   // megamorphic case is handled as part of the default stub.
2030   DCHECK(!FLAG_vector_ics);
2031
2032   // Probe the stub cache.
2033   Add<HTailCallThroughMegamorphicCache>(receiver, name);
2034
2035   // We never continue.
2036   return graph()->GetConstant0();
2037 }
2038 } }  // namespace v8::internal