Revert of Expose SIMD.Float32x4 type to Javascript. (patchset #14 id:450001 of https...
[platform/upstream/v8.git] / src / code-stubs-hydrogen.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/ic/ic.h"
12 #include "src/lithium.h"
13
14 namespace v8 {
15 namespace internal {
16
17
18 static LChunk* OptimizeGraph(HGraph* graph) {
19   DisallowHeapAllocation no_allocation;
20   DisallowHandleAllocation no_handles;
21   DisallowHandleDereference no_deref;
22
23   DCHECK(graph != NULL);
24   BailoutReason bailout_reason = kNoReason;
25   if (!graph->Optimize(&bailout_reason)) {
26     FATAL(GetBailoutReason(bailout_reason));
27   }
28   LChunk* chunk = LChunk::NewChunk(graph);
29   if (chunk == NULL) {
30     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
31   }
32   return chunk;
33 }
34
35
36 class CodeStubGraphBuilderBase : public HGraphBuilder {
37  public:
38   explicit CodeStubGraphBuilderBase(CompilationInfo* info)
39       : HGraphBuilder(info),
40         arguments_length_(NULL),
41         info_(info),
42         descriptor_(info->code_stub()),
43         context_(NULL) {
44     int parameter_count = GetParameterCount();
45     parameters_.Reset(new HParameter*[parameter_count]);
46   }
47   virtual bool BuildGraph();
48
49  protected:
50   virtual HValue* BuildCodeStub() = 0;
51   int GetParameterCount() const {
52     return descriptor_.GetRegisterParameterCount();
53   }
54   HParameter* GetParameter(int parameter) {
55     DCHECK(parameter < GetParameterCount());
56     return parameters_[parameter];
57   }
58   Representation GetParameterRepresentation(int parameter) {
59     return RepresentationFromType(descriptor_.GetParameterType(parameter));
60   }
61   bool IsParameterCountRegister(int index) const {
62     return descriptor_.GetRegisterParameter(index)
63         .is(descriptor_.stack_parameter_count());
64   }
65   HValue* GetArgumentsLength() {
66     // This is initialized in BuildGraph()
67     DCHECK(arguments_length_ != NULL);
68     return arguments_length_;
69   }
70   CompilationInfo* info() { return info_; }
71   CodeStub* stub() { return info_->code_stub(); }
72   HContext* context() { return context_; }
73   Isolate* isolate() { return info_->isolate(); }
74
75   HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
76   void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
77                             Representation representation,
78                             bool transition_to_field);
79
80   enum ArgumentClass {
81     NONE,
82     SINGLE,
83     MULTIPLE
84   };
85
86   HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
87   HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
88                                    HValue* value);
89
90   HValue* BuildArrayConstructor(ElementsKind kind,
91                                 AllocationSiteOverrideMode override_mode,
92                                 ArgumentClass argument_class);
93   HValue* BuildInternalArrayConstructor(ElementsKind kind,
94                                         ArgumentClass argument_class);
95
96   // BuildCheckAndInstallOptimizedCode emits code to install the optimized
97   // function found in the optimized code map at map_index in js_function, if
98   // the function at map_index matches the given native_context. Builder is
99   // left in the "Then()" state after the install.
100   void BuildCheckAndInstallOptimizedCode(HValue* js_function,
101                                          HValue* native_context,
102                                          IfBuilder* builder,
103                                          HValue* optimized_map,
104                                          HValue* map_index);
105   void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
106                                  HValue* code_object, HValue* literals);
107   void BuildInstallCode(HValue* js_function, HValue* shared_info);
108
109   HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
110                                          HValue* iterator,
111                                          int field_offset);
112   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
113                                         HValue* shared_info,
114                                         HValue* native_context);
115
116  private:
117   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
118   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
119                                           ElementsKind kind);
120
121   base::SmartArrayPointer<HParameter*> parameters_;
122   HValue* arguments_length_;
123   CompilationInfo* info_;
124   CodeStubDescriptor descriptor_;
125   HContext* context_;
126 };
127
128
129 bool CodeStubGraphBuilderBase::BuildGraph() {
130   // Update the static counter each time a new code stub is generated.
131   isolate()->counters()->code_stubs()->Increment();
132
133   if (FLAG_trace_hydrogen_stubs) {
134     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
135     PrintF("-----------------------------------------------------------\n");
136     PrintF("Compiling stub %s using hydrogen\n", name);
137     isolate()->GetHTracer()->TraceCompilation(info());
138   }
139
140   int param_count = GetParameterCount();
141   HEnvironment* start_environment = graph()->start_environment();
142   HBasicBlock* next_block = CreateBasicBlock(start_environment);
143   Goto(next_block);
144   next_block->SetJoinId(BailoutId::StubEntry());
145   set_current_block(next_block);
146
147   bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
148   HInstruction* stack_parameter_count = NULL;
149   for (int i = 0; i < param_count; ++i) {
150     Representation r = GetParameterRepresentation(i);
151     HParameter* param = Add<HParameter>(i,
152                                         HParameter::REGISTER_PARAMETER, r);
153     start_environment->Bind(i, param);
154     parameters_[i] = param;
155     if (IsParameterCountRegister(i)) {
156       param->set_type(HType::Smi());
157       stack_parameter_count = param;
158       arguments_length_ = stack_parameter_count;
159     }
160   }
161
162   DCHECK(!runtime_stack_params || arguments_length_ != NULL);
163   if (!runtime_stack_params) {
164     stack_parameter_count = graph()->GetConstantMinus1();
165     arguments_length_ = graph()->GetConstant0();
166   }
167
168   context_ = Add<HContext>();
169   start_environment->BindContext(context_);
170
171   Add<HSimulate>(BailoutId::StubEntry());
172
173   NoObservableSideEffectsScope no_effects(this);
174
175   HValue* return_value = BuildCodeStub();
176
177   // We might have extra expressions to pop from the stack in addition to the
178   // arguments above.
179   HInstruction* stack_pop_count = stack_parameter_count;
180   if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
181     if (!stack_parameter_count->IsConstant() &&
182         descriptor_.hint_stack_parameter_count() < 0) {
183       HInstruction* constant_one = graph()->GetConstant1();
184       stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
185       stack_pop_count->ClearFlag(HValue::kCanOverflow);
186       // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
187       // smi.
188     } else {
189       int count = descriptor_.hint_stack_parameter_count();
190       stack_pop_count = Add<HConstant>(count);
191     }
192   }
193
194   if (current_block() != NULL) {
195     HReturn* hreturn_instruction = New<HReturn>(return_value,
196                                                 stack_pop_count);
197     FinishCurrentBlock(hreturn_instruction);
198   }
199   return true;
200 }
201
202
203 template <class Stub>
204 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
205  public:
206   explicit CodeStubGraphBuilder(CompilationInfo* info)
207       : CodeStubGraphBuilderBase(info) {}
208
209  protected:
210   virtual HValue* BuildCodeStub() {
211     if (casted_stub()->IsUninitialized()) {
212       return BuildCodeUninitializedStub();
213     } else {
214       return BuildCodeInitializedStub();
215     }
216   }
217
218   virtual HValue* BuildCodeInitializedStub() {
219     UNIMPLEMENTED();
220     return NULL;
221   }
222
223   virtual HValue* BuildCodeUninitializedStub() {
224     // Force a deopt that falls back to the runtime.
225     HValue* undefined = graph()->GetConstantUndefined();
226     IfBuilder builder(this);
227     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
228     builder.Then();
229     builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
230     return undefined;
231   }
232
233   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
234 };
235
236
237 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
238     ExternalReference miss) {
239   Factory* factory = isolate()->factory();
240
241   // Generate the new code.
242   MacroAssembler masm(isolate(), NULL, 256);
243
244   {
245     // Update the static counter each time a new code stub is generated.
246     isolate()->counters()->code_stubs()->Increment();
247
248     // Generate the code for the stub.
249     masm.set_generating_stub(true);
250     // TODO(yangguo): remove this once we can serialize IC stubs.
251     masm.enable_serializer();
252     NoCurrentFrameScope scope(&masm);
253     GenerateLightweightMiss(&masm, miss);
254   }
255
256   // Create the code object.
257   CodeDesc desc;
258   masm.GetCode(&desc);
259
260   // Copy the generated code into a heap object.
261   Code::Flags flags = Code::ComputeFlags(
262       GetCodeKind(),
263       GetICState(),
264       GetExtraICState(),
265       GetStubType());
266   Handle<Code> new_object = factory->NewCode(
267       desc, flags, masm.CodeObject(), NeedsImmovableCode());
268   return new_object;
269 }
270
271
272 template <class Stub>
273 static Handle<Code> DoGenerateCode(Stub* stub) {
274   Isolate* isolate = stub->isolate();
275   CodeStubDescriptor descriptor(stub);
276
277   // If we are uninitialized we can use a light-weight stub to enter
278   // the runtime that is significantly faster than using the standard
279   // stub-failure deopt mechanism.
280   if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
281     DCHECK(!descriptor.stack_parameter_count().is_valid());
282     return stub->GenerateLightweightMissCode(descriptor.miss_handler());
283   }
284   base::ElapsedTimer timer;
285   if (FLAG_profile_hydrogen_code_stub_compilation) {
286     timer.Start();
287   }
288   Zone zone;
289   CompilationInfo info(stub, isolate, &zone);
290   CodeStubGraphBuilder<Stub> builder(&info);
291   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
292   Handle<Code> code = chunk->Codegen();
293   if (FLAG_profile_hydrogen_code_stub_compilation) {
294     OFStream os(stdout);
295     os << "[Lazy compilation of " << stub << " took "
296        << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
297   }
298   return code;
299 }
300
301
302 template <>
303 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
304   info()->MarkAsSavesCallerDoubles();
305   HValue* number = GetParameter(NumberToStringStub::kNumber);
306   return BuildNumberToString(number, Type::Number(zone()));
307 }
308
309
310 Handle<Code> NumberToStringStub::GenerateCode() {
311   return DoGenerateCode(this);
312 }
313
314
315 // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
316 // Possible optimizations: put the type string into the oddballs.
317 template <>
318 HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
319   Factory* factory = isolate()->factory();
320   HConstant* number_string = Add<HConstant>(factory->number_string());
321   HValue* object = GetParameter(TypeofStub::kObject);
322
323   IfBuilder is_smi(this);
324   HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
325   is_smi.Then();
326   { Push(number_string); }
327   is_smi.Else();
328   {
329     IfBuilder is_number(this);
330     is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
331     is_number.Then();
332     { Push(number_string); }
333     is_number.Else();
334     {
335       HConstant* undefined_string = Add<HConstant>(factory->undefined_string());
336       HValue* map = AddLoadMap(object, smi_check);
337       HValue* instance_type = Add<HLoadNamedField>(
338           map, nullptr, HObjectAccess::ForMapInstanceType());
339       IfBuilder is_string(this);
340       is_string.If<HCompareNumericAndBranch>(
341           instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
342       is_string.Then();
343       { Push(Add<HConstant>(factory->string_string())); }
344       is_string.Else();
345       {
346         HConstant* object_string = Add<HConstant>(factory->object_string());
347         IfBuilder is_oddball(this);
348         is_oddball.If<HCompareNumericAndBranch>(
349             instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
350         is_oddball.Then();
351         {
352           IfBuilder is_true_or_false(this);
353           is_true_or_false.If<HCompareObjectEqAndBranch>(
354               object, graph()->GetConstantTrue());
355           is_true_or_false.OrIf<HCompareObjectEqAndBranch>(
356               object, graph()->GetConstantFalse());
357           is_true_or_false.Then();
358           { Push(Add<HConstant>(factory->boolean_string())); }
359           is_true_or_false.Else();
360           {
361             IfBuilder is_null(this);
362             is_null.If<HCompareObjectEqAndBranch>(object,
363                                                   graph()->GetConstantNull());
364             is_null.Then();
365             { Push(object_string); }
366             is_null.Else();
367             { Push(undefined_string); }
368           }
369           is_true_or_false.End();
370         }
371         is_oddball.Else();
372         {
373           IfBuilder is_symbol(this);
374           is_symbol.If<HCompareNumericAndBranch>(
375               instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
376           is_symbol.Then();
377           { Push(Add<HConstant>(factory->symbol_string())); }
378           is_symbol.Else();
379           {
380             IfBuilder is_function(this);
381             HConstant* js_function = Add<HConstant>(JS_FUNCTION_TYPE);
382             HConstant* js_function_proxy =
383                 Add<HConstant>(JS_FUNCTION_PROXY_TYPE);
384             is_function.If<HCompareNumericAndBranch>(instance_type, js_function,
385                                                      Token::EQ);
386             is_function.OrIf<HCompareNumericAndBranch>(
387                 instance_type, js_function_proxy, Token::EQ);
388             is_function.Then();
389             { Push(Add<HConstant>(factory->function_string())); }
390             is_function.Else();
391             {
392               // Is it an undetectable object?
393               IfBuilder is_undetectable(this);
394               is_undetectable.If<HIsUndetectableAndBranch>(object);
395               is_undetectable.Then();
396               {
397                 // typeof an undetectable object is 'undefined'.
398                 Push(undefined_string);
399               }
400               is_undetectable.Else();
401               {
402                 // For any kind of object not handled above, the spec rule for
403                 // host objects gives that it is okay to return "object".
404                 Push(object_string);
405               }
406             }
407             is_function.End();
408           }
409           is_symbol.End();
410         }
411         is_oddball.End();
412       }
413       is_string.End();
414     }
415     is_number.End();
416   }
417   is_smi.End();
418
419   return environment()->Pop();
420 }
421
422
423 Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
424
425
426 template <>
427 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
428   Factory* factory = isolate()->factory();
429   HValue* undefined = graph()->GetConstantUndefined();
430   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
431
432   // This stub is very performance sensitive, the generated code must be tuned
433   // so that it doesn't build and eager frame.
434   info()->MarkMustNotHaveEagerFrame();
435
436   HInstruction* allocation_site =
437       Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
438   IfBuilder checker(this);
439   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
440                                                     undefined);
441   checker.Then();
442
443   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
444       AllocationSite::kTransitionInfoOffset);
445   HInstruction* boilerplate =
446       Add<HLoadNamedField>(allocation_site, nullptr, access);
447   HValue* elements = AddLoadElements(boilerplate);
448   HValue* capacity = AddLoadFixedArrayLength(elements);
449   IfBuilder zero_capacity(this);
450   zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
451                                            Token::EQ);
452   zero_capacity.Then();
453   Push(BuildCloneShallowArrayEmpty(boilerplate,
454                                    allocation_site,
455                                    alloc_site_mode));
456   zero_capacity.Else();
457   IfBuilder if_fixed_cow(this);
458   if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
459   if_fixed_cow.Then();
460   Push(BuildCloneShallowArrayCow(boilerplate,
461                                  allocation_site,
462                                  alloc_site_mode,
463                                  FAST_ELEMENTS));
464   if_fixed_cow.Else();
465   IfBuilder if_fixed(this);
466   if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
467   if_fixed.Then();
468   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
469                                       allocation_site,
470                                       alloc_site_mode,
471                                       FAST_ELEMENTS));
472
473   if_fixed.Else();
474   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
475                                       allocation_site,
476                                       alloc_site_mode,
477                                       FAST_DOUBLE_ELEMENTS));
478   if_fixed.End();
479   if_fixed_cow.End();
480   zero_capacity.End();
481
482   checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
483   checker.End();
484
485   return environment()->Pop();
486 }
487
488
489 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
490   return DoGenerateCode(this);
491 }
492
493
494 template <>
495 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
496   HValue* undefined = graph()->GetConstantUndefined();
497
498   HInstruction* allocation_site =
499       Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
500
501   IfBuilder checker(this);
502   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
503                                                     undefined);
504   checker.And();
505
506   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
507       AllocationSite::kTransitionInfoOffset);
508   HInstruction* boilerplate =
509       Add<HLoadNamedField>(allocation_site, nullptr, access);
510
511   int length = casted_stub()->length();
512   if (length == 0) {
513     // Empty objects have some slack added to them.
514     length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
515   }
516   int size = JSObject::kHeaderSize + length * kPointerSize;
517   int object_size = size;
518   if (FLAG_allocation_site_pretenuring) {
519     size += AllocationMemento::kSize;
520   }
521
522   HValue* boilerplate_map =
523       Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
524   HValue* boilerplate_size = Add<HLoadNamedField>(
525       boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
526   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
527   checker.If<HCompareNumericAndBranch>(boilerplate_size,
528                                        size_in_words, Token::EQ);
529   checker.Then();
530
531   HValue* size_in_bytes = Add<HConstant>(size);
532
533   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
534       NOT_TENURED, JS_OBJECT_TYPE);
535
536   for (int i = 0; i < object_size; i += kPointerSize) {
537     HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
538     Add<HStoreNamedField>(object, access,
539                           Add<HLoadNamedField>(boilerplate, nullptr, access));
540   }
541
542   DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
543   if (FLAG_allocation_site_pretenuring) {
544     BuildCreateAllocationMemento(
545         object, Add<HConstant>(object_size), allocation_site);
546   }
547
548   environment()->Push(object);
549   checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
550   checker.End();
551
552   return environment()->Pop();
553 }
554
555
556 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
557   return DoGenerateCode(this);
558 }
559
560
561 template <>
562 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
563   // This stub is performance sensitive, the generated code must be tuned
564   // so that it doesn't build an eager frame.
565   info()->MarkMustNotHaveEagerFrame();
566
567   HValue* size = Add<HConstant>(AllocationSite::kSize);
568   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
569       JS_OBJECT_TYPE);
570
571   // Store the map
572   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
573   AddStoreMapConstant(object, allocation_site_map);
574
575   // Store the payload (smi elements kind)
576   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
577   Add<HStoreNamedField>(object,
578                         HObjectAccess::ForAllocationSiteOffset(
579                             AllocationSite::kTransitionInfoOffset),
580                         initial_elements_kind);
581
582   // Unlike literals, constructed arrays don't have nested sites
583   Add<HStoreNamedField>(object,
584                         HObjectAccess::ForAllocationSiteOffset(
585                             AllocationSite::kNestedSiteOffset),
586                         graph()->GetConstant0());
587
588   // Pretenuring calculation field.
589   Add<HStoreNamedField>(object,
590                         HObjectAccess::ForAllocationSiteOffset(
591                             AllocationSite::kPretenureDataOffset),
592                         graph()->GetConstant0());
593
594   // Pretenuring memento creation count field.
595   Add<HStoreNamedField>(object,
596                         HObjectAccess::ForAllocationSiteOffset(
597                             AllocationSite::kPretenureCreateCountOffset),
598                         graph()->GetConstant0());
599
600   // Store an empty fixed array for the code dependency.
601   HConstant* empty_fixed_array =
602     Add<HConstant>(isolate()->factory()->empty_fixed_array());
603   Add<HStoreNamedField>(
604       object,
605       HObjectAccess::ForAllocationSiteOffset(
606           AllocationSite::kDependentCodeOffset),
607       empty_fixed_array);
608
609   // Link the object to the allocation site list
610   HValue* site_list = Add<HConstant>(
611       ExternalReference::allocation_sites_list_address(isolate()));
612   HValue* site = Add<HLoadNamedField>(site_list, nullptr,
613                                       HObjectAccess::ForAllocationSiteList());
614   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
615   // mark as such in order to skip the write barrier, once we have a unified
616   // system for weakness. For now we decided to keep it like this because having
617   // an initial write barrier backed store makes this pointer strong until the
618   // next GC, and allocation sites are designed to survive several GCs anyway.
619   Add<HStoreNamedField>(
620       object,
621       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
622       site);
623   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
624                         object);
625
626   HInstruction* feedback_vector = GetParameter(0);
627   HInstruction* slot = GetParameter(1);
628   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
629                    INITIALIZING_STORE);
630   return feedback_vector;
631 }
632
633
634 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
635   return DoGenerateCode(this);
636 }
637
638
639 template <>
640 HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
641   // This stub is performance sensitive, the generated code must be tuned
642   // so that it doesn't build an eager frame.
643   info()->MarkMustNotHaveEagerFrame();
644
645   HValue* size = Add<HConstant>(WeakCell::kSize);
646   HInstruction* object =
647       Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
648
649   Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
650   AddStoreMapConstant(object, weak_cell_map);
651
652   HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
653   Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
654   Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
655                         graph()->GetConstantHole());
656
657   HInstruction* feedback_vector =
658       GetParameter(CreateWeakCellDescriptor::kVectorIndex);
659   HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
660   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
661                    INITIALIZING_STORE);
662   return graph()->GetConstant0();
663 }
664
665
666 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
667
668
669 template <>
670 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
671   int context_index = casted_stub()->context_index();
672   int slot_index = casted_stub()->slot_index();
673
674   HValue* script_context = BuildGetScriptContext(context_index);
675   return Add<HLoadNamedField>(script_context, nullptr,
676                               HObjectAccess::ForContextSlot(slot_index));
677 }
678
679
680 Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
681   return DoGenerateCode(this);
682 }
683
684
685 template <>
686 HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
687   int context_index = casted_stub()->context_index();
688   int slot_index = casted_stub()->slot_index();
689
690   HValue* script_context = BuildGetScriptContext(context_index);
691   Add<HStoreNamedField>(script_context,
692                         HObjectAccess::ForContextSlot(slot_index),
693                         GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
694   return GetParameter(2);
695 }
696
697
698 Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
699   return DoGenerateCode(this);
700 }
701
702
703 template <>
704 HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
705   ElementsKind kind = casted_stub()->elements_kind();
706   if (IsFastDoubleElementsKind(kind)) {
707     info()->MarkAsSavesCallerDoubles();
708   }
709
710   HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
711   HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
712
713   HValue* elements = AddLoadElements(object);
714   HValue* current_capacity = Add<HLoadNamedField>(
715       elements, nullptr, HObjectAccess::ForFixedArrayLength());
716
717   HValue* length =
718       casted_stub()->is_js_array()
719           ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
720                                  HObjectAccess::ForArrayLength(kind))
721           : current_capacity;
722
723   return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
724                                            current_capacity, key);
725 }
726
727
728 Handle<Code> GrowArrayElementsStub::GenerateCode() {
729   return DoGenerateCode(this);
730 }
731
732
733 template <>
734 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
735   LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
736                                     ? CONVERT_HOLE_TO_UNDEFINED
737                                     : NEVER_RETURN_HOLE;
738
739   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
740       GetParameter(LoadDescriptor::kReceiverIndex),
741       GetParameter(LoadDescriptor::kNameIndex), NULL,
742       casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
743       hole_mode, STANDARD_STORE);
744   return load;
745 }
746
747
748 Handle<Code> LoadFastElementStub::GenerateCode() {
749   return DoGenerateCode(this);
750 }
751
752
753 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
754     HValue* object, FieldIndex index) {
755   Representation representation = index.is_double()
756       ? Representation::Double()
757       : Representation::Tagged();
758   int offset = index.offset();
759   HObjectAccess access = index.is_inobject()
760       ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
761       : HObjectAccess::ForBackingStoreOffset(offset, representation);
762   if (index.is_double() &&
763       (!FLAG_unbox_double_fields || !index.is_inobject())) {
764     // Load the heap number.
765     object = Add<HLoadNamedField>(
766         object, nullptr, access.WithRepresentation(Representation::Tagged()));
767     // Load the double value from it.
768     access = HObjectAccess::ForHeapNumberValue();
769   }
770   return Add<HLoadNamedField>(object, nullptr, access);
771 }
772
773
774 template<>
775 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
776   return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
777 }
778
779
780 Handle<Code> LoadFieldStub::GenerateCode() {
781   return DoGenerateCode(this);
782 }
783
784
785 template <>
786 HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
787   return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
788                                            casted_stub()->index());
789 }
790
791
792 Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
793   return DoGenerateCode(this);
794 }
795
796
797 template <>
798 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
799   HValue* map = AddLoadMap(GetParameter(0), NULL);
800   HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
801       Map::kDescriptorsOffset, Representation::Tagged());
802   HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
803   HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
804       DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
805   return Add<HLoadNamedField>(descriptors, nullptr, value_access);
806 }
807
808
809 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
810
811
812 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
813                                                HValue* value) {
814   HValue* result = NULL;
815   HInstruction* backing_store =
816       Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
817                       ALLOW_RETURN_HOLE);
818   Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
819   HValue* backing_store_length = Add<HLoadNamedField>(
820       backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
821   IfBuilder in_unmapped_range(this);
822   in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
823                                                  Token::LT);
824   in_unmapped_range.Then();
825   {
826     if (value == NULL) {
827       result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
828                                NEVER_RETURN_HOLE);
829     } else {
830       Add<HStoreKeyed>(backing_store, key, value, FAST_HOLEY_ELEMENTS);
831     }
832   }
833   in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
834   in_unmapped_range.End();
835   return result;
836 }
837
838
839 HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
840                                                            HValue* key,
841                                                            HValue* value) {
842   // Mapped arguments are actual arguments. Unmapped arguments are values added
843   // to the arguments object after it was created for the call. Mapped arguments
844   // are stored in the context at indexes given by elements[key + 2]. Unmapped
845   // arguments are stored as regular indexed properties in the arguments array,
846   // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
847   // look at argument object construction.
848   //
849   // The sloppy arguments elements array has a special format:
850   //
851   // 0: context
852   // 1: unmapped arguments array
853   // 2: mapped_index0,
854   // 3: mapped_index1,
855   // ...
856   //
857   // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
858   // If key + 2 >= elements.length then attempt to look in the unmapped
859   // arguments array (given by elements[1]) and return the value at key, missing
860   // to the runtime if the unmapped arguments array is not a fixed array or if
861   // key >= unmapped_arguments_array.length.
862   //
863   // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
864   // in the unmapped arguments array, as described above. Otherwise, t is a Smi
865   // index into the context array given at elements[0]. Return the value at
866   // context[t].
867
868   bool is_load = value == NULL;
869
870   key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
871   IfBuilder positive_smi(this);
872   positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
873                                             Token::LT);
874   positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
875   positive_smi.End();
876
877   HValue* constant_two = Add<HConstant>(2);
878   HValue* elements = AddLoadElements(receiver, nullptr);
879   HValue* elements_length = Add<HLoadNamedField>(
880       elements, nullptr, HObjectAccess::ForFixedArrayLength());
881   HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
882   IfBuilder in_range(this);
883   in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
884   in_range.Then();
885   {
886     HValue* index = AddUncasted<HAdd>(key, constant_two);
887     HInstruction* mapped_index = Add<HLoadKeyed>(
888         elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
889
890     IfBuilder is_valid(this);
891     is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
892                                               graph()->GetConstantHole());
893     is_valid.Then();
894     {
895       // TODO(mvstanton): I'd like to assert from this point, that if the
896       // mapped_index is not the hole that it is indeed, a smi. An unnecessary
897       // smi check is being emitted.
898       HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
899                                             nullptr, FAST_ELEMENTS);
900       STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
901       if (is_load) {
902         HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
903                                          FAST_ELEMENTS, ALLOW_RETURN_HOLE);
904         environment()->Push(result);
905       } else {
906         DCHECK(value != NULL);
907         Add<HStoreKeyed>(the_context, mapped_index, value, FAST_ELEMENTS);
908         environment()->Push(value);
909       }
910     }
911     is_valid.Else();
912     {
913       HValue* result = UnmappedCase(elements, key, value);
914       environment()->Push(is_load ? result : value);
915     }
916     is_valid.End();
917   }
918   in_range.Else();
919   {
920     HValue* result = UnmappedCase(elements, key, value);
921     environment()->Push(is_load ? result : value);
922   }
923   in_range.End();
924
925   return environment()->Pop();
926 }
927
928
929 template <>
930 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
931   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
932   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
933
934   return EmitKeyedSloppyArguments(receiver, key, NULL);
935 }
936
937
938 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
939   return DoGenerateCode(this);
940 }
941
942
943 template <>
944 HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
945   HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
946   HValue* key = GetParameter(StoreDescriptor::kNameIndex);
947   HValue* value = GetParameter(StoreDescriptor::kValueIndex);
948
949   return EmitKeyedSloppyArguments(receiver, key, value);
950 }
951
952
953 Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
954   return DoGenerateCode(this);
955 }
956
957
958 void CodeStubGraphBuilderBase::BuildStoreNamedField(
959     HValue* object, HValue* value, FieldIndex index,
960     Representation representation, bool transition_to_field) {
961   DCHECK(!index.is_double() || representation.IsDouble());
962   int offset = index.offset();
963   HObjectAccess access =
964       index.is_inobject()
965           ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
966           : HObjectAccess::ForBackingStoreOffset(offset, representation);
967
968   if (representation.IsDouble()) {
969     if (!FLAG_unbox_double_fields || !index.is_inobject()) {
970       HObjectAccess heap_number_access =
971           access.WithRepresentation(Representation::Tagged());
972       if (transition_to_field) {
973         // The store requires a mutable HeapNumber to be allocated.
974         NoObservableSideEffectsScope no_side_effects(this);
975         HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
976
977         // TODO(hpayer): Allocation site pretenuring support.
978         HInstruction* heap_number =
979             Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
980                            MUTABLE_HEAP_NUMBER_TYPE);
981         AddStoreMapConstant(heap_number,
982                             isolate()->factory()->mutable_heap_number_map());
983         Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
984                               value);
985         // Store the new mutable heap number into the object.
986         access = heap_number_access;
987         value = heap_number;
988       } else {
989         // Load the heap number.
990         object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
991         // Store the double value into it.
992         access = HObjectAccess::ForHeapNumberValue();
993       }
994     }
995   } else if (representation.IsHeapObject()) {
996     BuildCheckHeapObject(value);
997   }
998
999   Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1000 }
1001
1002
1003 template <>
1004 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1005   BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
1006                        casted_stub()->representation(), false);
1007   return GetParameter(2);
1008 }
1009
1010
1011 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1012
1013
1014 template <>
1015 HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
1016   HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
1017
1018   switch (casted_stub()->store_mode()) {
1019     case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1020       HValue* properties = Add<HLoadNamedField>(
1021           object, nullptr, HObjectAccess::ForPropertiesPointer());
1022       HValue* length = AddLoadFixedArrayLength(properties);
1023       HValue* delta =
1024           Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1025       HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1026
1027       // Grow properties array.
1028       ElementsKind kind = FAST_ELEMENTS;
1029       Add<HBoundsCheck>(new_capacity,
1030                         Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1031                                         FixedArray::kHeaderSize) >>
1032                                        ElementsKindToShiftSize(kind)));
1033
1034       // Reuse this code for properties backing store allocation.
1035       HValue* new_properties =
1036           BuildAllocateAndInitializeArray(kind, new_capacity);
1037
1038       BuildCopyProperties(properties, new_properties, length, new_capacity);
1039
1040       Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1041                             new_properties);
1042     }
1043     // Fall through.
1044     case StoreTransitionStub::StoreMapAndValue:
1045       // Store the new value into the "extended" object.
1046       BuildStoreNamedField(
1047           object, GetParameter(StoreTransitionDescriptor::kValueIndex),
1048           casted_stub()->index(), casted_stub()->representation(), true);
1049     // Fall through.
1050
1051     case StoreTransitionStub::StoreMapOnly:
1052       // And finally update the map.
1053       Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
1054                             GetParameter(StoreTransitionDescriptor::kMapIndex));
1055       break;
1056   }
1057   return GetParameter(StoreTransitionDescriptor::kValueIndex);
1058 }
1059
1060
1061 Handle<Code> StoreTransitionStub::GenerateCode() {
1062   return DoGenerateCode(this);
1063 }
1064
1065
1066 template <>
1067 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
1068   HValue* string = BuildLoadNamedField(GetParameter(0),
1069       FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
1070   return BuildLoadNamedField(string,
1071       FieldIndex::ForInObjectOffset(String::kLengthOffset));
1072 }
1073
1074
1075 Handle<Code> StringLengthStub::GenerateCode() {
1076   return DoGenerateCode(this);
1077 }
1078
1079
1080 template <>
1081 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1082   BuildUncheckedMonomorphicElementAccess(
1083       GetParameter(StoreDescriptor::kReceiverIndex),
1084       GetParameter(StoreDescriptor::kNameIndex),
1085       GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1086       casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1087       casted_stub()->store_mode());
1088
1089   return GetParameter(2);
1090 }
1091
1092
1093 Handle<Code> StoreFastElementStub::GenerateCode() {
1094   return DoGenerateCode(this);
1095 }
1096
1097
1098 template <>
1099 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1100   info()->MarkAsSavesCallerDoubles();
1101
1102   BuildTransitionElementsKind(GetParameter(0),
1103                               GetParameter(1),
1104                               casted_stub()->from_kind(),
1105                               casted_stub()->to_kind(),
1106                               casted_stub()->is_js_array());
1107
1108   return GetParameter(0);
1109 }
1110
1111
1112 Handle<Code> TransitionElementsKindStub::GenerateCode() {
1113   return DoGenerateCode(this);
1114 }
1115
1116
1117 template <>
1118 HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
1119   HValue* result =
1120       Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
1121                      NOT_TENURED, HEAP_NUMBER_TYPE);
1122   AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
1123   return result;
1124 }
1125
1126
1127 Handle<Code> AllocateHeapNumberStub::GenerateCode() {
1128   return DoGenerateCode(this);
1129 }
1130
1131
1132 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1133     ElementsKind kind,
1134     AllocationSiteOverrideMode override_mode,
1135     ArgumentClass argument_class) {
1136   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1137   HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1138   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1139                                override_mode);
1140   HValue* result = NULL;
1141   switch (argument_class) {
1142     case NONE:
1143       // This stub is very performance sensitive, the generated code must be
1144       // tuned so that it doesn't build and eager frame.
1145       info()->MarkMustNotHaveEagerFrame();
1146       result = array_builder.AllocateEmptyArray();
1147       break;
1148     case SINGLE:
1149       result = BuildArraySingleArgumentConstructor(&array_builder);
1150       break;
1151     case MULTIPLE:
1152       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1153       break;
1154   }
1155
1156   return result;
1157 }
1158
1159
1160 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1161     ElementsKind kind, ArgumentClass argument_class) {
1162   HValue* constructor = GetParameter(
1163       InternalArrayConstructorStubBase::kConstructor);
1164   JSArrayBuilder array_builder(this, kind, constructor);
1165
1166   HValue* result = NULL;
1167   switch (argument_class) {
1168     case NONE:
1169       // This stub is very performance sensitive, the generated code must be
1170       // tuned so that it doesn't build and eager frame.
1171       info()->MarkMustNotHaveEagerFrame();
1172       result = array_builder.AllocateEmptyArray();
1173       break;
1174     case SINGLE:
1175       result = BuildArraySingleArgumentConstructor(&array_builder);
1176       break;
1177     case MULTIPLE:
1178       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1179       break;
1180   }
1181   return result;
1182 }
1183
1184
1185 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1186     JSArrayBuilder* array_builder) {
1187   // Smi check and range check on the input arg.
1188   HValue* constant_one = graph()->GetConstant1();
1189   HValue* constant_zero = graph()->GetConstant0();
1190
1191   HInstruction* elements = Add<HArgumentsElements>(false);
1192   HInstruction* argument = Add<HAccessArgumentsAt>(
1193       elements, constant_one, constant_zero);
1194
1195   return BuildAllocateArrayFromLength(array_builder, argument);
1196 }
1197
1198
1199 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1200     JSArrayBuilder* array_builder, ElementsKind kind) {
1201   // Insert a bounds check because the number of arguments might exceed
1202   // the kInitialMaxFastElementArray limit. This cannot happen for code
1203   // that was parsed, but calling via Array.apply(thisArg, [...]) might
1204   // trigger it.
1205   HValue* length = GetArgumentsLength();
1206   HConstant* max_alloc_length =
1207       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1208   HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1209
1210   // We need to fill with the hole if it's a smi array in the multi-argument
1211   // case because we might have to bail out while copying arguments into
1212   // the array because they aren't compatible with a smi array.
1213   // If it's a double array, no problem, and if it's fast then no
1214   // problem either because doubles are boxed.
1215   //
1216   // TODO(mvstanton): consider an instruction to memset fill the array
1217   // with zero in this case instead.
1218   JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1219       ? JSArrayBuilder::FILL_WITH_HOLE
1220       : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1221   HValue* new_object = array_builder->AllocateArray(checked_length,
1222                                                     max_alloc_length,
1223                                                     checked_length,
1224                                                     fill_mode);
1225   HValue* elements = array_builder->GetElementsLocation();
1226   DCHECK(elements != NULL);
1227
1228   // Now populate the elements correctly.
1229   LoopBuilder builder(this,
1230                       context(),
1231                       LoopBuilder::kPostIncrement);
1232   HValue* start = graph()->GetConstant0();
1233   HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1234   HInstruction* argument_elements = Add<HArgumentsElements>(false);
1235   HInstruction* argument = Add<HAccessArgumentsAt>(
1236       argument_elements, checked_length, key);
1237
1238   Add<HStoreKeyed>(elements, key, argument, kind);
1239   builder.EndBody();
1240   return new_object;
1241 }
1242
1243
1244 template <>
1245 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1246   ElementsKind kind = casted_stub()->elements_kind();
1247   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1248   return BuildArrayConstructor(kind, override_mode, NONE);
1249 }
1250
1251
1252 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1253   return DoGenerateCode(this);
1254 }
1255
1256
1257 template <>
1258 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1259     BuildCodeStub() {
1260   ElementsKind kind = casted_stub()->elements_kind();
1261   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1262   return BuildArrayConstructor(kind, override_mode, SINGLE);
1263 }
1264
1265
1266 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1267   return DoGenerateCode(this);
1268 }
1269
1270
1271 template <>
1272 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1273   ElementsKind kind = casted_stub()->elements_kind();
1274   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1275   return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1276 }
1277
1278
1279 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1280   return DoGenerateCode(this);
1281 }
1282
1283
1284 template <>
1285 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1286     BuildCodeStub() {
1287   ElementsKind kind = casted_stub()->elements_kind();
1288   return BuildInternalArrayConstructor(kind, NONE);
1289 }
1290
1291
1292 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1293   return DoGenerateCode(this);
1294 }
1295
1296
1297 template <>
1298 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1299     BuildCodeStub() {
1300   ElementsKind kind = casted_stub()->elements_kind();
1301   return BuildInternalArrayConstructor(kind, SINGLE);
1302 }
1303
1304
1305 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1306   return DoGenerateCode(this);
1307 }
1308
1309
1310 template <>
1311 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1312     BuildCodeStub() {
1313   ElementsKind kind = casted_stub()->elements_kind();
1314   return BuildInternalArrayConstructor(kind, MULTIPLE);
1315 }
1316
1317
1318 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1319   return DoGenerateCode(this);
1320 }
1321
1322
1323 template <>
1324 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1325   Isolate* isolate = graph()->isolate();
1326   CompareNilICStub* stub = casted_stub();
1327   HIfContinuation continuation;
1328   Handle<Map> sentinel_map(isolate->heap()->meta_map());
1329   Type* type = stub->GetType(zone(), sentinel_map);
1330   BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
1331   IfBuilder if_nil(this, &continuation);
1332   if_nil.Then();
1333   if (continuation.IsFalseReachable()) {
1334     if_nil.Else();
1335     if_nil.Return(graph()->GetConstant0());
1336   }
1337   if_nil.End();
1338   return continuation.IsTrueReachable()
1339       ? graph()->GetConstant1()
1340       : graph()->GetConstantUndefined();
1341 }
1342
1343
1344 Handle<Code> CompareNilICStub::GenerateCode() {
1345   return DoGenerateCode(this);
1346 }
1347
1348
1349 template <>
1350 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1351   BinaryOpICState state = casted_stub()->state();
1352
1353   HValue* left = GetParameter(BinaryOpICStub::kLeft);
1354   HValue* right = GetParameter(BinaryOpICStub::kRight);
1355
1356   Type* left_type = state.GetLeftType(zone());
1357   Type* right_type = state.GetRightType(zone());
1358   Type* result_type = state.GetResultType(zone());
1359
1360   DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1361          (state.HasSideEffects() || !result_type->Is(Type::None())));
1362
1363   HValue* result = NULL;
1364   HAllocationMode allocation_mode(NOT_TENURED);
1365   if (state.op() == Token::ADD &&
1366       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1367       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1368     // For the generic add stub a fast case for string addition is performance
1369     // critical.
1370     if (left_type->Maybe(Type::String())) {
1371       IfBuilder if_leftisstring(this);
1372       if_leftisstring.If<HIsStringAndBranch>(left);
1373       if_leftisstring.Then();
1374       {
1375         Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()),
1376                                   right_type, result_type,
1377                                   state.fixed_right_arg(), allocation_mode,
1378                                   state.strength()));
1379       }
1380       if_leftisstring.Else();
1381       {
1382         Push(BuildBinaryOperation(
1383             state.op(), left, right, left_type, right_type, result_type,
1384             state.fixed_right_arg(), allocation_mode, state.strength()));
1385       }
1386       if_leftisstring.End();
1387       result = Pop();
1388     } else {
1389       IfBuilder if_rightisstring(this);
1390       if_rightisstring.If<HIsStringAndBranch>(right);
1391       if_rightisstring.Then();
1392       {
1393         Push(BuildBinaryOperation(state.op(), left, right, left_type,
1394                                   Type::String(zone()), result_type,
1395                                   state.fixed_right_arg(), allocation_mode,
1396                                   state.strength()));
1397       }
1398       if_rightisstring.Else();
1399       {
1400         Push(BuildBinaryOperation(
1401             state.op(), left, right, left_type, right_type, result_type,
1402             state.fixed_right_arg(), allocation_mode, state.strength()));
1403       }
1404       if_rightisstring.End();
1405       result = Pop();
1406     }
1407   } else {
1408     result = BuildBinaryOperation(
1409         state.op(), left, right, left_type, right_type, result_type,
1410         state.fixed_right_arg(), allocation_mode, state.strength());
1411   }
1412
1413   // If we encounter a generic argument, the number conversion is
1414   // observable, thus we cannot afford to bail out after the fact.
1415   if (!state.HasSideEffects()) {
1416     result = EnforceNumberType(result, result_type);
1417   }
1418
1419   return result;
1420 }
1421
1422
1423 Handle<Code> BinaryOpICStub::GenerateCode() {
1424   return DoGenerateCode(this);
1425 }
1426
1427
1428 template <>
1429 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1430   BinaryOpICState state = casted_stub()->state();
1431
1432   HValue* allocation_site = GetParameter(
1433       BinaryOpWithAllocationSiteStub::kAllocationSite);
1434   HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1435   HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1436
1437   Type* left_type = state.GetLeftType(zone());
1438   Type* right_type = state.GetRightType(zone());
1439   Type* result_type = state.GetResultType(zone());
1440   HAllocationMode allocation_mode(allocation_site);
1441
1442   return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1443                               result_type, state.fixed_right_arg(),
1444                               allocation_mode, state.strength());
1445 }
1446
1447
1448 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1449   return DoGenerateCode(this);
1450 }
1451
1452
1453 template <>
1454 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1455   StringAddStub* stub = casted_stub();
1456   StringAddFlags flags = stub->flags();
1457   PretenureFlag pretenure_flag = stub->pretenure_flag();
1458
1459   HValue* left = GetParameter(StringAddStub::kLeft);
1460   HValue* right = GetParameter(StringAddStub::kRight);
1461
1462   // Make sure that both arguments are strings if not known in advance.
1463   if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1464     left = BuildCheckString(left);
1465   }
1466   if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1467     right = BuildCheckString(right);
1468   }
1469
1470   return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1471 }
1472
1473
1474 Handle<Code> StringAddStub::GenerateCode() {
1475   return DoGenerateCode(this);
1476 }
1477
1478
1479 template <>
1480 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1481   ToBooleanStub* stub = casted_stub();
1482   HValue* true_value = NULL;
1483   HValue* false_value = NULL;
1484
1485   switch (stub->mode()) {
1486     case ToBooleanStub::RESULT_AS_SMI:
1487       true_value = graph()->GetConstant1();
1488       false_value = graph()->GetConstant0();
1489       break;
1490     case ToBooleanStub::RESULT_AS_ODDBALL:
1491       true_value = graph()->GetConstantTrue();
1492       false_value = graph()->GetConstantFalse();
1493       break;
1494     case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1495       true_value = graph()->GetConstantFalse();
1496       false_value = graph()->GetConstantTrue();
1497       break;
1498   }
1499
1500   IfBuilder if_true(this);
1501   if_true.If<HBranch>(GetParameter(0), stub->types());
1502   if_true.Then();
1503   if_true.Return(true_value);
1504   if_true.Else();
1505   if_true.End();
1506   return false_value;
1507 }
1508
1509
1510 Handle<Code> ToBooleanStub::GenerateCode() {
1511   return DoGenerateCode(this);
1512 }
1513
1514
1515 template <>
1516 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1517   StoreGlobalStub* stub = casted_stub();
1518   HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1519   if (stub->check_global()) {
1520     // Check that the map of the global has not changed: use a placeholder map
1521     // that will be replaced later with the global object's map.
1522     HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1523     HValue* proxy_map =
1524         Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1525     HValue* global =
1526         Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1527     HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1528         StoreGlobalStub::global_map_placeholder(isolate())));
1529     HValue* expected_map = Add<HLoadNamedField>(
1530         map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1531     HValue* map =
1532         Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1533     IfBuilder map_check(this);
1534     map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1535     map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1536     map_check.End();
1537   }
1538
1539   HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1540       StoreGlobalStub::property_cell_placeholder(isolate())));
1541   HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1542                                       HObjectAccess::ForWeakCellValue());
1543   Add<HCheckHeapObject>(cell);
1544   HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1545   // Load the payload of the global parameter cell. A hole indicates that the
1546   // cell has been invalidated and that the store must be handled by the
1547   // runtime.
1548   HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
1549
1550   auto cell_type = stub->cell_type();
1551   if (cell_type == PropertyCellType::kConstant ||
1552       cell_type == PropertyCellType::kUndefined) {
1553     // This is always valid for all states a cell can be in.
1554     IfBuilder builder(this);
1555     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1556     builder.Then();
1557     builder.ElseDeopt(
1558         Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
1559     builder.End();
1560   } else {
1561     IfBuilder builder(this);
1562     HValue* hole_value = graph()->GetConstantHole();
1563     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1564     builder.Then();
1565     builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
1566     builder.Else();
1567     // When dealing with constant types, the type may be allowed to change, as
1568     // long as optimized code remains valid.
1569     if (cell_type == PropertyCellType::kConstantType) {
1570       switch (stub->constant_type()) {
1571         case PropertyCellConstantType::kSmi:
1572           access = access.WithRepresentation(Representation::Smi());
1573           break;
1574         case PropertyCellConstantType::kStableMap: {
1575           // It is sufficient here to check that the value and cell contents
1576           // have identical maps, no matter if they are stable or not or if they
1577           // are the maps that were originally in the cell or not. If optimized
1578           // code will deopt when a cell has a unstable map and if it has a
1579           // dependency on a stable map, it will deopt if the map destabilizes.
1580           Add<HCheckHeapObject>(value);
1581           Add<HCheckHeapObject>(cell_contents);
1582           HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1583                                                       HObjectAccess::ForMap());
1584           HValue* map =
1585               Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1586           IfBuilder map_check(this);
1587           map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1588           map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1589           map_check.End();
1590           access = access.WithRepresentation(Representation::HeapObject());
1591           break;
1592         }
1593       }
1594     }
1595     Add<HStoreNamedField>(cell, access, value);
1596     builder.End();
1597   }
1598
1599   return value;
1600 }
1601
1602
1603 Handle<Code> StoreGlobalStub::GenerateCode() {
1604   return DoGenerateCode(this);
1605 }
1606
1607
1608 template <>
1609 HValue* CodeStubGraphBuilder<LoadGlobalViaContextStub>::BuildCodeStub() {
1610   LoadGlobalViaContextStub* stub = casted_stub();
1611   int depth_value = stub->depth();
1612   HValue* depth = GetParameter(0);
1613   HValue* slot_index = GetParameter(1);
1614   HValue* name = GetParameter(2);
1615
1616   // Choose between dynamic or static context script fetching versions.
1617   depth = depth_value < LoadGlobalViaContextStub::kDynamicDepth
1618               ? nullptr
1619               : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
1620   slot_index =
1621       AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
1622
1623   HValue* script_context = BuildGetParentContext(depth, depth_value);
1624   HValue* cell =
1625       Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
1626
1627   HValue* value = Add<HLoadNamedField>(cell, nullptr,
1628                                        HObjectAccess::ForPropertyCellValue());
1629
1630   IfBuilder builder(this);
1631   HValue* hole_value = graph()->GetConstantHole();
1632   builder.IfNot<HCompareObjectEqAndBranch, HValue*>(value, hole_value);
1633   builder.Then();
1634   { Push(value); }
1635   builder.Else();
1636   {
1637     Add<HPushArguments>(script_context, slot_index, name);
1638     Push(Add<HCallRuntime>(
1639         isolate()->factory()->empty_string(),
1640         Runtime::FunctionForId(Runtime::kLoadGlobalViaContext), 3));
1641   }
1642   builder.End();
1643   return Pop();
1644 }
1645
1646
1647 Handle<Code> LoadGlobalViaContextStub::GenerateCode() {
1648   return DoGenerateCode(this);
1649 }
1650
1651
1652 template <>
1653 HValue* CodeStubGraphBuilder<StoreGlobalViaContextStub>::BuildCodeStub() {
1654   StoreGlobalViaContextStub* stub = casted_stub();
1655   int depth_value = stub->depth();
1656   HValue* depth = GetParameter(0);
1657   HValue* slot_index = GetParameter(1);
1658   HValue* name = GetParameter(2);
1659   HValue* value = GetParameter(3);
1660
1661   // Choose between dynamic or static context script fetching versions.
1662   depth = depth_value < StoreGlobalViaContextStub::kDynamicDepth
1663               ? nullptr
1664               : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
1665   slot_index =
1666       AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
1667
1668   HValue* script_context = BuildGetParentContext(depth, depth_value);
1669   HValue* cell =
1670       Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
1671
1672   // Fast case that requires storing to cell.
1673   HIfContinuation if_fast_store_continuation(graph()->CreateBasicBlock(),
1674                                              graph()->CreateBasicBlock());
1675
1676   // Fast case that does not require storing to cell.
1677   HIfContinuation if_fast_no_store_continuation(graph()->CreateBasicBlock(),
1678                                                 graph()->CreateBasicBlock());
1679
1680   // This stub does the same as StoreGlobalStub but in a dynamic manner.
1681
1682   HValue* cell_contents = Add<HLoadNamedField>(
1683       cell, nullptr, HObjectAccess::ForPropertyCellValue());
1684
1685   IfBuilder if_hole(this);
1686   HValue* hole_value = graph()->GetConstantHole();
1687   if_hole.IfNot<HCompareObjectEqAndBranch, HValue*>(cell_contents, hole_value);
1688   if_hole.Then();
1689   {
1690     HValue* details = Add<HLoadNamedField>(
1691         cell, nullptr, HObjectAccess::ForPropertyCellDetails());
1692     HValue* cell_type =
1693         BuildDecodeField<PropertyDetails::PropertyCellTypeField>(details);
1694
1695     // The code below relies on this.
1696     STATIC_ASSERT(PropertyCellType::kUndefined < PropertyCellType::kConstant);
1697     STATIC_ASSERT(PropertyCellType::kConstant <
1698                   PropertyCellType::kConstantType);
1699     STATIC_ASSERT(PropertyCellType::kConstant < PropertyCellType::kMutable);
1700
1701     // Handle all cell type cases.
1702     IfBuilder if_not_const(this);
1703
1704     int cell_type_constant = static_cast<int>(PropertyCellType::kConstant);
1705     if_not_const.If<HCompareNumericAndBranch, HValue*>(
1706         cell_type, Add<HConstant>(cell_type_constant), Token::GT);
1707     if_not_const.Then();
1708     {
1709       // kConstantType or kMutable.
1710       IfBuilder if_const_type(this);
1711       int cell_type_constant_type =
1712           static_cast<int>(PropertyCellType::kConstantType);
1713       if_const_type.If<HCompareNumericAndBranch, HValue*>(
1714           cell_type, Add<HConstant>(cell_type_constant_type), Token::EQ);
1715       if_const_type.Then();
1716       {
1717         // Check that either both value and cell_contents are smi or
1718         // both have the same map.
1719         IfBuilder if_cell_is_smi(this);
1720         if_cell_is_smi.If<HIsSmiAndBranch>(cell_contents);
1721         if_cell_is_smi.Then();
1722         {
1723           IfBuilder if_value_is_smi(this);
1724           if_value_is_smi.If<HIsSmiAndBranch>(value);
1725           if_value_is_smi.Then();
1726           {
1727             // Both cell_contents and value are smis, do store.
1728           }
1729           if_value_is_smi.Else();  // Slow case.
1730           if_value_is_smi.JoinContinuation(&if_fast_store_continuation);
1731         }
1732         if_cell_is_smi.Else();
1733         {
1734           IfBuilder if_value_is_heap_object(this);
1735           if_value_is_heap_object.IfNot<HIsSmiAndBranch>(value);
1736           if_value_is_heap_object.Then();
1737           {
1738             // Both cell_contents and value are heap objects, do store.
1739             HValue* expected_map = Add<HLoadNamedField>(
1740                 cell_contents, nullptr, HObjectAccess::ForMap());
1741             HValue* map =
1742                 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1743             IfBuilder map_check(this);
1744             map_check.If<HCompareObjectEqAndBranch>(expected_map, map);
1745             map_check.Then();
1746             map_check.Else();  // Slow case.
1747             map_check.JoinContinuation(&if_fast_store_continuation);
1748
1749             // The accessor case is handled by the map check above, since
1750             // the value must not have a AccessorPair map.
1751           }
1752           if_value_is_heap_object.Else();  // Slow case.
1753           if_value_is_heap_object.JoinContinuation(&if_fast_store_continuation);
1754         }
1755         if_cell_is_smi.EndUnreachable();
1756       }
1757       if_const_type.Else();
1758       {
1759         // Check that the property kind is kData.
1760         HValue* kind = BuildDecodeField<PropertyDetails::KindField>(details);
1761         HValue* data_kind_value = Add<HConstant>(kData);
1762
1763         IfBuilder builder(this);
1764         builder.If<HCompareNumericAndBranch, HValue*>(kind, data_kind_value,
1765                                                       Token::EQ);
1766         builder.Then();
1767         builder.Else();  // Slow case.
1768         builder.JoinContinuation(&if_fast_store_continuation);
1769       }
1770       if_const_type.EndUnreachable();
1771     }
1772     if_not_const.Else();
1773     {
1774       // kUndefined or kConstant, just check that the value matches.
1775       IfBuilder builder(this);
1776       builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1777       builder.Then();
1778       builder.Else();  // Slow case.
1779       builder.JoinContinuation(&if_fast_no_store_continuation);
1780     }
1781     if_not_const.EndUnreachable();
1782   }
1783   if_hole.Else();  // Slow case.
1784   if_hole.JoinContinuation(&if_fast_store_continuation);
1785
1786   // Do store for fast case.
1787   IfBuilder if_fast_store(this, &if_fast_store_continuation);
1788   if_fast_store.Then();
1789   {
1790     // All checks are done, store the value to the cell.
1791     Add<HStoreNamedField>(cell, HObjectAccess::ForPropertyCellValue(), value);
1792   }
1793   if_fast_store.Else();
1794   if_fast_store.JoinContinuation(&if_fast_no_store_continuation);
1795
1796   // Bailout to runtime call for slow case.
1797   IfBuilder if_no_fast_store(this, &if_fast_no_store_continuation);
1798   if_no_fast_store.Then();
1799   {
1800     // Nothing else to do.
1801   }
1802   if_no_fast_store.Else();
1803   {
1804     // Slow case, call runtime.
1805     HInstruction* lang_mode = Add<HConstant>(casted_stub()->language_mode());
1806     Add<HPushArguments>(script_context, slot_index, name, value);
1807     Add<HPushArguments>(lang_mode);
1808     Add<HCallRuntime>(isolate()->factory()->empty_string(),
1809                       Runtime::FunctionForId(Runtime::kStoreGlobalViaContext),
1810                       5);
1811   }
1812   if_no_fast_store.End();
1813   return value;
1814 }
1815
1816
1817 Handle<Code> StoreGlobalViaContextStub::GenerateCode() {
1818   return DoGenerateCode(this);
1819 }
1820
1821
1822 template <>
1823 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1824   HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1825   HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1826   HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1827   HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1828
1829   if (FLAG_trace_elements_transitions) {
1830     // Tracing elements transitions is the job of the runtime.
1831     Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1832                      Deoptimizer::EAGER);
1833   } else {
1834     info()->MarkAsSavesCallerDoubles();
1835
1836     BuildTransitionElementsKind(object, map,
1837                                 casted_stub()->from_kind(),
1838                                 casted_stub()->to_kind(),
1839                                 casted_stub()->is_jsarray());
1840
1841     BuildUncheckedMonomorphicElementAccess(object, key, value,
1842                                            casted_stub()->is_jsarray(),
1843                                            casted_stub()->to_kind(),
1844                                            STORE, ALLOW_RETURN_HOLE,
1845                                            casted_stub()->store_mode());
1846   }
1847
1848   return value;
1849 }
1850
1851
1852 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1853   return DoGenerateCode(this);
1854 }
1855
1856
1857 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1858     HValue* js_function,
1859     HValue* native_context,
1860     IfBuilder* builder,
1861     HValue* optimized_map,
1862     HValue* map_index) {
1863   HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1864   HValue* context_slot = LoadFromOptimizedCodeMap(
1865       optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1866   HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1867       optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1868   builder->If<HCompareObjectEqAndBranch>(native_context,
1869                                          context_slot);
1870   builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1871   builder->Then();
1872   HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1873       map_index, SharedFunctionInfo::kCachedCodeOffset);
1874   // and the literals
1875   HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1876       map_index, SharedFunctionInfo::kLiteralsOffset);
1877
1878   BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
1879
1880   // The builder continues in the "then" after this function.
1881 }
1882
1883
1884 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function,
1885                                                          HValue* native_context,
1886                                                          HValue* code_object,
1887                                                          HValue* literals) {
1888   Counters* counters = isolate()->counters();
1889   AddIncrementCounter(counters->fast_new_closure_install_optimized());
1890
1891   // TODO(fschneider): Idea: store proper code pointers in the optimized code
1892   // map and either unmangle them on marking or do nothing as the whole map is
1893   // discarded on major GC anyway.
1894   Add<HStoreCodeEntry>(js_function, code_object);
1895   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1896                         literals);
1897
1898   // Now link a function into a list of optimized functions.
1899   HValue* optimized_functions_list = Add<HLoadNamedField>(
1900       native_context, nullptr,
1901       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1902   Add<HStoreNamedField>(js_function,
1903                         HObjectAccess::ForNextFunctionLinkPointer(),
1904                         optimized_functions_list);
1905
1906   // This store is the only one that should have a write barrier.
1907   Add<HStoreNamedField>(native_context,
1908            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1909            js_function);
1910 }
1911
1912
1913 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1914                                                 HValue* shared_info) {
1915   Add<HStoreNamedField>(js_function,
1916                         HObjectAccess::ForNextFunctionLinkPointer(),
1917                         graph()->GetConstantUndefined());
1918   HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1919                                              HObjectAccess::ForCodeOffset());
1920   Add<HStoreCodeEntry>(js_function, code_object);
1921 }
1922
1923
1924 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1925     HValue* optimized_map,
1926     HValue* iterator,
1927     int field_offset) {
1928   // By making sure to express these loads in the form [<hvalue> + constant]
1929   // the keyed load can be hoisted.
1930   DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1931   HValue* field_slot = iterator;
1932   if (field_offset > 0) {
1933     HValue* field_offset_value = Add<HConstant>(field_offset);
1934     field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1935   }
1936   HInstruction* field_entry =
1937       Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
1938   return field_entry;
1939 }
1940
1941
1942 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1943     HValue* js_function,
1944     HValue* shared_info,
1945     HValue* native_context) {
1946   Counters* counters = isolate()->counters();
1947   Factory* factory = isolate()->factory();
1948   IfBuilder is_optimized(this);
1949   HInstruction* optimized_map = Add<HLoadNamedField>(
1950       shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
1951   HValue* null_constant = Add<HConstant>(0);
1952   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1953   is_optimized.Then();
1954   {
1955     BuildInstallCode(js_function, shared_info);
1956   }
1957   is_optimized.Else();
1958   {
1959     AddIncrementCounter(counters->fast_new_closure_try_optimized());
1960     // optimized_map points to fixed array of 3-element entries
1961     // (native context, optimized code, literals).
1962     // Map must never be empty, so check the first elements.
1963     HValue* first_entry_index =
1964         Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1965     IfBuilder already_in(this);
1966     BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1967                                       optimized_map, first_entry_index);
1968     already_in.Else();
1969     {
1970       // Iterate through the rest of map backwards. Do not double check first
1971       // entry. After the loop, if no matching optimized code was found,
1972       // install unoptimized code.
1973       // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1974       //     i > SharedFunctionInfo::kEntriesStart;
1975       //     i -= SharedFunctionInfo::kEntryLength) { .. }
1976       HValue* shared_function_entry_length =
1977           Add<HConstant>(SharedFunctionInfo::kEntryLength);
1978       LoopBuilder loop_builder(this,
1979                                context(),
1980                                LoopBuilder::kPostDecrement,
1981                                shared_function_entry_length);
1982       HValue* array_length = Add<HLoadNamedField>(
1983           optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1984       HValue* start_pos = AddUncasted<HSub>(array_length,
1985                                             shared_function_entry_length);
1986       HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1987                                                      first_entry_index,
1988                                                      Token::GT);
1989       {
1990         IfBuilder done_check(this);
1991         BuildCheckAndInstallOptimizedCode(js_function, native_context,
1992                                           &done_check,
1993                                           optimized_map,
1994                                           slot_iterator);
1995         // Fall out of the loop
1996         loop_builder.Break();
1997       }
1998       loop_builder.EndBody();
1999
2000       // If slot_iterator equals first entry index, then we failed to find a
2001       // context-dependent code and try context-independent code next.
2002       IfBuilder no_optimized_code_check(this);
2003       no_optimized_code_check.If<HCompareNumericAndBranch>(
2004           slot_iterator, first_entry_index, Token::EQ);
2005       no_optimized_code_check.Then();
2006       {
2007         IfBuilder shared_code_check(this);
2008         HValue* shared_code = Add<HLoadNamedField>(
2009             optimized_map, nullptr,
2010             HObjectAccess::ForOptimizedCodeMapSharedCode());
2011         shared_code_check.IfNot<HCompareObjectEqAndBranch>(
2012             shared_code, graph()->GetConstantUndefined());
2013         shared_code_check.Then();
2014         {
2015           // Store the context-independent optimized code.
2016           HValue* literals = Add<HConstant>(factory->empty_fixed_array());
2017           BuildInstallOptimizedCode(js_function, native_context, shared_code,
2018                                     literals);
2019         }
2020         shared_code_check.Else();
2021         {
2022           // Store the unoptimized code.
2023           BuildInstallCode(js_function, shared_info);
2024         }
2025       }
2026     }
2027   }
2028 }
2029
2030
2031 template<>
2032 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
2033   Counters* counters = isolate()->counters();
2034   Factory* factory = isolate()->factory();
2035   HInstruction* empty_fixed_array =
2036       Add<HConstant>(factory->empty_fixed_array());
2037   HValue* shared_info = GetParameter(0);
2038
2039   AddIncrementCounter(counters->fast_new_closure_total());
2040
2041   // Create a new closure from the given function info in new space
2042   HValue* size = Add<HConstant>(JSFunction::kSize);
2043   HInstruction* js_function =
2044       Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
2045
2046   int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
2047                                             casted_stub()->kind());
2048
2049   // Compute the function map in the current native context and set that
2050   // as the map of the allocated object.
2051   HInstruction* native_context = BuildGetNativeContext();
2052   HInstruction* map_slot_value = Add<HLoadNamedField>(
2053       native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
2054   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
2055
2056   // Initialize the rest of the function.
2057   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
2058                         empty_fixed_array);
2059   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
2060                         empty_fixed_array);
2061   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2062                         empty_fixed_array);
2063   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
2064                         graph()->GetConstantHole());
2065   Add<HStoreNamedField>(
2066       js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
2067   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
2068                         context());
2069
2070   // Initialize the code pointer in the function to be the one found in the
2071   // shared function info object. But first check if there is an optimized
2072   // version for our context.
2073   BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
2074
2075   return js_function;
2076 }
2077
2078
2079 Handle<Code> FastNewClosureStub::GenerateCode() {
2080   return DoGenerateCode(this);
2081 }
2082
2083
2084 template<>
2085 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
2086   int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
2087
2088   // Get the function.
2089   HParameter* function = GetParameter(FastNewContextStub::kFunction);
2090
2091   // Allocate the context in new space.
2092   HAllocate* function_context = Add<HAllocate>(
2093       Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
2094       HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
2095
2096   // Set up the object header.
2097   AddStoreMapConstant(function_context,
2098                       isolate()->factory()->function_context_map());
2099   Add<HStoreNamedField>(function_context,
2100                         HObjectAccess::ForFixedArrayLength(),
2101                         Add<HConstant>(length));
2102
2103   // Set up the fixed slots.
2104   Add<HStoreNamedField>(function_context,
2105                         HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2106                         function);
2107   Add<HStoreNamedField>(function_context,
2108                         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2109                         context());
2110   Add<HStoreNamedField>(function_context,
2111                         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
2112                         graph()->GetConstant0());
2113
2114   // Copy the global object from the previous context.
2115   HValue* global_object = Add<HLoadNamedField>(
2116       context(), nullptr,
2117       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2118   Add<HStoreNamedField>(function_context,
2119                         HObjectAccess::ForContextSlot(
2120                             Context::GLOBAL_OBJECT_INDEX),
2121                         global_object);
2122
2123   // Initialize the rest of the slots to undefined.
2124   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2125     Add<HStoreNamedField>(function_context,
2126                           HObjectAccess::ForContextSlot(i),
2127                           graph()->GetConstantUndefined());
2128   }
2129
2130   return function_context;
2131 }
2132
2133
2134 Handle<Code> FastNewContextStub::GenerateCode() {
2135   return DoGenerateCode(this);
2136 }
2137
2138
2139 template <>
2140 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2141   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2142   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2143
2144   Add<HCheckSmi>(key);
2145
2146   HValue* elements = AddLoadElements(receiver);
2147
2148   HValue* hash = BuildElementIndexHash(key);
2149
2150   return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2151                                              casted_stub()->language_mode());
2152 }
2153
2154
2155 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2156   return DoGenerateCode(this);
2157 }
2158
2159
2160 template<>
2161 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2162   // Determine the parameters.
2163   HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2164   HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2165   HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2166
2167   info()->MarkMustNotHaveEagerFrame();
2168
2169   return BuildRegExpConstructResult(length, index, input);
2170 }
2171
2172
2173 Handle<Code> RegExpConstructResultStub::GenerateCode() {
2174   return DoGenerateCode(this);
2175 }
2176
2177
2178 template <>
2179 class CodeStubGraphBuilder<KeyedLoadGenericStub>
2180     : public CodeStubGraphBuilderBase {
2181  public:
2182   explicit CodeStubGraphBuilder(CompilationInfo* info)
2183       : CodeStubGraphBuilderBase(info) {}
2184
2185  protected:
2186   virtual HValue* BuildCodeStub();
2187
2188   void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2189                                    HValue* bit_field2,
2190                                    ElementsKind kind);
2191
2192   void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2193                             HValue* receiver,
2194                             HValue* key,
2195                             HValue* instance_type,
2196                             HValue* bit_field2,
2197                             ElementsKind kind);
2198
2199   void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
2200                                 HValue* receiver,
2201                                 HValue* key,
2202                                 HValue* instance_type,
2203                                 HValue* bit_field2,
2204                                 ElementsKind kind);
2205
2206   KeyedLoadGenericStub* casted_stub() {
2207     return static_cast<KeyedLoadGenericStub*>(stub());
2208   }
2209 };
2210
2211
2212 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2213     HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2214     ElementsKind kind) {
2215   ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2216   HValue* kind_limit = Add<HConstant>(
2217       static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2218
2219   if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2220   if_builder->Then();
2221 }
2222
2223
2224 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2225     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2226     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2227   DCHECK(!IsExternalArrayElementsKind(kind));
2228
2229   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2230
2231   IfBuilder js_array_check(this);
2232   js_array_check.If<HCompareNumericAndBranch>(
2233       instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2234   js_array_check.Then();
2235   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2236                                               true, kind,
2237                                               LOAD, NEVER_RETURN_HOLE,
2238                                               STANDARD_STORE));
2239   js_array_check.Else();
2240   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2241                                               false, kind,
2242                                               LOAD, NEVER_RETURN_HOLE,
2243                                               STANDARD_STORE));
2244   js_array_check.End();
2245 }
2246
2247
2248 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
2249     HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2250     HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2251   DCHECK(IsExternalArrayElementsKind(kind));
2252
2253   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2254
2255   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2256                                               false, kind,
2257                                               LOAD, NEVER_RETURN_HOLE,
2258                                               STANDARD_STORE));
2259 }
2260
2261
2262 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2263   HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2264   HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2265   // Split into a smi/integer case and unique string case.
2266   HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2267                                                 graph()->CreateBasicBlock());
2268
2269   BuildKeyedIndexCheck(key, &index_name_split_continuation);
2270
2271   IfBuilder index_name_split(this, &index_name_split_continuation);
2272   index_name_split.Then();
2273   {
2274     // Key is an index (number)
2275     key = Pop();
2276
2277     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2278       (1 << Map::kHasIndexedInterceptor);
2279     BuildJSObjectCheck(receiver, bit_field_mask);
2280
2281     HValue* map =
2282         Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2283
2284     HValue* instance_type =
2285         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
2286
2287     HValue* bit_field2 =
2288         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
2289
2290     IfBuilder kind_if(this);
2291     BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2292                          FAST_HOLEY_ELEMENTS);
2293
2294     kind_if.Else();
2295     {
2296       BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2297                            FAST_HOLEY_DOUBLE_ELEMENTS);
2298     }
2299     kind_if.Else();
2300
2301     // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2302     BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2303     {
2304       HValue* elements = AddLoadElements(receiver);
2305
2306       HValue* hash = BuildElementIndexHash(key);
2307
2308       Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2309                                                casted_stub()->language_mode()));
2310     }
2311     kind_if.Else();
2312
2313     // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2314     STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2315                   SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
2316     BuildElementsKindLimitCheck(&kind_if, bit_field2,
2317                                 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
2318     // Non-strict elements are not handled.
2319     Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
2320                      Deoptimizer::EAGER);
2321     Push(graph()->GetConstant0());
2322
2323     kind_if.Else();
2324     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2325                              EXTERNAL_INT8_ELEMENTS);
2326
2327     kind_if.Else();
2328     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2329                              EXTERNAL_UINT8_ELEMENTS);
2330
2331     kind_if.Else();
2332     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2333                              EXTERNAL_INT16_ELEMENTS);
2334
2335     kind_if.Else();
2336     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2337                              EXTERNAL_UINT16_ELEMENTS);
2338
2339     kind_if.Else();
2340     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2341                              EXTERNAL_INT32_ELEMENTS);
2342
2343     kind_if.Else();
2344     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2345                              EXTERNAL_UINT32_ELEMENTS);
2346
2347     kind_if.Else();
2348     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2349                              EXTERNAL_FLOAT32_ELEMENTS);
2350
2351     kind_if.Else();
2352     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2353                              EXTERNAL_FLOAT64_ELEMENTS);
2354
2355     kind_if.Else();
2356     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2357                              EXTERNAL_UINT8_CLAMPED_ELEMENTS);
2358
2359     kind_if.ElseDeopt(
2360         Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
2361
2362     kind_if.End();
2363   }
2364   index_name_split.Else();
2365   {
2366     // Key is a unique string.
2367     key = Pop();
2368
2369     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2370         (1 << Map::kHasNamedInterceptor);
2371     BuildJSObjectCheck(receiver, bit_field_mask);
2372
2373     HIfContinuation continuation;
2374     BuildTestForDictionaryProperties(receiver, &continuation);
2375     IfBuilder if_dict_properties(this, &continuation);
2376     if_dict_properties.Then();
2377     {
2378       //  Key is string, properties are dictionary mode
2379       BuildNonGlobalObjectCheck(receiver);
2380
2381       HValue* properties = Add<HLoadNamedField>(
2382           receiver, nullptr, HObjectAccess::ForPropertiesPointer());
2383
2384       HValue* hash =
2385           Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
2386
2387       hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2388
2389       HValue* value = BuildUncheckedDictionaryElementLoad(
2390           receiver, properties, key, hash, casted_stub()->language_mode());
2391       Push(value);
2392     }
2393     if_dict_properties.Else();
2394     {
2395       // TODO(dcarney): don't use keyed lookup cache, but convert to use
2396       // megamorphic stub cache.
2397       UNREACHABLE();
2398       //  Key is string, properties are fast mode
2399       HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2400
2401       ExternalReference cache_keys_ref =
2402           ExternalReference::keyed_lookup_cache_keys(isolate());
2403       HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2404
2405       HValue* map =
2406           Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2407       HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2408       base_index->ClearFlag(HValue::kCanOverflow);
2409
2410       HIfContinuation inline_or_runtime_continuation(
2411           graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2412       {
2413         IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2414         for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2415              ++probe) {
2416           IfBuilder* lookup_if = &lookup_ifs[probe];
2417           lookup_if->Initialize(this);
2418           int probe_base = probe * KeyedLookupCache::kEntryLength;
2419           HValue* map_index = AddUncasted<HAdd>(
2420               base_index,
2421               Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2422           map_index->ClearFlag(HValue::kCanOverflow);
2423           HValue* key_index = AddUncasted<HAdd>(
2424               base_index,
2425               Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2426           key_index->ClearFlag(HValue::kCanOverflow);
2427           HValue* map_to_check =
2428               Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
2429                               NEVER_RETURN_HOLE, 0);
2430           lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2431           lookup_if->And();
2432           HValue* key_to_check =
2433               Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
2434                               NEVER_RETURN_HOLE, 0);
2435           lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2436           lookup_if->Then();
2437           {
2438             ExternalReference cache_field_offsets_ref =
2439                 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2440             HValue* cache_field_offsets =
2441                 Add<HConstant>(cache_field_offsets_ref);
2442             HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2443             index->ClearFlag(HValue::kCanOverflow);
2444             HValue* property_index =
2445                 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
2446                                 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
2447             Push(property_index);
2448           }
2449           lookup_if->Else();
2450         }
2451         for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2452           lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2453         }
2454       }
2455
2456       IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2457       inline_or_runtime.Then();
2458       {
2459         // Found a cached index, load property inline.
2460         Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2461       }
2462       inline_or_runtime.Else();
2463       {
2464         // KeyedLookupCache miss; call runtime.
2465         Add<HPushArguments>(receiver, key);
2466         Push(Add<HCallRuntime>(
2467             isolate()->factory()->empty_string(),
2468             Runtime::FunctionForId(is_strong(casted_stub()->language_mode())
2469                                        ? Runtime::kKeyedGetPropertyStrong
2470                                        : Runtime::kKeyedGetProperty),
2471             2));
2472       }
2473       inline_or_runtime.End();
2474     }
2475     if_dict_properties.End();
2476   }
2477   index_name_split.End();
2478
2479   return Pop();
2480 }
2481
2482
2483 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2484   return DoGenerateCode(this);
2485 }
2486
2487 }  // namespace internal
2488 }  // namespace v8