1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/ic/ic.h"
12 #include "src/lithium.h"
18 static LChunk* OptimizeGraph(HGraph* graph) {
19 DisallowHeapAllocation no_allocation;
20 DisallowHandleAllocation no_handles;
21 DisallowHandleDereference no_deref;
23 DCHECK(graph != NULL);
24 BailoutReason bailout_reason = kNoReason;
25 if (!graph->Optimize(&bailout_reason)) {
26 FATAL(GetBailoutReason(bailout_reason));
28 LChunk* chunk = LChunk::NewChunk(graph);
30 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
36 class CodeStubGraphBuilderBase : public HGraphBuilder {
38 explicit CodeStubGraphBuilderBase(CompilationInfo* info)
39 : HGraphBuilder(info),
40 arguments_length_(NULL),
42 descriptor_(info->code_stub()),
44 int parameter_count = descriptor_.GetEnvironmentParameterCount();
45 parameters_.Reset(new HParameter*[parameter_count]);
47 virtual bool BuildGraph();
50 virtual HValue* BuildCodeStub() = 0;
51 HParameter* GetParameter(int parameter) {
52 DCHECK(parameter < descriptor_.GetEnvironmentParameterCount());
53 return parameters_[parameter];
55 HValue* GetArgumentsLength() {
56 // This is initialized in BuildGraph()
57 DCHECK(arguments_length_ != NULL);
58 return arguments_length_;
60 CompilationInfo* info() { return info_; }
61 CodeStub* stub() { return info_->code_stub(); }
62 HContext* context() { return context_; }
63 Isolate* isolate() { return info_->isolate(); }
65 HLoadNamedField* BuildLoadNamedField(HValue* object,
67 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
68 Representation representation,
69 bool transition_to_field);
77 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
78 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
81 HValue* BuildArrayConstructor(ElementsKind kind,
82 AllocationSiteOverrideMode override_mode,
83 ArgumentClass argument_class);
84 HValue* BuildInternalArrayConstructor(ElementsKind kind,
85 ArgumentClass argument_class);
87 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
88 // function found in the optimized code map at map_index in js_function, if
89 // the function at map_index matches the given native_context. Builder is
90 // left in the "Then()" state after the install.
91 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
92 HValue* native_context,
94 HValue* optimized_map,
96 void BuildInstallCode(HValue* js_function, HValue* shared_info);
98 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
101 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
103 HValue* native_context);
106 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
107 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
110 SmartArrayPointer<HParameter*> parameters_;
111 HValue* arguments_length_;
112 CompilationInfo* info_;
113 CodeStubDescriptor descriptor_;
118 bool CodeStubGraphBuilderBase::BuildGraph() {
119 // Update the static counter each time a new code stub is generated.
120 isolate()->counters()->code_stubs()->Increment();
122 if (FLAG_trace_hydrogen_stubs) {
123 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
124 PrintF("-----------------------------------------------------------\n");
125 PrintF("Compiling stub %s using hydrogen\n", name);
126 isolate()->GetHTracer()->TraceCompilation(info());
129 int param_count = descriptor_.GetEnvironmentParameterCount();
130 HEnvironment* start_environment = graph()->start_environment();
131 HBasicBlock* next_block = CreateBasicBlock(start_environment);
133 next_block->SetJoinId(BailoutId::StubEntry());
134 set_current_block(next_block);
136 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
137 HInstruction* stack_parameter_count = NULL;
138 for (int i = 0; i < param_count; ++i) {
139 Representation r = descriptor_.GetEnvironmentParameterRepresentation(i);
140 HParameter* param = Add<HParameter>(i,
141 HParameter::REGISTER_PARAMETER, r);
142 start_environment->Bind(i, param);
143 parameters_[i] = param;
144 if (descriptor_.IsEnvironmentParameterCountRegister(i)) {
145 param->set_type(HType::Smi());
146 stack_parameter_count = param;
147 arguments_length_ = stack_parameter_count;
151 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
152 if (!runtime_stack_params) {
153 stack_parameter_count = graph()->GetConstantMinus1();
154 arguments_length_ = graph()->GetConstant0();
157 context_ = Add<HContext>();
158 start_environment->BindContext(context_);
160 Add<HSimulate>(BailoutId::StubEntry());
162 NoObservableSideEffectsScope no_effects(this);
164 HValue* return_value = BuildCodeStub();
166 // We might have extra expressions to pop from the stack in addition to the
168 HInstruction* stack_pop_count = stack_parameter_count;
169 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
170 if (!stack_parameter_count->IsConstant() &&
171 descriptor_.hint_stack_parameter_count() < 0) {
172 HInstruction* constant_one = graph()->GetConstant1();
173 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
174 stack_pop_count->ClearFlag(HValue::kCanOverflow);
175 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
178 int count = descriptor_.hint_stack_parameter_count();
179 stack_pop_count = Add<HConstant>(count);
183 if (current_block() != NULL) {
184 HReturn* hreturn_instruction = New<HReturn>(return_value,
186 FinishCurrentBlock(hreturn_instruction);
192 template <class Stub>
193 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
195 explicit CodeStubGraphBuilder(CompilationInfo* info)
196 : CodeStubGraphBuilderBase(info) {}
199 virtual HValue* BuildCodeStub() {
200 if (casted_stub()->IsUninitialized()) {
201 return BuildCodeUninitializedStub();
203 return BuildCodeInitializedStub();
207 virtual HValue* BuildCodeInitializedStub() {
212 virtual HValue* BuildCodeUninitializedStub() {
213 // Force a deopt that falls back to the runtime.
214 HValue* undefined = graph()->GetConstantUndefined();
215 IfBuilder builder(this);
216 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
218 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
222 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
226 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
227 ExternalReference miss) {
228 Factory* factory = isolate()->factory();
230 // Generate the new code.
231 MacroAssembler masm(isolate(), NULL, 256);
234 // Update the static counter each time a new code stub is generated.
235 isolate()->counters()->code_stubs()->Increment();
237 // Generate the code for the stub.
238 masm.set_generating_stub(true);
239 // TODO(yangguo): remove this once we can serialize IC stubs.
240 masm.enable_serializer();
241 NoCurrentFrameScope scope(&masm);
242 GenerateLightweightMiss(&masm, miss);
245 // Create the code object.
249 // Copy the generated code into a heap object.
250 Code::Flags flags = Code::ComputeFlags(
255 Handle<Code> new_object = factory->NewCode(
256 desc, flags, masm.CodeObject(), NeedsImmovableCode());
261 template <class Stub>
262 static Handle<Code> DoGenerateCode(Stub* stub) {
263 Isolate* isolate = stub->isolate();
264 CodeStubDescriptor descriptor(stub);
266 // If we are uninitialized we can use a light-weight stub to enter
267 // the runtime that is significantly faster than using the standard
268 // stub-failure deopt mechanism.
269 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
270 DCHECK(!descriptor.stack_parameter_count().is_valid());
271 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
273 base::ElapsedTimer timer;
274 if (FLAG_profile_hydrogen_code_stub_compilation) {
278 CompilationInfo info(stub, isolate, &zone);
279 CodeStubGraphBuilder<Stub> builder(&info);
280 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
281 Handle<Code> code = chunk->Codegen();
282 if (FLAG_profile_hydrogen_code_stub_compilation) {
284 os << "[Lazy compilation of " << stub << " took "
285 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
292 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
293 info()->MarkAsSavesCallerDoubles();
294 HValue* number = GetParameter(NumberToStringStub::kNumber);
295 return BuildNumberToString(number, Type::Number(zone()));
299 Handle<Code> NumberToStringStub::GenerateCode() {
300 return DoGenerateCode(this);
304 // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
305 // Possible optimizations: put the type string into the oddballs.
307 HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
308 Factory* factory = isolate()->factory();
309 HConstant* number_string = Add<HConstant>(factory->number_string());
310 HValue* object = GetParameter(TypeofStub::kObject);
312 IfBuilder is_smi(this);
313 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
315 { Push(number_string); }
318 IfBuilder is_number(this);
319 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
321 { Push(number_string); }
324 HConstant* undefined_string = Add<HConstant>(factory->undefined_string());
325 HValue* map = AddLoadMap(object, smi_check);
326 HValue* instance_type = Add<HLoadNamedField>(
327 map, nullptr, HObjectAccess::ForMapInstanceType());
328 IfBuilder is_string(this);
329 is_string.If<HCompareNumericAndBranch>(
330 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
332 { Push(Add<HConstant>(factory->string_string())); }
335 HConstant* object_string = Add<HConstant>(factory->object_string());
336 IfBuilder is_oddball(this);
337 is_oddball.If<HCompareNumericAndBranch>(
338 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
341 IfBuilder is_true_or_false(this);
342 is_true_or_false.If<HCompareObjectEqAndBranch>(
343 object, graph()->GetConstantTrue());
344 is_true_or_false.OrIf<HCompareObjectEqAndBranch>(
345 object, graph()->GetConstantFalse());
346 is_true_or_false.Then();
347 { Push(Add<HConstant>(factory->boolean_string())); }
348 is_true_or_false.Else();
350 IfBuilder is_null(this);
351 is_null.If<HCompareObjectEqAndBranch>(object,
352 graph()->GetConstantNull());
354 { Push(object_string); }
356 { Push(undefined_string); }
358 is_true_or_false.End();
362 IfBuilder is_symbol(this);
363 is_symbol.If<HCompareNumericAndBranch>(
364 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
366 { Push(Add<HConstant>(factory->symbol_string())); }
369 IfBuilder is_function(this);
370 HConstant* js_function = Add<HConstant>(JS_FUNCTION_TYPE);
371 HConstant* js_function_proxy =
372 Add<HConstant>(JS_FUNCTION_PROXY_TYPE);
373 is_function.If<HCompareNumericAndBranch>(instance_type, js_function,
375 is_function.OrIf<HCompareNumericAndBranch>(
376 instance_type, js_function_proxy, Token::EQ);
378 { Push(Add<HConstant>(factory->function_string())); }
381 // Is it an undetectable object?
382 IfBuilder is_undetectable(this);
383 is_undetectable.If<HIsUndetectableAndBranch>(object);
384 is_undetectable.Then();
386 // typeof an undetectable object is 'undefined'.
387 Push(undefined_string);
389 is_undetectable.Else();
391 // For any kind of object not handled above, the spec rule for
392 // host objects gives that it is okay to return "object".
408 return environment()->Pop();
412 Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
416 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
417 Factory* factory = isolate()->factory();
418 HValue* undefined = graph()->GetConstantUndefined();
419 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
421 // This stub is very performance sensitive, the generated code must be tuned
422 // so that it doesn't build and eager frame.
423 info()->MarkMustNotHaveEagerFrame();
425 HInstruction* allocation_site =
426 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
427 IfBuilder checker(this);
428 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
432 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
433 AllocationSite::kTransitionInfoOffset);
434 HInstruction* boilerplate =
435 Add<HLoadNamedField>(allocation_site, nullptr, access);
436 HValue* elements = AddLoadElements(boilerplate);
437 HValue* capacity = AddLoadFixedArrayLength(elements);
438 IfBuilder zero_capacity(this);
439 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
441 zero_capacity.Then();
442 Push(BuildCloneShallowArrayEmpty(boilerplate,
445 zero_capacity.Else();
446 IfBuilder if_fixed_cow(this);
447 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
449 Push(BuildCloneShallowArrayCow(boilerplate,
454 IfBuilder if_fixed(this);
455 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
457 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
463 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
466 FAST_DOUBLE_ELEMENTS));
471 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
474 return environment()->Pop();
478 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
479 return DoGenerateCode(this);
484 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
485 HValue* undefined = graph()->GetConstantUndefined();
487 HInstruction* allocation_site =
488 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
490 IfBuilder checker(this);
491 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
495 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
496 AllocationSite::kTransitionInfoOffset);
497 HInstruction* boilerplate =
498 Add<HLoadNamedField>(allocation_site, nullptr, access);
500 int length = casted_stub()->length();
502 // Empty objects have some slack added to them.
503 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
505 int size = JSObject::kHeaderSize + length * kPointerSize;
506 int object_size = size;
507 if (FLAG_allocation_site_pretenuring) {
508 size += AllocationMemento::kSize;
511 HValue* boilerplate_map =
512 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
513 HValue* boilerplate_size = Add<HLoadNamedField>(
514 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
515 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
516 checker.If<HCompareNumericAndBranch>(boilerplate_size,
517 size_in_words, Token::EQ);
520 HValue* size_in_bytes = Add<HConstant>(size);
522 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
523 NOT_TENURED, JS_OBJECT_TYPE);
525 for (int i = 0; i < object_size; i += kPointerSize) {
526 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
527 Add<HStoreNamedField>(object, access,
528 Add<HLoadNamedField>(boilerplate, nullptr, access));
531 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
532 if (FLAG_allocation_site_pretenuring) {
533 BuildCreateAllocationMemento(
534 object, Add<HConstant>(object_size), allocation_site);
537 environment()->Push(object);
538 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
541 return environment()->Pop();
545 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
546 return DoGenerateCode(this);
551 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
552 // This stub is performance sensitive, the generated code must be tuned
553 // so that it doesn't build an eager frame.
554 info()->MarkMustNotHaveEagerFrame();
556 HValue* size = Add<HConstant>(AllocationSite::kSize);
557 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
561 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
562 AddStoreMapConstant(object, allocation_site_map);
564 // Store the payload (smi elements kind)
565 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
566 Add<HStoreNamedField>(object,
567 HObjectAccess::ForAllocationSiteOffset(
568 AllocationSite::kTransitionInfoOffset),
569 initial_elements_kind);
571 // Unlike literals, constructed arrays don't have nested sites
572 Add<HStoreNamedField>(object,
573 HObjectAccess::ForAllocationSiteOffset(
574 AllocationSite::kNestedSiteOffset),
575 graph()->GetConstant0());
577 // Pretenuring calculation field.
578 Add<HStoreNamedField>(object,
579 HObjectAccess::ForAllocationSiteOffset(
580 AllocationSite::kPretenureDataOffset),
581 graph()->GetConstant0());
583 // Pretenuring memento creation count field.
584 Add<HStoreNamedField>(object,
585 HObjectAccess::ForAllocationSiteOffset(
586 AllocationSite::kPretenureCreateCountOffset),
587 graph()->GetConstant0());
589 // Store an empty fixed array for the code dependency.
590 HConstant* empty_fixed_array =
591 Add<HConstant>(isolate()->factory()->empty_fixed_array());
592 Add<HStoreNamedField>(
594 HObjectAccess::ForAllocationSiteOffset(
595 AllocationSite::kDependentCodeOffset),
598 // Link the object to the allocation site list
599 HValue* site_list = Add<HConstant>(
600 ExternalReference::allocation_sites_list_address(isolate()));
601 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
602 HObjectAccess::ForAllocationSiteList());
603 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
604 // mark as such in order to skip the write barrier, once we have a unified
605 // system for weakness. For now we decided to keep it like this because having
606 // an initial write barrier backed store makes this pointer strong until the
607 // next GC, and allocation sites are designed to survive several GCs anyway.
608 Add<HStoreNamedField>(
610 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
612 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
615 HInstruction* feedback_vector = GetParameter(0);
616 HInstruction* slot = GetParameter(1);
617 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
619 return feedback_vector;
623 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
624 return DoGenerateCode(this);
629 HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
630 // This stub is performance sensitive, the generated code must be tuned
631 // so that it doesn't build an eager frame.
632 info()->MarkMustNotHaveEagerFrame();
634 HValue* size = Add<HConstant>(WeakCell::kSize);
635 HInstruction* object =
636 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
638 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
639 AddStoreMapConstant(object, weak_cell_map);
641 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
642 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
643 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
644 graph()->GetConstantUndefined());
646 HInstruction* feedback_vector =
647 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
648 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
649 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
651 return graph()->GetConstant0();
655 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
659 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
660 int context_index = casted_stub()->context_index();
661 int slot_index = casted_stub()->slot_index();
663 HValue* script_context = BuildGetScriptContext(context_index);
664 return Add<HLoadNamedField>(script_context, nullptr,
665 HObjectAccess::ForContextSlot(slot_index));
669 Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
670 return DoGenerateCode(this);
675 HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
676 int context_index = casted_stub()->context_index();
677 int slot_index = casted_stub()->slot_index();
679 HValue* script_context = BuildGetScriptContext(context_index);
680 Add<HStoreNamedField>(script_context,
681 HObjectAccess::ForContextSlot(slot_index),
682 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
683 return GetParameter(2);
687 Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
688 return DoGenerateCode(this);
693 HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
694 ElementsKind kind = casted_stub()->elements_kind();
695 if (IsFastDoubleElementsKind(kind)) {
696 info()->MarkAsSavesCallerDoubles();
699 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
700 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
702 HValue* elements = AddLoadElements(object);
703 HValue* current_capacity = Add<HLoadNamedField>(
704 elements, nullptr, HObjectAccess::ForFixedArrayLength());
707 casted_stub()->is_js_array()
708 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
709 HObjectAccess::ForArrayLength(kind))
712 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
713 current_capacity, key);
717 Handle<Code> GrowArrayElementsStub::GenerateCode() {
718 return DoGenerateCode(this);
723 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
724 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
725 ? CONVERT_HOLE_TO_UNDEFINED
728 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
729 GetParameter(LoadDescriptor::kReceiverIndex),
730 GetParameter(LoadDescriptor::kNameIndex), NULL,
731 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
732 hole_mode, STANDARD_STORE);
737 Handle<Code> LoadFastElementStub::GenerateCode() {
738 return DoGenerateCode(this);
742 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
743 HValue* object, FieldIndex index) {
744 Representation representation = index.is_double()
745 ? Representation::Double()
746 : Representation::Tagged();
747 int offset = index.offset();
748 HObjectAccess access = index.is_inobject()
749 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
750 : HObjectAccess::ForBackingStoreOffset(offset, representation);
751 if (index.is_double() &&
752 (!FLAG_unbox_double_fields || !index.is_inobject())) {
753 // Load the heap number.
754 object = Add<HLoadNamedField>(
755 object, nullptr, access.WithRepresentation(Representation::Tagged()));
756 // Load the double value from it.
757 access = HObjectAccess::ForHeapNumberValue();
759 return Add<HLoadNamedField>(object, nullptr, access);
764 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
765 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
769 Handle<Code> LoadFieldStub::GenerateCode() {
770 return DoGenerateCode(this);
775 HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
776 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
777 casted_stub()->index());
781 Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
782 return DoGenerateCode(this);
787 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
788 HValue* map = AddLoadMap(GetParameter(0), NULL);
789 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
790 Map::kDescriptorsOffset, Representation::Tagged());
791 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
792 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
793 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
794 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
798 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
801 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
803 HValue* result = NULL;
804 HInstruction* backing_store =
805 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
807 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
808 HValue* backing_store_length = Add<HLoadNamedField>(
809 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
810 IfBuilder in_unmapped_range(this);
811 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
813 in_unmapped_range.Then();
816 result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
819 Add<HStoreKeyed>(backing_store, key, value, FAST_HOLEY_ELEMENTS);
822 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
823 in_unmapped_range.End();
828 HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
831 // Mapped arguments are actual arguments. Unmapped arguments are values added
832 // to the arguments object after it was created for the call. Mapped arguments
833 // are stored in the context at indexes given by elements[key + 2]. Unmapped
834 // arguments are stored as regular indexed properties in the arguments array,
835 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
836 // look at argument object construction.
838 // The sloppy arguments elements array has a special format:
841 // 1: unmapped arguments array
846 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
847 // If key + 2 >= elements.length then attempt to look in the unmapped
848 // arguments array (given by elements[1]) and return the value at key, missing
849 // to the runtime if the unmapped arguments array is not a fixed array or if
850 // key >= unmapped_arguments_array.length.
852 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
853 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
854 // index into the context array given at elements[0]. Return the value at
857 bool is_load = value == NULL;
859 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
860 IfBuilder positive_smi(this);
861 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
863 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
866 HValue* constant_two = Add<HConstant>(2);
867 HValue* elements = AddLoadElements(receiver, nullptr);
868 HValue* elements_length = Add<HLoadNamedField>(
869 elements, nullptr, HObjectAccess::ForFixedArrayLength());
870 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
871 IfBuilder in_range(this);
872 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
875 HValue* index = AddUncasted<HAdd>(key, constant_two);
876 HInstruction* mapped_index = Add<HLoadKeyed>(
877 elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
879 IfBuilder is_valid(this);
880 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
881 graph()->GetConstantHole());
884 // TODO(mvstanton): I'd like to assert from this point, that if the
885 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
886 // smi check is being emitted.
887 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
888 nullptr, FAST_ELEMENTS);
889 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
891 HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
892 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
893 environment()->Push(result);
895 DCHECK(value != NULL);
896 Add<HStoreKeyed>(the_context, mapped_index, value, FAST_ELEMENTS);
897 environment()->Push(value);
902 HValue* result = UnmappedCase(elements, key, value);
903 environment()->Push(is_load ? result : value);
909 HValue* result = UnmappedCase(elements, key, value);
910 environment()->Push(is_load ? result : value);
914 return environment()->Pop();
919 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
920 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
921 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
923 return EmitKeyedSloppyArguments(receiver, key, NULL);
927 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
928 return DoGenerateCode(this);
933 HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
934 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
935 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
936 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
938 return EmitKeyedSloppyArguments(receiver, key, value);
942 Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
943 return DoGenerateCode(this);
947 void CodeStubGraphBuilderBase::BuildStoreNamedField(
948 HValue* object, HValue* value, FieldIndex index,
949 Representation representation, bool transition_to_field) {
950 DCHECK(!index.is_double() || representation.IsDouble());
951 int offset = index.offset();
952 HObjectAccess access =
954 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
955 : HObjectAccess::ForBackingStoreOffset(offset, representation);
957 if (representation.IsDouble()) {
958 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
959 HObjectAccess heap_number_access =
960 access.WithRepresentation(Representation::Tagged());
961 if (transition_to_field) {
962 // The store requires a mutable HeapNumber to be allocated.
963 NoObservableSideEffectsScope no_side_effects(this);
964 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
966 // TODO(hpayer): Allocation site pretenuring support.
967 HInstruction* heap_number =
968 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
969 MUTABLE_HEAP_NUMBER_TYPE);
970 AddStoreMapConstant(heap_number,
971 isolate()->factory()->mutable_heap_number_map());
972 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
974 // Store the new mutable heap number into the object.
975 access = heap_number_access;
978 // Load the heap number.
979 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
980 // Store the double value into it.
981 access = HObjectAccess::ForHeapNumberValue();
984 } else if (representation.IsHeapObject()) {
985 BuildCheckHeapObject(value);
988 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
993 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
994 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
995 casted_stub()->representation(), false);
996 return GetParameter(2);
1000 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1004 HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
1005 HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
1007 switch (casted_stub()->store_mode()) {
1008 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1009 HValue* properties = Add<HLoadNamedField>(
1010 object, nullptr, HObjectAccess::ForPropertiesPointer());
1011 HValue* length = AddLoadFixedArrayLength(properties);
1013 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1014 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1016 // Grow properties array.
1017 ElementsKind kind = FAST_ELEMENTS;
1018 Add<HBoundsCheck>(new_capacity,
1019 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1020 FixedArray::kHeaderSize) >>
1021 ElementsKindToShiftSize(kind)));
1023 // Reuse this code for properties backing store allocation.
1024 HValue* new_properties =
1025 BuildAllocateAndInitializeArray(kind, new_capacity);
1027 BuildCopyProperties(properties, new_properties, length, new_capacity);
1029 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1033 case StoreTransitionStub::StoreMapAndValue:
1034 // Store the new value into the "extended" object.
1035 BuildStoreNamedField(
1036 object, GetParameter(StoreTransitionDescriptor::kValueIndex),
1037 casted_stub()->index(), casted_stub()->representation(), true);
1040 case StoreTransitionStub::StoreMapOnly:
1041 // And finally update the map.
1042 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
1043 GetParameter(StoreTransitionDescriptor::kMapIndex));
1046 return GetParameter(StoreTransitionDescriptor::kValueIndex);
1050 Handle<Code> StoreTransitionStub::GenerateCode() {
1051 return DoGenerateCode(this);
1056 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
1057 HValue* string = BuildLoadNamedField(GetParameter(0),
1058 FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
1059 return BuildLoadNamedField(string,
1060 FieldIndex::ForInObjectOffset(String::kLengthOffset));
1064 Handle<Code> StringLengthStub::GenerateCode() {
1065 return DoGenerateCode(this);
1070 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1071 BuildUncheckedMonomorphicElementAccess(
1072 GetParameter(StoreDescriptor::kReceiverIndex),
1073 GetParameter(StoreDescriptor::kNameIndex),
1074 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1075 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1076 casted_stub()->store_mode());
1078 return GetParameter(2);
1082 Handle<Code> StoreFastElementStub::GenerateCode() {
1083 return DoGenerateCode(this);
1088 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1089 info()->MarkAsSavesCallerDoubles();
1091 BuildTransitionElementsKind(GetParameter(0),
1093 casted_stub()->from_kind(),
1094 casted_stub()->to_kind(),
1095 casted_stub()->is_js_array());
1097 return GetParameter(0);
1101 Handle<Code> TransitionElementsKindStub::GenerateCode() {
1102 return DoGenerateCode(this);
1107 HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
1109 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
1110 NOT_TENURED, HEAP_NUMBER_TYPE);
1111 AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
1116 Handle<Code> AllocateHeapNumberStub::GenerateCode() {
1117 return DoGenerateCode(this);
1121 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1123 AllocationSiteOverrideMode override_mode,
1124 ArgumentClass argument_class) {
1125 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1126 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1127 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1129 HValue* result = NULL;
1130 switch (argument_class) {
1132 // This stub is very performance sensitive, the generated code must be
1133 // tuned so that it doesn't build and eager frame.
1134 info()->MarkMustNotHaveEagerFrame();
1135 result = array_builder.AllocateEmptyArray();
1138 result = BuildArraySingleArgumentConstructor(&array_builder);
1141 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1149 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1150 ElementsKind kind, ArgumentClass argument_class) {
1151 HValue* constructor = GetParameter(
1152 InternalArrayConstructorStubBase::kConstructor);
1153 JSArrayBuilder array_builder(this, kind, constructor);
1155 HValue* result = NULL;
1156 switch (argument_class) {
1158 // This stub is very performance sensitive, the generated code must be
1159 // tuned so that it doesn't build and eager frame.
1160 info()->MarkMustNotHaveEagerFrame();
1161 result = array_builder.AllocateEmptyArray();
1164 result = BuildArraySingleArgumentConstructor(&array_builder);
1167 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1174 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1175 JSArrayBuilder* array_builder) {
1176 // Smi check and range check on the input arg.
1177 HValue* constant_one = graph()->GetConstant1();
1178 HValue* constant_zero = graph()->GetConstant0();
1180 HInstruction* elements = Add<HArgumentsElements>(false);
1181 HInstruction* argument = Add<HAccessArgumentsAt>(
1182 elements, constant_one, constant_zero);
1184 return BuildAllocateArrayFromLength(array_builder, argument);
1188 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1189 JSArrayBuilder* array_builder, ElementsKind kind) {
1190 // Insert a bounds check because the number of arguments might exceed
1191 // the kInitialMaxFastElementArray limit. This cannot happen for code
1192 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1194 HValue* length = GetArgumentsLength();
1195 HConstant* max_alloc_length =
1196 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1197 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1199 // We need to fill with the hole if it's a smi array in the multi-argument
1200 // case because we might have to bail out while copying arguments into
1201 // the array because they aren't compatible with a smi array.
1202 // If it's a double array, no problem, and if it's fast then no
1203 // problem either because doubles are boxed.
1205 // TODO(mvstanton): consider an instruction to memset fill the array
1206 // with zero in this case instead.
1207 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1208 ? JSArrayBuilder::FILL_WITH_HOLE
1209 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1210 HValue* new_object = array_builder->AllocateArray(checked_length,
1214 HValue* elements = array_builder->GetElementsLocation();
1215 DCHECK(elements != NULL);
1217 // Now populate the elements correctly.
1218 LoopBuilder builder(this,
1220 LoopBuilder::kPostIncrement);
1221 HValue* start = graph()->GetConstant0();
1222 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1223 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1224 HInstruction* argument = Add<HAccessArgumentsAt>(
1225 argument_elements, checked_length, key);
1227 Add<HStoreKeyed>(elements, key, argument, kind);
1234 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1235 ElementsKind kind = casted_stub()->elements_kind();
1236 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1237 return BuildArrayConstructor(kind, override_mode, NONE);
1241 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1242 return DoGenerateCode(this);
1247 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1249 ElementsKind kind = casted_stub()->elements_kind();
1250 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1251 return BuildArrayConstructor(kind, override_mode, SINGLE);
1255 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1256 return DoGenerateCode(this);
1261 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1262 ElementsKind kind = casted_stub()->elements_kind();
1263 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1264 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1268 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1269 return DoGenerateCode(this);
1274 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1276 ElementsKind kind = casted_stub()->elements_kind();
1277 return BuildInternalArrayConstructor(kind, NONE);
1281 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1282 return DoGenerateCode(this);
1287 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1289 ElementsKind kind = casted_stub()->elements_kind();
1290 return BuildInternalArrayConstructor(kind, SINGLE);
1294 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1295 return DoGenerateCode(this);
1300 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1302 ElementsKind kind = casted_stub()->elements_kind();
1303 return BuildInternalArrayConstructor(kind, MULTIPLE);
1307 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1308 return DoGenerateCode(this);
1313 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1314 Isolate* isolate = graph()->isolate();
1315 CompareNilICStub* stub = casted_stub();
1316 HIfContinuation continuation;
1317 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1318 Type* type = stub->GetType(zone(), sentinel_map);
1319 BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
1320 IfBuilder if_nil(this, &continuation);
1322 if (continuation.IsFalseReachable()) {
1324 if_nil.Return(graph()->GetConstant0());
1327 return continuation.IsTrueReachable()
1328 ? graph()->GetConstant1()
1329 : graph()->GetConstantUndefined();
1333 Handle<Code> CompareNilICStub::GenerateCode() {
1334 return DoGenerateCode(this);
1339 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1340 BinaryOpICState state = casted_stub()->state();
1342 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1343 HValue* right = GetParameter(BinaryOpICStub::kRight);
1345 Type* left_type = state.GetLeftType(zone());
1346 Type* right_type = state.GetRightType(zone());
1347 Type* result_type = state.GetResultType(zone());
1349 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1350 (state.HasSideEffects() || !result_type->Is(Type::None())));
1352 HValue* result = NULL;
1353 HAllocationMode allocation_mode(NOT_TENURED);
1354 if (state.op() == Token::ADD &&
1355 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1356 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1357 // For the generic add stub a fast case for string addition is performance
1359 if (left_type->Maybe(Type::String())) {
1360 IfBuilder if_leftisstring(this);
1361 if_leftisstring.If<HIsStringAndBranch>(left);
1362 if_leftisstring.Then();
1364 Push(BuildBinaryOperation(
1365 state.op(), left, right,
1366 Type::String(zone()), right_type,
1367 result_type, state.fixed_right_arg(),
1368 allocation_mode, state.language_mode()));
1370 if_leftisstring.Else();
1372 Push(BuildBinaryOperation(
1373 state.op(), left, right,
1374 left_type, right_type, result_type,
1375 state.fixed_right_arg(), allocation_mode,
1376 state.language_mode()));
1378 if_leftisstring.End();
1381 IfBuilder if_rightisstring(this);
1382 if_rightisstring.If<HIsStringAndBranch>(right);
1383 if_rightisstring.Then();
1385 Push(BuildBinaryOperation(
1386 state.op(), left, right,
1387 left_type, Type::String(zone()),
1388 result_type, state.fixed_right_arg(),
1389 allocation_mode, state.language_mode()));
1391 if_rightisstring.Else();
1393 Push(BuildBinaryOperation(
1394 state.op(), left, right,
1395 left_type, right_type, result_type,
1396 state.fixed_right_arg(), allocation_mode,
1397 state.language_mode()));
1399 if_rightisstring.End();
1403 result = BuildBinaryOperation(
1404 state.op(), left, right,
1405 left_type, right_type, result_type,
1406 state.fixed_right_arg(), allocation_mode, state.language_mode());
1409 // If we encounter a generic argument, the number conversion is
1410 // observable, thus we cannot afford to bail out after the fact.
1411 if (!state.HasSideEffects()) {
1412 result = EnforceNumberType(result, result_type);
1419 Handle<Code> BinaryOpICStub::GenerateCode() {
1420 return DoGenerateCode(this);
1425 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1426 BinaryOpICState state = casted_stub()->state();
1428 HValue* allocation_site = GetParameter(
1429 BinaryOpWithAllocationSiteStub::kAllocationSite);
1430 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1431 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1433 Type* left_type = state.GetLeftType(zone());
1434 Type* right_type = state.GetRightType(zone());
1435 Type* result_type = state.GetResultType(zone());
1436 HAllocationMode allocation_mode(allocation_site);
1438 return BuildBinaryOperation(state.op(), left, right,
1439 left_type, right_type, result_type,
1440 state.fixed_right_arg(), allocation_mode,
1441 state.language_mode());
1445 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1446 return DoGenerateCode(this);
1451 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1452 StringAddStub* stub = casted_stub();
1453 StringAddFlags flags = stub->flags();
1454 PretenureFlag pretenure_flag = stub->pretenure_flag();
1456 HValue* left = GetParameter(StringAddStub::kLeft);
1457 HValue* right = GetParameter(StringAddStub::kRight);
1459 // Make sure that both arguments are strings if not known in advance.
1460 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1461 left = BuildCheckString(left);
1463 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1464 right = BuildCheckString(right);
1467 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1471 Handle<Code> StringAddStub::GenerateCode() {
1472 return DoGenerateCode(this);
1477 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1478 ToBooleanStub* stub = casted_stub();
1479 HValue* true_value = NULL;
1480 HValue* false_value = NULL;
1482 switch (stub->mode()) {
1483 case ToBooleanStub::RESULT_AS_SMI:
1484 true_value = graph()->GetConstant1();
1485 false_value = graph()->GetConstant0();
1487 case ToBooleanStub::RESULT_AS_ODDBALL:
1488 true_value = graph()->GetConstantTrue();
1489 false_value = graph()->GetConstantFalse();
1491 case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1492 true_value = graph()->GetConstantFalse();
1493 false_value = graph()->GetConstantTrue();
1497 IfBuilder if_true(this);
1498 if_true.If<HBranch>(GetParameter(0), stub->types());
1500 if_true.Return(true_value);
1507 Handle<Code> ToBooleanStub::GenerateCode() {
1508 return DoGenerateCode(this);
1513 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1514 StoreGlobalStub* stub = casted_stub();
1515 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1516 if (stub->check_global()) {
1517 // Check that the map of the global has not changed: use a placeholder map
1518 // that will be replaced later with the global object's map.
1519 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1521 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1523 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1524 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1525 StoreGlobalStub::global_map_placeholder(isolate())));
1526 HValue* expected_map = Add<HLoadNamedField>(
1527 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1529 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1530 IfBuilder map_check(this);
1531 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1532 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1536 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1537 StoreGlobalStub::property_cell_placeholder(isolate())));
1538 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1539 HObjectAccess::ForWeakCellValue());
1540 Add<HCheckHeapObject>(cell);
1541 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1542 // Load the payload of the global parameter cell. A hole indicates that the
1543 // cell has been invalidated and that the store must be handled by the
1545 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
1547 auto cell_type = stub->cell_type();
1548 if (cell_type == PropertyCellType::kConstant ||
1549 cell_type == PropertyCellType::kUndefined) {
1550 // This is always valid for all states a cell can be in.
1551 IfBuilder builder(this);
1552 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1555 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
1558 IfBuilder builder(this);
1559 HValue* hole_value = graph()->GetConstantHole();
1560 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1562 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
1564 // When dealing with constant types, the type may be allowed to change, as
1565 // long as optimized code remains valid.
1566 if (cell_type == PropertyCellType::kConstantType) {
1567 switch (stub->constant_type()) {
1568 case PropertyCellConstantType::kSmi:
1569 access = access.WithRepresentation(Representation::Smi());
1571 case PropertyCellConstantType::kStableMap: {
1572 // It is sufficient here to check that the value and cell contents
1573 // have identical maps, no matter if they are stable or not or if they
1574 // are the maps that were originally in the cell or not. If optimized
1575 // code will deopt when a cell has a unstable map and if it has a
1576 // dependency on a stable map, it will deopt if the map destabilizes.
1577 Add<HCheckHeapObject>(value);
1578 Add<HCheckHeapObject>(cell_contents);
1579 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1580 HObjectAccess::ForMap());
1582 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1583 IfBuilder map_check(this);
1584 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1585 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1587 access = access.WithRepresentation(Representation::HeapObject());
1592 Add<HStoreNamedField>(cell, access, value);
1600 Handle<Code> StoreGlobalStub::GenerateCode() {
1601 return DoGenerateCode(this);
1606 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1607 HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1608 HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1609 HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1610 HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1612 if (FLAG_trace_elements_transitions) {
1613 // Tracing elements transitions is the job of the runtime.
1614 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1615 Deoptimizer::EAGER);
1617 info()->MarkAsSavesCallerDoubles();
1619 BuildTransitionElementsKind(object, map,
1620 casted_stub()->from_kind(),
1621 casted_stub()->to_kind(),
1622 casted_stub()->is_jsarray());
1624 BuildUncheckedMonomorphicElementAccess(object, key, value,
1625 casted_stub()->is_jsarray(),
1626 casted_stub()->to_kind(),
1627 STORE, ALLOW_RETURN_HOLE,
1628 casted_stub()->store_mode());
1635 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1636 return DoGenerateCode(this);
1640 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1641 HValue* js_function,
1642 HValue* native_context,
1644 HValue* optimized_map,
1645 HValue* map_index) {
1646 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1647 HValue* context_slot = LoadFromOptimizedCodeMap(
1648 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1649 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1650 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1651 builder->If<HCompareObjectEqAndBranch>(native_context,
1653 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1655 HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1656 map_index, SharedFunctionInfo::kCachedCodeOffset);
1658 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1659 map_index, SharedFunctionInfo::kLiteralsOffset);
1661 Counters* counters = isolate()->counters();
1662 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1664 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1665 // map and either unmangle them on marking or do nothing as the whole map is
1666 // discarded on major GC anyway.
1667 Add<HStoreCodeEntry>(js_function, code_object);
1668 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1671 // Now link a function into a list of optimized functions.
1672 HValue* optimized_functions_list = Add<HLoadNamedField>(
1673 native_context, nullptr,
1674 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1675 Add<HStoreNamedField>(js_function,
1676 HObjectAccess::ForNextFunctionLinkPointer(),
1677 optimized_functions_list);
1679 // This store is the only one that should have a write barrier.
1680 Add<HStoreNamedField>(native_context,
1681 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1684 // The builder continues in the "then" after this function.
1688 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1689 HValue* shared_info) {
1690 Add<HStoreNamedField>(js_function,
1691 HObjectAccess::ForNextFunctionLinkPointer(),
1692 graph()->GetConstantUndefined());
1693 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1694 HObjectAccess::ForCodeOffset());
1695 Add<HStoreCodeEntry>(js_function, code_object);
1699 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1700 HValue* optimized_map,
1703 // By making sure to express these loads in the form [<hvalue> + constant]
1704 // the keyed load can be hoisted.
1705 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1706 HValue* field_slot = iterator;
1707 if (field_offset > 0) {
1708 HValue* field_offset_value = Add<HConstant>(field_offset);
1709 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1711 HInstruction* field_entry =
1712 Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
1717 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1718 HValue* js_function,
1719 HValue* shared_info,
1720 HValue* native_context) {
1721 Counters* counters = isolate()->counters();
1722 IfBuilder is_optimized(this);
1723 HInstruction* optimized_map = Add<HLoadNamedField>(
1724 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
1725 HValue* null_constant = Add<HConstant>(0);
1726 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1727 is_optimized.Then();
1729 BuildInstallCode(js_function, shared_info);
1731 is_optimized.Else();
1733 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1734 // optimized_map points to fixed array of 3-element entries
1735 // (native context, optimized code, literals).
1736 // Map must never be empty, so check the first elements.
1737 HValue* first_entry_index =
1738 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1739 IfBuilder already_in(this);
1740 BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1741 optimized_map, first_entry_index);
1744 // Iterate through the rest of map backwards. Do not double check first
1745 // entry. After the loop, if no matching optimized code was found,
1746 // install unoptimized code.
1747 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1748 // i > SharedFunctionInfo::kEntriesStart;
1749 // i -= SharedFunctionInfo::kEntryLength) { .. }
1750 HValue* shared_function_entry_length =
1751 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1752 LoopBuilder loop_builder(this,
1754 LoopBuilder::kPostDecrement,
1755 shared_function_entry_length);
1756 HValue* array_length = Add<HLoadNamedField>(
1757 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1758 HValue* start_pos = AddUncasted<HSub>(array_length,
1759 shared_function_entry_length);
1760 HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1764 IfBuilder done_check(this);
1765 BuildCheckAndInstallOptimizedCode(js_function, native_context,
1769 // Fall out of the loop
1770 loop_builder.Break();
1772 loop_builder.EndBody();
1774 // If slot_iterator equals first entry index, then we failed to find and
1775 // install optimized code
1776 IfBuilder no_optimized_code_check(this);
1777 no_optimized_code_check.If<HCompareNumericAndBranch>(
1778 slot_iterator, first_entry_index, Token::EQ);
1779 no_optimized_code_check.Then();
1781 // Store the unoptimized code
1782 BuildInstallCode(js_function, shared_info);
1790 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1791 Counters* counters = isolate()->counters();
1792 Factory* factory = isolate()->factory();
1793 HInstruction* empty_fixed_array =
1794 Add<HConstant>(factory->empty_fixed_array());
1795 HValue* shared_info = GetParameter(0);
1797 AddIncrementCounter(counters->fast_new_closure_total());
1799 // Create a new closure from the given function info in new space
1800 HValue* size = Add<HConstant>(JSFunction::kSize);
1801 HInstruction* js_function =
1802 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
1804 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1805 casted_stub()->kind());
1807 // Compute the function map in the current native context and set that
1808 // as the map of the allocated object.
1809 HInstruction* native_context = BuildGetNativeContext();
1810 HInstruction* map_slot_value = Add<HLoadNamedField>(
1811 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
1812 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1814 // Initialize the rest of the function.
1815 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1817 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1819 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1821 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1822 graph()->GetConstantHole());
1823 Add<HStoreNamedField>(
1824 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
1825 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1828 // Initialize the code pointer in the function to be the one
1829 // found in the shared function info object.
1830 // But first check if there is an optimized version for our context.
1831 if (FLAG_cache_optimized_code) {
1832 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1834 BuildInstallCode(js_function, shared_info);
1841 Handle<Code> FastNewClosureStub::GenerateCode() {
1842 return DoGenerateCode(this);
1847 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1848 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1850 // Get the function.
1851 HParameter* function = GetParameter(FastNewContextStub::kFunction);
1853 // Allocate the context in new space.
1854 HAllocate* function_context = Add<HAllocate>(
1855 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1856 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1858 // Set up the object header.
1859 AddStoreMapConstant(function_context,
1860 isolate()->factory()->function_context_map());
1861 Add<HStoreNamedField>(function_context,
1862 HObjectAccess::ForFixedArrayLength(),
1863 Add<HConstant>(length));
1865 // Set up the fixed slots.
1866 Add<HStoreNamedField>(function_context,
1867 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1869 Add<HStoreNamedField>(function_context,
1870 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1872 Add<HStoreNamedField>(function_context,
1873 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1874 graph()->GetConstant0());
1876 // Copy the global object from the previous context.
1877 HValue* global_object = Add<HLoadNamedField>(
1879 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1880 Add<HStoreNamedField>(function_context,
1881 HObjectAccess::ForContextSlot(
1882 Context::GLOBAL_OBJECT_INDEX),
1885 // Initialize the rest of the slots to undefined.
1886 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1887 Add<HStoreNamedField>(function_context,
1888 HObjectAccess::ForContextSlot(i),
1889 graph()->GetConstantUndefined());
1892 return function_context;
1896 Handle<Code> FastNewContextStub::GenerateCode() {
1897 return DoGenerateCode(this);
1902 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
1903 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1904 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1906 Add<HCheckSmi>(key);
1908 HValue* elements = AddLoadElements(receiver);
1910 HValue* hash = BuildElementIndexHash(key);
1912 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1916 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
1917 return DoGenerateCode(this);
1922 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1923 // Determine the parameters.
1924 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1925 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1926 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1928 info()->MarkMustNotHaveEagerFrame();
1930 return BuildRegExpConstructResult(length, index, input);
1934 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1935 return DoGenerateCode(this);
1940 class CodeStubGraphBuilder<KeyedLoadGenericStub>
1941 : public CodeStubGraphBuilderBase {
1943 explicit CodeStubGraphBuilder(CompilationInfo* info)
1944 : CodeStubGraphBuilderBase(info) {}
1947 virtual HValue* BuildCodeStub();
1949 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1953 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1956 HValue* instance_type,
1960 void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1963 HValue* instance_type,
1967 KeyedLoadGenericStub* casted_stub() {
1968 return static_cast<KeyedLoadGenericStub*>(stub());
1973 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
1974 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1975 ElementsKind kind) {
1976 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1977 HValue* kind_limit = Add<HConstant>(
1978 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1980 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1985 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
1986 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1987 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1988 DCHECK(!IsExternalArrayElementsKind(kind));
1990 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1992 IfBuilder js_array_check(this);
1993 js_array_check.If<HCompareNumericAndBranch>(
1994 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1995 js_array_check.Then();
1996 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1998 LOAD, NEVER_RETURN_HOLE,
2000 js_array_check.Else();
2001 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2003 LOAD, NEVER_RETURN_HOLE,
2005 js_array_check.End();
2009 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
2010 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2011 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2012 DCHECK(IsExternalArrayElementsKind(kind));
2014 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2016 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2018 LOAD, NEVER_RETURN_HOLE,
2023 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2024 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2025 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2027 // Split into a smi/integer case and unique string case.
2028 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2029 graph()->CreateBasicBlock());
2031 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2033 IfBuilder index_name_split(this, &index_name_split_continuation);
2034 index_name_split.Then();
2036 // Key is an index (number)
2039 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2040 (1 << Map::kHasIndexedInterceptor);
2041 BuildJSObjectCheck(receiver, bit_field_mask);
2044 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2046 HValue* instance_type =
2047 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
2049 HValue* bit_field2 =
2050 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
2052 IfBuilder kind_if(this);
2053 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2054 FAST_HOLEY_ELEMENTS);
2058 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2059 FAST_HOLEY_DOUBLE_ELEMENTS);
2063 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2064 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2066 HValue* elements = AddLoadElements(receiver);
2068 HValue* hash = BuildElementIndexHash(key);
2070 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
2074 // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2075 BuildElementsKindLimitCheck(&kind_if, bit_field2,
2076 SLOPPY_ARGUMENTS_ELEMENTS);
2077 // Non-strict elements are not handled.
2078 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
2079 Deoptimizer::EAGER);
2080 Push(graph()->GetConstant0());
2083 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2084 EXTERNAL_INT8_ELEMENTS);
2087 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2088 EXTERNAL_UINT8_ELEMENTS);
2091 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2092 EXTERNAL_INT16_ELEMENTS);
2095 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2096 EXTERNAL_UINT16_ELEMENTS);
2099 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2100 EXTERNAL_INT32_ELEMENTS);
2103 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2104 EXTERNAL_UINT32_ELEMENTS);
2107 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2108 EXTERNAL_FLOAT32_ELEMENTS);
2111 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2112 EXTERNAL_FLOAT64_ELEMENTS);
2115 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2116 EXTERNAL_UINT8_CLAMPED_ELEMENTS);
2119 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
2123 index_name_split.Else();
2125 // Key is a unique string.
2128 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2129 (1 << Map::kHasNamedInterceptor);
2130 BuildJSObjectCheck(receiver, bit_field_mask);
2132 HIfContinuation continuation;
2133 BuildTestForDictionaryProperties(receiver, &continuation);
2134 IfBuilder if_dict_properties(this, &continuation);
2135 if_dict_properties.Then();
2137 // Key is string, properties are dictionary mode
2138 BuildNonGlobalObjectCheck(receiver);
2140 HValue* properties = Add<HLoadNamedField>(
2141 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
2144 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
2146 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2148 HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
2154 if_dict_properties.Else();
2156 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2157 // megamorphic stub cache.
2159 // Key is string, properties are fast mode
2160 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2162 ExternalReference cache_keys_ref =
2163 ExternalReference::keyed_lookup_cache_keys(isolate());
2164 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2167 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2168 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2169 base_index->ClearFlag(HValue::kCanOverflow);
2171 HIfContinuation inline_or_runtime_continuation(
2172 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2174 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2175 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2177 IfBuilder* lookup_if = &lookup_ifs[probe];
2178 lookup_if->Initialize(this);
2179 int probe_base = probe * KeyedLookupCache::kEntryLength;
2180 HValue* map_index = AddUncasted<HAdd>(
2182 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2183 map_index->ClearFlag(HValue::kCanOverflow);
2184 HValue* key_index = AddUncasted<HAdd>(
2186 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2187 key_index->ClearFlag(HValue::kCanOverflow);
2188 HValue* map_to_check =
2189 Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
2190 NEVER_RETURN_HOLE, 0);
2191 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2193 HValue* key_to_check =
2194 Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
2195 NEVER_RETURN_HOLE, 0);
2196 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2199 ExternalReference cache_field_offsets_ref =
2200 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2201 HValue* cache_field_offsets =
2202 Add<HConstant>(cache_field_offsets_ref);
2203 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2204 index->ClearFlag(HValue::kCanOverflow);
2205 HValue* property_index =
2206 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
2207 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
2208 Push(property_index);
2212 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2213 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2217 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2218 inline_or_runtime.Then();
2220 // Found a cached index, load property inline.
2221 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2223 inline_or_runtime.Else();
2225 // KeyedLookupCache miss; call runtime.
2226 Add<HPushArguments>(receiver, key);
2227 Push(Add<HCallRuntime>(
2228 isolate()->factory()->empty_string(),
2229 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
2231 inline_or_runtime.End();
2233 if_dict_properties.End();
2235 index_name_split.End();
2241 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2242 return DoGenerateCode(this);
2245 } // namespace internal