1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/ic/ic.h"
12 #include "src/lithium.h"
18 static LChunk* OptimizeGraph(HGraph* graph) {
19 DisallowHeapAllocation no_allocation;
20 DisallowHandleAllocation no_handles;
21 DisallowHandleDereference no_deref;
23 DCHECK(graph != NULL);
24 BailoutReason bailout_reason = kNoReason;
25 if (!graph->Optimize(&bailout_reason)) {
26 FATAL(GetBailoutReason(bailout_reason));
28 LChunk* chunk = LChunk::NewChunk(graph);
30 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
36 class CodeStubGraphBuilderBase : public HGraphBuilder {
38 explicit CodeStubGraphBuilderBase(CompilationInfo* info)
39 : HGraphBuilder(info),
40 arguments_length_(NULL),
42 descriptor_(info->code_stub()),
44 int parameter_count = GetParameterCount();
45 parameters_.Reset(new HParameter*[parameter_count]);
47 virtual bool BuildGraph();
50 virtual HValue* BuildCodeStub() = 0;
51 int GetParameterCount() const {
52 return descriptor_.GetRegisterParameterCount();
54 HParameter* GetParameter(int parameter) {
55 DCHECK(parameter < GetParameterCount());
56 return parameters_[parameter];
58 Representation GetParameterRepresentation(int parameter) {
59 return RepresentationFromType(descriptor_.GetParameterType(parameter));
61 bool IsParameterCountRegister(int index) const {
62 return descriptor_.GetRegisterParameter(index)
63 .is(descriptor_.stack_parameter_count());
65 HValue* GetArgumentsLength() {
66 // This is initialized in BuildGraph()
67 DCHECK(arguments_length_ != NULL);
68 return arguments_length_;
70 CompilationInfo* info() { return info_; }
71 CodeStub* stub() { return info_->code_stub(); }
72 HContext* context() { return context_; }
73 Isolate* isolate() { return info_->isolate(); }
75 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
76 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
77 Representation representation,
78 bool transition_to_field);
86 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
87 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
90 HValue* BuildArrayConstructor(ElementsKind kind,
91 AllocationSiteOverrideMode override_mode,
92 ArgumentClass argument_class);
93 HValue* BuildInternalArrayConstructor(ElementsKind kind,
94 ArgumentClass argument_class);
96 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
97 // function found in the optimized code map at map_index in js_function, if
98 // the function at map_index matches the given native_context. Builder is
99 // left in the "Then()" state after the install.
100 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
101 HValue* native_context,
103 HValue* optimized_map,
105 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
106 HValue* code_object, HValue* literals);
107 void BuildInstallCode(HValue* js_function, HValue* shared_info);
109 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
112 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
114 HValue* native_context);
117 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
118 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
121 base::SmartArrayPointer<HParameter*> parameters_;
122 HValue* arguments_length_;
123 CompilationInfo* info_;
124 CodeStubDescriptor descriptor_;
129 bool CodeStubGraphBuilderBase::BuildGraph() {
130 // Update the static counter each time a new code stub is generated.
131 isolate()->counters()->code_stubs()->Increment();
133 if (FLAG_trace_hydrogen_stubs) {
134 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
135 PrintF("-----------------------------------------------------------\n");
136 PrintF("Compiling stub %s using hydrogen\n", name);
137 isolate()->GetHTracer()->TraceCompilation(info());
140 int param_count = GetParameterCount();
141 HEnvironment* start_environment = graph()->start_environment();
142 HBasicBlock* next_block = CreateBasicBlock(start_environment);
144 next_block->SetJoinId(BailoutId::StubEntry());
145 set_current_block(next_block);
147 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
148 HInstruction* stack_parameter_count = NULL;
149 for (int i = 0; i < param_count; ++i) {
150 Representation r = GetParameterRepresentation(i);
151 HParameter* param = Add<HParameter>(i,
152 HParameter::REGISTER_PARAMETER, r);
153 start_environment->Bind(i, param);
154 parameters_[i] = param;
155 if (IsParameterCountRegister(i)) {
156 param->set_type(HType::Smi());
157 stack_parameter_count = param;
158 arguments_length_ = stack_parameter_count;
162 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
163 if (!runtime_stack_params) {
164 stack_parameter_count = graph()->GetConstantMinus1();
165 arguments_length_ = graph()->GetConstant0();
168 context_ = Add<HContext>();
169 start_environment->BindContext(context_);
171 Add<HSimulate>(BailoutId::StubEntry());
173 NoObservableSideEffectsScope no_effects(this);
175 HValue* return_value = BuildCodeStub();
177 // We might have extra expressions to pop from the stack in addition to the
179 HInstruction* stack_pop_count = stack_parameter_count;
180 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
181 if (!stack_parameter_count->IsConstant() &&
182 descriptor_.hint_stack_parameter_count() < 0) {
183 HInstruction* constant_one = graph()->GetConstant1();
184 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
185 stack_pop_count->ClearFlag(HValue::kCanOverflow);
186 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
189 int count = descriptor_.hint_stack_parameter_count();
190 stack_pop_count = Add<HConstant>(count);
194 if (current_block() != NULL) {
195 HReturn* hreturn_instruction = New<HReturn>(return_value,
197 FinishCurrentBlock(hreturn_instruction);
203 template <class Stub>
204 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
206 explicit CodeStubGraphBuilder(CompilationInfo* info)
207 : CodeStubGraphBuilderBase(info) {}
210 virtual HValue* BuildCodeStub() {
211 if (casted_stub()->IsUninitialized()) {
212 return BuildCodeUninitializedStub();
214 return BuildCodeInitializedStub();
218 virtual HValue* BuildCodeInitializedStub() {
223 virtual HValue* BuildCodeUninitializedStub() {
224 // Force a deopt that falls back to the runtime.
225 HValue* undefined = graph()->GetConstantUndefined();
226 IfBuilder builder(this);
227 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
229 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
233 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
237 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
238 ExternalReference miss) {
239 Factory* factory = isolate()->factory();
241 // Generate the new code.
242 MacroAssembler masm(isolate(), NULL, 256);
245 // Update the static counter each time a new code stub is generated.
246 isolate()->counters()->code_stubs()->Increment();
248 // Generate the code for the stub.
249 masm.set_generating_stub(true);
250 // TODO(yangguo): remove this once we can serialize IC stubs.
251 masm.enable_serializer();
252 NoCurrentFrameScope scope(&masm);
253 GenerateLightweightMiss(&masm, miss);
256 // Create the code object.
260 // Copy the generated code into a heap object.
261 Code::Flags flags = Code::ComputeFlags(
266 Handle<Code> new_object = factory->NewCode(
267 desc, flags, masm.CodeObject(), NeedsImmovableCode());
272 template <class Stub>
273 static Handle<Code> DoGenerateCode(Stub* stub) {
274 Isolate* isolate = stub->isolate();
275 CodeStubDescriptor descriptor(stub);
277 // If we are uninitialized we can use a light-weight stub to enter
278 // the runtime that is significantly faster than using the standard
279 // stub-failure deopt mechanism.
280 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
281 DCHECK(!descriptor.stack_parameter_count().is_valid());
282 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
284 base::ElapsedTimer timer;
285 if (FLAG_profile_hydrogen_code_stub_compilation) {
289 CompilationInfo info(stub, isolate, &zone);
290 CodeStubGraphBuilder<Stub> builder(&info);
291 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
292 Handle<Code> code = chunk->Codegen();
293 if (FLAG_profile_hydrogen_code_stub_compilation) {
295 os << "[Lazy compilation of " << stub << " took "
296 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
303 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
304 info()->MarkAsSavesCallerDoubles();
305 HValue* number = GetParameter(NumberToStringStub::kNumber);
306 return BuildNumberToString(number, Type::Number(zone()));
310 Handle<Code> NumberToStringStub::GenerateCode() {
311 return DoGenerateCode(this);
315 // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
316 // Possible optimizations: put the type string into the oddballs.
318 HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
319 Factory* factory = isolate()->factory();
320 HConstant* number_string = Add<HConstant>(factory->number_string());
321 HValue* object = GetParameter(TypeofStub::kObject);
323 IfBuilder is_smi(this);
324 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
326 { Push(number_string); }
329 IfBuilder is_number(this);
330 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
332 { Push(number_string); }
335 HConstant* undefined_string = Add<HConstant>(factory->undefined_string());
336 HValue* map = AddLoadMap(object, smi_check);
337 HValue* instance_type = Add<HLoadNamedField>(
338 map, nullptr, HObjectAccess::ForMapInstanceType());
339 IfBuilder is_string(this);
340 is_string.If<HCompareNumericAndBranch>(
341 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
343 { Push(Add<HConstant>(factory->string_string())); }
346 HConstant* object_string = Add<HConstant>(factory->object_string());
347 IfBuilder is_oddball(this);
348 is_oddball.If<HCompareNumericAndBranch>(
349 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
352 IfBuilder is_true_or_false(this);
353 is_true_or_false.If<HCompareObjectEqAndBranch>(
354 object, graph()->GetConstantTrue());
355 is_true_or_false.OrIf<HCompareObjectEqAndBranch>(
356 object, graph()->GetConstantFalse());
357 is_true_or_false.Then();
358 { Push(Add<HConstant>(factory->boolean_string())); }
359 is_true_or_false.Else();
361 IfBuilder is_null(this);
362 is_null.If<HCompareObjectEqAndBranch>(object,
363 graph()->GetConstantNull());
365 { Push(object_string); }
367 { Push(undefined_string); }
369 is_true_or_false.End();
373 IfBuilder is_symbol(this);
374 is_symbol.If<HCompareNumericAndBranch>(
375 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
377 { Push(Add<HConstant>(factory->symbol_string())); }
380 IfBuilder is_function(this);
381 HConstant* js_function = Add<HConstant>(JS_FUNCTION_TYPE);
382 HConstant* js_function_proxy =
383 Add<HConstant>(JS_FUNCTION_PROXY_TYPE);
384 is_function.If<HCompareNumericAndBranch>(instance_type, js_function,
386 is_function.OrIf<HCompareNumericAndBranch>(
387 instance_type, js_function_proxy, Token::EQ);
389 { Push(Add<HConstant>(factory->function_string())); }
392 // Is it an undetectable object?
393 IfBuilder is_undetectable(this);
394 is_undetectable.If<HIsUndetectableAndBranch>(object);
395 is_undetectable.Then();
397 // typeof an undetectable object is 'undefined'.
398 Push(undefined_string);
400 is_undetectable.Else();
402 // For any kind of object not handled above, the spec rule for
403 // host objects gives that it is okay to return "object".
419 return environment()->Pop();
423 Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
427 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
428 Factory* factory = isolate()->factory();
429 HValue* undefined = graph()->GetConstantUndefined();
430 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
432 // This stub is very performance sensitive, the generated code must be tuned
433 // so that it doesn't build and eager frame.
434 info()->MarkMustNotHaveEagerFrame();
436 HInstruction* allocation_site =
437 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
438 IfBuilder checker(this);
439 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
443 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
444 AllocationSite::kTransitionInfoOffset);
445 HInstruction* boilerplate =
446 Add<HLoadNamedField>(allocation_site, nullptr, access);
447 HValue* elements = AddLoadElements(boilerplate);
448 HValue* capacity = AddLoadFixedArrayLength(elements);
449 IfBuilder zero_capacity(this);
450 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
452 zero_capacity.Then();
453 Push(BuildCloneShallowArrayEmpty(boilerplate,
456 zero_capacity.Else();
457 IfBuilder if_fixed_cow(this);
458 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
460 Push(BuildCloneShallowArrayCow(boilerplate,
465 IfBuilder if_fixed(this);
466 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
468 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
474 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
477 FAST_DOUBLE_ELEMENTS));
482 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
485 return environment()->Pop();
489 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
490 return DoGenerateCode(this);
495 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
496 HValue* undefined = graph()->GetConstantUndefined();
498 HInstruction* allocation_site =
499 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
501 IfBuilder checker(this);
502 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
506 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
507 AllocationSite::kTransitionInfoOffset);
508 HInstruction* boilerplate =
509 Add<HLoadNamedField>(allocation_site, nullptr, access);
511 int length = casted_stub()->length();
513 // Empty objects have some slack added to them.
514 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
516 int size = JSObject::kHeaderSize + length * kPointerSize;
517 int object_size = size;
518 if (FLAG_allocation_site_pretenuring) {
519 size += AllocationMemento::kSize;
522 HValue* boilerplate_map =
523 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
524 HValue* boilerplate_size = Add<HLoadNamedField>(
525 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
526 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
527 checker.If<HCompareNumericAndBranch>(boilerplate_size,
528 size_in_words, Token::EQ);
531 HValue* size_in_bytes = Add<HConstant>(size);
533 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
534 NOT_TENURED, JS_OBJECT_TYPE);
536 for (int i = 0; i < object_size; i += kPointerSize) {
537 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
538 Add<HStoreNamedField>(object, access,
539 Add<HLoadNamedField>(boilerplate, nullptr, access));
542 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
543 if (FLAG_allocation_site_pretenuring) {
544 BuildCreateAllocationMemento(
545 object, Add<HConstant>(object_size), allocation_site);
548 environment()->Push(object);
549 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
552 return environment()->Pop();
556 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
557 return DoGenerateCode(this);
562 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
563 // This stub is performance sensitive, the generated code must be tuned
564 // so that it doesn't build an eager frame.
565 info()->MarkMustNotHaveEagerFrame();
567 HValue* size = Add<HConstant>(AllocationSite::kSize);
568 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
572 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
573 AddStoreMapConstant(object, allocation_site_map);
575 // Store the payload (smi elements kind)
576 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
577 Add<HStoreNamedField>(object,
578 HObjectAccess::ForAllocationSiteOffset(
579 AllocationSite::kTransitionInfoOffset),
580 initial_elements_kind);
582 // Unlike literals, constructed arrays don't have nested sites
583 Add<HStoreNamedField>(object,
584 HObjectAccess::ForAllocationSiteOffset(
585 AllocationSite::kNestedSiteOffset),
586 graph()->GetConstant0());
588 // Pretenuring calculation field.
589 Add<HStoreNamedField>(object,
590 HObjectAccess::ForAllocationSiteOffset(
591 AllocationSite::kPretenureDataOffset),
592 graph()->GetConstant0());
594 // Pretenuring memento creation count field.
595 Add<HStoreNamedField>(object,
596 HObjectAccess::ForAllocationSiteOffset(
597 AllocationSite::kPretenureCreateCountOffset),
598 graph()->GetConstant0());
600 // Store an empty fixed array for the code dependency.
601 HConstant* empty_fixed_array =
602 Add<HConstant>(isolate()->factory()->empty_fixed_array());
603 Add<HStoreNamedField>(
605 HObjectAccess::ForAllocationSiteOffset(
606 AllocationSite::kDependentCodeOffset),
609 // Link the object to the allocation site list
610 HValue* site_list = Add<HConstant>(
611 ExternalReference::allocation_sites_list_address(isolate()));
612 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
613 HObjectAccess::ForAllocationSiteList());
614 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
615 // mark as such in order to skip the write barrier, once we have a unified
616 // system for weakness. For now we decided to keep it like this because having
617 // an initial write barrier backed store makes this pointer strong until the
618 // next GC, and allocation sites are designed to survive several GCs anyway.
619 Add<HStoreNamedField>(
621 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
623 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
626 HInstruction* feedback_vector = GetParameter(0);
627 HInstruction* slot = GetParameter(1);
628 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
630 return feedback_vector;
634 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
635 return DoGenerateCode(this);
640 HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
641 // This stub is performance sensitive, the generated code must be tuned
642 // so that it doesn't build an eager frame.
643 info()->MarkMustNotHaveEagerFrame();
645 HValue* size = Add<HConstant>(WeakCell::kSize);
646 HInstruction* object =
647 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
649 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
650 AddStoreMapConstant(object, weak_cell_map);
652 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
653 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
654 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
655 graph()->GetConstantHole());
657 HInstruction* feedback_vector =
658 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
659 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
660 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
662 return graph()->GetConstant0();
666 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
670 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
671 int context_index = casted_stub()->context_index();
672 int slot_index = casted_stub()->slot_index();
674 HValue* script_context = BuildGetScriptContext(context_index);
675 return Add<HLoadNamedField>(script_context, nullptr,
676 HObjectAccess::ForContextSlot(slot_index));
680 Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
681 return DoGenerateCode(this);
686 HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
687 int context_index = casted_stub()->context_index();
688 int slot_index = casted_stub()->slot_index();
690 HValue* script_context = BuildGetScriptContext(context_index);
691 Add<HStoreNamedField>(script_context,
692 HObjectAccess::ForContextSlot(slot_index),
693 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
694 return GetParameter(2);
698 Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
699 return DoGenerateCode(this);
704 HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
705 ElementsKind kind = casted_stub()->elements_kind();
706 if (IsFastDoubleElementsKind(kind)) {
707 info()->MarkAsSavesCallerDoubles();
710 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
711 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
713 HValue* elements = AddLoadElements(object);
714 HValue* current_capacity = Add<HLoadNamedField>(
715 elements, nullptr, HObjectAccess::ForFixedArrayLength());
718 casted_stub()->is_js_array()
719 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
720 HObjectAccess::ForArrayLength(kind))
723 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
724 current_capacity, key);
728 Handle<Code> GrowArrayElementsStub::GenerateCode() {
729 return DoGenerateCode(this);
734 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
735 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
736 ? CONVERT_HOLE_TO_UNDEFINED
739 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
740 GetParameter(LoadDescriptor::kReceiverIndex),
741 GetParameter(LoadDescriptor::kNameIndex), NULL,
742 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
743 hole_mode, STANDARD_STORE);
748 Handle<Code> LoadFastElementStub::GenerateCode() {
749 return DoGenerateCode(this);
753 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
754 HValue* object, FieldIndex index) {
755 Representation representation = index.is_double()
756 ? Representation::Double()
757 : Representation::Tagged();
758 int offset = index.offset();
759 HObjectAccess access = index.is_inobject()
760 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
761 : HObjectAccess::ForBackingStoreOffset(offset, representation);
762 if (index.is_double() &&
763 (!FLAG_unbox_double_fields || !index.is_inobject())) {
764 // Load the heap number.
765 object = Add<HLoadNamedField>(
766 object, nullptr, access.WithRepresentation(Representation::Tagged()));
767 // Load the double value from it.
768 access = HObjectAccess::ForHeapNumberValue();
770 return Add<HLoadNamedField>(object, nullptr, access);
775 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
776 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
780 Handle<Code> LoadFieldStub::GenerateCode() {
781 return DoGenerateCode(this);
786 HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
787 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
788 casted_stub()->index());
792 Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
793 return DoGenerateCode(this);
798 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
799 HValue* map = AddLoadMap(GetParameter(0), NULL);
800 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
801 Map::kDescriptorsOffset, Representation::Tagged());
802 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
803 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
804 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
805 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
809 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
812 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
814 HValue* result = NULL;
815 HInstruction* backing_store =
816 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
818 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
819 HValue* backing_store_length = Add<HLoadNamedField>(
820 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
821 IfBuilder in_unmapped_range(this);
822 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
824 in_unmapped_range.Then();
827 result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
830 Add<HStoreKeyed>(backing_store, key, value, FAST_HOLEY_ELEMENTS);
833 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
834 in_unmapped_range.End();
839 HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
842 // Mapped arguments are actual arguments. Unmapped arguments are values added
843 // to the arguments object after it was created for the call. Mapped arguments
844 // are stored in the context at indexes given by elements[key + 2]. Unmapped
845 // arguments are stored as regular indexed properties in the arguments array,
846 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
847 // look at argument object construction.
849 // The sloppy arguments elements array has a special format:
852 // 1: unmapped arguments array
857 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
858 // If key + 2 >= elements.length then attempt to look in the unmapped
859 // arguments array (given by elements[1]) and return the value at key, missing
860 // to the runtime if the unmapped arguments array is not a fixed array or if
861 // key >= unmapped_arguments_array.length.
863 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
864 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
865 // index into the context array given at elements[0]. Return the value at
868 bool is_load = value == NULL;
870 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
871 IfBuilder positive_smi(this);
872 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
874 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
877 HValue* constant_two = Add<HConstant>(2);
878 HValue* elements = AddLoadElements(receiver, nullptr);
879 HValue* elements_length = Add<HLoadNamedField>(
880 elements, nullptr, HObjectAccess::ForFixedArrayLength());
881 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
882 IfBuilder in_range(this);
883 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
886 HValue* index = AddUncasted<HAdd>(key, constant_two);
887 HInstruction* mapped_index = Add<HLoadKeyed>(
888 elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
890 IfBuilder is_valid(this);
891 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
892 graph()->GetConstantHole());
895 // TODO(mvstanton): I'd like to assert from this point, that if the
896 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
897 // smi check is being emitted.
898 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
899 nullptr, FAST_ELEMENTS);
900 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
902 HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
903 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
904 environment()->Push(result);
906 DCHECK(value != NULL);
907 Add<HStoreKeyed>(the_context, mapped_index, value, FAST_ELEMENTS);
908 environment()->Push(value);
913 HValue* result = UnmappedCase(elements, key, value);
914 environment()->Push(is_load ? result : value);
920 HValue* result = UnmappedCase(elements, key, value);
921 environment()->Push(is_load ? result : value);
925 return environment()->Pop();
930 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
931 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
932 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
934 return EmitKeyedSloppyArguments(receiver, key, NULL);
938 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
939 return DoGenerateCode(this);
944 HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
945 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
946 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
947 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
949 return EmitKeyedSloppyArguments(receiver, key, value);
953 Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
954 return DoGenerateCode(this);
958 void CodeStubGraphBuilderBase::BuildStoreNamedField(
959 HValue* object, HValue* value, FieldIndex index,
960 Representation representation, bool transition_to_field) {
961 DCHECK(!index.is_double() || representation.IsDouble());
962 int offset = index.offset();
963 HObjectAccess access =
965 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
966 : HObjectAccess::ForBackingStoreOffset(offset, representation);
968 if (representation.IsDouble()) {
969 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
970 HObjectAccess heap_number_access =
971 access.WithRepresentation(Representation::Tagged());
972 if (transition_to_field) {
973 // The store requires a mutable HeapNumber to be allocated.
974 NoObservableSideEffectsScope no_side_effects(this);
975 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
977 // TODO(hpayer): Allocation site pretenuring support.
978 HInstruction* heap_number =
979 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
980 MUTABLE_HEAP_NUMBER_TYPE);
981 AddStoreMapConstant(heap_number,
982 isolate()->factory()->mutable_heap_number_map());
983 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
985 // Store the new mutable heap number into the object.
986 access = heap_number_access;
989 // Load the heap number.
990 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
991 // Store the double value into it.
992 access = HObjectAccess::ForHeapNumberValue();
995 } else if (representation.IsHeapObject()) {
996 BuildCheckHeapObject(value);
999 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1004 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1005 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
1006 casted_stub()->representation(), false);
1007 return GetParameter(2);
1011 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1015 HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
1016 HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
1018 switch (casted_stub()->store_mode()) {
1019 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1020 HValue* properties = Add<HLoadNamedField>(
1021 object, nullptr, HObjectAccess::ForPropertiesPointer());
1022 HValue* length = AddLoadFixedArrayLength(properties);
1024 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1025 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1027 // Grow properties array.
1028 ElementsKind kind = FAST_ELEMENTS;
1029 Add<HBoundsCheck>(new_capacity,
1030 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1031 FixedArray::kHeaderSize) >>
1032 ElementsKindToShiftSize(kind)));
1034 // Reuse this code for properties backing store allocation.
1035 HValue* new_properties =
1036 BuildAllocateAndInitializeArray(kind, new_capacity);
1038 BuildCopyProperties(properties, new_properties, length, new_capacity);
1040 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1044 case StoreTransitionStub::StoreMapAndValue:
1045 // Store the new value into the "extended" object.
1046 BuildStoreNamedField(
1047 object, GetParameter(StoreTransitionDescriptor::kValueIndex),
1048 casted_stub()->index(), casted_stub()->representation(), true);
1051 case StoreTransitionStub::StoreMapOnly:
1052 // And finally update the map.
1053 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
1054 GetParameter(StoreTransitionDescriptor::kMapIndex));
1057 return GetParameter(StoreTransitionDescriptor::kValueIndex);
1061 Handle<Code> StoreTransitionStub::GenerateCode() {
1062 return DoGenerateCode(this);
1067 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
1068 HValue* string = BuildLoadNamedField(GetParameter(0),
1069 FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
1070 return BuildLoadNamedField(string,
1071 FieldIndex::ForInObjectOffset(String::kLengthOffset));
1075 Handle<Code> StringLengthStub::GenerateCode() {
1076 return DoGenerateCode(this);
1081 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1082 BuildUncheckedMonomorphicElementAccess(
1083 GetParameter(StoreDescriptor::kReceiverIndex),
1084 GetParameter(StoreDescriptor::kNameIndex),
1085 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1086 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1087 casted_stub()->store_mode());
1089 return GetParameter(2);
1093 Handle<Code> StoreFastElementStub::GenerateCode() {
1094 return DoGenerateCode(this);
1099 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1100 info()->MarkAsSavesCallerDoubles();
1102 BuildTransitionElementsKind(GetParameter(0),
1104 casted_stub()->from_kind(),
1105 casted_stub()->to_kind(),
1106 casted_stub()->is_js_array());
1108 return GetParameter(0);
1112 Handle<Code> TransitionElementsKindStub::GenerateCode() {
1113 return DoGenerateCode(this);
1118 HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
1120 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
1121 NOT_TENURED, HEAP_NUMBER_TYPE);
1122 AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
1127 Handle<Code> AllocateHeapNumberStub::GenerateCode() {
1128 return DoGenerateCode(this);
1132 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1134 AllocationSiteOverrideMode override_mode,
1135 ArgumentClass argument_class) {
1136 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1137 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1138 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1140 HValue* result = NULL;
1141 switch (argument_class) {
1143 // This stub is very performance sensitive, the generated code must be
1144 // tuned so that it doesn't build and eager frame.
1145 info()->MarkMustNotHaveEagerFrame();
1146 result = array_builder.AllocateEmptyArray();
1149 result = BuildArraySingleArgumentConstructor(&array_builder);
1152 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1160 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1161 ElementsKind kind, ArgumentClass argument_class) {
1162 HValue* constructor = GetParameter(
1163 InternalArrayConstructorStubBase::kConstructor);
1164 JSArrayBuilder array_builder(this, kind, constructor);
1166 HValue* result = NULL;
1167 switch (argument_class) {
1169 // This stub is very performance sensitive, the generated code must be
1170 // tuned so that it doesn't build and eager frame.
1171 info()->MarkMustNotHaveEagerFrame();
1172 result = array_builder.AllocateEmptyArray();
1175 result = BuildArraySingleArgumentConstructor(&array_builder);
1178 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1185 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1186 JSArrayBuilder* array_builder) {
1187 // Smi check and range check on the input arg.
1188 HValue* constant_one = graph()->GetConstant1();
1189 HValue* constant_zero = graph()->GetConstant0();
1191 HInstruction* elements = Add<HArgumentsElements>(false);
1192 HInstruction* argument = Add<HAccessArgumentsAt>(
1193 elements, constant_one, constant_zero);
1195 return BuildAllocateArrayFromLength(array_builder, argument);
1199 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1200 JSArrayBuilder* array_builder, ElementsKind kind) {
1201 // Insert a bounds check because the number of arguments might exceed
1202 // the kInitialMaxFastElementArray limit. This cannot happen for code
1203 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1205 HValue* length = GetArgumentsLength();
1206 HConstant* max_alloc_length =
1207 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1208 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1210 // We need to fill with the hole if it's a smi array in the multi-argument
1211 // case because we might have to bail out while copying arguments into
1212 // the array because they aren't compatible with a smi array.
1213 // If it's a double array, no problem, and if it's fast then no
1214 // problem either because doubles are boxed.
1216 // TODO(mvstanton): consider an instruction to memset fill the array
1217 // with zero in this case instead.
1218 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1219 ? JSArrayBuilder::FILL_WITH_HOLE
1220 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1221 HValue* new_object = array_builder->AllocateArray(checked_length,
1225 HValue* elements = array_builder->GetElementsLocation();
1226 DCHECK(elements != NULL);
1228 // Now populate the elements correctly.
1229 LoopBuilder builder(this,
1231 LoopBuilder::kPostIncrement);
1232 HValue* start = graph()->GetConstant0();
1233 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1234 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1235 HInstruction* argument = Add<HAccessArgumentsAt>(
1236 argument_elements, checked_length, key);
1238 Add<HStoreKeyed>(elements, key, argument, kind);
1245 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1246 ElementsKind kind = casted_stub()->elements_kind();
1247 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1248 return BuildArrayConstructor(kind, override_mode, NONE);
1252 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1253 return DoGenerateCode(this);
1258 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1260 ElementsKind kind = casted_stub()->elements_kind();
1261 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1262 return BuildArrayConstructor(kind, override_mode, SINGLE);
1266 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1267 return DoGenerateCode(this);
1272 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1273 ElementsKind kind = casted_stub()->elements_kind();
1274 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1275 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1279 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1280 return DoGenerateCode(this);
1285 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1287 ElementsKind kind = casted_stub()->elements_kind();
1288 return BuildInternalArrayConstructor(kind, NONE);
1292 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1293 return DoGenerateCode(this);
1298 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1300 ElementsKind kind = casted_stub()->elements_kind();
1301 return BuildInternalArrayConstructor(kind, SINGLE);
1305 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1306 return DoGenerateCode(this);
1311 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1313 ElementsKind kind = casted_stub()->elements_kind();
1314 return BuildInternalArrayConstructor(kind, MULTIPLE);
1318 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1319 return DoGenerateCode(this);
1324 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1325 Isolate* isolate = graph()->isolate();
1326 CompareNilICStub* stub = casted_stub();
1327 HIfContinuation continuation;
1328 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1329 Type* type = stub->GetType(zone(), sentinel_map);
1330 BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
1331 IfBuilder if_nil(this, &continuation);
1333 if (continuation.IsFalseReachable()) {
1335 if_nil.Return(graph()->GetConstant0());
1338 return continuation.IsTrueReachable()
1339 ? graph()->GetConstant1()
1340 : graph()->GetConstantUndefined();
1344 Handle<Code> CompareNilICStub::GenerateCode() {
1345 return DoGenerateCode(this);
1350 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1351 BinaryOpICState state = casted_stub()->state();
1353 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1354 HValue* right = GetParameter(BinaryOpICStub::kRight);
1356 Type* left_type = state.GetLeftType(zone());
1357 Type* right_type = state.GetRightType(zone());
1358 Type* result_type = state.GetResultType(zone());
1360 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1361 (state.HasSideEffects() || !result_type->Is(Type::None())));
1363 HValue* result = NULL;
1364 HAllocationMode allocation_mode(NOT_TENURED);
1365 if (state.op() == Token::ADD &&
1366 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1367 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1368 // For the generic add stub a fast case for string addition is performance
1370 if (left_type->Maybe(Type::String())) {
1371 IfBuilder if_leftisstring(this);
1372 if_leftisstring.If<HIsStringAndBranch>(left);
1373 if_leftisstring.Then();
1375 Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()),
1376 right_type, result_type,
1377 state.fixed_right_arg(), allocation_mode,
1380 if_leftisstring.Else();
1382 Push(BuildBinaryOperation(
1383 state.op(), left, right, left_type, right_type, result_type,
1384 state.fixed_right_arg(), allocation_mode, state.strength()));
1386 if_leftisstring.End();
1389 IfBuilder if_rightisstring(this);
1390 if_rightisstring.If<HIsStringAndBranch>(right);
1391 if_rightisstring.Then();
1393 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1394 Type::String(zone()), result_type,
1395 state.fixed_right_arg(), allocation_mode,
1398 if_rightisstring.Else();
1400 Push(BuildBinaryOperation(
1401 state.op(), left, right, left_type, right_type, result_type,
1402 state.fixed_right_arg(), allocation_mode, state.strength()));
1404 if_rightisstring.End();
1408 result = BuildBinaryOperation(
1409 state.op(), left, right, left_type, right_type, result_type,
1410 state.fixed_right_arg(), allocation_mode, state.strength());
1413 // If we encounter a generic argument, the number conversion is
1414 // observable, thus we cannot afford to bail out after the fact.
1415 if (!state.HasSideEffects()) {
1416 result = EnforceNumberType(result, result_type);
1423 Handle<Code> BinaryOpICStub::GenerateCode() {
1424 return DoGenerateCode(this);
1429 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1430 BinaryOpICState state = casted_stub()->state();
1432 HValue* allocation_site = GetParameter(
1433 BinaryOpWithAllocationSiteStub::kAllocationSite);
1434 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1435 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1437 Type* left_type = state.GetLeftType(zone());
1438 Type* right_type = state.GetRightType(zone());
1439 Type* result_type = state.GetResultType(zone());
1440 HAllocationMode allocation_mode(allocation_site);
1442 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1443 result_type, state.fixed_right_arg(),
1444 allocation_mode, state.strength());
1448 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1449 return DoGenerateCode(this);
1454 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1455 StringAddStub* stub = casted_stub();
1456 StringAddFlags flags = stub->flags();
1457 PretenureFlag pretenure_flag = stub->pretenure_flag();
1459 HValue* left = GetParameter(StringAddStub::kLeft);
1460 HValue* right = GetParameter(StringAddStub::kRight);
1462 // Make sure that both arguments are strings if not known in advance.
1463 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1464 left = BuildCheckString(left);
1466 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1467 right = BuildCheckString(right);
1470 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1474 Handle<Code> StringAddStub::GenerateCode() {
1475 return DoGenerateCode(this);
1480 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1481 ToBooleanStub* stub = casted_stub();
1482 HValue* true_value = NULL;
1483 HValue* false_value = NULL;
1485 switch (stub->mode()) {
1486 case ToBooleanStub::RESULT_AS_SMI:
1487 true_value = graph()->GetConstant1();
1488 false_value = graph()->GetConstant0();
1490 case ToBooleanStub::RESULT_AS_ODDBALL:
1491 true_value = graph()->GetConstantTrue();
1492 false_value = graph()->GetConstantFalse();
1494 case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1495 true_value = graph()->GetConstantFalse();
1496 false_value = graph()->GetConstantTrue();
1500 IfBuilder if_true(this);
1501 if_true.If<HBranch>(GetParameter(0), stub->types());
1503 if_true.Return(true_value);
1510 Handle<Code> ToBooleanStub::GenerateCode() {
1511 return DoGenerateCode(this);
1516 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1517 StoreGlobalStub* stub = casted_stub();
1518 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1519 if (stub->check_global()) {
1520 // Check that the map of the global has not changed: use a placeholder map
1521 // that will be replaced later with the global object's map.
1522 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1524 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1526 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1527 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1528 StoreGlobalStub::global_map_placeholder(isolate())));
1529 HValue* expected_map = Add<HLoadNamedField>(
1530 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1532 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1533 IfBuilder map_check(this);
1534 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1535 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1539 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1540 StoreGlobalStub::property_cell_placeholder(isolate())));
1541 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1542 HObjectAccess::ForWeakCellValue());
1543 Add<HCheckHeapObject>(cell);
1544 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1545 // Load the payload of the global parameter cell. A hole indicates that the
1546 // cell has been invalidated and that the store must be handled by the
1548 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
1550 auto cell_type = stub->cell_type();
1551 if (cell_type == PropertyCellType::kConstant ||
1552 cell_type == PropertyCellType::kUndefined) {
1553 // This is always valid for all states a cell can be in.
1554 IfBuilder builder(this);
1555 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1558 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
1561 IfBuilder builder(this);
1562 HValue* hole_value = graph()->GetConstantHole();
1563 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1565 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
1567 // When dealing with constant types, the type may be allowed to change, as
1568 // long as optimized code remains valid.
1569 if (cell_type == PropertyCellType::kConstantType) {
1570 switch (stub->constant_type()) {
1571 case PropertyCellConstantType::kSmi:
1572 access = access.WithRepresentation(Representation::Smi());
1574 case PropertyCellConstantType::kStableMap: {
1575 // It is sufficient here to check that the value and cell contents
1576 // have identical maps, no matter if they are stable or not or if they
1577 // are the maps that were originally in the cell or not. If optimized
1578 // code will deopt when a cell has a unstable map and if it has a
1579 // dependency on a stable map, it will deopt if the map destabilizes.
1580 Add<HCheckHeapObject>(value);
1581 Add<HCheckHeapObject>(cell_contents);
1582 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1583 HObjectAccess::ForMap());
1585 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1586 IfBuilder map_check(this);
1587 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1588 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1590 access = access.WithRepresentation(Representation::HeapObject());
1595 Add<HStoreNamedField>(cell, access, value);
1603 Handle<Code> StoreGlobalStub::GenerateCode() {
1604 return DoGenerateCode(this);
1609 HValue* CodeStubGraphBuilder<LoadGlobalViaContextStub>::BuildCodeStub() {
1610 LoadGlobalViaContextStub* stub = casted_stub();
1611 int depth_value = stub->depth();
1612 HValue* depth = GetParameter(0);
1613 HValue* slot_index = GetParameter(1);
1614 HValue* name = GetParameter(2);
1616 // Choose between dynamic or static context script fetching versions.
1617 depth = depth_value < LoadGlobalViaContextStub::kDynamicDepth
1619 : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
1621 AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
1623 HValue* script_context = BuildGetParentContext(depth, depth_value);
1625 Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
1627 HValue* value = Add<HLoadNamedField>(cell, nullptr,
1628 HObjectAccess::ForPropertyCellValue());
1630 IfBuilder builder(this);
1631 HValue* hole_value = graph()->GetConstantHole();
1632 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(value, hole_value);
1637 Add<HPushArguments>(script_context, slot_index, name);
1638 Push(Add<HCallRuntime>(
1639 isolate()->factory()->empty_string(),
1640 Runtime::FunctionForId(Runtime::kLoadGlobalViaContext), 3));
1647 Handle<Code> LoadGlobalViaContextStub::GenerateCode() {
1648 return DoGenerateCode(this);
1653 HValue* CodeStubGraphBuilder<StoreGlobalViaContextStub>::BuildCodeStub() {
1654 StoreGlobalViaContextStub* stub = casted_stub();
1655 int depth_value = stub->depth();
1656 HValue* depth = GetParameter(0);
1657 HValue* slot_index = GetParameter(1);
1658 HValue* name = GetParameter(2);
1659 HValue* value = GetParameter(3);
1661 // Choose between dynamic or static context script fetching versions.
1662 depth = depth_value < StoreGlobalViaContextStub::kDynamicDepth
1664 : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
1666 AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
1668 HValue* script_context = BuildGetParentContext(depth, depth_value);
1670 Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
1672 // Fast case that requires storing to cell.
1673 HIfContinuation if_fast_store_continuation(graph()->CreateBasicBlock(),
1674 graph()->CreateBasicBlock());
1676 // Fast case that does not require storing to cell.
1677 HIfContinuation if_fast_no_store_continuation(graph()->CreateBasicBlock(),
1678 graph()->CreateBasicBlock());
1680 // This stub does the same as StoreGlobalStub but in a dynamic manner.
1682 HValue* cell_contents = Add<HLoadNamedField>(
1683 cell, nullptr, HObjectAccess::ForPropertyCellValue());
1685 IfBuilder if_hole(this);
1686 HValue* hole_value = graph()->GetConstantHole();
1687 if_hole.IfNot<HCompareObjectEqAndBranch, HValue*>(cell_contents, hole_value);
1690 HValue* details = Add<HLoadNamedField>(
1691 cell, nullptr, HObjectAccess::ForPropertyCellDetails());
1693 BuildDecodeField<PropertyDetails::PropertyCellTypeField>(details);
1695 // The code below relies on this.
1696 STATIC_ASSERT(PropertyCellType::kUndefined < PropertyCellType::kConstant);
1697 STATIC_ASSERT(PropertyCellType::kConstant <
1698 PropertyCellType::kConstantType);
1699 STATIC_ASSERT(PropertyCellType::kConstant < PropertyCellType::kMutable);
1701 // Handle all cell type cases.
1702 IfBuilder if_not_const(this);
1704 int cell_type_constant = static_cast<int>(PropertyCellType::kConstant);
1705 if_not_const.If<HCompareNumericAndBranch, HValue*>(
1706 cell_type, Add<HConstant>(cell_type_constant), Token::GT);
1707 if_not_const.Then();
1709 // kConstantType or kMutable.
1710 IfBuilder if_const_type(this);
1711 int cell_type_constant_type =
1712 static_cast<int>(PropertyCellType::kConstantType);
1713 if_const_type.If<HCompareNumericAndBranch, HValue*>(
1714 cell_type, Add<HConstant>(cell_type_constant_type), Token::EQ);
1715 if_const_type.Then();
1717 // Check that either both value and cell_contents are smi or
1718 // both have the same map.
1719 IfBuilder if_cell_is_smi(this);
1720 if_cell_is_smi.If<HIsSmiAndBranch>(cell_contents);
1721 if_cell_is_smi.Then();
1723 IfBuilder if_value_is_smi(this);
1724 if_value_is_smi.If<HIsSmiAndBranch>(value);
1725 if_value_is_smi.Then();
1727 // Both cell_contents and value are smis, do store.
1729 if_value_is_smi.Else(); // Slow case.
1730 if_value_is_smi.JoinContinuation(&if_fast_store_continuation);
1732 if_cell_is_smi.Else();
1734 IfBuilder if_value_is_heap_object(this);
1735 if_value_is_heap_object.IfNot<HIsSmiAndBranch>(value);
1736 if_value_is_heap_object.Then();
1738 // Both cell_contents and value are heap objects, do store.
1739 HValue* expected_map = Add<HLoadNamedField>(
1740 cell_contents, nullptr, HObjectAccess::ForMap());
1742 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1743 IfBuilder map_check(this);
1744 map_check.If<HCompareObjectEqAndBranch>(expected_map, map);
1746 map_check.Else(); // Slow case.
1747 map_check.JoinContinuation(&if_fast_store_continuation);
1749 // The accessor case is handled by the map check above, since
1750 // the value must not have a AccessorPair map.
1752 if_value_is_heap_object.Else(); // Slow case.
1753 if_value_is_heap_object.JoinContinuation(&if_fast_store_continuation);
1755 if_cell_is_smi.EndUnreachable();
1757 if_const_type.Else();
1759 // Check that the property kind is kData.
1760 HValue* kind = BuildDecodeField<PropertyDetails::KindField>(details);
1761 HValue* data_kind_value = Add<HConstant>(kData);
1763 IfBuilder builder(this);
1764 builder.If<HCompareNumericAndBranch, HValue*>(kind, data_kind_value,
1767 builder.Else(); // Slow case.
1768 builder.JoinContinuation(&if_fast_store_continuation);
1770 if_const_type.EndUnreachable();
1772 if_not_const.Else();
1774 // kUndefined or kConstant, just check that the value matches.
1775 IfBuilder builder(this);
1776 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1778 builder.Else(); // Slow case.
1779 builder.JoinContinuation(&if_fast_no_store_continuation);
1781 if_not_const.EndUnreachable();
1783 if_hole.Else(); // Slow case.
1784 if_hole.JoinContinuation(&if_fast_store_continuation);
1786 // Do store for fast case.
1787 IfBuilder if_fast_store(this, &if_fast_store_continuation);
1788 if_fast_store.Then();
1790 // All checks are done, store the value to the cell.
1791 Add<HStoreNamedField>(cell, HObjectAccess::ForPropertyCellValue(), value);
1793 if_fast_store.Else();
1794 if_fast_store.JoinContinuation(&if_fast_no_store_continuation);
1796 // Bailout to runtime call for slow case.
1797 IfBuilder if_no_fast_store(this, &if_fast_no_store_continuation);
1798 if_no_fast_store.Then();
1800 // Nothing else to do.
1802 if_no_fast_store.Else();
1804 // Slow case, call runtime.
1805 HInstruction* lang_mode = Add<HConstant>(casted_stub()->language_mode());
1806 Add<HPushArguments>(script_context, slot_index, name, value);
1807 Add<HPushArguments>(lang_mode);
1808 Add<HCallRuntime>(isolate()->factory()->empty_string(),
1809 Runtime::FunctionForId(Runtime::kStoreGlobalViaContext),
1812 if_no_fast_store.End();
1817 Handle<Code> StoreGlobalViaContextStub::GenerateCode() {
1818 return DoGenerateCode(this);
1823 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1824 HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1825 HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1826 HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1827 HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1829 if (FLAG_trace_elements_transitions) {
1830 // Tracing elements transitions is the job of the runtime.
1831 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1832 Deoptimizer::EAGER);
1834 info()->MarkAsSavesCallerDoubles();
1836 BuildTransitionElementsKind(object, map,
1837 casted_stub()->from_kind(),
1838 casted_stub()->to_kind(),
1839 casted_stub()->is_jsarray());
1841 BuildUncheckedMonomorphicElementAccess(object, key, value,
1842 casted_stub()->is_jsarray(),
1843 casted_stub()->to_kind(),
1844 STORE, ALLOW_RETURN_HOLE,
1845 casted_stub()->store_mode());
1852 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1853 return DoGenerateCode(this);
1857 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1858 HValue* js_function,
1859 HValue* native_context,
1861 HValue* optimized_map,
1862 HValue* map_index) {
1863 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1864 HValue* context_slot = LoadFromOptimizedCodeMap(
1865 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1866 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1867 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1868 builder->If<HCompareObjectEqAndBranch>(native_context,
1870 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1872 HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1873 map_index, SharedFunctionInfo::kCachedCodeOffset);
1875 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1876 map_index, SharedFunctionInfo::kLiteralsOffset);
1878 BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
1880 // The builder continues in the "then" after this function.
1884 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function,
1885 HValue* native_context,
1886 HValue* code_object,
1888 Counters* counters = isolate()->counters();
1889 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1891 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1892 // map and either unmangle them on marking or do nothing as the whole map is
1893 // discarded on major GC anyway.
1894 Add<HStoreCodeEntry>(js_function, code_object);
1895 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1898 // Now link a function into a list of optimized functions.
1899 HValue* optimized_functions_list = Add<HLoadNamedField>(
1900 native_context, nullptr,
1901 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1902 Add<HStoreNamedField>(js_function,
1903 HObjectAccess::ForNextFunctionLinkPointer(),
1904 optimized_functions_list);
1906 // This store is the only one that should have a write barrier.
1907 Add<HStoreNamedField>(native_context,
1908 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1913 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1914 HValue* shared_info) {
1915 Add<HStoreNamedField>(js_function,
1916 HObjectAccess::ForNextFunctionLinkPointer(),
1917 graph()->GetConstantUndefined());
1918 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1919 HObjectAccess::ForCodeOffset());
1920 Add<HStoreCodeEntry>(js_function, code_object);
1924 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1925 HValue* optimized_map,
1928 // By making sure to express these loads in the form [<hvalue> + constant]
1929 // the keyed load can be hoisted.
1930 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1931 HValue* field_slot = iterator;
1932 if (field_offset > 0) {
1933 HValue* field_offset_value = Add<HConstant>(field_offset);
1934 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1936 HInstruction* field_entry =
1937 Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
1942 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1943 HValue* js_function,
1944 HValue* shared_info,
1945 HValue* native_context) {
1946 Counters* counters = isolate()->counters();
1947 Factory* factory = isolate()->factory();
1948 IfBuilder is_optimized(this);
1949 HInstruction* optimized_map = Add<HLoadNamedField>(
1950 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
1951 HValue* null_constant = Add<HConstant>(0);
1952 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1953 is_optimized.Then();
1955 BuildInstallCode(js_function, shared_info);
1957 is_optimized.Else();
1959 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1960 // optimized_map points to fixed array of 3-element entries
1961 // (native context, optimized code, literals).
1962 // Map must never be empty, so check the first elements.
1963 HValue* first_entry_index =
1964 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1965 IfBuilder already_in(this);
1966 BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1967 optimized_map, first_entry_index);
1970 // Iterate through the rest of map backwards. Do not double check first
1971 // entry. After the loop, if no matching optimized code was found,
1972 // install unoptimized code.
1973 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1974 // i > SharedFunctionInfo::kEntriesStart;
1975 // i -= SharedFunctionInfo::kEntryLength) { .. }
1976 HValue* shared_function_entry_length =
1977 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1978 LoopBuilder loop_builder(this,
1980 LoopBuilder::kPostDecrement,
1981 shared_function_entry_length);
1982 HValue* array_length = Add<HLoadNamedField>(
1983 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1984 HValue* start_pos = AddUncasted<HSub>(array_length,
1985 shared_function_entry_length);
1986 HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1990 IfBuilder done_check(this);
1991 BuildCheckAndInstallOptimizedCode(js_function, native_context,
1995 // Fall out of the loop
1996 loop_builder.Break();
1998 loop_builder.EndBody();
2000 // If slot_iterator equals first entry index, then we failed to find a
2001 // context-dependent code and try context-independent code next.
2002 IfBuilder no_optimized_code_check(this);
2003 no_optimized_code_check.If<HCompareNumericAndBranch>(
2004 slot_iterator, first_entry_index, Token::EQ);
2005 no_optimized_code_check.Then();
2007 IfBuilder shared_code_check(this);
2008 HValue* shared_code = Add<HLoadNamedField>(
2009 optimized_map, nullptr,
2010 HObjectAccess::ForOptimizedCodeMapSharedCode());
2011 shared_code_check.IfNot<HCompareObjectEqAndBranch>(
2012 shared_code, graph()->GetConstantUndefined());
2013 shared_code_check.Then();
2015 // Store the context-independent optimized code.
2016 HValue* literals = Add<HConstant>(factory->empty_fixed_array());
2017 BuildInstallOptimizedCode(js_function, native_context, shared_code,
2020 shared_code_check.Else();
2022 // Store the unoptimized code.
2023 BuildInstallCode(js_function, shared_info);
2032 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
2033 Counters* counters = isolate()->counters();
2034 Factory* factory = isolate()->factory();
2035 HInstruction* empty_fixed_array =
2036 Add<HConstant>(factory->empty_fixed_array());
2037 HValue* shared_info = GetParameter(0);
2039 AddIncrementCounter(counters->fast_new_closure_total());
2041 // Create a new closure from the given function info in new space
2042 HValue* size = Add<HConstant>(JSFunction::kSize);
2043 HInstruction* js_function =
2044 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
2046 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
2047 casted_stub()->kind());
2049 // Compute the function map in the current native context and set that
2050 // as the map of the allocated object.
2051 HInstruction* native_context = BuildGetNativeContext();
2052 HInstruction* map_slot_value = Add<HLoadNamedField>(
2053 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
2054 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
2056 // Initialize the rest of the function.
2057 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
2059 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
2061 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2063 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
2064 graph()->GetConstantHole());
2065 Add<HStoreNamedField>(
2066 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
2067 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
2070 // Initialize the code pointer in the function to be the one found in the
2071 // shared function info object. But first check if there is an optimized
2072 // version for our context.
2073 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
2079 Handle<Code> FastNewClosureStub::GenerateCode() {
2080 return DoGenerateCode(this);
2085 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
2086 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
2088 // Get the function.
2089 HParameter* function = GetParameter(FastNewContextStub::kFunction);
2091 // Allocate the context in new space.
2092 HAllocate* function_context = Add<HAllocate>(
2093 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
2094 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
2096 // Set up the object header.
2097 AddStoreMapConstant(function_context,
2098 isolate()->factory()->function_context_map());
2099 Add<HStoreNamedField>(function_context,
2100 HObjectAccess::ForFixedArrayLength(),
2101 Add<HConstant>(length));
2103 // Set up the fixed slots.
2104 Add<HStoreNamedField>(function_context,
2105 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2107 Add<HStoreNamedField>(function_context,
2108 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2110 Add<HStoreNamedField>(function_context,
2111 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
2112 graph()->GetConstant0());
2114 // Copy the global object from the previous context.
2115 HValue* global_object = Add<HLoadNamedField>(
2117 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2118 Add<HStoreNamedField>(function_context,
2119 HObjectAccess::ForContextSlot(
2120 Context::GLOBAL_OBJECT_INDEX),
2123 // Initialize the rest of the slots to undefined.
2124 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2125 Add<HStoreNamedField>(function_context,
2126 HObjectAccess::ForContextSlot(i),
2127 graph()->GetConstantUndefined());
2130 return function_context;
2134 Handle<Code> FastNewContextStub::GenerateCode() {
2135 return DoGenerateCode(this);
2140 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2141 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2142 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2144 Add<HCheckSmi>(key);
2146 HValue* elements = AddLoadElements(receiver);
2148 HValue* hash = BuildElementIndexHash(key);
2150 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2151 casted_stub()->language_mode());
2155 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2156 return DoGenerateCode(this);
2161 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2162 // Determine the parameters.
2163 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2164 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2165 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2167 info()->MarkMustNotHaveEagerFrame();
2169 return BuildRegExpConstructResult(length, index, input);
2173 Handle<Code> RegExpConstructResultStub::GenerateCode() {
2174 return DoGenerateCode(this);
2179 class CodeStubGraphBuilder<KeyedLoadGenericStub>
2180 : public CodeStubGraphBuilderBase {
2182 explicit CodeStubGraphBuilder(CompilationInfo* info)
2183 : CodeStubGraphBuilderBase(info) {}
2186 virtual HValue* BuildCodeStub();
2188 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2192 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2195 HValue* instance_type,
2199 void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
2202 HValue* instance_type,
2206 KeyedLoadGenericStub* casted_stub() {
2207 return static_cast<KeyedLoadGenericStub*>(stub());
2212 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2213 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2214 ElementsKind kind) {
2215 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2216 HValue* kind_limit = Add<HConstant>(
2217 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2219 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2224 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2225 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2226 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2227 DCHECK(!IsExternalArrayElementsKind(kind));
2229 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2231 IfBuilder js_array_check(this);
2232 js_array_check.If<HCompareNumericAndBranch>(
2233 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2234 js_array_check.Then();
2235 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2237 LOAD, NEVER_RETURN_HOLE,
2239 js_array_check.Else();
2240 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2242 LOAD, NEVER_RETURN_HOLE,
2244 js_array_check.End();
2248 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
2249 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2250 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2251 DCHECK(IsExternalArrayElementsKind(kind));
2253 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2255 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2257 LOAD, NEVER_RETURN_HOLE,
2262 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2263 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2264 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2265 // Split into a smi/integer case and unique string case.
2266 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2267 graph()->CreateBasicBlock());
2269 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2271 IfBuilder index_name_split(this, &index_name_split_continuation);
2272 index_name_split.Then();
2274 // Key is an index (number)
2277 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2278 (1 << Map::kHasIndexedInterceptor);
2279 BuildJSObjectCheck(receiver, bit_field_mask);
2282 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2284 HValue* instance_type =
2285 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
2287 HValue* bit_field2 =
2288 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
2290 IfBuilder kind_if(this);
2291 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2292 FAST_HOLEY_ELEMENTS);
2296 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2297 FAST_HOLEY_DOUBLE_ELEMENTS);
2301 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2302 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2304 HValue* elements = AddLoadElements(receiver);
2306 HValue* hash = BuildElementIndexHash(key);
2308 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2309 casted_stub()->language_mode()));
2313 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2314 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2315 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
2316 BuildElementsKindLimitCheck(&kind_if, bit_field2,
2317 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
2318 // Non-strict elements are not handled.
2319 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
2320 Deoptimizer::EAGER);
2321 Push(graph()->GetConstant0());
2324 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2325 EXTERNAL_INT8_ELEMENTS);
2328 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2329 EXTERNAL_UINT8_ELEMENTS);
2332 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2333 EXTERNAL_INT16_ELEMENTS);
2336 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2337 EXTERNAL_UINT16_ELEMENTS);
2340 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2341 EXTERNAL_INT32_ELEMENTS);
2344 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2345 EXTERNAL_UINT32_ELEMENTS);
2348 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2349 EXTERNAL_FLOAT32_ELEMENTS);
2352 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2353 EXTERNAL_FLOAT64_ELEMENTS);
2356 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2357 EXTERNAL_UINT8_CLAMPED_ELEMENTS);
2360 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
2364 index_name_split.Else();
2366 // Key is a unique string.
2369 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2370 (1 << Map::kHasNamedInterceptor);
2371 BuildJSObjectCheck(receiver, bit_field_mask);
2373 HIfContinuation continuation;
2374 BuildTestForDictionaryProperties(receiver, &continuation);
2375 IfBuilder if_dict_properties(this, &continuation);
2376 if_dict_properties.Then();
2378 // Key is string, properties are dictionary mode
2379 BuildNonGlobalObjectCheck(receiver);
2381 HValue* properties = Add<HLoadNamedField>(
2382 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
2385 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
2387 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2389 HValue* value = BuildUncheckedDictionaryElementLoad(
2390 receiver, properties, key, hash, casted_stub()->language_mode());
2393 if_dict_properties.Else();
2395 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2396 // megamorphic stub cache.
2398 // Key is string, properties are fast mode
2399 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2401 ExternalReference cache_keys_ref =
2402 ExternalReference::keyed_lookup_cache_keys(isolate());
2403 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2406 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2407 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2408 base_index->ClearFlag(HValue::kCanOverflow);
2410 HIfContinuation inline_or_runtime_continuation(
2411 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2413 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2414 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2416 IfBuilder* lookup_if = &lookup_ifs[probe];
2417 lookup_if->Initialize(this);
2418 int probe_base = probe * KeyedLookupCache::kEntryLength;
2419 HValue* map_index = AddUncasted<HAdd>(
2421 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2422 map_index->ClearFlag(HValue::kCanOverflow);
2423 HValue* key_index = AddUncasted<HAdd>(
2425 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2426 key_index->ClearFlag(HValue::kCanOverflow);
2427 HValue* map_to_check =
2428 Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
2429 NEVER_RETURN_HOLE, 0);
2430 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2432 HValue* key_to_check =
2433 Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
2434 NEVER_RETURN_HOLE, 0);
2435 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2438 ExternalReference cache_field_offsets_ref =
2439 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2440 HValue* cache_field_offsets =
2441 Add<HConstant>(cache_field_offsets_ref);
2442 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2443 index->ClearFlag(HValue::kCanOverflow);
2444 HValue* property_index =
2445 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
2446 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
2447 Push(property_index);
2451 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2452 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2456 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2457 inline_or_runtime.Then();
2459 // Found a cached index, load property inline.
2460 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2462 inline_or_runtime.Else();
2464 // KeyedLookupCache miss; call runtime.
2465 Add<HPushArguments>(receiver, key);
2466 Push(Add<HCallRuntime>(
2467 isolate()->factory()->empty_string(),
2468 Runtime::FunctionForId(is_strong(casted_stub()->language_mode())
2469 ? Runtime::kKeyedGetPropertyStrong
2470 : Runtime::kKeyedGetProperty),
2473 inline_or_runtime.End();
2475 if_dict_properties.End();
2477 index_name_split.End();
2483 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2484 return DoGenerateCode(this);
2487 } // namespace internal