1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/ic/ic.h"
12 #include "src/lithium.h"
18 static LChunk* OptimizeGraph(HGraph* graph) {
19 DisallowHeapAllocation no_allocation;
20 DisallowHandleAllocation no_handles;
21 DisallowHandleDereference no_deref;
23 DCHECK(graph != NULL);
24 BailoutReason bailout_reason = kNoReason;
25 if (!graph->Optimize(&bailout_reason)) {
26 FATAL(GetBailoutReason(bailout_reason));
28 LChunk* chunk = LChunk::NewChunk(graph);
30 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
36 class CodeStubGraphBuilderBase : public HGraphBuilder {
38 explicit CodeStubGraphBuilderBase(CompilationInfo* info)
39 : HGraphBuilder(info),
40 arguments_length_(NULL),
42 descriptor_(info->code_stub()),
44 int parameter_count = GetParameterCount();
45 parameters_.Reset(new HParameter*[parameter_count]);
47 virtual bool BuildGraph();
50 virtual HValue* BuildCodeStub() = 0;
51 int GetParameterCount() const {
52 return descriptor_.GetRegisterParameterCount();
54 HParameter* GetParameter(int parameter) {
55 DCHECK(parameter < GetParameterCount());
56 return parameters_[parameter];
58 Representation GetParameterRepresentation(int parameter) {
59 return RepresentationFromType(descriptor_.GetParameterType(parameter));
61 bool IsParameterCountRegister(int index) const {
62 return descriptor_.GetRegisterParameter(index)
63 .is(descriptor_.stack_parameter_count());
65 HValue* GetArgumentsLength() {
66 // This is initialized in BuildGraph()
67 DCHECK(arguments_length_ != NULL);
68 return arguments_length_;
70 CompilationInfo* info() { return info_; }
71 CodeStub* stub() { return info_->code_stub(); }
72 HContext* context() { return context_; }
73 Isolate* isolate() { return info_->isolate(); }
75 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
76 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
77 Representation representation,
78 bool transition_to_field);
86 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
87 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
90 HValue* BuildArrayConstructor(ElementsKind kind,
91 AllocationSiteOverrideMode override_mode,
92 ArgumentClass argument_class);
93 HValue* BuildInternalArrayConstructor(ElementsKind kind,
94 ArgumentClass argument_class);
96 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
97 // function found in the optimized code map at map_index in js_function, if
98 // the function at map_index matches the given native_context. Builder is
99 // left in the "Then()" state after the install.
100 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
101 HValue* native_context,
103 HValue* optimized_map,
105 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
106 HValue* code_object, HValue* literals);
107 void BuildInstallCode(HValue* js_function, HValue* shared_info);
109 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
112 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
114 HValue* native_context);
117 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
118 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
121 base::SmartArrayPointer<HParameter*> parameters_;
122 HValue* arguments_length_;
123 CompilationInfo* info_;
124 CodeStubDescriptor descriptor_;
129 bool CodeStubGraphBuilderBase::BuildGraph() {
130 // Update the static counter each time a new code stub is generated.
131 isolate()->counters()->code_stubs()->Increment();
133 if (FLAG_trace_hydrogen_stubs) {
134 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
135 PrintF("-----------------------------------------------------------\n");
136 PrintF("Compiling stub %s using hydrogen\n", name);
137 isolate()->GetHTracer()->TraceCompilation(info());
140 int param_count = GetParameterCount();
141 HEnvironment* start_environment = graph()->start_environment();
142 HBasicBlock* next_block = CreateBasicBlock(start_environment);
144 next_block->SetJoinId(BailoutId::StubEntry());
145 set_current_block(next_block);
147 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
148 HInstruction* stack_parameter_count = NULL;
149 for (int i = 0; i < param_count; ++i) {
150 Representation r = GetParameterRepresentation(i);
151 HParameter* param = Add<HParameter>(i,
152 HParameter::REGISTER_PARAMETER, r);
153 start_environment->Bind(i, param);
154 parameters_[i] = param;
155 if (IsParameterCountRegister(i)) {
156 param->set_type(HType::Smi());
157 stack_parameter_count = param;
158 arguments_length_ = stack_parameter_count;
162 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
163 if (!runtime_stack_params) {
164 stack_parameter_count = graph()->GetConstantMinus1();
165 arguments_length_ = graph()->GetConstant0();
168 context_ = Add<HContext>();
169 start_environment->BindContext(context_);
171 Add<HSimulate>(BailoutId::StubEntry());
173 NoObservableSideEffectsScope no_effects(this);
175 HValue* return_value = BuildCodeStub();
177 // We might have extra expressions to pop from the stack in addition to the
179 HInstruction* stack_pop_count = stack_parameter_count;
180 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
181 if (!stack_parameter_count->IsConstant() &&
182 descriptor_.hint_stack_parameter_count() < 0) {
183 HInstruction* constant_one = graph()->GetConstant1();
184 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
185 stack_pop_count->ClearFlag(HValue::kCanOverflow);
186 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
189 int count = descriptor_.hint_stack_parameter_count();
190 stack_pop_count = Add<HConstant>(count);
194 if (current_block() != NULL) {
195 HReturn* hreturn_instruction = New<HReturn>(return_value,
197 FinishCurrentBlock(hreturn_instruction);
203 template <class Stub>
204 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
206 explicit CodeStubGraphBuilder(CompilationInfo* info)
207 : CodeStubGraphBuilderBase(info) {}
210 virtual HValue* BuildCodeStub() {
211 if (casted_stub()->IsUninitialized()) {
212 return BuildCodeUninitializedStub();
214 return BuildCodeInitializedStub();
218 virtual HValue* BuildCodeInitializedStub() {
223 virtual HValue* BuildCodeUninitializedStub() {
224 // Force a deopt that falls back to the runtime.
225 HValue* undefined = graph()->GetConstantUndefined();
226 IfBuilder builder(this);
227 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
229 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
233 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
237 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
238 ExternalReference miss) {
239 Factory* factory = isolate()->factory();
241 // Generate the new code.
242 MacroAssembler masm(isolate(), NULL, 256);
245 // Update the static counter each time a new code stub is generated.
246 isolate()->counters()->code_stubs()->Increment();
248 // Generate the code for the stub.
249 masm.set_generating_stub(true);
250 // TODO(yangguo): remove this once we can serialize IC stubs.
251 masm.enable_serializer();
252 NoCurrentFrameScope scope(&masm);
253 GenerateLightweightMiss(&masm, miss);
256 // Create the code object.
260 // Copy the generated code into a heap object.
261 Code::Flags flags = Code::ComputeFlags(
266 Handle<Code> new_object = factory->NewCode(
267 desc, flags, masm.CodeObject(), NeedsImmovableCode());
272 template <class Stub>
273 static Handle<Code> DoGenerateCode(Stub* stub) {
274 Isolate* isolate = stub->isolate();
275 CodeStubDescriptor descriptor(stub);
277 // If we are uninitialized we can use a light-weight stub to enter
278 // the runtime that is significantly faster than using the standard
279 // stub-failure deopt mechanism.
280 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
281 DCHECK(!descriptor.stack_parameter_count().is_valid());
282 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
284 base::ElapsedTimer timer;
285 if (FLAG_profile_hydrogen_code_stub_compilation) {
289 CompilationInfo info(stub, isolate, &zone);
290 CodeStubGraphBuilder<Stub> builder(&info);
291 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
292 Handle<Code> code = chunk->Codegen();
293 if (FLAG_profile_hydrogen_code_stub_compilation) {
295 os << "[Lazy compilation of " << stub << " took "
296 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
303 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
304 info()->MarkAsSavesCallerDoubles();
305 HValue* number = GetParameter(NumberToStringStub::kNumber);
306 return BuildNumberToString(number, Type::Number(zone()));
310 Handle<Code> NumberToStringStub::GenerateCode() {
311 return DoGenerateCode(this);
315 // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
316 // Possible optimizations: put the type string into the oddballs.
318 HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
319 Factory* factory = isolate()->factory();
320 HConstant* number_string = Add<HConstant>(factory->number_string());
321 HValue* object = GetParameter(TypeofStub::kObject);
323 IfBuilder is_smi(this);
324 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
326 { Push(number_string); }
329 IfBuilder is_number(this);
330 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
332 { Push(number_string); }
335 HConstant* undefined_string = Add<HConstant>(factory->undefined_string());
336 HValue* map = AddLoadMap(object, smi_check);
337 HValue* instance_type = Add<HLoadNamedField>(
338 map, nullptr, HObjectAccess::ForMapInstanceType());
339 IfBuilder is_string(this);
340 is_string.If<HCompareNumericAndBranch>(
341 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
343 { Push(Add<HConstant>(factory->string_string())); }
346 HConstant* object_string = Add<HConstant>(factory->object_string());
347 IfBuilder is_oddball(this);
348 is_oddball.If<HCompareNumericAndBranch>(
349 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
352 IfBuilder is_true_or_false(this);
353 is_true_or_false.If<HCompareObjectEqAndBranch>(
354 object, graph()->GetConstantTrue());
355 is_true_or_false.OrIf<HCompareObjectEqAndBranch>(
356 object, graph()->GetConstantFalse());
357 is_true_or_false.Then();
358 { Push(Add<HConstant>(factory->boolean_string())); }
359 is_true_or_false.Else();
361 IfBuilder is_null(this);
362 is_null.If<HCompareObjectEqAndBranch>(object,
363 graph()->GetConstantNull());
365 { Push(object_string); }
367 { Push(undefined_string); }
369 is_true_or_false.End();
373 IfBuilder is_symbol(this);
374 is_symbol.If<HCompareNumericAndBranch>(
375 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
377 { Push(Add<HConstant>(factory->symbol_string())); }
380 IfBuilder is_function(this);
381 HConstant* js_function = Add<HConstant>(JS_FUNCTION_TYPE);
382 HConstant* js_function_proxy =
383 Add<HConstant>(JS_FUNCTION_PROXY_TYPE);
384 is_function.If<HCompareNumericAndBranch>(instance_type, js_function,
386 is_function.OrIf<HCompareNumericAndBranch>(
387 instance_type, js_function_proxy, Token::EQ);
389 { Push(Add<HConstant>(factory->function_string())); }
392 IfBuilder is_float32x4(this);
393 is_float32x4.If<HCompareNumericAndBranch>(
394 instance_type, Add<HConstant>(FLOAT32X4_TYPE), Token::EQ);
396 { Push(Add<HConstant>(factory->float32x4_string())); }
399 // Is it an undetectable object?
400 IfBuilder is_undetectable(this);
401 is_undetectable.If<HIsUndetectableAndBranch>(object);
402 is_undetectable.Then();
404 // typeof an undetectable object is 'undefined'.
405 Push(undefined_string);
407 is_undetectable.Else();
409 // For any kind of object not handled above, the spec rule for
410 // host objects gives that it is okay to return "object".
427 return environment()->Pop();
431 Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
435 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
436 Factory* factory = isolate()->factory();
437 HValue* undefined = graph()->GetConstantUndefined();
438 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
440 // This stub is very performance sensitive, the generated code must be tuned
441 // so that it doesn't build and eager frame.
442 info()->MarkMustNotHaveEagerFrame();
444 HInstruction* allocation_site =
445 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
446 IfBuilder checker(this);
447 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
451 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
452 AllocationSite::kTransitionInfoOffset);
453 HInstruction* boilerplate =
454 Add<HLoadNamedField>(allocation_site, nullptr, access);
455 HValue* elements = AddLoadElements(boilerplate);
456 HValue* capacity = AddLoadFixedArrayLength(elements);
457 IfBuilder zero_capacity(this);
458 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
460 zero_capacity.Then();
461 Push(BuildCloneShallowArrayEmpty(boilerplate,
464 zero_capacity.Else();
465 IfBuilder if_fixed_cow(this);
466 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
468 Push(BuildCloneShallowArrayCow(boilerplate,
473 IfBuilder if_fixed(this);
474 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
476 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
482 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
485 FAST_DOUBLE_ELEMENTS));
490 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
493 return environment()->Pop();
497 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
498 return DoGenerateCode(this);
503 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
504 HValue* undefined = graph()->GetConstantUndefined();
506 HInstruction* allocation_site =
507 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
509 IfBuilder checker(this);
510 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
514 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
515 AllocationSite::kTransitionInfoOffset);
516 HInstruction* boilerplate =
517 Add<HLoadNamedField>(allocation_site, nullptr, access);
519 int length = casted_stub()->length();
521 // Empty objects have some slack added to them.
522 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
524 int size = JSObject::kHeaderSize + length * kPointerSize;
525 int object_size = size;
526 if (FLAG_allocation_site_pretenuring) {
527 size += AllocationMemento::kSize;
530 HValue* boilerplate_map =
531 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
532 HValue* boilerplate_size = Add<HLoadNamedField>(
533 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
534 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
535 checker.If<HCompareNumericAndBranch>(boilerplate_size,
536 size_in_words, Token::EQ);
539 HValue* size_in_bytes = Add<HConstant>(size);
541 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
542 NOT_TENURED, JS_OBJECT_TYPE);
544 for (int i = 0; i < object_size; i += kPointerSize) {
545 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
546 Add<HStoreNamedField>(object, access,
547 Add<HLoadNamedField>(boilerplate, nullptr, access));
550 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
551 if (FLAG_allocation_site_pretenuring) {
552 BuildCreateAllocationMemento(
553 object, Add<HConstant>(object_size), allocation_site);
556 environment()->Push(object);
557 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
560 return environment()->Pop();
564 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
565 return DoGenerateCode(this);
570 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
571 // This stub is performance sensitive, the generated code must be tuned
572 // so that it doesn't build an eager frame.
573 info()->MarkMustNotHaveEagerFrame();
575 HValue* size = Add<HConstant>(AllocationSite::kSize);
576 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
580 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
581 AddStoreMapConstant(object, allocation_site_map);
583 // Store the payload (smi elements kind)
584 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
585 Add<HStoreNamedField>(object,
586 HObjectAccess::ForAllocationSiteOffset(
587 AllocationSite::kTransitionInfoOffset),
588 initial_elements_kind);
590 // Unlike literals, constructed arrays don't have nested sites
591 Add<HStoreNamedField>(object,
592 HObjectAccess::ForAllocationSiteOffset(
593 AllocationSite::kNestedSiteOffset),
594 graph()->GetConstant0());
596 // Pretenuring calculation field.
597 Add<HStoreNamedField>(object,
598 HObjectAccess::ForAllocationSiteOffset(
599 AllocationSite::kPretenureDataOffset),
600 graph()->GetConstant0());
602 // Pretenuring memento creation count field.
603 Add<HStoreNamedField>(object,
604 HObjectAccess::ForAllocationSiteOffset(
605 AllocationSite::kPretenureCreateCountOffset),
606 graph()->GetConstant0());
608 // Store an empty fixed array for the code dependency.
609 HConstant* empty_fixed_array =
610 Add<HConstant>(isolate()->factory()->empty_fixed_array());
611 Add<HStoreNamedField>(
613 HObjectAccess::ForAllocationSiteOffset(
614 AllocationSite::kDependentCodeOffset),
617 // Link the object to the allocation site list
618 HValue* site_list = Add<HConstant>(
619 ExternalReference::allocation_sites_list_address(isolate()));
620 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
621 HObjectAccess::ForAllocationSiteList());
622 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
623 // mark as such in order to skip the write barrier, once we have a unified
624 // system for weakness. For now we decided to keep it like this because having
625 // an initial write barrier backed store makes this pointer strong until the
626 // next GC, and allocation sites are designed to survive several GCs anyway.
627 Add<HStoreNamedField>(
629 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
631 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
634 HInstruction* feedback_vector = GetParameter(0);
635 HInstruction* slot = GetParameter(1);
636 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
638 return feedback_vector;
642 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
643 return DoGenerateCode(this);
648 HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
649 // This stub is performance sensitive, the generated code must be tuned
650 // so that it doesn't build an eager frame.
651 info()->MarkMustNotHaveEagerFrame();
653 HValue* size = Add<HConstant>(WeakCell::kSize);
654 HInstruction* object =
655 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
657 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
658 AddStoreMapConstant(object, weak_cell_map);
660 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
661 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
662 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
663 graph()->GetConstantHole());
665 HInstruction* feedback_vector =
666 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
667 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
668 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
670 return graph()->GetConstant0();
674 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
678 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
679 int context_index = casted_stub()->context_index();
680 int slot_index = casted_stub()->slot_index();
682 HValue* script_context = BuildGetScriptContext(context_index);
683 return Add<HLoadNamedField>(script_context, nullptr,
684 HObjectAccess::ForContextSlot(slot_index));
688 Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
689 return DoGenerateCode(this);
694 HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
695 int context_index = casted_stub()->context_index();
696 int slot_index = casted_stub()->slot_index();
698 HValue* script_context = BuildGetScriptContext(context_index);
699 Add<HStoreNamedField>(script_context,
700 HObjectAccess::ForContextSlot(slot_index),
701 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
702 return GetParameter(2);
706 Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
707 return DoGenerateCode(this);
712 HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
713 ElementsKind kind = casted_stub()->elements_kind();
714 if (IsFastDoubleElementsKind(kind)) {
715 info()->MarkAsSavesCallerDoubles();
718 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
719 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
721 HValue* elements = AddLoadElements(object);
722 HValue* current_capacity = Add<HLoadNamedField>(
723 elements, nullptr, HObjectAccess::ForFixedArrayLength());
726 casted_stub()->is_js_array()
727 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
728 HObjectAccess::ForArrayLength(kind))
731 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
732 current_capacity, key);
736 Handle<Code> GrowArrayElementsStub::GenerateCode() {
737 return DoGenerateCode(this);
742 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
743 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
744 ? CONVERT_HOLE_TO_UNDEFINED
747 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
748 GetParameter(LoadDescriptor::kReceiverIndex),
749 GetParameter(LoadDescriptor::kNameIndex), NULL,
750 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
751 hole_mode, STANDARD_STORE);
756 Handle<Code> LoadFastElementStub::GenerateCode() {
757 return DoGenerateCode(this);
761 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
762 HValue* object, FieldIndex index) {
763 Representation representation = index.is_double()
764 ? Representation::Double()
765 : Representation::Tagged();
766 int offset = index.offset();
767 HObjectAccess access = index.is_inobject()
768 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
769 : HObjectAccess::ForBackingStoreOffset(offset, representation);
770 if (index.is_double() &&
771 (!FLAG_unbox_double_fields || !index.is_inobject())) {
772 // Load the heap number.
773 object = Add<HLoadNamedField>(
774 object, nullptr, access.WithRepresentation(Representation::Tagged()));
775 // Load the double value from it.
776 access = HObjectAccess::ForHeapNumberValue();
778 return Add<HLoadNamedField>(object, nullptr, access);
783 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
784 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
788 Handle<Code> LoadFieldStub::GenerateCode() {
789 return DoGenerateCode(this);
794 HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
795 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
796 casted_stub()->index());
800 Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
801 return DoGenerateCode(this);
806 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
807 HValue* map = AddLoadMap(GetParameter(0), NULL);
808 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
809 Map::kDescriptorsOffset, Representation::Tagged());
810 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
811 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
812 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
813 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
817 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
820 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
822 HValue* result = NULL;
823 HInstruction* backing_store =
824 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
826 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
827 HValue* backing_store_length = Add<HLoadNamedField>(
828 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
829 IfBuilder in_unmapped_range(this);
830 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
832 in_unmapped_range.Then();
835 result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
838 Add<HStoreKeyed>(backing_store, key, value, FAST_HOLEY_ELEMENTS);
841 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
842 in_unmapped_range.End();
847 HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
850 // Mapped arguments are actual arguments. Unmapped arguments are values added
851 // to the arguments object after it was created for the call. Mapped arguments
852 // are stored in the context at indexes given by elements[key + 2]. Unmapped
853 // arguments are stored as regular indexed properties in the arguments array,
854 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
855 // look at argument object construction.
857 // The sloppy arguments elements array has a special format:
860 // 1: unmapped arguments array
865 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
866 // If key + 2 >= elements.length then attempt to look in the unmapped
867 // arguments array (given by elements[1]) and return the value at key, missing
868 // to the runtime if the unmapped arguments array is not a fixed array or if
869 // key >= unmapped_arguments_array.length.
871 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
872 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
873 // index into the context array given at elements[0]. Return the value at
876 bool is_load = value == NULL;
878 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
879 IfBuilder positive_smi(this);
880 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
882 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
885 HValue* constant_two = Add<HConstant>(2);
886 HValue* elements = AddLoadElements(receiver, nullptr);
887 HValue* elements_length = Add<HLoadNamedField>(
888 elements, nullptr, HObjectAccess::ForFixedArrayLength());
889 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
890 IfBuilder in_range(this);
891 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
894 HValue* index = AddUncasted<HAdd>(key, constant_two);
895 HInstruction* mapped_index = Add<HLoadKeyed>(
896 elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
898 IfBuilder is_valid(this);
899 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
900 graph()->GetConstantHole());
903 // TODO(mvstanton): I'd like to assert from this point, that if the
904 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
905 // smi check is being emitted.
906 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
907 nullptr, FAST_ELEMENTS);
908 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
910 HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
911 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
912 environment()->Push(result);
914 DCHECK(value != NULL);
915 Add<HStoreKeyed>(the_context, mapped_index, value, FAST_ELEMENTS);
916 environment()->Push(value);
921 HValue* result = UnmappedCase(elements, key, value);
922 environment()->Push(is_load ? result : value);
928 HValue* result = UnmappedCase(elements, key, value);
929 environment()->Push(is_load ? result : value);
933 return environment()->Pop();
938 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
939 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
940 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
942 return EmitKeyedSloppyArguments(receiver, key, NULL);
946 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
947 return DoGenerateCode(this);
952 HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
953 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
954 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
955 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
957 return EmitKeyedSloppyArguments(receiver, key, value);
961 Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
962 return DoGenerateCode(this);
966 void CodeStubGraphBuilderBase::BuildStoreNamedField(
967 HValue* object, HValue* value, FieldIndex index,
968 Representation representation, bool transition_to_field) {
969 DCHECK(!index.is_double() || representation.IsDouble());
970 int offset = index.offset();
971 HObjectAccess access =
973 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
974 : HObjectAccess::ForBackingStoreOffset(offset, representation);
976 if (representation.IsDouble()) {
977 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
978 HObjectAccess heap_number_access =
979 access.WithRepresentation(Representation::Tagged());
980 if (transition_to_field) {
981 // The store requires a mutable HeapNumber to be allocated.
982 NoObservableSideEffectsScope no_side_effects(this);
983 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
985 // TODO(hpayer): Allocation site pretenuring support.
986 HInstruction* heap_number =
987 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
988 MUTABLE_HEAP_NUMBER_TYPE);
989 AddStoreMapConstant(heap_number,
990 isolate()->factory()->mutable_heap_number_map());
991 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
993 // Store the new mutable heap number into the object.
994 access = heap_number_access;
997 // Load the heap number.
998 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
999 // Store the double value into it.
1000 access = HObjectAccess::ForHeapNumberValue();
1003 } else if (representation.IsHeapObject()) {
1004 BuildCheckHeapObject(value);
1007 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1012 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1013 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
1014 casted_stub()->representation(), false);
1015 return GetParameter(2);
1019 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1023 HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
1024 HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
1026 switch (casted_stub()->store_mode()) {
1027 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1028 HValue* properties = Add<HLoadNamedField>(
1029 object, nullptr, HObjectAccess::ForPropertiesPointer());
1030 HValue* length = AddLoadFixedArrayLength(properties);
1032 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1033 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1035 // Grow properties array.
1036 ElementsKind kind = FAST_ELEMENTS;
1037 Add<HBoundsCheck>(new_capacity,
1038 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1039 FixedArray::kHeaderSize) >>
1040 ElementsKindToShiftSize(kind)));
1042 // Reuse this code for properties backing store allocation.
1043 HValue* new_properties =
1044 BuildAllocateAndInitializeArray(kind, new_capacity);
1046 BuildCopyProperties(properties, new_properties, length, new_capacity);
1048 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1052 case StoreTransitionStub::StoreMapAndValue:
1053 // Store the new value into the "extended" object.
1054 BuildStoreNamedField(
1055 object, GetParameter(StoreTransitionDescriptor::kValueIndex),
1056 casted_stub()->index(), casted_stub()->representation(), true);
1059 case StoreTransitionStub::StoreMapOnly:
1060 // And finally update the map.
1061 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
1062 GetParameter(StoreTransitionDescriptor::kMapIndex));
1065 return GetParameter(StoreTransitionDescriptor::kValueIndex);
1069 Handle<Code> StoreTransitionStub::GenerateCode() {
1070 return DoGenerateCode(this);
1075 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
1076 HValue* string = BuildLoadNamedField(GetParameter(0),
1077 FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
1078 return BuildLoadNamedField(string,
1079 FieldIndex::ForInObjectOffset(String::kLengthOffset));
1083 Handle<Code> StringLengthStub::GenerateCode() {
1084 return DoGenerateCode(this);
1089 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1090 BuildUncheckedMonomorphicElementAccess(
1091 GetParameter(StoreDescriptor::kReceiverIndex),
1092 GetParameter(StoreDescriptor::kNameIndex),
1093 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1094 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1095 casted_stub()->store_mode());
1097 return GetParameter(2);
1101 Handle<Code> StoreFastElementStub::GenerateCode() {
1102 return DoGenerateCode(this);
1107 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1108 info()->MarkAsSavesCallerDoubles();
1110 BuildTransitionElementsKind(GetParameter(0),
1112 casted_stub()->from_kind(),
1113 casted_stub()->to_kind(),
1114 casted_stub()->is_js_array());
1116 return GetParameter(0);
1120 Handle<Code> TransitionElementsKindStub::GenerateCode() {
1121 return DoGenerateCode(this);
1126 HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
1128 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
1129 NOT_TENURED, HEAP_NUMBER_TYPE);
1130 AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
1135 Handle<Code> AllocateHeapNumberStub::GenerateCode() {
1136 return DoGenerateCode(this);
1140 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1142 AllocationSiteOverrideMode override_mode,
1143 ArgumentClass argument_class) {
1144 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1145 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1146 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1148 HValue* result = NULL;
1149 switch (argument_class) {
1151 // This stub is very performance sensitive, the generated code must be
1152 // tuned so that it doesn't build and eager frame.
1153 info()->MarkMustNotHaveEagerFrame();
1154 result = array_builder.AllocateEmptyArray();
1157 result = BuildArraySingleArgumentConstructor(&array_builder);
1160 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1168 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1169 ElementsKind kind, ArgumentClass argument_class) {
1170 HValue* constructor = GetParameter(
1171 InternalArrayConstructorStubBase::kConstructor);
1172 JSArrayBuilder array_builder(this, kind, constructor);
1174 HValue* result = NULL;
1175 switch (argument_class) {
1177 // This stub is very performance sensitive, the generated code must be
1178 // tuned so that it doesn't build and eager frame.
1179 info()->MarkMustNotHaveEagerFrame();
1180 result = array_builder.AllocateEmptyArray();
1183 result = BuildArraySingleArgumentConstructor(&array_builder);
1186 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1193 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1194 JSArrayBuilder* array_builder) {
1195 // Smi check and range check on the input arg.
1196 HValue* constant_one = graph()->GetConstant1();
1197 HValue* constant_zero = graph()->GetConstant0();
1199 HInstruction* elements = Add<HArgumentsElements>(false);
1200 HInstruction* argument = Add<HAccessArgumentsAt>(
1201 elements, constant_one, constant_zero);
1203 return BuildAllocateArrayFromLength(array_builder, argument);
1207 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1208 JSArrayBuilder* array_builder, ElementsKind kind) {
1209 // Insert a bounds check because the number of arguments might exceed
1210 // the kInitialMaxFastElementArray limit. This cannot happen for code
1211 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1213 HValue* length = GetArgumentsLength();
1214 HConstant* max_alloc_length =
1215 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1216 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1218 // We need to fill with the hole if it's a smi array in the multi-argument
1219 // case because we might have to bail out while copying arguments into
1220 // the array because they aren't compatible with a smi array.
1221 // If it's a double array, no problem, and if it's fast then no
1222 // problem either because doubles are boxed.
1224 // TODO(mvstanton): consider an instruction to memset fill the array
1225 // with zero in this case instead.
1226 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1227 ? JSArrayBuilder::FILL_WITH_HOLE
1228 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1229 HValue* new_object = array_builder->AllocateArray(checked_length,
1233 HValue* elements = array_builder->GetElementsLocation();
1234 DCHECK(elements != NULL);
1236 // Now populate the elements correctly.
1237 LoopBuilder builder(this,
1239 LoopBuilder::kPostIncrement);
1240 HValue* start = graph()->GetConstant0();
1241 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1242 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1243 HInstruction* argument = Add<HAccessArgumentsAt>(
1244 argument_elements, checked_length, key);
1246 Add<HStoreKeyed>(elements, key, argument, kind);
1253 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1254 ElementsKind kind = casted_stub()->elements_kind();
1255 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1256 return BuildArrayConstructor(kind, override_mode, NONE);
1260 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1261 return DoGenerateCode(this);
1266 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1268 ElementsKind kind = casted_stub()->elements_kind();
1269 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1270 return BuildArrayConstructor(kind, override_mode, SINGLE);
1274 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1275 return DoGenerateCode(this);
1280 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1281 ElementsKind kind = casted_stub()->elements_kind();
1282 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1283 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1287 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1288 return DoGenerateCode(this);
1293 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1295 ElementsKind kind = casted_stub()->elements_kind();
1296 return BuildInternalArrayConstructor(kind, NONE);
1300 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1301 return DoGenerateCode(this);
1306 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1308 ElementsKind kind = casted_stub()->elements_kind();
1309 return BuildInternalArrayConstructor(kind, SINGLE);
1313 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1314 return DoGenerateCode(this);
1319 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1321 ElementsKind kind = casted_stub()->elements_kind();
1322 return BuildInternalArrayConstructor(kind, MULTIPLE);
1326 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1327 return DoGenerateCode(this);
1332 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1333 Isolate* isolate = graph()->isolate();
1334 CompareNilICStub* stub = casted_stub();
1335 HIfContinuation continuation;
1336 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1337 Type* type = stub->GetType(zone(), sentinel_map);
1338 BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
1339 IfBuilder if_nil(this, &continuation);
1341 if (continuation.IsFalseReachable()) {
1343 if_nil.Return(graph()->GetConstant0());
1346 return continuation.IsTrueReachable()
1347 ? graph()->GetConstant1()
1348 : graph()->GetConstantUndefined();
1352 Handle<Code> CompareNilICStub::GenerateCode() {
1353 return DoGenerateCode(this);
1358 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1359 BinaryOpICState state = casted_stub()->state();
1361 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1362 HValue* right = GetParameter(BinaryOpICStub::kRight);
1364 Type* left_type = state.GetLeftType(zone());
1365 Type* right_type = state.GetRightType(zone());
1366 Type* result_type = state.GetResultType(zone());
1368 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1369 (state.HasSideEffects() || !result_type->Is(Type::None())));
1371 HValue* result = NULL;
1372 HAllocationMode allocation_mode(NOT_TENURED);
1373 if (state.op() == Token::ADD &&
1374 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1375 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1376 // For the generic add stub a fast case for string addition is performance
1378 if (left_type->Maybe(Type::String())) {
1379 IfBuilder if_leftisstring(this);
1380 if_leftisstring.If<HIsStringAndBranch>(left);
1381 if_leftisstring.Then();
1383 Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()),
1384 right_type, result_type,
1385 state.fixed_right_arg(), allocation_mode,
1388 if_leftisstring.Else();
1390 Push(BuildBinaryOperation(
1391 state.op(), left, right, left_type, right_type, result_type,
1392 state.fixed_right_arg(), allocation_mode, state.strength()));
1394 if_leftisstring.End();
1397 IfBuilder if_rightisstring(this);
1398 if_rightisstring.If<HIsStringAndBranch>(right);
1399 if_rightisstring.Then();
1401 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1402 Type::String(zone()), result_type,
1403 state.fixed_right_arg(), allocation_mode,
1406 if_rightisstring.Else();
1408 Push(BuildBinaryOperation(
1409 state.op(), left, right, left_type, right_type, result_type,
1410 state.fixed_right_arg(), allocation_mode, state.strength()));
1412 if_rightisstring.End();
1416 result = BuildBinaryOperation(
1417 state.op(), left, right, left_type, right_type, result_type,
1418 state.fixed_right_arg(), allocation_mode, state.strength());
1421 // If we encounter a generic argument, the number conversion is
1422 // observable, thus we cannot afford to bail out after the fact.
1423 if (!state.HasSideEffects()) {
1424 result = EnforceNumberType(result, result_type);
1431 Handle<Code> BinaryOpICStub::GenerateCode() {
1432 return DoGenerateCode(this);
1437 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1438 BinaryOpICState state = casted_stub()->state();
1440 HValue* allocation_site = GetParameter(
1441 BinaryOpWithAllocationSiteStub::kAllocationSite);
1442 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1443 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1445 Type* left_type = state.GetLeftType(zone());
1446 Type* right_type = state.GetRightType(zone());
1447 Type* result_type = state.GetResultType(zone());
1448 HAllocationMode allocation_mode(allocation_site);
1450 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1451 result_type, state.fixed_right_arg(),
1452 allocation_mode, state.strength());
1456 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1457 return DoGenerateCode(this);
1462 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1463 StringAddStub* stub = casted_stub();
1464 StringAddFlags flags = stub->flags();
1465 PretenureFlag pretenure_flag = stub->pretenure_flag();
1467 HValue* left = GetParameter(StringAddStub::kLeft);
1468 HValue* right = GetParameter(StringAddStub::kRight);
1470 // Make sure that both arguments are strings if not known in advance.
1471 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1472 left = BuildCheckString(left);
1474 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1475 right = BuildCheckString(right);
1478 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1482 Handle<Code> StringAddStub::GenerateCode() {
1483 return DoGenerateCode(this);
1488 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1489 ToBooleanStub* stub = casted_stub();
1490 HValue* true_value = NULL;
1491 HValue* false_value = NULL;
1493 switch (stub->mode()) {
1494 case ToBooleanStub::RESULT_AS_SMI:
1495 true_value = graph()->GetConstant1();
1496 false_value = graph()->GetConstant0();
1498 case ToBooleanStub::RESULT_AS_ODDBALL:
1499 true_value = graph()->GetConstantTrue();
1500 false_value = graph()->GetConstantFalse();
1502 case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1503 true_value = graph()->GetConstantFalse();
1504 false_value = graph()->GetConstantTrue();
1508 IfBuilder if_true(this);
1509 if_true.If<HBranch>(GetParameter(0), stub->types());
1511 if_true.Return(true_value);
1518 Handle<Code> ToBooleanStub::GenerateCode() {
1519 return DoGenerateCode(this);
1524 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1525 StoreGlobalStub* stub = casted_stub();
1526 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1527 if (stub->check_global()) {
1528 // Check that the map of the global has not changed: use a placeholder map
1529 // that will be replaced later with the global object's map.
1530 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1532 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1534 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1535 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1536 StoreGlobalStub::global_map_placeholder(isolate())));
1537 HValue* expected_map = Add<HLoadNamedField>(
1538 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1540 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1541 IfBuilder map_check(this);
1542 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1543 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1547 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1548 StoreGlobalStub::property_cell_placeholder(isolate())));
1549 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1550 HObjectAccess::ForWeakCellValue());
1551 Add<HCheckHeapObject>(cell);
1552 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1553 // Load the payload of the global parameter cell. A hole indicates that the
1554 // cell has been invalidated and that the store must be handled by the
1556 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
1558 auto cell_type = stub->cell_type();
1559 if (cell_type == PropertyCellType::kConstant ||
1560 cell_type == PropertyCellType::kUndefined) {
1561 // This is always valid for all states a cell can be in.
1562 IfBuilder builder(this);
1563 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1566 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
1569 IfBuilder builder(this);
1570 HValue* hole_value = graph()->GetConstantHole();
1571 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1573 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
1575 // When dealing with constant types, the type may be allowed to change, as
1576 // long as optimized code remains valid.
1577 if (cell_type == PropertyCellType::kConstantType) {
1578 switch (stub->constant_type()) {
1579 case PropertyCellConstantType::kSmi:
1580 access = access.WithRepresentation(Representation::Smi());
1582 case PropertyCellConstantType::kStableMap: {
1583 // It is sufficient here to check that the value and cell contents
1584 // have identical maps, no matter if they are stable or not or if they
1585 // are the maps that were originally in the cell or not. If optimized
1586 // code will deopt when a cell has a unstable map and if it has a
1587 // dependency on a stable map, it will deopt if the map destabilizes.
1588 Add<HCheckHeapObject>(value);
1589 Add<HCheckHeapObject>(cell_contents);
1590 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1591 HObjectAccess::ForMap());
1593 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1594 IfBuilder map_check(this);
1595 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1596 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1598 access = access.WithRepresentation(Representation::HeapObject());
1603 Add<HStoreNamedField>(cell, access, value);
1611 Handle<Code> StoreGlobalStub::GenerateCode() {
1612 return DoGenerateCode(this);
1617 HValue* CodeStubGraphBuilder<LoadGlobalViaContextStub>::BuildCodeStub() {
1618 LoadGlobalViaContextStub* stub = casted_stub();
1619 int depth_value = stub->depth();
1620 HValue* depth = GetParameter(0);
1621 HValue* slot_index = GetParameter(1);
1622 HValue* name = GetParameter(2);
1624 // Choose between dynamic or static context script fetching versions.
1625 depth = depth_value < LoadGlobalViaContextStub::kDynamicDepth
1627 : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
1629 AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
1631 HValue* script_context = BuildGetParentContext(depth, depth_value);
1633 Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
1635 HValue* value = Add<HLoadNamedField>(cell, nullptr,
1636 HObjectAccess::ForPropertyCellValue());
1638 IfBuilder builder(this);
1639 HValue* hole_value = graph()->GetConstantHole();
1640 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(value, hole_value);
1645 Add<HPushArguments>(script_context, slot_index, name);
1646 Push(Add<HCallRuntime>(
1647 isolate()->factory()->empty_string(),
1648 Runtime::FunctionForId(Runtime::kLoadGlobalViaContext), 3));
1655 Handle<Code> LoadGlobalViaContextStub::GenerateCode() {
1656 return DoGenerateCode(this);
1661 HValue* CodeStubGraphBuilder<StoreGlobalViaContextStub>::BuildCodeStub() {
1662 StoreGlobalViaContextStub* stub = casted_stub();
1663 int depth_value = stub->depth();
1664 HValue* depth = GetParameter(0);
1665 HValue* slot_index = GetParameter(1);
1666 HValue* name = GetParameter(2);
1667 HValue* value = GetParameter(3);
1669 // Choose between dynamic or static context script fetching versions.
1670 depth = depth_value < StoreGlobalViaContextStub::kDynamicDepth
1672 : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
1674 AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
1676 HValue* script_context = BuildGetParentContext(depth, depth_value);
1678 Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
1680 // Fast case that requires storing to cell.
1681 HIfContinuation if_fast_store_continuation(graph()->CreateBasicBlock(),
1682 graph()->CreateBasicBlock());
1684 // Fast case that does not require storing to cell.
1685 HIfContinuation if_fast_no_store_continuation(graph()->CreateBasicBlock(),
1686 graph()->CreateBasicBlock());
1688 // This stub does the same as StoreGlobalStub but in a dynamic manner.
1690 HValue* cell_contents = Add<HLoadNamedField>(
1691 cell, nullptr, HObjectAccess::ForPropertyCellValue());
1693 IfBuilder if_hole(this);
1694 HValue* hole_value = graph()->GetConstantHole();
1695 if_hole.IfNot<HCompareObjectEqAndBranch, HValue*>(cell_contents, hole_value);
1698 HValue* details = Add<HLoadNamedField>(
1699 cell, nullptr, HObjectAccess::ForPropertyCellDetails());
1701 BuildDecodeField<PropertyDetails::PropertyCellTypeField>(details);
1703 // The code below relies on this.
1704 STATIC_ASSERT(PropertyCellType::kUndefined < PropertyCellType::kConstant);
1705 STATIC_ASSERT(PropertyCellType::kConstant <
1706 PropertyCellType::kConstantType);
1707 STATIC_ASSERT(PropertyCellType::kConstant < PropertyCellType::kMutable);
1709 // Handle all cell type cases.
1710 IfBuilder if_not_const(this);
1712 int cell_type_constant = static_cast<int>(PropertyCellType::kConstant);
1713 if_not_const.If<HCompareNumericAndBranch, HValue*>(
1714 cell_type, Add<HConstant>(cell_type_constant), Token::GT);
1715 if_not_const.Then();
1717 // kConstantType or kMutable.
1718 IfBuilder if_const_type(this);
1719 int cell_type_constant_type =
1720 static_cast<int>(PropertyCellType::kConstantType);
1721 if_const_type.If<HCompareNumericAndBranch, HValue*>(
1722 cell_type, Add<HConstant>(cell_type_constant_type), Token::EQ);
1723 if_const_type.Then();
1725 // Check that either both value and cell_contents are smi or
1726 // both have the same map.
1727 IfBuilder if_cell_is_smi(this);
1728 if_cell_is_smi.If<HIsSmiAndBranch>(cell_contents);
1729 if_cell_is_smi.Then();
1731 IfBuilder if_value_is_smi(this);
1732 if_value_is_smi.If<HIsSmiAndBranch>(value);
1733 if_value_is_smi.Then();
1735 // Both cell_contents and value are smis, do store.
1737 if_value_is_smi.Else(); // Slow case.
1738 if_value_is_smi.JoinContinuation(&if_fast_store_continuation);
1740 if_cell_is_smi.Else();
1742 IfBuilder if_value_is_heap_object(this);
1743 if_value_is_heap_object.IfNot<HIsSmiAndBranch>(value);
1744 if_value_is_heap_object.Then();
1746 // Both cell_contents and value are heap objects, do store.
1747 HValue* expected_map = Add<HLoadNamedField>(
1748 cell_contents, nullptr, HObjectAccess::ForMap());
1750 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1751 IfBuilder map_check(this);
1752 map_check.If<HCompareObjectEqAndBranch>(expected_map, map);
1754 map_check.Else(); // Slow case.
1755 map_check.JoinContinuation(&if_fast_store_continuation);
1757 // The accessor case is handled by the map check above, since
1758 // the value must not have a AccessorPair map.
1760 if_value_is_heap_object.Else(); // Slow case.
1761 if_value_is_heap_object.JoinContinuation(&if_fast_store_continuation);
1763 if_cell_is_smi.EndUnreachable();
1765 if_const_type.Else();
1767 // Check that the property kind is kData.
1768 HValue* kind = BuildDecodeField<PropertyDetails::KindField>(details);
1769 HValue* data_kind_value = Add<HConstant>(kData);
1771 IfBuilder builder(this);
1772 builder.If<HCompareNumericAndBranch, HValue*>(kind, data_kind_value,
1775 builder.Else(); // Slow case.
1776 builder.JoinContinuation(&if_fast_store_continuation);
1778 if_const_type.EndUnreachable();
1780 if_not_const.Else();
1782 // kUndefined or kConstant, just check that the value matches.
1783 IfBuilder builder(this);
1784 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1786 builder.Else(); // Slow case.
1787 builder.JoinContinuation(&if_fast_no_store_continuation);
1789 if_not_const.EndUnreachable();
1791 if_hole.Else(); // Slow case.
1792 if_hole.JoinContinuation(&if_fast_store_continuation);
1794 // Do store for fast case.
1795 IfBuilder if_fast_store(this, &if_fast_store_continuation);
1796 if_fast_store.Then();
1798 // All checks are done, store the value to the cell.
1799 Add<HStoreNamedField>(cell, HObjectAccess::ForPropertyCellValue(), value);
1801 if_fast_store.Else();
1802 if_fast_store.JoinContinuation(&if_fast_no_store_continuation);
1804 // Bailout to runtime call for slow case.
1805 IfBuilder if_no_fast_store(this, &if_fast_no_store_continuation);
1806 if_no_fast_store.Then();
1808 // Nothing else to do.
1810 if_no_fast_store.Else();
1812 // Slow case, call runtime.
1813 HInstruction* lang_mode = Add<HConstant>(casted_stub()->language_mode());
1814 Add<HPushArguments>(script_context, slot_index, name, value);
1815 Add<HPushArguments>(lang_mode);
1816 Add<HCallRuntime>(isolate()->factory()->empty_string(),
1817 Runtime::FunctionForId(Runtime::kStoreGlobalViaContext),
1820 if_no_fast_store.End();
1825 Handle<Code> StoreGlobalViaContextStub::GenerateCode() {
1826 return DoGenerateCode(this);
1831 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1832 HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1833 HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1834 HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1835 HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1837 if (FLAG_trace_elements_transitions) {
1838 // Tracing elements transitions is the job of the runtime.
1839 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1840 Deoptimizer::EAGER);
1842 info()->MarkAsSavesCallerDoubles();
1844 BuildTransitionElementsKind(object, map,
1845 casted_stub()->from_kind(),
1846 casted_stub()->to_kind(),
1847 casted_stub()->is_jsarray());
1849 BuildUncheckedMonomorphicElementAccess(object, key, value,
1850 casted_stub()->is_jsarray(),
1851 casted_stub()->to_kind(),
1852 STORE, ALLOW_RETURN_HOLE,
1853 casted_stub()->store_mode());
1860 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1861 return DoGenerateCode(this);
1865 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1866 HValue* js_function,
1867 HValue* native_context,
1869 HValue* optimized_map,
1870 HValue* map_index) {
1871 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1872 HValue* context_slot = LoadFromOptimizedCodeMap(
1873 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1874 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1875 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1876 builder->If<HCompareObjectEqAndBranch>(native_context,
1878 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1880 HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1881 map_index, SharedFunctionInfo::kCachedCodeOffset);
1883 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1884 map_index, SharedFunctionInfo::kLiteralsOffset);
1886 BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
1888 // The builder continues in the "then" after this function.
1892 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function,
1893 HValue* native_context,
1894 HValue* code_object,
1896 Counters* counters = isolate()->counters();
1897 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1899 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1900 // map and either unmangle them on marking or do nothing as the whole map is
1901 // discarded on major GC anyway.
1902 Add<HStoreCodeEntry>(js_function, code_object);
1903 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1906 // Now link a function into a list of optimized functions.
1907 HValue* optimized_functions_list = Add<HLoadNamedField>(
1908 native_context, nullptr,
1909 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1910 Add<HStoreNamedField>(js_function,
1911 HObjectAccess::ForNextFunctionLinkPointer(),
1912 optimized_functions_list);
1914 // This store is the only one that should have a write barrier.
1915 Add<HStoreNamedField>(native_context,
1916 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1921 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1922 HValue* shared_info) {
1923 Add<HStoreNamedField>(js_function,
1924 HObjectAccess::ForNextFunctionLinkPointer(),
1925 graph()->GetConstantUndefined());
1926 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1927 HObjectAccess::ForCodeOffset());
1928 Add<HStoreCodeEntry>(js_function, code_object);
1932 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1933 HValue* optimized_map,
1936 // By making sure to express these loads in the form [<hvalue> + constant]
1937 // the keyed load can be hoisted.
1938 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1939 HValue* field_slot = iterator;
1940 if (field_offset > 0) {
1941 HValue* field_offset_value = Add<HConstant>(field_offset);
1942 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1944 HInstruction* field_entry =
1945 Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
1950 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1951 HValue* js_function,
1952 HValue* shared_info,
1953 HValue* native_context) {
1954 Counters* counters = isolate()->counters();
1955 Factory* factory = isolate()->factory();
1956 IfBuilder is_optimized(this);
1957 HInstruction* optimized_map = Add<HLoadNamedField>(
1958 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
1959 HValue* null_constant = Add<HConstant>(0);
1960 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1961 is_optimized.Then();
1963 BuildInstallCode(js_function, shared_info);
1965 is_optimized.Else();
1967 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1968 // optimized_map points to fixed array of 3-element entries
1969 // (native context, optimized code, literals).
1970 // Map must never be empty, so check the first elements.
1971 HValue* first_entry_index =
1972 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1973 IfBuilder already_in(this);
1974 BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1975 optimized_map, first_entry_index);
1978 // Iterate through the rest of map backwards. Do not double check first
1979 // entry. After the loop, if no matching optimized code was found,
1980 // install unoptimized code.
1981 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1982 // i > SharedFunctionInfo::kEntriesStart;
1983 // i -= SharedFunctionInfo::kEntryLength) { .. }
1984 HValue* shared_function_entry_length =
1985 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1986 LoopBuilder loop_builder(this,
1988 LoopBuilder::kPostDecrement,
1989 shared_function_entry_length);
1990 HValue* array_length = Add<HLoadNamedField>(
1991 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1992 HValue* start_pos = AddUncasted<HSub>(array_length,
1993 shared_function_entry_length);
1994 HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1998 IfBuilder done_check(this);
1999 BuildCheckAndInstallOptimizedCode(js_function, native_context,
2003 // Fall out of the loop
2004 loop_builder.Break();
2006 loop_builder.EndBody();
2008 // If slot_iterator equals first entry index, then we failed to find a
2009 // context-dependent code and try context-independent code next.
2010 IfBuilder no_optimized_code_check(this);
2011 no_optimized_code_check.If<HCompareNumericAndBranch>(
2012 slot_iterator, first_entry_index, Token::EQ);
2013 no_optimized_code_check.Then();
2015 IfBuilder shared_code_check(this);
2016 HValue* shared_code = Add<HLoadNamedField>(
2017 optimized_map, nullptr,
2018 HObjectAccess::ForOptimizedCodeMapSharedCode());
2019 shared_code_check.IfNot<HCompareObjectEqAndBranch>(
2020 shared_code, graph()->GetConstantUndefined());
2021 shared_code_check.Then();
2023 // Store the context-independent optimized code.
2024 HValue* literals = Add<HConstant>(factory->empty_fixed_array());
2025 BuildInstallOptimizedCode(js_function, native_context, shared_code,
2028 shared_code_check.Else();
2030 // Store the unoptimized code.
2031 BuildInstallCode(js_function, shared_info);
2040 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
2041 Counters* counters = isolate()->counters();
2042 Factory* factory = isolate()->factory();
2043 HInstruction* empty_fixed_array =
2044 Add<HConstant>(factory->empty_fixed_array());
2045 HValue* shared_info = GetParameter(0);
2047 AddIncrementCounter(counters->fast_new_closure_total());
2049 // Create a new closure from the given function info in new space
2050 HValue* size = Add<HConstant>(JSFunction::kSize);
2051 HInstruction* js_function =
2052 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
2054 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
2055 casted_stub()->kind());
2057 // Compute the function map in the current native context and set that
2058 // as the map of the allocated object.
2059 HInstruction* native_context = BuildGetNativeContext();
2060 HInstruction* map_slot_value = Add<HLoadNamedField>(
2061 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
2062 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
2064 // Initialize the rest of the function.
2065 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
2067 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
2069 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2071 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
2072 graph()->GetConstantHole());
2073 Add<HStoreNamedField>(
2074 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
2075 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
2078 // Initialize the code pointer in the function to be the one found in the
2079 // shared function info object. But first check if there is an optimized
2080 // version for our context.
2081 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
2087 Handle<Code> FastNewClosureStub::GenerateCode() {
2088 return DoGenerateCode(this);
2093 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
2094 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
2096 // Get the function.
2097 HParameter* function = GetParameter(FastNewContextStub::kFunction);
2099 // Allocate the context in new space.
2100 HAllocate* function_context = Add<HAllocate>(
2101 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
2102 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
2104 // Set up the object header.
2105 AddStoreMapConstant(function_context,
2106 isolate()->factory()->function_context_map());
2107 Add<HStoreNamedField>(function_context,
2108 HObjectAccess::ForFixedArrayLength(),
2109 Add<HConstant>(length));
2111 // Set up the fixed slots.
2112 Add<HStoreNamedField>(function_context,
2113 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2115 Add<HStoreNamedField>(function_context,
2116 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2118 Add<HStoreNamedField>(function_context,
2119 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
2120 graph()->GetConstant0());
2122 // Copy the global object from the previous context.
2123 HValue* global_object = Add<HLoadNamedField>(
2125 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2126 Add<HStoreNamedField>(function_context,
2127 HObjectAccess::ForContextSlot(
2128 Context::GLOBAL_OBJECT_INDEX),
2131 // Initialize the rest of the slots to undefined.
2132 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2133 Add<HStoreNamedField>(function_context,
2134 HObjectAccess::ForContextSlot(i),
2135 graph()->GetConstantUndefined());
2138 return function_context;
2142 Handle<Code> FastNewContextStub::GenerateCode() {
2143 return DoGenerateCode(this);
2148 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2149 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2150 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2152 Add<HCheckSmi>(key);
2154 HValue* elements = AddLoadElements(receiver);
2156 HValue* hash = BuildElementIndexHash(key);
2158 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2159 casted_stub()->language_mode());
2163 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2164 return DoGenerateCode(this);
2169 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2170 // Determine the parameters.
2171 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2172 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2173 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2175 info()->MarkMustNotHaveEagerFrame();
2177 return BuildRegExpConstructResult(length, index, input);
2181 Handle<Code> RegExpConstructResultStub::GenerateCode() {
2182 return DoGenerateCode(this);
2187 class CodeStubGraphBuilder<KeyedLoadGenericStub>
2188 : public CodeStubGraphBuilderBase {
2190 explicit CodeStubGraphBuilder(CompilationInfo* info)
2191 : CodeStubGraphBuilderBase(info) {}
2194 virtual HValue* BuildCodeStub();
2196 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2200 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2203 HValue* instance_type,
2207 void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
2210 HValue* instance_type,
2214 KeyedLoadGenericStub* casted_stub() {
2215 return static_cast<KeyedLoadGenericStub*>(stub());
2220 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2221 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2222 ElementsKind kind) {
2223 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2224 HValue* kind_limit = Add<HConstant>(
2225 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2227 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2232 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2233 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2234 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2235 DCHECK(!IsExternalArrayElementsKind(kind));
2237 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2239 IfBuilder js_array_check(this);
2240 js_array_check.If<HCompareNumericAndBranch>(
2241 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2242 js_array_check.Then();
2243 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2245 LOAD, NEVER_RETURN_HOLE,
2247 js_array_check.Else();
2248 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2250 LOAD, NEVER_RETURN_HOLE,
2252 js_array_check.End();
2256 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
2257 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2258 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
2259 DCHECK(IsExternalArrayElementsKind(kind));
2261 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2263 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2265 LOAD, NEVER_RETURN_HOLE,
2270 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2271 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2272 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2273 // Split into a smi/integer case and unique string case.
2274 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2275 graph()->CreateBasicBlock());
2277 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2279 IfBuilder index_name_split(this, &index_name_split_continuation);
2280 index_name_split.Then();
2282 // Key is an index (number)
2285 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2286 (1 << Map::kHasIndexedInterceptor);
2287 BuildJSObjectCheck(receiver, bit_field_mask);
2290 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2292 HValue* instance_type =
2293 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
2295 HValue* bit_field2 =
2296 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
2298 IfBuilder kind_if(this);
2299 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2300 FAST_HOLEY_ELEMENTS);
2304 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2305 FAST_HOLEY_DOUBLE_ELEMENTS);
2309 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2310 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2312 HValue* elements = AddLoadElements(receiver);
2314 HValue* hash = BuildElementIndexHash(key);
2316 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2317 casted_stub()->language_mode()));
2321 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2322 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2323 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
2324 BuildElementsKindLimitCheck(&kind_if, bit_field2,
2325 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
2326 // Non-strict elements are not handled.
2327 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
2328 Deoptimizer::EAGER);
2329 Push(graph()->GetConstant0());
2332 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2333 EXTERNAL_INT8_ELEMENTS);
2336 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2337 EXTERNAL_UINT8_ELEMENTS);
2340 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2341 EXTERNAL_INT16_ELEMENTS);
2344 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2345 EXTERNAL_UINT16_ELEMENTS);
2348 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2349 EXTERNAL_INT32_ELEMENTS);
2352 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2353 EXTERNAL_UINT32_ELEMENTS);
2356 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2357 EXTERNAL_FLOAT32_ELEMENTS);
2360 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2361 EXTERNAL_FLOAT64_ELEMENTS);
2364 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2365 EXTERNAL_UINT8_CLAMPED_ELEMENTS);
2368 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
2372 index_name_split.Else();
2374 // Key is a unique string.
2377 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2378 (1 << Map::kHasNamedInterceptor);
2379 BuildJSObjectCheck(receiver, bit_field_mask);
2381 HIfContinuation continuation;
2382 BuildTestForDictionaryProperties(receiver, &continuation);
2383 IfBuilder if_dict_properties(this, &continuation);
2384 if_dict_properties.Then();
2386 // Key is string, properties are dictionary mode
2387 BuildNonGlobalObjectCheck(receiver);
2389 HValue* properties = Add<HLoadNamedField>(
2390 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
2393 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
2395 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2397 HValue* value = BuildUncheckedDictionaryElementLoad(
2398 receiver, properties, key, hash, casted_stub()->language_mode());
2401 if_dict_properties.Else();
2403 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2404 // megamorphic stub cache.
2406 // Key is string, properties are fast mode
2407 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2409 ExternalReference cache_keys_ref =
2410 ExternalReference::keyed_lookup_cache_keys(isolate());
2411 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2414 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2415 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2416 base_index->ClearFlag(HValue::kCanOverflow);
2418 HIfContinuation inline_or_runtime_continuation(
2419 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2421 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2422 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2424 IfBuilder* lookup_if = &lookup_ifs[probe];
2425 lookup_if->Initialize(this);
2426 int probe_base = probe * KeyedLookupCache::kEntryLength;
2427 HValue* map_index = AddUncasted<HAdd>(
2429 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2430 map_index->ClearFlag(HValue::kCanOverflow);
2431 HValue* key_index = AddUncasted<HAdd>(
2433 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2434 key_index->ClearFlag(HValue::kCanOverflow);
2435 HValue* map_to_check =
2436 Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
2437 NEVER_RETURN_HOLE, 0);
2438 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2440 HValue* key_to_check =
2441 Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
2442 NEVER_RETURN_HOLE, 0);
2443 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2446 ExternalReference cache_field_offsets_ref =
2447 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2448 HValue* cache_field_offsets =
2449 Add<HConstant>(cache_field_offsets_ref);
2450 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2451 index->ClearFlag(HValue::kCanOverflow);
2452 HValue* property_index =
2453 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
2454 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
2455 Push(property_index);
2459 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2460 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2464 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2465 inline_or_runtime.Then();
2467 // Found a cached index, load property inline.
2468 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2470 inline_or_runtime.Else();
2472 // KeyedLookupCache miss; call runtime.
2473 Add<HPushArguments>(receiver, key);
2474 Push(Add<HCallRuntime>(
2475 isolate()->factory()->empty_string(),
2476 Runtime::FunctionForId(is_strong(casted_stub()->language_mode())
2477 ? Runtime::kKeyedGetPropertyStrong
2478 : Runtime::kKeyedGetProperty),
2481 inline_or_runtime.End();
2483 if_dict_properties.End();
2485 index_name_split.End();
2491 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2492 return DoGenerateCode(this);
2495 } // namespace internal